mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-01-31 00:24:13 +00:00
Merge remote-tracking branch 'origin/master' into 3336.py36-off-travis
This commit is contained in:
commit
f889edb9bd
1
.github/workflows/ci.yml
vendored
1
.github/workflows/ci.yml
vendored
@ -72,6 +72,7 @@ jobs:
|
||||
matrix:
|
||||
os:
|
||||
- macos-latest
|
||||
- windows-latest
|
||||
python-version:
|
||||
- 2.7
|
||||
|
||||
|
@ -358,7 +358,10 @@ def bob(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, reques
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@pytest.mark.skipif(sys.platform.startswith('win'),
|
||||
'Tor tests are unstable on Windows')
|
||||
def chutney(reactor, temp_dir):
|
||||
|
||||
chutney_dir = join(temp_dir, 'chutney')
|
||||
mkdir(chutney_dir)
|
||||
|
||||
@ -377,18 +380,39 @@ def chutney(reactor, temp_dir):
|
||||
proto,
|
||||
'git',
|
||||
(
|
||||
'git', 'clone', '--depth=1',
|
||||
'git', 'clone',
|
||||
'https://git.torproject.org/chutney.git',
|
||||
chutney_dir,
|
||||
),
|
||||
env=environ,
|
||||
)
|
||||
pytest_twisted.blockon(proto.done)
|
||||
|
||||
# XXX: Here we reset Chutney to the last revision known to work
|
||||
# with Python 2, as a workaround for Chutney moving to Python 3.
|
||||
# When this is no longer necessary, we will have to drop this and
|
||||
# add '--depth=1' back to the above 'git clone' subprocess.
|
||||
proto = _DumpOutputProtocol(None)
|
||||
reactor.spawnProcess(
|
||||
proto,
|
||||
'git',
|
||||
(
|
||||
'git', '-C', chutney_dir,
|
||||
'reset', '--hard',
|
||||
'99bd06c7554b9113af8c0877b6eca4ceb95dcbaa'
|
||||
),
|
||||
env=environ,
|
||||
)
|
||||
pytest_twisted.blockon(proto.done)
|
||||
|
||||
return chutney_dir
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@pytest.mark.skipif(sys.platform.startswith('win'),
|
||||
reason='Tor tests are unstable on Windows')
|
||||
def tor_network(reactor, temp_dir, chutney, request):
|
||||
|
||||
# this is the actual "chutney" script at the root of a chutney checkout
|
||||
chutney_dir = chutney
|
||||
chut = join(chutney_dir, 'chutney')
|
||||
|
@ -10,12 +10,21 @@ from six.moves import StringIO
|
||||
from twisted.internet.protocol import ProcessProtocol
|
||||
from twisted.internet.error import ProcessExitedAlready, ProcessDone
|
||||
from twisted.internet.defer import inlineCallbacks, Deferred
|
||||
|
||||
import pytest
|
||||
import pytest_twisted
|
||||
|
||||
import util
|
||||
|
||||
# see "conftest.py" for the fixtures (e.g. "tor_network")
|
||||
|
||||
# XXX: Integration tests that involve Tor do not run reliably on
|
||||
# Windows. They are skipped for now, in order to reduce CI noise.
|
||||
#
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3347
|
||||
if sys.platform.startswith('win'):
|
||||
pytest.skip('Skipping Tor tests on Windows', allow_module_level=True)
|
||||
|
||||
@pytest_twisted.inlineCallbacks
|
||||
def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl):
|
||||
yield _create_anonymous_node(reactor, 'carol', 8008, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl)
|
||||
|
@ -219,23 +219,21 @@ def test_status(alice):
|
||||
found_upload = False
|
||||
found_download = False
|
||||
for href in hrefs:
|
||||
if href.startswith(u"/") or not href:
|
||||
if href == u"/" or not href:
|
||||
continue
|
||||
resp = requests.get(
|
||||
util.node_url(alice.node_dir, u"status/{}".format(href)),
|
||||
)
|
||||
if href.startswith(u'up'):
|
||||
resp = requests.get(util.node_url(alice.node_dir, href))
|
||||
if href.startswith(u"/status/up"):
|
||||
assert "File Upload Status" in resp.content
|
||||
if "Total Size: {}".format(len(FILE_CONTENTS)) in resp.content:
|
||||
found_upload = True
|
||||
elif href.startswith(u'down'):
|
||||
elif href.startswith(u"/status/down"):
|
||||
assert "File Download Status" in resp.content
|
||||
if "Total Size: {}".format(len(FILE_CONTENTS)) in resp.content:
|
||||
found_download = True
|
||||
|
||||
# download the specialized event information
|
||||
resp = requests.get(
|
||||
util.node_url(alice.node_dir, u"status/{}/event_json".format(href)),
|
||||
util.node_url(alice.node_dir, u"{}/event_json".format(href)),
|
||||
)
|
||||
js = json.loads(resp.content)
|
||||
# there's usually just one "read" operation, but this can handle many ..
|
||||
|
@ -1,13 +1,105 @@
|
||||
allmydata.test.mutable.test_exceptions.Exceptions.test_repr
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_1s
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_25s
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_day
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_future_5_minutes
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_hours
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_month
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_year
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_parse_space
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_space
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_time
|
||||
allmydata.test.test_base32.Base32.test_a2b
|
||||
allmydata.test.test_base32.Base32.test_a2b_b2a_match_Pythons
|
||||
allmydata.test.test_base32.Base32.test_b2a
|
||||
allmydata.test.test_base32.Base32.test_b2a_or_none
|
||||
allmydata.test.test_base62.Base62.test_ende_0x00
|
||||
allmydata.test.test_base62.Base62.test_ende_0x000000
|
||||
allmydata.test.test_base62.Base62.test_ende_0x01
|
||||
allmydata.test.test_base62.Base62.test_ende_0x0100
|
||||
allmydata.test.test_base62.Base62.test_ende_0x010000
|
||||
allmydata.test.test_base62.Base62.test_ende_longrandstr
|
||||
allmydata.test.test_base62.Base62.test_ende_randstr
|
||||
allmydata.test.test_base62.Base62.test_known_values
|
||||
allmydata.test.test_base62.Base62.test_num_octets_that_encode_to_this_many_chars
|
||||
allmydata.test.test_base62.Base62.test_odd_sizes
|
||||
allmydata.test.test_base62.Base62.test_roundtrip
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_failure
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_gather_results
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_success
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_wait_for_delayed_calls
|
||||
allmydata.test.test_dictutil.DictUtil.test_auxdict
|
||||
allmydata.test.test_dictutil.DictUtil.test_dict_of_sets
|
||||
allmydata.test.test_hashtree.Complete.test_create
|
||||
allmydata.test.test_hashtree.Complete.test_dump
|
||||
allmydata.test.test_hashtree.Complete.test_needed_hashes
|
||||
allmydata.test.test_hashtree.Incomplete.test_check
|
||||
allmydata.test.test_hashtree.Incomplete.test_create
|
||||
allmydata.test.test_hashtree.Incomplete.test_depth_of
|
||||
allmydata.test.test_hashtree.Incomplete.test_large
|
||||
allmydata.test.test_hashtree.Incomplete.test_needed_hashes
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_chk
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_hashers
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_known_answers
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_random_key
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_sha256d
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_sha256d_truncated
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_timing_safe_compare
|
||||
allmydata.test.test_humanreadable.HumanReadable.test_repr
|
||||
allmydata.test.test_iputil.ListAddresses.test_get_local_ip_for
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async_mock_cygwin
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async_mock_ifconfig
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async_mock_ip_addr
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async_mock_route
|
||||
allmydata.test.test_iputil.ListenOnUsed.test_random_port
|
||||
allmydata.test.test_iputil.ListenOnUsed.test_specific_port
|
||||
allmydata.test.test_netstring.Netstring.test_encode
|
||||
allmydata.test.test_netstring.Netstring.test_extra
|
||||
allmydata.test.test_netstring.Netstring.test_nested
|
||||
allmydata.test.test_netstring.Netstring.test_split
|
||||
allmydata.test.test_observer.Observer.test_lazy_oneshot
|
||||
allmydata.test.test_observer.Observer.test_observerlist
|
||||
allmydata.test.test_observer.Observer.test_oneshot
|
||||
allmydata.test.test_observer.Observer.test_oneshot_fireagain
|
||||
allmydata.test.test_pipeline.Pipeline.test_basic
|
||||
allmydata.test.test_pipeline.Pipeline.test_errors
|
||||
allmydata.test.test_pipeline.Pipeline.test_errors2
|
||||
allmydata.test.test_python3.Python3PortingEffortTests.test_finished_porting
|
||||
allmydata.test.test_python3.Python3PortingEffortTests.test_ported_modules_distinct
|
||||
allmydata.test.test_python3.Python3PortingEffortTests.test_ported_modules_exist
|
||||
allmydata.test.test_spans.ByteSpans.test_basic
|
||||
allmydata.test.test_spans.ByteSpans.test_large
|
||||
allmydata.test.test_spans.ByteSpans.test_math
|
||||
allmydata.test.test_spans.ByteSpans.test_overlap
|
||||
allmydata.test.test_spans.ByteSpans.test_random
|
||||
allmydata.test.test_spans.StringSpans.test_basic
|
||||
allmydata.test.test_spans.StringSpans.test_random
|
||||
allmydata.test.test_spans.StringSpans.test_test
|
||||
allmydata.test.test_statistics.Statistics.test_binomial_coeff
|
||||
allmydata.test.test_statistics.Statistics.test_binomial_distribution_pmf
|
||||
allmydata.test.test_statistics.Statistics.test_convolve
|
||||
allmydata.test.test_statistics.Statistics.test_find_k
|
||||
allmydata.test.test_statistics.Statistics.test_pr_backup_file_loss
|
||||
allmydata.test.test_statistics.Statistics.test_pr_file_loss
|
||||
allmydata.test.test_statistics.Statistics.test_repair_cost
|
||||
allmydata.test.test_statistics.Statistics.test_repair_count_pmf
|
||||
allmydata.test.test_statistics.Statistics.test_survival_pmf
|
||||
allmydata.test.test_time_format.TimeFormat.test_epoch
|
||||
allmydata.test.test_time_format.TimeFormat.test_epoch_in_London
|
||||
allmydata.test.test_time_format.TimeFormat.test_format_delta
|
||||
allmydata.test.test_time_format.TimeFormat.test_format_time
|
||||
allmydata.test.test_time_format.TimeFormat.test_format_time_y2038
|
||||
allmydata.test.test_time_format.TimeFormat.test_iso_utc
|
||||
allmydata.test.test_time_format.TimeFormat.test_parse_date
|
||||
allmydata.test.test_time_format.TimeFormat.test_parse_duration
|
||||
allmydata.test.test_version.CheckRequirement.test_cross_check
|
||||
allmydata.test.test_version.CheckRequirement.test_cross_check_unparseable_versions
|
||||
allmydata.test.test_version.CheckRequirement.test_extract_openssl_version
|
||||
allmydata.test.test_version.CheckRequirement.test_packages_from_pkg_resources
|
||||
allmydata.test.test_version.T.test_report_import_error
|
||||
allmydata.test.test_version.VersionTestCase.test_basic_versions
|
||||
allmydata.test.test_version.VersionTestCase.test_comparison
|
||||
allmydata.test.test_version.VersionTestCase.test_from_parts
|
||||
allmydata.test.test_version.VersionTestCase.test_irrational_versions
|
||||
allmydata.test.test_version.VersionTestCase.test_suggest_normalized_version
|
||||
|
1
newsfragments/3313.minor
Normal file
1
newsfragments/3313.minor
Normal file
@ -0,0 +1 @@
|
||||
Replace nevow with twisted.web in web.operations.OphandleTable
|
0
newsfragments/3340.minor
Normal file
0
newsfragments/3340.minor
Normal file
0
newsfragments/3341.minor
Normal file
0
newsfragments/3341.minor
Normal file
0
newsfragments/3342.minor
Normal file
0
newsfragments/3342.minor
Normal file
0
newsfragments/3343.minor
Normal file
0
newsfragments/3343.minor
Normal file
0
newsfragments/3344.minor
Normal file
0
newsfragments/3344.minor
Normal file
0
newsfragments/3346.minor
Normal file
0
newsfragments/3346.minor
Normal file
1
newsfragments/3348.bugfix
Normal file
1
newsfragments/3348.bugfix
Normal file
@ -0,0 +1 @@
|
||||
Use last known revision of Chutney that is known to work with Python 2 for Tor integration tests.
|
1
newsfragments/3349.bugfix
Normal file
1
newsfragments/3349.bugfix
Normal file
@ -0,0 +1 @@
|
||||
Mutable files now use RSA exponent 65537
|
0
newsfragments/3351.minor
Normal file
0
newsfragments/3351.minor
Normal file
0
newsfragments/3353.minor
Normal file
0
newsfragments/3353.minor
Normal file
1
newsfragments/3354.minor
Normal file
1
newsfragments/3354.minor
Normal file
@ -0,0 +1 @@
|
||||
|
0
newsfragments/3356.minor
Normal file
0
newsfragments/3356.minor
Normal file
1
newsfragments/3357.minor
Normal file
1
newsfragments/3357.minor
Normal file
@ -0,0 +1 @@
|
||||
|
0
newsfragments/3359.minor
Normal file
0
newsfragments/3359.minor
Normal file
@ -4,7 +4,7 @@
|
||||
, setuptools, setuptoolsTrial, pyasn1, zope_interface
|
||||
, service-identity, pyyaml, magic-wormhole, treq, appdirs
|
||||
, beautifulsoup4, eliot, autobahn, cryptography
|
||||
, html5lib, pyutil
|
||||
, html5lib, pyutil, distro
|
||||
}:
|
||||
python.pkgs.buildPythonPackage rec {
|
||||
version = "1.14.0.dev";
|
||||
@ -50,7 +50,7 @@ python.pkgs.buildPythonPackage rec {
|
||||
setuptoolsTrial pyasn1 zope_interface
|
||||
service-identity pyyaml magic-wormhole treq
|
||||
eliot autobahn cryptography setuptools
|
||||
future pyutil
|
||||
future pyutil distro
|
||||
];
|
||||
|
||||
checkInputs = with python.pkgs; [
|
||||
@ -59,6 +59,7 @@ python.pkgs.buildPythonPackage rec {
|
||||
fixtures
|
||||
beautifulsoup4
|
||||
html5lib
|
||||
tenacity
|
||||
];
|
||||
|
||||
checkPhase = ''
|
||||
|
4
setup.py
4
setup.py
@ -127,6 +127,9 @@ install_requires = [
|
||||
|
||||
# Utility code:
|
||||
"pyutil >= 3.3.0",
|
||||
|
||||
# Linux distribution detection:
|
||||
"distro >= 1.4.0",
|
||||
]
|
||||
|
||||
setup_requires = [
|
||||
@ -387,6 +390,7 @@ setup(name="tahoe-lafs", # also set in __init__.py
|
||||
"beautifulsoup4",
|
||||
"html5lib",
|
||||
"junitxml",
|
||||
"tenacity",
|
||||
] + tor_requires + i2p_requires,
|
||||
"tor": tor_requires,
|
||||
"i2p": i2p_requires,
|
||||
|
@ -46,18 +46,8 @@ def create_signing_keypair(key_size):
|
||||
|
||||
:returns: 2-tuple of (private_key, public_key)
|
||||
"""
|
||||
# Tahoe's original use of pycryptopp would use cryptopp's default
|
||||
# public_exponent, which is 17
|
||||
#
|
||||
# Thus, we are using 17 here as well. However, there are other
|
||||
# choices; see this for more discussion:
|
||||
# https://security.stackexchange.com/questions/2335/should-rsa-public-exponent-be-only-in-3-5-17-257-or-65537-due-to-security-c
|
||||
#
|
||||
# Another popular choice is 65537. See:
|
||||
# https://cryptography.io/en/latest/hazmat/primitives/asymmetric/rsa/#cryptography.hazmat.primitives.asymmetric.rsa.generate_private_key
|
||||
# https://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html
|
||||
priv_key = rsa.generate_private_key(
|
||||
public_exponent=17,
|
||||
public_exponent=65537,
|
||||
key_size=key_size,
|
||||
backend=default_backend()
|
||||
)
|
||||
|
@ -1,7 +1,4 @@
|
||||
# -*- test-case-name: allmydata.test.test_hashtree -*-
|
||||
|
||||
from allmydata.util import mathutil # from the pyutil library
|
||||
|
||||
"""
|
||||
Read and write chunks from files.
|
||||
|
||||
@ -50,6 +47,17 @@ or implied. It probably won't make your computer catch on fire,
|
||||
or eat your children, but it might. Use at your own risk.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from allmydata.util import mathutil # from the pyutil library
|
||||
|
||||
from allmydata.util import base32
|
||||
from allmydata.util.hashutil import tagged_hash, tagged_pair_hash
|
||||
|
||||
@ -170,9 +178,10 @@ def depth_of(i):
|
||||
return mathutil.log_floor(i+1, 2)
|
||||
|
||||
def empty_leaf_hash(i):
|
||||
return tagged_hash('Merkle tree empty leaf', "%d" % i)
|
||||
return tagged_hash(b'Merkle tree empty leaf', b"%d" % i)
|
||||
|
||||
def pair_hash(a, b):
|
||||
return tagged_pair_hash('Merkle tree internal node', a, b)
|
||||
return tagged_pair_hash(b'Merkle tree internal node', a, b)
|
||||
|
||||
class HashTree(CompleteBinaryTreeMixin, list):
|
||||
"""
|
||||
@ -215,7 +224,7 @@ class HashTree(CompleteBinaryTreeMixin, list):
|
||||
while len(rows[-1]) != 1:
|
||||
last = rows[-1]
|
||||
rows += [[pair_hash(last[2*i], last[2*i+1])
|
||||
for i in xrange(len(last)//2)]]
|
||||
for i in range(len(last)//2)]]
|
||||
# Flatten the list of rows into a single list.
|
||||
rows.reverse()
|
||||
self[:] = sum(rows, [])
|
||||
@ -289,7 +298,7 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
|
||||
rows = [L]
|
||||
while len(rows[-1]) != 1:
|
||||
last = rows[-1]
|
||||
rows += [[None for i in xrange(len(last)//2)]]
|
||||
rows += [[None for i in range(len(last)//2)]]
|
||||
# Flatten the list of rows into a single list.
|
||||
rows.reverse()
|
||||
self[:] = sum(rows, [])
|
||||
@ -372,12 +381,12 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
|
||||
|
||||
assert isinstance(hashes, dict)
|
||||
for h in hashes.values():
|
||||
assert isinstance(h, str)
|
||||
assert isinstance(h, bytes)
|
||||
assert isinstance(leaves, dict)
|
||||
for h in leaves.values():
|
||||
assert isinstance(h, str)
|
||||
assert isinstance(h, bytes)
|
||||
new_hashes = hashes.copy()
|
||||
for leafnum,leafhash in leaves.iteritems():
|
||||
for leafnum,leafhash in leaves.items():
|
||||
hashnum = self.first_leaf_num + leafnum
|
||||
if hashnum in new_hashes:
|
||||
if new_hashes[hashnum] != leafhash:
|
||||
@ -416,7 +425,7 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
|
||||
|
||||
# first we provisionally add all hashes to the tree, comparing
|
||||
# any duplicates
|
||||
for i,h in new_hashes.iteritems():
|
||||
for i,h in new_hashes.items():
|
||||
if self[i]:
|
||||
if self[i] != h:
|
||||
raise BadHashError("new hash %s does not match "
|
||||
|
67
src/allmydata/test/common_py3.py
Normal file
67
src/allmydata/test/common_py3.py
Normal file
@ -0,0 +1,67 @@
|
||||
"""
|
||||
Common utilities that have been ported to Python 3.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os
|
||||
import time
|
||||
import signal
|
||||
|
||||
from twisted.internet import reactor
|
||||
|
||||
|
||||
class TimezoneMixin(object):
|
||||
|
||||
def setTimezone(self, timezone):
|
||||
def tzset_if_possible():
|
||||
# Windows doesn't have time.tzset().
|
||||
if hasattr(time, 'tzset'):
|
||||
time.tzset()
|
||||
|
||||
unset = object()
|
||||
originalTimezone = os.environ.get('TZ', unset)
|
||||
def restoreTimezone():
|
||||
if originalTimezone is unset:
|
||||
del os.environ['TZ']
|
||||
else:
|
||||
os.environ['TZ'] = originalTimezone
|
||||
tzset_if_possible()
|
||||
|
||||
os.environ['TZ'] = timezone
|
||||
self.addCleanup(restoreTimezone)
|
||||
tzset_if_possible()
|
||||
|
||||
def have_working_tzset(self):
|
||||
return hasattr(time, 'tzset')
|
||||
|
||||
|
||||
class SignalMixin(object):
|
||||
# This class is necessary for any code which wants to use Processes
|
||||
# outside the usual reactor.run() environment. It is copied from
|
||||
# Twisted's twisted.test.test_process . Note that Twisted-8.2.0 uses
|
||||
# something rather different.
|
||||
sigchldHandler = None
|
||||
|
||||
def setUp(self):
|
||||
# make sure SIGCHLD handler is installed, as it should be on
|
||||
# reactor.run(). problem is reactor may not have been run when this
|
||||
# test runs.
|
||||
if hasattr(reactor, "_handleSigchld") and hasattr(signal, "SIGCHLD"):
|
||||
self.sigchldHandler = signal.signal(signal.SIGCHLD,
|
||||
reactor._handleSigchld)
|
||||
return super(SignalMixin, self).setUp()
|
||||
|
||||
def tearDown(self):
|
||||
if self.sigchldHandler:
|
||||
signal.signal(signal.SIGCHLD, self.sigchldHandler)
|
||||
return super(SignalMixin, self).tearDown()
|
@ -1,6 +1,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os, signal, time
|
||||
import os
|
||||
from random import randrange
|
||||
from six.moves import StringIO
|
||||
|
||||
@ -12,6 +12,8 @@ from ..util.assertutil import precondition
|
||||
from allmydata.util.encodingutil import (unicode_platform, get_filesystem_encoding,
|
||||
get_io_encoding)
|
||||
from ..scripts import runner
|
||||
from .common_py3 import SignalMixin
|
||||
|
||||
|
||||
def skip_if_cannot_represent_filename(u):
|
||||
precondition(isinstance(u, unicode))
|
||||
@ -88,27 +90,6 @@ class ReallyEqualMixin(object):
|
||||
self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg))
|
||||
|
||||
|
||||
class SignalMixin(object):
|
||||
# This class is necessary for any code which wants to use Processes
|
||||
# outside the usual reactor.run() environment. It is copied from
|
||||
# Twisted's twisted.test.test_process . Note that Twisted-8.2.0 uses
|
||||
# something rather different.
|
||||
sigchldHandler = None
|
||||
|
||||
def setUp(self):
|
||||
# make sure SIGCHLD handler is installed, as it should be on
|
||||
# reactor.run(). problem is reactor may not have been run when this
|
||||
# test runs.
|
||||
if hasattr(reactor, "_handleSigchld") and hasattr(signal, "SIGCHLD"):
|
||||
self.sigchldHandler = signal.signal(signal.SIGCHLD,
|
||||
reactor._handleSigchld)
|
||||
return super(SignalMixin, self).setUp()
|
||||
|
||||
def tearDown(self):
|
||||
if self.sigchldHandler:
|
||||
signal.signal(signal.SIGCHLD, self.sigchldHandler)
|
||||
return super(SignalMixin, self).tearDown()
|
||||
|
||||
class StallMixin(object):
|
||||
def stall(self, res=None, delay=1):
|
||||
d = defer.Deferred()
|
||||
@ -185,31 +166,6 @@ class TestMixin(SignalMixin):
|
||||
self.fail("Reactor was still active when it was required to be quiescent.")
|
||||
|
||||
|
||||
class TimezoneMixin(object):
|
||||
|
||||
def setTimezone(self, timezone):
|
||||
def tzset_if_possible():
|
||||
# Windows doesn't have time.tzset().
|
||||
if hasattr(time, 'tzset'):
|
||||
time.tzset()
|
||||
|
||||
unset = object()
|
||||
originalTimezone = os.environ.get('TZ', unset)
|
||||
def restoreTimezone():
|
||||
if originalTimezone is unset:
|
||||
del os.environ['TZ']
|
||||
else:
|
||||
os.environ['TZ'] = originalTimezone
|
||||
tzset_if_possible()
|
||||
|
||||
os.environ['TZ'] = timezone
|
||||
self.addCleanup(restoreTimezone)
|
||||
tzset_if_possible()
|
||||
|
||||
def have_working_tzset(self):
|
||||
return hasattr(time, 'tzset')
|
||||
|
||||
|
||||
try:
|
||||
import win32file
|
||||
import win32con
|
||||
|
149
src/allmydata/test/test_abbreviate.py
Normal file
149
src/allmydata/test/test_abbreviate.py
Normal file
@ -0,0 +1,149 @@
|
||||
"""
|
||||
Tests for allmydata.util.abbreviate.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.util import abbreviate
|
||||
|
||||
|
||||
class Abbreviate(unittest.TestCase):
|
||||
def test_abbrev_time_1s(self):
|
||||
diff = timedelta(seconds=1)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('1 second ago', s)
|
||||
|
||||
def test_abbrev_time_25s(self):
|
||||
diff = timedelta(seconds=25)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('25 seconds ago', s)
|
||||
|
||||
def test_abbrev_time_future_5_minutes(self):
|
||||
diff = timedelta(minutes=-5)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('5 minutes in the future', s)
|
||||
|
||||
def test_abbrev_time_hours(self):
|
||||
diff = timedelta(hours=4)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('4 hours ago', s)
|
||||
|
||||
def test_abbrev_time_day(self):
|
||||
diff = timedelta(hours=49) # must be more than 2 days
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('2 days ago', s)
|
||||
|
||||
def test_abbrev_time_month(self):
|
||||
diff = timedelta(days=91)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('3 months ago', s)
|
||||
|
||||
def test_abbrev_time_year(self):
|
||||
diff = timedelta(weeks=(5 * 52) + 1)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('5 years ago', s)
|
||||
|
||||
def test_time(self):
|
||||
a = abbreviate.abbreviate_time
|
||||
self.failUnlessEqual(a(None), "unknown")
|
||||
self.failUnlessEqual(a(0), "0 seconds")
|
||||
self.failUnlessEqual(a(1), "1 second")
|
||||
self.failUnlessEqual(a(2), "2 seconds")
|
||||
self.failUnlessEqual(a(119), "119 seconds")
|
||||
MIN = 60
|
||||
self.failUnlessEqual(a(2*MIN), "2 minutes")
|
||||
self.failUnlessEqual(a(60*MIN), "60 minutes")
|
||||
self.failUnlessEqual(a(179*MIN), "179 minutes")
|
||||
HOUR = 60*MIN
|
||||
self.failUnlessEqual(a(180*MIN), "3 hours")
|
||||
self.failUnlessEqual(a(4*HOUR), "4 hours")
|
||||
DAY = 24*HOUR
|
||||
MONTH = 30*DAY
|
||||
self.failUnlessEqual(a(2*DAY), "2 days")
|
||||
self.failUnlessEqual(a(2*MONTH), "2 months")
|
||||
YEAR = 365*DAY
|
||||
self.failUnlessEqual(a(5*YEAR), "5 years")
|
||||
|
||||
def test_space(self):
|
||||
tests_si = [(None, "unknown"),
|
||||
(0, "0 B"),
|
||||
(1, "1 B"),
|
||||
(999, "999 B"),
|
||||
(1000, "1000 B"),
|
||||
(1023, "1023 B"),
|
||||
(1024, "1.02 kB"),
|
||||
(20*1000, "20.00 kB"),
|
||||
(1024*1024, "1.05 MB"),
|
||||
(1000*1000, "1.00 MB"),
|
||||
(1000*1000*1000, "1.00 GB"),
|
||||
(1000*1000*1000*1000, "1.00 TB"),
|
||||
(1000*1000*1000*1000*1000, "1.00 PB"),
|
||||
(1000*1000*1000*1000*1000*1000, "1.00 EB"),
|
||||
(1234567890123456789, "1.23 EB"),
|
||||
]
|
||||
for (x, expected) in tests_si:
|
||||
got = abbreviate.abbreviate_space(x, SI=True)
|
||||
self.failUnlessEqual(got, expected)
|
||||
|
||||
tests_base1024 = [(None, "unknown"),
|
||||
(0, "0 B"),
|
||||
(1, "1 B"),
|
||||
(999, "999 B"),
|
||||
(1000, "1000 B"),
|
||||
(1023, "1023 B"),
|
||||
(1024, "1.00 kiB"),
|
||||
(20*1024, "20.00 kiB"),
|
||||
(1000*1000, "976.56 kiB"),
|
||||
(1024*1024, "1.00 MiB"),
|
||||
(1024*1024*1024, "1.00 GiB"),
|
||||
(1024*1024*1024*1024, "1.00 TiB"),
|
||||
(1000*1000*1000*1000*1000, "909.49 TiB"),
|
||||
(1024*1024*1024*1024*1024, "1.00 PiB"),
|
||||
(1024*1024*1024*1024*1024*1024, "1.00 EiB"),
|
||||
(1234567890123456789, "1.07 EiB"),
|
||||
]
|
||||
for (x, expected) in tests_base1024:
|
||||
got = abbreviate.abbreviate_space(x, SI=False)
|
||||
self.failUnlessEqual(got, expected)
|
||||
|
||||
self.failUnlessEqual(abbreviate.abbreviate_space_both(1234567),
|
||||
"(1.23 MB, 1.18 MiB)")
|
||||
|
||||
def test_parse_space(self):
|
||||
p = abbreviate.parse_abbreviated_size
|
||||
self.failUnlessEqual(p(""), None)
|
||||
self.failUnlessEqual(p(None), None)
|
||||
self.failUnlessEqual(p("123"), 123)
|
||||
self.failUnlessEqual(p("123B"), 123)
|
||||
self.failUnlessEqual(p("2K"), 2000)
|
||||
self.failUnlessEqual(p("2kb"), 2000)
|
||||
self.failUnlessEqual(p("2KiB"), 2048)
|
||||
self.failUnlessEqual(p("10MB"), 10*1000*1000)
|
||||
self.failUnlessEqual(p("10MiB"), 10*1024*1024)
|
||||
self.failUnlessEqual(p("5G"), 5*1000*1000*1000)
|
||||
self.failUnlessEqual(p("4GiB"), 4*1024*1024*1024)
|
||||
self.failUnlessEqual(p("3TB"), 3*1000*1000*1000*1000)
|
||||
self.failUnlessEqual(p("3TiB"), 3*1024*1024*1024*1024)
|
||||
self.failUnlessEqual(p("6PB"), 6*1000*1000*1000*1000*1000)
|
||||
self.failUnlessEqual(p("6PiB"), 6*1024*1024*1024*1024*1024)
|
||||
self.failUnlessEqual(p("9EB"), 9*1000*1000*1000*1000*1000*1000)
|
||||
self.failUnlessEqual(p("9EiB"), 9*1024*1024*1024*1024*1024*1024)
|
||||
|
||||
e = self.failUnlessRaises(ValueError, p, "12 cubits")
|
||||
self.failUnlessIn("12 cubits", str(e))
|
||||
e = self.failUnlessRaises(ValueError, p, "1 BB")
|
||||
self.failUnlessIn("1 BB", str(e))
|
||||
e = self.failUnlessRaises(ValueError, p, "fhtagn")
|
||||
self.failUnlessIn("fhtagn", str(e))
|
@ -1,6 +1,16 @@
|
||||
"""
|
||||
Tests for allmydata.util.base32.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import base64
|
||||
|
||||
@ -22,7 +32,9 @@ class Base32(unittest.TestCase):
|
||||
self.failUnlessEqual(encoded, x)
|
||||
self.assertIsInstance(encoded, bytes)
|
||||
self.assertTrue(base32.could_be_base32_encoded(encoded))
|
||||
self.assertEqual(base32.a2b(encoded), input_bytes)
|
||||
decoded = base32.a2b(encoded)
|
||||
self.assertEqual(decoded, input_bytes)
|
||||
self.assertIsInstance(decoded, bytes)
|
||||
|
||||
def test_b2a(self):
|
||||
self.failUnlessEqual(base32.b2a(b"\x12\x34"), b"ci2a")
|
||||
|
@ -1,7 +1,22 @@
|
||||
import random, unittest
|
||||
"""
|
||||
Tests for allmydata.util.base62.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import chr as byteschr
|
||||
|
||||
import random, unittest
|
||||
|
||||
from hypothesis import (
|
||||
strategies as st,
|
||||
given,
|
||||
@ -10,20 +25,41 @@ from hypothesis import (
|
||||
from allmydata.util import base62, mathutil
|
||||
|
||||
def insecurerandstr(n):
|
||||
return b''.join(map(byteschr, map(random.randrange, [0]*n, [256]*n)))
|
||||
return bytes(list(map(random.randrange, [0]*n, [256]*n)))
|
||||
|
||||
class T(unittest.TestCase):
|
||||
class Base62(unittest.TestCase):
|
||||
def _test_num_octets_that_encode_to_this_many_chars(self, chars, octets):
|
||||
assert base62.num_octets_that_encode_to_this_many_chars(chars) == octets, "%s != %s <- %s" % (octets, base62.num_octets_that_encode_to_this_many_chars(chars), chars)
|
||||
|
||||
def _test_ende(self, bs):
|
||||
ascii=base62.b2a(bs)
|
||||
bs2=base62.a2b(ascii)
|
||||
assert bs2 == bs, "bs2: %s:%s, bs: %s:%s, ascii: %s:%s" % (len(bs2), repr(bs2), len(bs), repr(bs), len(ascii), repr(ascii))
|
||||
def _test_roundtrip(self, bs):
|
||||
encoded = base62.b2a(bs)
|
||||
decoded = base62.a2b(encoded)
|
||||
self.assertEqual(decoded, bs)
|
||||
self.assertIsInstance(encoded, bytes)
|
||||
self.assertIsInstance(bs, bytes)
|
||||
self.assertIsInstance(decoded, bytes)
|
||||
# Encoded string only uses values from the base62 allowed characters:
|
||||
self.assertFalse(set(encoded) - set(base62.chars))
|
||||
|
||||
@given(input_bytes=st.binary(max_size=100))
|
||||
def test_roundtrip(self, input_bytes):
|
||||
self._test_ende(input_bytes)
|
||||
self._test_roundtrip(input_bytes)
|
||||
|
||||
def test_known_values(self):
|
||||
"""Known values to ensure the algorithm hasn't changed."""
|
||||
|
||||
def check_expected(plaintext, encoded):
|
||||
result1 = base62.b2a(plaintext)
|
||||
self.assertEqual(encoded, result1)
|
||||
result2 = base62.a2b(encoded)
|
||||
self.assertEqual(plaintext, result2)
|
||||
|
||||
check_expected(b"hello", b'7tQLFHz')
|
||||
check_expected(b"", b'0')
|
||||
check_expected(b"zzz", b'0Xg7e')
|
||||
check_expected(b"\x36\xffWAT", b'49pq4mq')
|
||||
check_expected(b"1234 22323", b'1A0afZe9mxSZpz')
|
||||
check_expected(b"______", b'0TmAuCHJX')
|
||||
|
||||
def test_num_octets_that_encode_to_this_many_chars(self):
|
||||
return self._test_num_octets_that_encode_to_this_many_chars(2, 1)
|
||||
@ -32,25 +68,25 @@ class T(unittest.TestCase):
|
||||
return self._test_num_octets_that_encode_to_this_many_chars(6, 4)
|
||||
|
||||
def test_ende_0x00(self):
|
||||
return self._test_ende(b'\x00')
|
||||
return self._test_roundtrip(b'\x00')
|
||||
|
||||
def test_ende_0x01(self):
|
||||
return self._test_ende(b'\x01')
|
||||
return self._test_roundtrip(b'\x01')
|
||||
|
||||
def test_ende_0x0100(self):
|
||||
return self._test_ende(b'\x01\x00')
|
||||
return self._test_roundtrip(b'\x01\x00')
|
||||
|
||||
def test_ende_0x000000(self):
|
||||
return self._test_ende(b'\x00\x00\x00')
|
||||
return self._test_roundtrip(b'\x00\x00\x00')
|
||||
|
||||
def test_ende_0x010000(self):
|
||||
return self._test_ende(b'\x01\x00\x00')
|
||||
return self._test_roundtrip(b'\x01\x00\x00')
|
||||
|
||||
def test_ende_randstr(self):
|
||||
return self._test_ende(insecurerandstr(2**4))
|
||||
return self._test_roundtrip(insecurerandstr(2**4))
|
||||
|
||||
def test_ende_longrandstr(self):
|
||||
return self._test_ende(insecurerandstr(random.randrange(0, 2**10)))
|
||||
return self._test_roundtrip(insecurerandstr(random.randrange(0, 2**10)))
|
||||
|
||||
def test_odd_sizes(self):
|
||||
for j in range(2**6):
|
||||
@ -59,19 +95,12 @@ class T(unittest.TestCase):
|
||||
bs = insecurerandstr(numos)
|
||||
# zero-out unused least-sig bits
|
||||
if lib%8:
|
||||
b=ord(bs[-1])
|
||||
b = ord(bs[-1:])
|
||||
b = b >> (8 - (lib%8))
|
||||
b = b << (8 - (lib%8))
|
||||
bs = bs[:-1] + chr(b)
|
||||
bs = bs[:-1] + byteschr(b)
|
||||
asl = base62.b2a_l(bs, lib)
|
||||
assert len(asl) == base62.num_chars_that_this_many_octets_encode_to(numos) # the size of the base-62 encoding must be just right
|
||||
bs2l = base62.a2b_l(asl, lib)
|
||||
assert len(bs2l) == numos # the size of the result must be just right
|
||||
assert bs == bs2l
|
||||
|
||||
def suite():
|
||||
suite = unittest.makeSuite(T, 'test')
|
||||
return suite
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
90
src/allmydata/test/test_dictutil.py
Normal file
90
src/allmydata/test/test_dictutil.py
Normal file
@ -0,0 +1,90 @@
|
||||
"""
|
||||
Tests for allmydata.util.dictutil.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.util import dictutil
|
||||
|
||||
|
||||
class DictUtil(unittest.TestCase):
|
||||
def test_dict_of_sets(self):
|
||||
ds = dictutil.DictOfSets()
|
||||
ds.add(1, "a")
|
||||
ds.add(2, "b")
|
||||
ds.add(2, "b")
|
||||
ds.add(2, "c")
|
||||
self.failUnlessEqual(ds[1], set(["a"]))
|
||||
self.failUnlessEqual(ds[2], set(["b", "c"]))
|
||||
ds.discard(3, "d") # should not raise an exception
|
||||
ds.discard(2, "b")
|
||||
self.failUnlessEqual(ds[2], set(["c"]))
|
||||
ds.discard(2, "c")
|
||||
self.failIf(2 in ds)
|
||||
|
||||
ds.add(3, "f")
|
||||
ds2 = dictutil.DictOfSets()
|
||||
ds2.add(3, "f")
|
||||
ds2.add(3, "g")
|
||||
ds2.add(4, "h")
|
||||
ds.update(ds2)
|
||||
self.failUnlessEqual(ds[1], set(["a"]))
|
||||
self.failUnlessEqual(ds[3], set(["f", "g"]))
|
||||
self.failUnlessEqual(ds[4], set(["h"]))
|
||||
|
||||
def test_auxdict(self):
|
||||
d = dictutil.AuxValueDict()
|
||||
# we put the serialized form in the auxdata
|
||||
d.set_with_aux("key", ("filecap", "metadata"), "serialized")
|
||||
|
||||
self.failUnlessEqual(list(d.keys()), ["key"])
|
||||
self.failUnlessEqual(d["key"], ("filecap", "metadata"))
|
||||
self.failUnlessEqual(d.get_aux("key"), "serialized")
|
||||
def _get_missing(key):
|
||||
return d[key]
|
||||
self.failUnlessRaises(KeyError, _get_missing, "nonkey")
|
||||
self.failUnlessEqual(d.get("nonkey"), None)
|
||||
self.failUnlessEqual(d.get("nonkey", "nonvalue"), "nonvalue")
|
||||
self.failUnlessEqual(d.get_aux("nonkey"), None)
|
||||
self.failUnlessEqual(d.get_aux("nonkey", "nonvalue"), "nonvalue")
|
||||
|
||||
d["key"] = ("filecap2", "metadata2")
|
||||
self.failUnlessEqual(d["key"], ("filecap2", "metadata2"))
|
||||
self.failUnlessEqual(d.get_aux("key"), None)
|
||||
|
||||
d.set_with_aux("key2", "value2", "aux2")
|
||||
self.failUnlessEqual(sorted(d.keys()), ["key", "key2"])
|
||||
del d["key2"]
|
||||
self.failUnlessEqual(list(d.keys()), ["key"])
|
||||
self.failIf("key2" in d)
|
||||
self.failUnlessRaises(KeyError, _get_missing, "key2")
|
||||
self.failUnlessEqual(d.get("key2"), None)
|
||||
self.failUnlessEqual(d.get_aux("key2"), None)
|
||||
d["key2"] = "newvalue2"
|
||||
self.failUnlessEqual(d.get("key2"), "newvalue2")
|
||||
self.failUnlessEqual(d.get_aux("key2"), None)
|
||||
|
||||
d = dictutil.AuxValueDict({1:2,3:4})
|
||||
self.failUnlessEqual(sorted(d.keys()), [1,3])
|
||||
self.failUnlessEqual(d[1], 2)
|
||||
self.failUnlessEqual(d.get_aux(1), None)
|
||||
|
||||
d = dictutil.AuxValueDict([ (1,2), (3,4) ])
|
||||
self.failUnlessEqual(sorted(d.keys()), [1,3])
|
||||
self.failUnlessEqual(d[1], 2)
|
||||
self.failUnlessEqual(d.get_aux(1), None)
|
||||
|
||||
d = dictutil.AuxValueDict(one=1, two=2)
|
||||
self.failUnlessEqual(sorted(d.keys()), ["one","two"])
|
||||
self.failUnlessEqual(d["one"], 1)
|
||||
self.failUnlessEqual(d.get_aux("one"), None)
|
@ -1,4 +1,18 @@
|
||||
# -*- test-case-name: allmydata.test.test_hashtree -*-
|
||||
"""
|
||||
Tests for allmydata.hashtree.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
@ -7,8 +21,8 @@ from allmydata import hashtree
|
||||
|
||||
|
||||
def make_tree(numleaves):
|
||||
leaves = ["%d" % i for i in range(numleaves)]
|
||||
leaf_hashes = [tagged_hash("tag", leaf) for leaf in leaves]
|
||||
leaves = [b"%d" % i for i in range(numleaves)]
|
||||
leaf_hashes = [tagged_hash(b"tag", leaf) for leaf in leaves]
|
||||
ht = hashtree.HashTree(leaf_hashes)
|
||||
return ht
|
||||
|
||||
@ -20,7 +34,7 @@ class Complete(unittest.TestCase):
|
||||
ht = make_tree(8)
|
||||
root = ht[0]
|
||||
self.failUnlessEqual(len(root), 32)
|
||||
self.failUnlessEqual(ht.get_leaf(0), tagged_hash("tag", "0"))
|
||||
self.failUnlessEqual(ht.get_leaf(0), tagged_hash(b"tag", b"0"))
|
||||
self.failUnlessRaises(IndexError, ht.get_leaf, 8)
|
||||
self.failUnlessEqual(ht.get_leaf_index(0), 7)
|
||||
self.failUnlessRaises(IndexError, ht.parent, 0)
|
||||
@ -143,7 +157,7 @@ class Incomplete(unittest.TestCase):
|
||||
current_hashes = list(iht)
|
||||
# this should fail because there aren't enough hashes known
|
||||
try:
|
||||
iht.set_hashes(leaves={0: tagged_hash("tag", "0")})
|
||||
iht.set_hashes(leaves={0: tagged_hash(b"tag", b"0")})
|
||||
except hashtree.NotEnoughHashesError:
|
||||
pass
|
||||
else:
|
||||
@ -157,7 +171,7 @@ class Incomplete(unittest.TestCase):
|
||||
chain = {0: ht[0], 2: ht[2], 4: ht[4], 8: ht[8]}
|
||||
# this should fail because the leaf hash is just plain wrong
|
||||
try:
|
||||
iht.set_hashes(chain, leaves={0: tagged_hash("bad tag", "0")})
|
||||
iht.set_hashes(chain, leaves={0: tagged_hash(b"bad tag", b"0")})
|
||||
except hashtree.BadHashError:
|
||||
pass
|
||||
else:
|
||||
@ -166,18 +180,18 @@ class Incomplete(unittest.TestCase):
|
||||
# this should fail because we give it conflicting hashes: one as an
|
||||
# internal node, another as a leaf
|
||||
try:
|
||||
iht.set_hashes(chain, leaves={1: tagged_hash("bad tag", "1")})
|
||||
iht.set_hashes(chain, leaves={1: tagged_hash(b"bad tag", b"1")})
|
||||
except hashtree.BadHashError:
|
||||
pass
|
||||
else:
|
||||
self.fail("didn't catch bad hash")
|
||||
|
||||
bad_chain = chain.copy()
|
||||
bad_chain[2] = ht[2] + "BOGUS"
|
||||
bad_chain[2] = ht[2] + b"BOGUS"
|
||||
|
||||
# this should fail because the internal hash is wrong
|
||||
try:
|
||||
iht.set_hashes(bad_chain, leaves={0: tagged_hash("tag", "0")})
|
||||
iht.set_hashes(bad_chain, leaves={0: tagged_hash(b"tag", b"0")})
|
||||
except hashtree.BadHashError:
|
||||
pass
|
||||
else:
|
||||
@ -185,23 +199,23 @@ class Incomplete(unittest.TestCase):
|
||||
|
||||
# this should succeed
|
||||
try:
|
||||
iht.set_hashes(chain, leaves={0: tagged_hash("tag", "0")})
|
||||
iht.set_hashes(chain, leaves={0: tagged_hash(b"tag", b"0")})
|
||||
except hashtree.BadHashError as e:
|
||||
self.fail("bad hash: %s" % e)
|
||||
|
||||
self.failUnlessEqual(ht.get_leaf(0), tagged_hash("tag", "0"))
|
||||
self.failUnlessEqual(ht.get_leaf(0), tagged_hash(b"tag", b"0"))
|
||||
self.failUnlessRaises(IndexError, ht.get_leaf, 8)
|
||||
|
||||
# this should succeed too
|
||||
try:
|
||||
iht.set_hashes(leaves={1: tagged_hash("tag", "1")})
|
||||
iht.set_hashes(leaves={1: tagged_hash(b"tag", b"1")})
|
||||
except hashtree.BadHashError:
|
||||
self.fail("bad hash")
|
||||
|
||||
# this should fail because we give it hashes that conflict with some
|
||||
# that we added successfully before
|
||||
try:
|
||||
iht.set_hashes(leaves={1: tagged_hash("bad tag", "1")})
|
||||
iht.set_hashes(leaves={1: tagged_hash(b"bad tag", b"1")})
|
||||
except hashtree.BadHashError:
|
||||
pass
|
||||
else:
|
||||
@ -214,6 +228,6 @@ class Incomplete(unittest.TestCase):
|
||||
|
||||
# this should succeed
|
||||
try:
|
||||
iht.set_hashes(chain, leaves={4: tagged_hash("tag", "4")})
|
||||
iht.set_hashes(chain, leaves={4: tagged_hash(b"tag", b"4")})
|
||||
except hashtree.BadHashError as e:
|
||||
self.fail("bad hash: %s" % e)
|
||||
|
135
src/allmydata/test/test_hashutil.py
Normal file
135
src/allmydata/test/test_hashutil.py
Normal file
@ -0,0 +1,135 @@
|
||||
"""
|
||||
Tests for allmydata.util.hashutil.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.util import hashutil, base32
|
||||
|
||||
|
||||
class HashUtilTests(unittest.TestCase):
|
||||
|
||||
def test_random_key(self):
|
||||
k = hashutil.random_key()
|
||||
self.failUnlessEqual(len(k), hashutil.KEYLEN)
|
||||
self.assertIsInstance(k, bytes)
|
||||
|
||||
def test_sha256d(self):
|
||||
h1 = hashutil.tagged_hash(b"tag1", b"value")
|
||||
self.assertIsInstance(h1, bytes)
|
||||
h2 = hashutil.tagged_hasher(b"tag1")
|
||||
h2.update(b"value")
|
||||
h2a = h2.digest()
|
||||
h2b = h2.digest()
|
||||
self.assertIsInstance(h2a, bytes)
|
||||
self.failUnlessEqual(h1, h2a)
|
||||
self.failUnlessEqual(h2a, h2b)
|
||||
|
||||
def test_sha256d_truncated(self):
|
||||
h1 = hashutil.tagged_hash(b"tag1", b"value", 16)
|
||||
h2 = hashutil.tagged_hasher(b"tag1", 16)
|
||||
h2.update(b"value")
|
||||
h2 = h2.digest()
|
||||
self.failUnlessEqual(len(h1), 16)
|
||||
self.failUnlessEqual(len(h2), 16)
|
||||
self.failUnlessEqual(h1, h2)
|
||||
|
||||
def test_chk(self):
|
||||
h1 = hashutil.convergence_hash(3, 10, 1000, b"data", b"secret")
|
||||
h2 = hashutil.convergence_hasher(3, 10, 1000, b"secret")
|
||||
h2.update(b"data")
|
||||
h2 = h2.digest()
|
||||
self.failUnlessEqual(h1, h2)
|
||||
self.assertIsInstance(h1, bytes)
|
||||
self.assertIsInstance(h2, bytes)
|
||||
|
||||
def test_hashers(self):
|
||||
h1 = hashutil.block_hash(b"foo")
|
||||
h2 = hashutil.block_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
h1 = hashutil.uri_extension_hash(b"foo")
|
||||
h2 = hashutil.uri_extension_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
h1 = hashutil.plaintext_hash(b"foo")
|
||||
h2 = hashutil.plaintext_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
h1 = hashutil.crypttext_hash(b"foo")
|
||||
h2 = hashutil.crypttext_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
h1 = hashutil.crypttext_segment_hash(b"foo")
|
||||
h2 = hashutil.crypttext_segment_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
h1 = hashutil.plaintext_segment_hash(b"foo")
|
||||
h2 = hashutil.plaintext_segment_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
def test_timing_safe_compare(self):
|
||||
self.failUnless(hashutil.timing_safe_compare(b"a", b"a"))
|
||||
self.failUnless(hashutil.timing_safe_compare(b"ab", b"ab"))
|
||||
self.failIf(hashutil.timing_safe_compare(b"a", b"b"))
|
||||
self.failIf(hashutil.timing_safe_compare(b"a", b"aa"))
|
||||
|
||||
def _testknown(self, hashf, expected_a, *args):
|
||||
got = hashf(*args)
|
||||
self.assertIsInstance(got, bytes)
|
||||
got_a = base32.b2a(got)
|
||||
self.failUnlessEqual(got_a, expected_a)
|
||||
|
||||
def test_known_answers(self):
|
||||
# assert backwards compatibility
|
||||
self._testknown(hashutil.storage_index_hash, b"qb5igbhcc5esa6lwqorsy7e6am", b"")
|
||||
self._testknown(hashutil.block_hash, b"msjr5bh4evuh7fa3zw7uovixfbvlnstr5b65mrerwfnvjxig2jvq", b"")
|
||||
self._testknown(hashutil.uri_extension_hash, b"wthsu45q7zewac2mnivoaa4ulh5xvbzdmsbuyztq2a5fzxdrnkka", b"")
|
||||
self._testknown(hashutil.plaintext_hash, b"5lz5hwz3qj3af7n6e3arblw7xzutvnd3p3fjsngqjcb7utf3x3da", b"")
|
||||
self._testknown(hashutil.crypttext_hash, b"itdj6e4njtkoiavlrmxkvpreosscssklunhwtvxn6ggho4rkqwga", b"")
|
||||
self._testknown(hashutil.crypttext_segment_hash, b"aovy5aa7jej6ym5ikgwyoi4pxawnoj3wtaludjz7e2nb5xijb7aa", b"")
|
||||
self._testknown(hashutil.plaintext_segment_hash, b"4fdgf6qruaisyukhqcmoth4t3li6bkolbxvjy4awwcpprdtva7za", b"")
|
||||
self._testknown(hashutil.convergence_hash, b"3mo6ni7xweplycin6nowynw2we", 3, 10, 100, b"", b"converge")
|
||||
self._testknown(hashutil.my_renewal_secret_hash, b"ujhr5k5f7ypkp67jkpx6jl4p47pyta7hu5m527cpcgvkafsefm6q", b"")
|
||||
self._testknown(hashutil.my_cancel_secret_hash, b"rjwzmafe2duixvqy6h47f5wfrokdziry6zhx4smew4cj6iocsfaa", b"")
|
||||
self._testknown(hashutil.file_renewal_secret_hash, b"hzshk2kf33gzbd5n3a6eszkf6q6o6kixmnag25pniusyaulqjnia", b"", b"si")
|
||||
self._testknown(hashutil.file_cancel_secret_hash, b"bfciwvr6w7wcavsngxzxsxxaszj72dej54n4tu2idzp6b74g255q", b"", b"si")
|
||||
self._testknown(hashutil.bucket_renewal_secret_hash, b"e7imrzgzaoashsncacvy3oysdd2m5yvtooo4gmj4mjlopsazmvuq", b"", b"\x00"*20)
|
||||
self._testknown(hashutil.bucket_cancel_secret_hash, b"dvdujeyxeirj6uux6g7xcf4lvesk632aulwkzjar7srildvtqwma", b"", b"\x00"*20)
|
||||
self._testknown(hashutil.hmac, b"c54ypfi6pevb3nvo6ba42jtglpkry2kbdopqsi7dgrm4r7tw5sra", b"tag", b"")
|
||||
self._testknown(hashutil.mutable_rwcap_key_hash, b"6rvn2iqrghii5n4jbbwwqqsnqu", b"iv", b"wk")
|
||||
self._testknown(hashutil.ssk_writekey_hash, b"ykpgmdbpgbb6yqz5oluw2q26ye", b"")
|
||||
self._testknown(hashutil.ssk_write_enabler_master_hash, b"izbfbfkoait4dummruol3gy2bnixrrrslgye6ycmkuyujnenzpia", b"")
|
||||
self._testknown(hashutil.ssk_write_enabler_hash, b"fuu2dvx7g6gqu5x22vfhtyed7p4pd47y5hgxbqzgrlyvxoev62tq", b"wk", b"\x00"*20)
|
||||
self._testknown(hashutil.ssk_pubkey_fingerprint_hash, b"3opzw4hhm2sgncjx224qmt5ipqgagn7h5zivnfzqycvgqgmgz35q", b"")
|
||||
self._testknown(hashutil.ssk_readkey_hash, b"vugid4as6qbqgeq2xczvvcedai", b"")
|
||||
self._testknown(hashutil.ssk_readkey_data_hash, b"73wsaldnvdzqaf7v4pzbr2ae5a", b"iv", b"rk")
|
||||
self._testknown(hashutil.ssk_storage_index_hash, b"j7icz6kigb6hxrej3tv4z7ayym", b"")
|
||||
|
||||
self._testknown(hashutil.permute_server_hash,
|
||||
b"kb4354zeeurpo3ze5e275wzbynm6hlap", # b32(expected)
|
||||
b"SI", # peer selection index == storage_index
|
||||
base32.a2b(b"u33m4y7klhz3bypswqkozwetvabelhxt"), # seed
|
||||
)
|
@ -1,16 +1,36 @@
|
||||
"""
|
||||
Tests for allmydata.util.iputil.
|
||||
|
||||
import re, errno, subprocess, os
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2, native_str
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import re, errno, subprocess, os, socket
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from tenacity import retry, stop_after_attempt
|
||||
|
||||
from foolscap.api import Tub
|
||||
|
||||
from allmydata.util import iputil
|
||||
import allmydata.test.common_util as testutil
|
||||
import allmydata.test.common_py3 as testutil
|
||||
from allmydata.util.namespace import Namespace
|
||||
|
||||
|
||||
DOTTED_QUAD_RE=re.compile("^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$")
|
||||
DOTTED_QUAD_RE=re.compile(r"^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$")
|
||||
|
||||
MOCK_IPADDR_OUTPUT = """\
|
||||
# Mock output from subprocesses should be bytes, that's what happens on both
|
||||
# Python 2 and Python 3:
|
||||
MOCK_IPADDR_OUTPUT = b"""\
|
||||
1: lo: <LOOPBACK,UP,LOWER_UP> mtu 16436 qdisc noqueue state UNKNOWN \n\
|
||||
link/loopback 00:00:00:00:00:00 brd 00:00:00:00:00:00
|
||||
inet 127.0.0.1/8 scope host lo
|
||||
@ -28,7 +48,7 @@ MOCK_IPADDR_OUTPUT = """\
|
||||
valid_lft forever preferred_lft forever
|
||||
"""
|
||||
|
||||
MOCK_IFCONFIG_OUTPUT = """\
|
||||
MOCK_IFCONFIG_OUTPUT = b"""\
|
||||
eth1 Link encap:Ethernet HWaddr d4:3d:7e:01:b4:3e \n\
|
||||
inet addr:192.168.0.6 Bcast:192.168.0.255 Mask:255.255.255.0
|
||||
inet6 addr: fe80::d63d:7eff:fe01:b43e/64 Scope:Link
|
||||
@ -59,7 +79,7 @@ wlan0 Link encap:Ethernet HWaddr 90:f6:52:27:15:0a \n\
|
||||
"""
|
||||
|
||||
# This is actually from a VirtualBox VM running XP.
|
||||
MOCK_ROUTE_OUTPUT = """\
|
||||
MOCK_ROUTE_OUTPUT = b"""\
|
||||
===========================================================================
|
||||
Interface List
|
||||
0x1 ........................... MS TCP Loopback interface
|
||||
@ -98,6 +118,11 @@ class ListAddresses(testutil.SignalMixin, unittest.TestCase):
|
||||
def test_get_local_ip_for(self):
|
||||
addr = iputil.get_local_ip_for('127.0.0.1')
|
||||
self.failUnless(DOTTED_QUAD_RE.match(addr))
|
||||
# Bytes can be taken as input:
|
||||
bytes_addr = iputil.get_local_ip_for(b'127.0.0.1')
|
||||
self.assertEqual(addr, bytes_addr)
|
||||
# The output is a native string:
|
||||
self.assertIsInstance(addr, native_str)
|
||||
|
||||
def test_list_async(self):
|
||||
d = iputil.get_local_addresses_async()
|
||||
@ -162,3 +187,44 @@ class ListAddresses(testutil.SignalMixin, unittest.TestCase):
|
||||
def test_list_async_mock_cygwin(self):
|
||||
self.patch(iputil, 'platform', "cygwin")
|
||||
return self._test_list_async_mock(None, None, CYGWIN_TEST_ADDRESSES)
|
||||
|
||||
|
||||
class ListenOnUsed(unittest.TestCase):
|
||||
"""Tests for listenOnUnused."""
|
||||
|
||||
def create_tub(self, basedir):
|
||||
os.makedirs(basedir)
|
||||
tubfile = os.path.join(basedir, "tub.pem")
|
||||
tub = Tub(certFile=tubfile)
|
||||
tub.setOption("expose-remote-exception-types", False)
|
||||
tub.startService()
|
||||
self.addCleanup(tub.stopService)
|
||||
return tub
|
||||
|
||||
@retry(stop=stop_after_attempt(7))
|
||||
def test_random_port(self):
|
||||
"""A random port is selected if none is given."""
|
||||
tub = self.create_tub("utils/ListenOnUsed/test_randomport")
|
||||
self.assertEqual(len(tub.getListeners()), 0)
|
||||
portnum = iputil.listenOnUnused(tub)
|
||||
# We can connect to this port:
|
||||
s = socket.socket()
|
||||
s.connect(("127.0.0.1", portnum))
|
||||
s.close()
|
||||
self.assertEqual(len(tub.getListeners()), 1)
|
||||
|
||||
# Listen on another port:
|
||||
tub2 = self.create_tub("utils/ListenOnUsed/test_randomport_2")
|
||||
portnum2 = iputil.listenOnUnused(tub2)
|
||||
self.assertNotEqual(portnum, portnum2)
|
||||
|
||||
@retry(stop=stop_after_attempt(7))
|
||||
def test_specific_port(self):
|
||||
"""The given port is used."""
|
||||
tub = self.create_tub("utils/ListenOnUsed/test_givenport")
|
||||
s = socket.socket()
|
||||
s.bind(("127.0.0.1", 0))
|
||||
port = s.getsockname()[1]
|
||||
s.close()
|
||||
port2 = iputil.listenOnUnused(tub, port)
|
||||
self.assertEqual(port, port2)
|
||||
|
@ -1,36 +1,59 @@
|
||||
"""
|
||||
Tests for allmydata.util.netstring.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.util.netstring import netstring, split_netstring
|
||||
|
||||
|
||||
class Netstring(unittest.TestCase):
|
||||
def test_encode(self):
|
||||
"""netstring() correctly encodes the given bytes."""
|
||||
result = netstring(b"abc")
|
||||
self.assertEqual(result, b"3:abc,")
|
||||
self.assertIsInstance(result, bytes)
|
||||
|
||||
def test_split(self):
|
||||
a = netstring("hello") + netstring("world")
|
||||
self.failUnlessEqual(split_netstring(a, 2), (["hello", "world"], len(a)))
|
||||
self.failUnlessEqual(split_netstring(a, 2, required_trailer=""), (["hello", "world"], len(a)))
|
||||
a = netstring(b"hello") + netstring(b"world")
|
||||
for s in split_netstring(a, 2)[0]:
|
||||
self.assertIsInstance(s, bytes)
|
||||
self.failUnlessEqual(split_netstring(a, 2), ([b"hello", b"world"], len(a)))
|
||||
self.failUnlessEqual(split_netstring(a, 2, required_trailer=b""), ([b"hello", b"world"], len(a)))
|
||||
self.failUnlessRaises(ValueError, split_netstring, a, 3)
|
||||
self.failUnlessRaises(ValueError, split_netstring, a+" extra", 2, required_trailer="")
|
||||
self.failUnlessEqual(split_netstring(a+" extra", 2), (["hello", "world"], len(a)))
|
||||
self.failUnlessEqual(split_netstring(a+"++", 2, required_trailer="++"),
|
||||
(["hello", "world"], len(a)+2))
|
||||
self.failUnlessRaises(ValueError, split_netstring, a+b" extra", 2, required_trailer=b"")
|
||||
self.failUnlessEqual(split_netstring(a+b" extra", 2), ([b"hello", b"world"], len(a)))
|
||||
self.failUnlessEqual(split_netstring(a+b"++", 2, required_trailer=b"++"),
|
||||
([b"hello", b"world"], len(a)+2))
|
||||
self.failUnlessRaises(ValueError,
|
||||
split_netstring, a+"+", 2, required_trailer="not")
|
||||
split_netstring, a+b"+", 2, required_trailer=b"not")
|
||||
|
||||
def test_extra(self):
|
||||
a = netstring("hello")
|
||||
self.failUnlessEqual(split_netstring(a, 1), (["hello"], len(a)))
|
||||
b = netstring("hello") + "extra stuff"
|
||||
a = netstring(b"hello")
|
||||
self.failUnlessEqual(split_netstring(a, 1), ([b"hello"], len(a)))
|
||||
b = netstring(b"hello") + b"extra stuff"
|
||||
self.failUnlessEqual(split_netstring(b, 1),
|
||||
(["hello"], len(a)))
|
||||
([b"hello"], len(a)))
|
||||
|
||||
def test_nested(self):
|
||||
a = netstring("hello") + netstring("world") + "extra stuff"
|
||||
b = netstring("a") + netstring("is") + netstring(a) + netstring(".")
|
||||
a = netstring(b"hello") + netstring(b"world") + b"extra stuff"
|
||||
b = netstring(b"a") + netstring(b"is") + netstring(a) + netstring(b".")
|
||||
(top, pos) = split_netstring(b, 4)
|
||||
self.failUnlessEqual(len(top), 4)
|
||||
self.failUnlessEqual(top[0], "a")
|
||||
self.failUnlessEqual(top[1], "is")
|
||||
self.failUnlessEqual(top[0], b"a")
|
||||
self.failUnlessEqual(top[1], b"is")
|
||||
self.failUnlessEqual(top[2], a)
|
||||
self.failUnlessEqual(top[3], ".")
|
||||
self.failUnlessRaises(ValueError, split_netstring, a, 2, required_trailer="")
|
||||
self.failUnlessEqual(top[3], b".")
|
||||
self.failUnlessRaises(ValueError, split_netstring, a, 2, required_trailer=b"")
|
||||
bottom = split_netstring(a, 2)
|
||||
self.failUnlessEqual(bottom, (["hello", "world"], len(netstring("hello")+netstring("world"))))
|
||||
self.failUnlessEqual(bottom, ([b"hello", b"world"], len(netstring(b"hello")+netstring(b"world"))))
|
||||
|
@ -1,3 +1,17 @@
|
||||
"""
|
||||
Tests for allmydata.util.observer.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, reactor
|
||||
|
198
src/allmydata/test/test_pipeline.py
Normal file
198
src/allmydata/test/test_pipeline.py
Normal file
@ -0,0 +1,198 @@
|
||||
"""
|
||||
Tests for allmydata.util.pipeline.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import gc
|
||||
|
||||
from twisted.internet import defer
|
||||
from twisted.trial import unittest
|
||||
from twisted.python import log
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
from allmydata.util import pipeline
|
||||
|
||||
|
||||
class Pipeline(unittest.TestCase):
|
||||
def pause(self, *args, **kwargs):
|
||||
d = defer.Deferred()
|
||||
self.calls.append( (d, args, kwargs) )
|
||||
return d
|
||||
|
||||
def failUnlessCallsAre(self, expected):
|
||||
#print self.calls
|
||||
#print expected
|
||||
self.failUnlessEqual(len(self.calls), len(expected), self.calls)
|
||||
for i,c in enumerate(self.calls):
|
||||
self.failUnlessEqual(c[1:], expected[i], str(i))
|
||||
|
||||
def test_basic(self):
|
||||
self.calls = []
|
||||
finished = []
|
||||
p = pipeline.Pipeline(100)
|
||||
|
||||
d = p.flush() # fires immediately
|
||||
d.addCallbacks(finished.append, log.err)
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
finished = []
|
||||
|
||||
d = p.add(10, self.pause, "one")
|
||||
# the call should start right away, and our return Deferred should
|
||||
# fire right away
|
||||
d.addCallbacks(finished.append, log.err)
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
self.failUnlessEqual(finished[0], None)
|
||||
self.failUnlessCallsAre([ ( ("one",) , {} ) ])
|
||||
self.failUnlessEqual(p.gauge, 10)
|
||||
|
||||
# pipeline: [one]
|
||||
|
||||
finished = []
|
||||
d = p.add(20, self.pause, "two", kw=2)
|
||||
# pipeline: [one, two]
|
||||
|
||||
# the call and the Deferred should fire right away
|
||||
d.addCallbacks(finished.append, log.err)
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
self.failUnlessEqual(finished[0], None)
|
||||
self.failUnlessCallsAre([ ( ("one",) , {} ),
|
||||
( ("two",) , {"kw": 2} ),
|
||||
])
|
||||
self.failUnlessEqual(p.gauge, 30)
|
||||
|
||||
self.calls[0][0].callback("one-result")
|
||||
# pipeline: [two]
|
||||
self.failUnlessEqual(p.gauge, 20)
|
||||
|
||||
finished = []
|
||||
d = p.add(90, self.pause, "three", "posarg1")
|
||||
# pipeline: [two, three]
|
||||
flushed = []
|
||||
fd = p.flush()
|
||||
fd.addCallbacks(flushed.append, log.err)
|
||||
self.failUnlessEqual(flushed, [])
|
||||
|
||||
# the call will be made right away, but the return Deferred will not,
|
||||
# because the pipeline is now full.
|
||||
d.addCallbacks(finished.append, log.err)
|
||||
self.failUnlessEqual(len(finished), 0)
|
||||
self.failUnlessCallsAre([ ( ("one",) , {} ),
|
||||
( ("two",) , {"kw": 2} ),
|
||||
( ("three", "posarg1"), {} ),
|
||||
])
|
||||
self.failUnlessEqual(p.gauge, 110)
|
||||
|
||||
self.failUnlessRaises(pipeline.SingleFileError, p.add, 10, self.pause)
|
||||
|
||||
# retiring either call will unblock the pipeline, causing the #3
|
||||
# Deferred to fire
|
||||
self.calls[2][0].callback("three-result")
|
||||
# pipeline: [two]
|
||||
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
self.failUnlessEqual(finished[0], None)
|
||||
self.failUnlessEqual(flushed, [])
|
||||
|
||||
# retiring call#2 will finally allow the flush() Deferred to fire
|
||||
self.calls[1][0].callback("two-result")
|
||||
self.failUnlessEqual(len(flushed), 1)
|
||||
|
||||
def test_errors(self):
|
||||
self.calls = []
|
||||
p = pipeline.Pipeline(100)
|
||||
|
||||
d1 = p.add(200, self.pause, "one")
|
||||
d2 = p.flush()
|
||||
|
||||
finished = []
|
||||
d1.addBoth(finished.append)
|
||||
self.failUnlessEqual(finished, [])
|
||||
|
||||
flushed = []
|
||||
d2.addBoth(flushed.append)
|
||||
self.failUnlessEqual(flushed, [])
|
||||
|
||||
self.calls[0][0].errback(ValueError("oops"))
|
||||
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
f = finished[0]
|
||||
self.failUnless(isinstance(f, Failure))
|
||||
self.failUnless(f.check(pipeline.PipelineError))
|
||||
self.failUnlessIn("PipelineError", str(f.value))
|
||||
self.failUnlessIn("ValueError", str(f.value))
|
||||
r = repr(f.value)
|
||||
self.failUnless("ValueError" in r, r)
|
||||
f2 = f.value.error
|
||||
self.failUnless(f2.check(ValueError))
|
||||
|
||||
self.failUnlessEqual(len(flushed), 1)
|
||||
f = flushed[0]
|
||||
self.failUnless(isinstance(f, Failure))
|
||||
self.failUnless(f.check(pipeline.PipelineError))
|
||||
f2 = f.value.error
|
||||
self.failUnless(f2.check(ValueError))
|
||||
|
||||
# now that the pipeline is in the failed state, any new calls will
|
||||
# fail immediately
|
||||
|
||||
d3 = p.add(20, self.pause, "two")
|
||||
|
||||
finished = []
|
||||
d3.addBoth(finished.append)
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
f = finished[0]
|
||||
self.failUnless(isinstance(f, Failure))
|
||||
self.failUnless(f.check(pipeline.PipelineError))
|
||||
r = repr(f.value)
|
||||
self.failUnless("ValueError" in r, r)
|
||||
f2 = f.value.error
|
||||
self.failUnless(f2.check(ValueError))
|
||||
|
||||
d4 = p.flush()
|
||||
flushed = []
|
||||
d4.addBoth(flushed.append)
|
||||
self.failUnlessEqual(len(flushed), 1)
|
||||
f = flushed[0]
|
||||
self.failUnless(isinstance(f, Failure))
|
||||
self.failUnless(f.check(pipeline.PipelineError))
|
||||
f2 = f.value.error
|
||||
self.failUnless(f2.check(ValueError))
|
||||
|
||||
def test_errors2(self):
|
||||
self.calls = []
|
||||
p = pipeline.Pipeline(100)
|
||||
|
||||
d1 = p.add(10, self.pause, "one")
|
||||
d2 = p.add(20, self.pause, "two")
|
||||
d3 = p.add(30, self.pause, "three")
|
||||
d4 = p.flush()
|
||||
|
||||
# one call fails, then the second one succeeds: make sure
|
||||
# ExpandableDeferredList tolerates the second one
|
||||
|
||||
flushed = []
|
||||
d4.addBoth(flushed.append)
|
||||
self.failUnlessEqual(flushed, [])
|
||||
|
||||
self.calls[0][0].errback(ValueError("oops"))
|
||||
self.failUnlessEqual(len(flushed), 1)
|
||||
f = flushed[0]
|
||||
self.failUnless(isinstance(f, Failure))
|
||||
self.failUnless(f.check(pipeline.PipelineError))
|
||||
f2 = f.value.error
|
||||
self.failUnless(f2.check(ValueError))
|
||||
|
||||
self.calls[1][0].callback("two-result")
|
||||
self.calls[2][0].errback(ValueError("three-error"))
|
||||
|
||||
del d1,d2,d3,d4
|
||||
gc.collect() # for PyPy
|
617
src/allmydata/test/test_spans.py
Normal file
617
src/allmydata/test/test_spans.py
Normal file
@ -0,0 +1,617 @@
|
||||
"""
|
||||
Tests for allmydata.util.spans.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
import binascii
|
||||
import hashlib
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.util.spans import Spans, overlap, DataSpans
|
||||
|
||||
|
||||
def sha256(data):
|
||||
"""
|
||||
:param bytes data: data to hash
|
||||
|
||||
:returns: a hex-encoded SHA256 hash of the data
|
||||
"""
|
||||
return binascii.hexlify(hashlib.sha256(data).digest())
|
||||
|
||||
|
||||
class SimpleSpans(object):
|
||||
# this is a simple+inefficient form of util.spans.Spans . We compare the
|
||||
# behavior of this reference model against the real (efficient) form.
|
||||
|
||||
def __init__(self, _span_or_start=None, length=None):
|
||||
self._have = set()
|
||||
if length is not None:
|
||||
for i in range(_span_or_start, _span_or_start+length):
|
||||
self._have.add(i)
|
||||
elif _span_or_start:
|
||||
for (start,length) in _span_or_start:
|
||||
self.add(start, length)
|
||||
|
||||
def add(self, start, length):
|
||||
for i in range(start, start+length):
|
||||
self._have.add(i)
|
||||
return self
|
||||
|
||||
def remove(self, start, length):
|
||||
for i in range(start, start+length):
|
||||
self._have.discard(i)
|
||||
return self
|
||||
|
||||
def each(self):
|
||||
return sorted(self._have)
|
||||
|
||||
def __iter__(self):
|
||||
items = sorted(self._have)
|
||||
prevstart = None
|
||||
prevend = None
|
||||
for i in items:
|
||||
if prevstart is None:
|
||||
prevstart = prevend = i
|
||||
continue
|
||||
if i == prevend+1:
|
||||
prevend = i
|
||||
continue
|
||||
yield (prevstart, prevend-prevstart+1)
|
||||
prevstart = prevend = i
|
||||
if prevstart is not None:
|
||||
yield (prevstart, prevend-prevstart+1)
|
||||
|
||||
def __bool__(self): # this gets us bool()
|
||||
return bool(self.len())
|
||||
|
||||
def len(self):
|
||||
return len(self._have)
|
||||
|
||||
def __add__(self, other):
|
||||
s = self.__class__(self)
|
||||
for (start, length) in other:
|
||||
s.add(start, length)
|
||||
return s
|
||||
|
||||
def __sub__(self, other):
|
||||
s = self.__class__(self)
|
||||
for (start, length) in other:
|
||||
s.remove(start, length)
|
||||
return s
|
||||
|
||||
def __iadd__(self, other):
|
||||
for (start, length) in other:
|
||||
self.add(start, length)
|
||||
return self
|
||||
|
||||
def __isub__(self, other):
|
||||
for (start, length) in other:
|
||||
self.remove(start, length)
|
||||
return self
|
||||
|
||||
def __and__(self, other):
|
||||
s = self.__class__()
|
||||
for i in other.each():
|
||||
if i in self._have:
|
||||
s.add(i, 1)
|
||||
return s
|
||||
|
||||
def __contains__(self, start_and_length):
|
||||
(start, length) = start_and_length
|
||||
for i in range(start, start+length):
|
||||
if i not in self._have:
|
||||
return False
|
||||
return True
|
||||
|
||||
class ByteSpans(unittest.TestCase):
|
||||
def test_basic(self):
|
||||
s = Spans()
|
||||
self.failUnlessEqual(list(s), [])
|
||||
self.failIf(s)
|
||||
self.failIf((0,1) in s)
|
||||
self.failUnlessEqual(s.len(), 0)
|
||||
|
||||
s1 = Spans(3, 4) # 3,4,5,6
|
||||
self._check1(s1)
|
||||
|
||||
s1 = Spans(long(3), long(4)) # 3,4,5,6
|
||||
self._check1(s1)
|
||||
|
||||
s2 = Spans(s1)
|
||||
self._check1(s2)
|
||||
|
||||
s2.add(10,2) # 10,11
|
||||
self._check1(s1)
|
||||
self.failUnless((10,1) in s2)
|
||||
self.failIf((10,1) in s1)
|
||||
self.failUnlessEqual(list(s2.each()), [3,4,5,6,10,11])
|
||||
self.failUnlessEqual(s2.len(), 6)
|
||||
|
||||
s2.add(15,2).add(20,2)
|
||||
self.failUnlessEqual(list(s2.each()), [3,4,5,6,10,11,15,16,20,21])
|
||||
self.failUnlessEqual(s2.len(), 10)
|
||||
|
||||
s2.remove(4,3).remove(15,1)
|
||||
self.failUnlessEqual(list(s2.each()), [3,10,11,16,20,21])
|
||||
self.failUnlessEqual(s2.len(), 6)
|
||||
|
||||
s1 = SimpleSpans(3, 4) # 3 4 5 6
|
||||
s2 = SimpleSpans(5, 4) # 5 6 7 8
|
||||
i = s1 & s2
|
||||
self.failUnlessEqual(list(i.each()), [5, 6])
|
||||
|
||||
def _check1(self, s):
|
||||
self.failUnlessEqual(list(s), [(3,4)])
|
||||
self.failUnless(s)
|
||||
self.failUnlessEqual(s.len(), 4)
|
||||
self.failIf((0,1) in s)
|
||||
self.failUnless((3,4) in s)
|
||||
self.failUnless((3,1) in s)
|
||||
self.failUnless((5,2) in s)
|
||||
self.failUnless((6,1) in s)
|
||||
self.failIf((6,2) in s)
|
||||
self.failIf((7,1) in s)
|
||||
self.failUnlessEqual(list(s.each()), [3,4,5,6])
|
||||
|
||||
def test_large(self):
|
||||
s = Spans(4, 2**65) # don't do this with a SimpleSpans
|
||||
self.failUnlessEqual(list(s), [(4, 2**65)])
|
||||
self.failUnless(s)
|
||||
self.failUnlessEqual(s.len(), 2**65)
|
||||
self.failIf((0,1) in s)
|
||||
self.failUnless((4,2) in s)
|
||||
self.failUnless((2**65,2) in s)
|
||||
|
||||
def test_math(self):
|
||||
s1 = Spans(0, 10) # 0,1,2,3,4,5,6,7,8,9
|
||||
s2 = Spans(5, 3) # 5,6,7
|
||||
s3 = Spans(8, 4) # 8,9,10,11
|
||||
|
||||
s = s1 - s2
|
||||
self.failUnlessEqual(list(s.each()), [0,1,2,3,4,8,9])
|
||||
s = s1 - s3
|
||||
self.failUnlessEqual(list(s.each()), [0,1,2,3,4,5,6,7])
|
||||
s = s2 - s3
|
||||
self.failUnlessEqual(list(s.each()), [5,6,7])
|
||||
s = s1 & s2
|
||||
self.failUnlessEqual(list(s.each()), [5,6,7])
|
||||
s = s2 & s1
|
||||
self.failUnlessEqual(list(s.each()), [5,6,7])
|
||||
s = s1 & s3
|
||||
self.failUnlessEqual(list(s.each()), [8,9])
|
||||
s = s3 & s1
|
||||
self.failUnlessEqual(list(s.each()), [8,9])
|
||||
s = s2 & s3
|
||||
self.failUnlessEqual(list(s.each()), [])
|
||||
s = s3 & s2
|
||||
self.failUnlessEqual(list(s.each()), [])
|
||||
s = Spans() & s3
|
||||
self.failUnlessEqual(list(s.each()), [])
|
||||
s = s3 & Spans()
|
||||
self.failUnlessEqual(list(s.each()), [])
|
||||
|
||||
s = s1 + s2
|
||||
self.failUnlessEqual(list(s.each()), [0,1,2,3,4,5,6,7,8,9])
|
||||
s = s1 + s3
|
||||
self.failUnlessEqual(list(s.each()), [0,1,2,3,4,5,6,7,8,9,10,11])
|
||||
s = s2 + s3
|
||||
self.failUnlessEqual(list(s.each()), [5,6,7,8,9,10,11])
|
||||
|
||||
s = Spans(s1)
|
||||
s -= s2
|
||||
self.failUnlessEqual(list(s.each()), [0,1,2,3,4,8,9])
|
||||
s = Spans(s1)
|
||||
s -= s3
|
||||
self.failUnlessEqual(list(s.each()), [0,1,2,3,4,5,6,7])
|
||||
s = Spans(s2)
|
||||
s -= s3
|
||||
self.failUnlessEqual(list(s.each()), [5,6,7])
|
||||
|
||||
s = Spans(s1)
|
||||
s += s2
|
||||
self.failUnlessEqual(list(s.each()), [0,1,2,3,4,5,6,7,8,9])
|
||||
s = Spans(s1)
|
||||
s += s3
|
||||
self.failUnlessEqual(list(s.each()), [0,1,2,3,4,5,6,7,8,9,10,11])
|
||||
s = Spans(s2)
|
||||
s += s3
|
||||
self.failUnlessEqual(list(s.each()), [5,6,7,8,9,10,11])
|
||||
|
||||
def test_random(self):
|
||||
# attempt to increase coverage of corner cases by comparing behavior
|
||||
# of a simple-but-slow model implementation against the
|
||||
# complex-but-fast actual implementation, in a large number of random
|
||||
# operations
|
||||
S1 = SimpleSpans
|
||||
S2 = Spans
|
||||
s1 = S1(); s2 = S2()
|
||||
seed = b""
|
||||
def _create(subseed):
|
||||
ns1 = S1(); ns2 = S2()
|
||||
for i in range(10):
|
||||
what = sha256(subseed+bytes(i))
|
||||
start = int(what[2:4], 16)
|
||||
length = max(1,int(what[5:6], 16))
|
||||
ns1.add(start, length); ns2.add(start, length)
|
||||
return ns1, ns2
|
||||
|
||||
#print
|
||||
for i in range(1000):
|
||||
what = sha256(seed+bytes(i))
|
||||
op = what[0:1]
|
||||
subop = what[1:2]
|
||||
start = int(what[2:4], 16)
|
||||
length = max(1,int(what[5:6], 16))
|
||||
#print what
|
||||
if op in b"0":
|
||||
if subop in b"01234":
|
||||
s1 = S1(); s2 = S2()
|
||||
elif subop in b"5678":
|
||||
s1 = S1(start, length); s2 = S2(start, length)
|
||||
else:
|
||||
s1 = S1(s1); s2 = S2(s2)
|
||||
#print "s2 = %s" % s2.dump()
|
||||
elif op in b"123":
|
||||
#print "s2.add(%d,%d)" % (start, length)
|
||||
s1.add(start, length); s2.add(start, length)
|
||||
elif op in b"456":
|
||||
#print "s2.remove(%d,%d)" % (start, length)
|
||||
s1.remove(start, length); s2.remove(start, length)
|
||||
elif op in b"78":
|
||||
ns1, ns2 = _create(what[7:11])
|
||||
#print "s2 + %s" % ns2.dump()
|
||||
s1 = s1 + ns1; s2 = s2 + ns2
|
||||
elif op in b"9a":
|
||||
ns1, ns2 = _create(what[7:11])
|
||||
#print "%s - %s" % (s2.dump(), ns2.dump())
|
||||
s1 = s1 - ns1; s2 = s2 - ns2
|
||||
elif op in b"bc":
|
||||
ns1, ns2 = _create(what[7:11])
|
||||
#print "s2 += %s" % ns2.dump()
|
||||
s1 += ns1; s2 += ns2
|
||||
elif op in b"de":
|
||||
ns1, ns2 = _create(what[7:11])
|
||||
#print "%s -= %s" % (s2.dump(), ns2.dump())
|
||||
s1 -= ns1; s2 -= ns2
|
||||
else:
|
||||
ns1, ns2 = _create(what[7:11])
|
||||
#print "%s &= %s" % (s2.dump(), ns2.dump())
|
||||
s1 = s1 & ns1; s2 = s2 & ns2
|
||||
#print "s2 now %s" % s2.dump()
|
||||
self.failUnlessEqual(list(s1.each()), list(s2.each()))
|
||||
self.failUnlessEqual(s1.len(), s2.len())
|
||||
self.failUnlessEqual(bool(s1), bool(s2))
|
||||
self.failUnlessEqual(list(s1), list(s2))
|
||||
for j in range(10):
|
||||
what = sha256(what[12:14]+bytes(j))
|
||||
start = int(what[2:4], 16)
|
||||
length = max(1, int(what[5:6], 16))
|
||||
span = (start, length)
|
||||
self.failUnlessEqual(bool(span in s1), bool(span in s2))
|
||||
|
||||
|
||||
# s()
|
||||
# s(start,length)
|
||||
# s(s0)
|
||||
# s.add(start,length) : returns s
|
||||
# s.remove(start,length)
|
||||
# s.each() -> list of byte offsets, mostly for testing
|
||||
# list(s) -> list of (start,length) tuples, one per span
|
||||
# (start,length) in s -> True if (start..start+length-1) are all members
|
||||
# NOT equivalent to x in list(s)
|
||||
# s.len() -> number of bytes, for testing, bool(), and accounting/limiting
|
||||
# bool(s) (__nonzeron__)
|
||||
# s = s1+s2, s1-s2, +=s1, -=s1
|
||||
|
||||
def test_overlap(self):
|
||||
for a in range(20):
|
||||
for b in range(10):
|
||||
for c in range(20):
|
||||
for d in range(10):
|
||||
self._test_overlap(a,b,c,d)
|
||||
|
||||
def _test_overlap(self, a, b, c, d):
|
||||
s1 = set(range(a,a+b))
|
||||
s2 = set(range(c,c+d))
|
||||
#print "---"
|
||||
#self._show_overlap(s1, "1")
|
||||
#self._show_overlap(s2, "2")
|
||||
o = overlap(a,b,c,d)
|
||||
expected = s1.intersection(s2)
|
||||
if not expected:
|
||||
self.failUnlessEqual(o, None)
|
||||
else:
|
||||
start,length = o
|
||||
so = set(range(start,start+length))
|
||||
#self._show(so, "o")
|
||||
self.failUnlessEqual(so, expected)
|
||||
|
||||
def _show_overlap(self, s, c):
|
||||
import sys
|
||||
out = sys.stdout
|
||||
if s:
|
||||
for i in range(max(s)):
|
||||
if i in s:
|
||||
out.write(c)
|
||||
else:
|
||||
out.write(" ")
|
||||
out.write("\n")
|
||||
|
||||
def extend(s, start, length, fill):
|
||||
if len(s) >= start+length:
|
||||
return s
|
||||
assert len(fill) == 1
|
||||
return s + fill*(start+length-len(s))
|
||||
|
||||
def replace(s, start, data):
|
||||
assert len(s) >= start+len(data)
|
||||
return s[:start] + data + s[start+len(data):]
|
||||
|
||||
class SimpleDataSpans(object):
|
||||
def __init__(self, other=None):
|
||||
self.missing = "" # "1" where missing, "0" where found
|
||||
self.data = b""
|
||||
if other:
|
||||
for (start, data) in other.get_chunks():
|
||||
self.add(start, data)
|
||||
|
||||
def __bool__(self): # this gets us bool()
|
||||
return bool(self.len())
|
||||
|
||||
def len(self):
|
||||
return len(self.missing.replace("1", ""))
|
||||
|
||||
def _dump(self):
|
||||
return [i for (i,c) in enumerate(self.missing) if c == "0"]
|
||||
|
||||
def _have(self, start, length):
|
||||
m = self.missing[start:start+length]
|
||||
if not m or len(m)<length or int(m):
|
||||
return False
|
||||
return True
|
||||
def get_chunks(self):
|
||||
for i in self._dump():
|
||||
yield (i, self.data[i:i+1])
|
||||
def get_spans(self):
|
||||
return SimpleSpans([(start,len(data))
|
||||
for (start,data) in self.get_chunks()])
|
||||
def get(self, start, length):
|
||||
if self._have(start, length):
|
||||
return self.data[start:start+length]
|
||||
return None
|
||||
def pop(self, start, length):
|
||||
data = self.get(start, length)
|
||||
if data:
|
||||
self.remove(start, length)
|
||||
return data
|
||||
def remove(self, start, length):
|
||||
self.missing = replace(extend(self.missing, start, length, "1"),
|
||||
start, "1"*length)
|
||||
def add(self, start, data):
|
||||
self.missing = replace(extend(self.missing, start, len(data), "1"),
|
||||
start, "0"*len(data))
|
||||
self.data = replace(extend(self.data, start, len(data), b" "),
|
||||
start, data)
|
||||
|
||||
|
||||
class StringSpans(unittest.TestCase):
|
||||
def do_basic(self, klass):
|
||||
ds = klass()
|
||||
self.failUnlessEqual(ds.len(), 0)
|
||||
self.failUnlessEqual(list(ds._dump()), [])
|
||||
self.failUnlessEqual(sum([len(d) for (s,d) in ds.get_chunks()]), 0)
|
||||
s1 = ds.get_spans()
|
||||
self.failUnlessEqual(ds.get(0, 4), None)
|
||||
self.failUnlessEqual(ds.pop(0, 4), None)
|
||||
ds.remove(0, 4)
|
||||
|
||||
ds.add(2, b"four")
|
||||
self.failUnlessEqual(ds.len(), 4)
|
||||
self.failUnlessEqual(list(ds._dump()), [2,3,4,5])
|
||||
self.failUnlessEqual(sum([len(d) for (s,d) in ds.get_chunks()]), 4)
|
||||
s1 = ds.get_spans()
|
||||
self.failUnless((2,2) in s1)
|
||||
self.failUnlessEqual(ds.get(0, 4), None)
|
||||
self.failUnlessEqual(ds.pop(0, 4), None)
|
||||
self.failUnlessEqual(ds.get(4, 4), None)
|
||||
|
||||
ds2 = klass(ds)
|
||||
self.failUnlessEqual(ds2.len(), 4)
|
||||
self.failUnlessEqual(list(ds2._dump()), [2,3,4,5])
|
||||
self.failUnlessEqual(sum([len(d) for (s,d) in ds2.get_chunks()]), 4)
|
||||
self.failUnlessEqual(ds2.get(0, 4), None)
|
||||
self.failUnlessEqual(ds2.pop(0, 4), None)
|
||||
self.failUnlessEqual(ds2.pop(2, 3), b"fou")
|
||||
self.failUnlessEqual(sum([len(d) for (s,d) in ds2.get_chunks()]), 1)
|
||||
self.failUnlessEqual(ds2.get(2, 3), None)
|
||||
self.failUnlessEqual(ds2.get(5, 1), b"r")
|
||||
self.failUnlessEqual(ds.get(2, 3), b"fou")
|
||||
self.failUnlessEqual(sum([len(d) for (s,d) in ds.get_chunks()]), 4)
|
||||
|
||||
ds.add(0, b"23")
|
||||
self.failUnlessEqual(ds.len(), 6)
|
||||
self.failUnlessEqual(list(ds._dump()), [0,1,2,3,4,5])
|
||||
self.failUnlessEqual(sum([len(d) for (s,d) in ds.get_chunks()]), 6)
|
||||
self.failUnlessEqual(ds.get(0, 4), b"23fo")
|
||||
self.failUnlessEqual(ds.pop(0, 4), b"23fo")
|
||||
self.failUnlessEqual(sum([len(d) for (s,d) in ds.get_chunks()]), 2)
|
||||
self.failUnlessEqual(ds.get(0, 4), None)
|
||||
self.failUnlessEqual(ds.pop(0, 4), None)
|
||||
|
||||
ds = klass()
|
||||
ds.add(2, b"four")
|
||||
ds.add(3, b"ea")
|
||||
self.failUnlessEqual(ds.get(2, 4), b"fear")
|
||||
|
||||
ds = klass()
|
||||
ds.add(long(2), b"four")
|
||||
ds.add(long(3), b"ea")
|
||||
self.failUnlessEqual(ds.get(long(2), long(4)), b"fear")
|
||||
|
||||
|
||||
def do_scan(self, klass):
|
||||
# do a test with gaps and spans of size 1 and 2
|
||||
# left=(1,11) * right=(1,11) * gapsize=(1,2)
|
||||
# 111, 112, 121, 122, 211, 212, 221, 222
|
||||
# 211
|
||||
# 121
|
||||
# 112
|
||||
# 212
|
||||
# 222
|
||||
# 221
|
||||
# 111
|
||||
# 122
|
||||
# 11 1 1 11 11 11 1 1 111
|
||||
# 0123456789012345678901234567
|
||||
# abcdefghijklmnopqrstuvwxyz-=
|
||||
pieces = [(1, b"bc"),
|
||||
(4, b"e"),
|
||||
(7, b"h"),
|
||||
(9, b"jk"),
|
||||
(12, b"mn"),
|
||||
(16, b"qr"),
|
||||
(20, b"u"),
|
||||
(22, b"w"),
|
||||
(25, b"z-="),
|
||||
]
|
||||
p_elements = set([1,2,4,7,9,10,12,13,16,17,20,22,25,26,27])
|
||||
S = b"abcdefghijklmnopqrstuvwxyz-="
|
||||
# TODO: when adding data, add capital letters, to make sure we aren't
|
||||
# just leaving the old data in place
|
||||
l = len(S)
|
||||
def base():
|
||||
ds = klass()
|
||||
for start, data in pieces:
|
||||
ds.add(start, data)
|
||||
return ds
|
||||
def dump(s):
|
||||
p = set(s._dump())
|
||||
d = b"".join([((i not in p) and b" " or S[i]) for i in range(l)])
|
||||
assert len(d) == l
|
||||
return d
|
||||
DEBUG = False
|
||||
for start in range(0, l):
|
||||
for end in range(start+1, l):
|
||||
# add [start-end) to the baseline
|
||||
which = "%d-%d" % (start, end-1)
|
||||
p_added = set(range(start, end))
|
||||
b = base()
|
||||
if DEBUG:
|
||||
print()
|
||||
print(dump(b), which)
|
||||
add = klass(); add.add(start, S[start:end])
|
||||
print(dump(add))
|
||||
b.add(start, S[start:end])
|
||||
if DEBUG:
|
||||
print(dump(b))
|
||||
# check that the new span is there
|
||||
d = b.get(start, end-start)
|
||||
self.failUnlessEqual(d, S[start:end], which)
|
||||
# check that all the original pieces are still there
|
||||
for t_start, t_data in pieces:
|
||||
t_len = len(t_data)
|
||||
self.failUnlessEqual(b.get(t_start, t_len),
|
||||
S[t_start:t_start+t_len],
|
||||
"%s %d+%d" % (which, t_start, t_len))
|
||||
# check that a lot of subspans are mostly correct
|
||||
for t_start in range(l):
|
||||
for t_len in range(1,4):
|
||||
d = b.get(t_start, t_len)
|
||||
if d is not None:
|
||||
which2 = "%s+(%d-%d)" % (which, t_start,
|
||||
t_start+t_len-1)
|
||||
self.failUnlessEqual(d, S[t_start:t_start+t_len],
|
||||
which2)
|
||||
# check that removing a subspan gives the right value
|
||||
b2 = klass(b)
|
||||
b2.remove(t_start, t_len)
|
||||
removed = set(range(t_start, t_start+t_len))
|
||||
for i in range(l):
|
||||
exp = (((i in p_elements) or (i in p_added))
|
||||
and (i not in removed))
|
||||
which2 = "%s-(%d-%d)" % (which, t_start,
|
||||
t_start+t_len-1)
|
||||
self.failUnlessEqual(bool(b2.get(i, 1)), exp,
|
||||
which2+" %d" % i)
|
||||
|
||||
def test_test(self):
|
||||
self.do_basic(SimpleDataSpans)
|
||||
self.do_scan(SimpleDataSpans)
|
||||
|
||||
def test_basic(self):
|
||||
self.do_basic(DataSpans)
|
||||
self.do_scan(DataSpans)
|
||||
|
||||
def test_random(self):
|
||||
# attempt to increase coverage of corner cases by comparing behavior
|
||||
# of a simple-but-slow model implementation against the
|
||||
# complex-but-fast actual implementation, in a large number of random
|
||||
# operations
|
||||
S1 = SimpleDataSpans
|
||||
S2 = DataSpans
|
||||
s1 = S1(); s2 = S2()
|
||||
seed = b""
|
||||
def _randstr(length, seed):
|
||||
created = 0
|
||||
pieces = []
|
||||
while created < length:
|
||||
piece = sha256(seed + bytes(created))
|
||||
pieces.append(piece)
|
||||
created += len(piece)
|
||||
return b"".join(pieces)[:length]
|
||||
def _create(subseed):
|
||||
ns1 = S1(); ns2 = S2()
|
||||
for i in range(10):
|
||||
what = sha256(subseed+bytes(i))
|
||||
start = int(what[2:4], 16)
|
||||
length = max(1,int(what[5:6], 16))
|
||||
ns1.add(start, _randstr(length, what[7:9]));
|
||||
ns2.add(start, _randstr(length, what[7:9]))
|
||||
return ns1, ns2
|
||||
|
||||
#print
|
||||
for i in range(1000):
|
||||
what = sha256(seed+bytes(i))
|
||||
op = what[0:1]
|
||||
subop = what[1:2]
|
||||
start = int(what[2:4], 16)
|
||||
length = max(1,int(what[5:6], 16))
|
||||
#print what
|
||||
if op in b"0":
|
||||
if subop in b"0123456":
|
||||
s1 = S1(); s2 = S2()
|
||||
else:
|
||||
s1, s2 = _create(what[7:11])
|
||||
#print "s2 = %s" % list(s2._dump())
|
||||
elif op in b"123456":
|
||||
#print "s2.add(%d,%d)" % (start, length)
|
||||
s1.add(start, _randstr(length, what[7:9]));
|
||||
s2.add(start, _randstr(length, what[7:9]))
|
||||
elif op in b"789abc":
|
||||
#print "s2.remove(%d,%d)" % (start, length)
|
||||
s1.remove(start, length); s2.remove(start, length)
|
||||
else:
|
||||
#print "s2.pop(%d,%d)" % (start, length)
|
||||
d1 = s1.pop(start, length); d2 = s2.pop(start, length)
|
||||
self.failUnlessEqual(d1, d2)
|
||||
#print "s1 now %s" % list(s1._dump())
|
||||
#print "s2 now %s" % list(s2._dump())
|
||||
self.failUnlessEqual(s1.len(), s2.len())
|
||||
self.failUnlessEqual(list(s1._dump()), list(s2._dump()))
|
||||
for j in range(100):
|
||||
what = sha256(what[12:14]+bytes(j))
|
||||
start = int(what[2:4], 16)
|
||||
length = max(1, int(what[5:6], 16))
|
||||
d1 = s1.get(start, length); d2 = s2.get(start, length)
|
||||
self.failUnlessEqual(d1, d2, "%d+%d" % (start, length))
|
158
src/allmydata/test/test_statistics.py
Normal file
158
src/allmydata/test/test_statistics.py
Normal file
@ -0,0 +1,158 @@
|
||||
"""
|
||||
Tests for allmydata.util.statistics.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from six.moves import StringIO # native string StringIO
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.util import statistics
|
||||
|
||||
|
||||
class Statistics(unittest.TestCase):
|
||||
def should_assert(self, msg, func, *args, **kwargs):
|
||||
try:
|
||||
func(*args, **kwargs)
|
||||
self.fail(msg)
|
||||
except AssertionError:
|
||||
pass
|
||||
|
||||
def failUnlessListEqual(self, a, b, msg = None):
|
||||
self.failUnlessEqual(len(a), len(b))
|
||||
for i in range(len(a)):
|
||||
self.failUnlessEqual(a[i], b[i], msg)
|
||||
|
||||
def failUnlessListAlmostEqual(self, a, b, places = 7, msg = None):
|
||||
self.failUnlessEqual(len(a), len(b))
|
||||
for i in range(len(a)):
|
||||
self.failUnlessAlmostEqual(a[i], b[i], places, msg)
|
||||
|
||||
def test_binomial_coeff(self):
|
||||
f = statistics.binomial_coeff
|
||||
self.failUnlessEqual(f(20, 0), 1)
|
||||
self.failUnlessEqual(f(20, 1), 20)
|
||||
self.failUnlessEqual(f(20, 2), 190)
|
||||
self.failUnlessEqual(f(20, 8), f(20, 12))
|
||||
self.should_assert("Should assert if n < k", f, 2, 3)
|
||||
self.assertEqual(f(5, 3), f(5, 2))
|
||||
|
||||
def test_binomial_distribution_pmf(self):
|
||||
f = statistics.binomial_distribution_pmf
|
||||
|
||||
pmf_comp = f(2, .1)
|
||||
pmf_stat = [0.81, 0.18, 0.01]
|
||||
self.failUnlessListAlmostEqual(pmf_comp, pmf_stat)
|
||||
|
||||
# Summing across a PMF should give the total probability 1
|
||||
self.failUnlessAlmostEqual(sum(pmf_comp), 1)
|
||||
self.should_assert("Should assert if not 0<=p<=1", f, 1, -1)
|
||||
self.should_assert("Should assert if n < 1", f, 0, .1)
|
||||
|
||||
out = StringIO()
|
||||
statistics.print_pmf(pmf_comp, out=out)
|
||||
lines = out.getvalue().splitlines()
|
||||
self.failUnlessEqual(lines[0], "i=0: 0.81")
|
||||
self.failUnlessEqual(lines[1], "i=1: 0.18")
|
||||
self.failUnlessEqual(lines[2], "i=2: 0.01")
|
||||
|
||||
def test_survival_pmf(self):
|
||||
f = statistics.survival_pmf
|
||||
# Cross-check binomial-distribution method against convolution
|
||||
# method.
|
||||
p_list = [.9999] * 100 + [.99] * 50 + [.8] * 20
|
||||
pmf1 = statistics.survival_pmf_via_conv(p_list)
|
||||
pmf2 = statistics.survival_pmf_via_bd(p_list)
|
||||
self.failUnlessListAlmostEqual(pmf1, pmf2)
|
||||
self.failUnlessTrue(statistics.valid_pmf(pmf1))
|
||||
self.should_assert("Should assert if p_i > 1", f, [1.1]);
|
||||
self.should_assert("Should assert if p_i < 0", f, [-.1]);
|
||||
|
||||
def test_repair_count_pmf(self):
|
||||
survival_pmf = statistics.binomial_distribution_pmf(5, .9)
|
||||
repair_pmf = statistics.repair_count_pmf(survival_pmf, 3)
|
||||
# repair_pmf[0] == sum(survival_pmf[0,1,2,5])
|
||||
# repair_pmf[1] == survival_pmf[4]
|
||||
# repair_pmf[2] = survival_pmf[3]
|
||||
self.failUnlessListAlmostEqual(repair_pmf,
|
||||
[0.00001 + 0.00045 + 0.0081 + 0.59049,
|
||||
.32805,
|
||||
.0729,
|
||||
0, 0, 0])
|
||||
|
||||
def test_repair_cost(self):
|
||||
survival_pmf = statistics.binomial_distribution_pmf(5, .9)
|
||||
bwcost = statistics.bandwidth_cost_function
|
||||
cost = statistics.mean_repair_cost(bwcost, 1000,
|
||||
survival_pmf, 3, ul_dl_ratio=1.0)
|
||||
self.failUnlessAlmostEqual(cost, 558.90)
|
||||
cost = statistics.mean_repair_cost(bwcost, 1000,
|
||||
survival_pmf, 3, ul_dl_ratio=8.0)
|
||||
self.failUnlessAlmostEqual(cost, 1664.55)
|
||||
|
||||
# I haven't manually checked the math beyond here -warner
|
||||
cost = statistics.eternal_repair_cost(bwcost, 1000,
|
||||
survival_pmf, 3,
|
||||
discount_rate=0, ul_dl_ratio=1.0)
|
||||
self.failUnlessAlmostEqual(cost, 65292.056074766246)
|
||||
cost = statistics.eternal_repair_cost(bwcost, 1000,
|
||||
survival_pmf, 3,
|
||||
discount_rate=0.05,
|
||||
ul_dl_ratio=1.0)
|
||||
self.failUnlessAlmostEqual(cost, 9133.6097158191551)
|
||||
|
||||
def test_convolve(self):
|
||||
f = statistics.convolve
|
||||
v1 = [ 1, 2, 3 ]
|
||||
v2 = [ 4, 5, 6 ]
|
||||
v3 = [ 7, 8 ]
|
||||
v1v2result = [ 4, 13, 28, 27, 18 ]
|
||||
# Convolution is commutative
|
||||
r1 = f(v1, v2)
|
||||
r2 = f(v2, v1)
|
||||
self.failUnlessListEqual(r1, r2, "Convolution should be commutative")
|
||||
self.failUnlessListEqual(r1, v1v2result, "Didn't match known result")
|
||||
# Convolution is associative
|
||||
r1 = f(f(v1, v2), v3)
|
||||
r2 = f(v1, f(v2, v3))
|
||||
self.failUnlessListEqual(r1, r2, "Convolution should be associative")
|
||||
# Convolution is distributive
|
||||
r1 = f(v3, [ a + b for a, b in zip(v1, v2) ])
|
||||
tmp1 = f(v3, v1)
|
||||
tmp2 = f(v3, v2)
|
||||
r2 = [ a + b for a, b in zip(tmp1, tmp2) ]
|
||||
self.failUnlessListEqual(r1, r2, "Convolution should be distributive")
|
||||
# Convolution is scalar multiplication associative
|
||||
tmp1 = f(v1, v2)
|
||||
r1 = [ a * 4 for a in tmp1 ]
|
||||
tmp2 = [ a * 4 for a in v1 ]
|
||||
r2 = f(tmp2, v2)
|
||||
self.failUnlessListEqual(r1, r2, "Convolution should be scalar multiplication associative")
|
||||
|
||||
def test_find_k(self):
|
||||
f = statistics.find_k
|
||||
g = statistics.pr_file_loss
|
||||
plist = [.9] * 10 + [.8] * 10 # N=20
|
||||
t = .0001
|
||||
k = f(plist, t)
|
||||
self.failUnlessEqual(k, 10)
|
||||
self.failUnless(g(plist, k) < t)
|
||||
|
||||
def test_pr_file_loss(self):
|
||||
f = statistics.pr_file_loss
|
||||
plist = [.5] * 10
|
||||
self.failUnlessEqual(f(plist, 3), .0546875)
|
||||
|
||||
def test_pr_backup_file_loss(self):
|
||||
f = statistics.pr_backup_file_loss
|
||||
plist = [.5] * 10
|
||||
self.failUnlessEqual(f(plist, .5, 3), .02734375)
|
@ -26,7 +26,7 @@ from zope.interface import implementer
|
||||
from foolscap.api import fireEventually
|
||||
import itertools
|
||||
from allmydata import interfaces
|
||||
from allmydata.util import fileutil, hashutil, base32, pollmixin, time_format
|
||||
from allmydata.util import fileutil, hashutil, base32, pollmixin
|
||||
from allmydata.storage.server import StorageServer
|
||||
from allmydata.storage.mutable import MutableShareFile
|
||||
from allmydata.storage.immutable import BucketWriter, BucketReader
|
||||
@ -3852,25 +3852,6 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
|
||||
expiration_mode="bogus")
|
||||
self.failUnlessIn("GC mode 'bogus' must be 'age' or 'cutoff-date'", str(e))
|
||||
|
||||
def test_parse_duration(self):
|
||||
DAY = 24*60*60
|
||||
MONTH = 31*DAY
|
||||
YEAR = 365*DAY
|
||||
p = time_format.parse_duration
|
||||
self.failUnlessEqual(p("7days"), 7*DAY)
|
||||
self.failUnlessEqual(p("31day"), 31*DAY)
|
||||
self.failUnlessEqual(p("60 days"), 60*DAY)
|
||||
self.failUnlessEqual(p("2mo"), 2*MONTH)
|
||||
self.failUnlessEqual(p("3 month"), 3*MONTH)
|
||||
self.failUnlessEqual(p("2years"), 2*YEAR)
|
||||
e = self.failUnlessRaises(ValueError, p, "2kumquats")
|
||||
self.failUnlessIn("no unit (like day, month, or year) in '2kumquats'", str(e))
|
||||
|
||||
def test_parse_date(self):
|
||||
p = time_format.parse_date
|
||||
self.failUnless(isinstance(p("2009-03-18"), int), p("2009-03-18"))
|
||||
self.failUnlessEqual(p("2009-03-18"), 1237334400)
|
||||
|
||||
def test_limited_history(self):
|
||||
basedir = "storage/LeaseCrawler/limited_history"
|
||||
fileutil.make_dirs(basedir)
|
||||
|
169
src/allmydata/test/test_time_format.py
Normal file
169
src/allmydata/test/test_time_format.py
Normal file
@ -0,0 +1,169 @@
|
||||
"""
|
||||
Tests for allmydata.util.time_format.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
import time
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.test.common_py3 import TimezoneMixin
|
||||
from allmydata.util import time_format
|
||||
|
||||
|
||||
class TimeFormat(unittest.TestCase, TimezoneMixin):
|
||||
def test_epoch(self):
|
||||
return self._help_test_epoch()
|
||||
|
||||
def test_epoch_in_London(self):
|
||||
# Europe/London is a particularly troublesome timezone. Nowadays, its
|
||||
# offset from GMT is 0. But in 1970, its offset from GMT was 1.
|
||||
# (Apparently in 1970 Britain had redefined standard time to be GMT+1
|
||||
# and stayed in standard time all year round, whereas today
|
||||
# Europe/London standard time is GMT and Europe/London Daylight
|
||||
# Savings Time is GMT+1.) The current implementation of
|
||||
# time_format.iso_utc_time_to_localseconds() breaks if the timezone is
|
||||
# Europe/London. (As soon as this unit test is done then I'll change
|
||||
# that implementation to something that works even in this case...)
|
||||
|
||||
if not self.have_working_tzset():
|
||||
raise unittest.SkipTest("This test can't be run on a platform without time.tzset().")
|
||||
|
||||
self.setTimezone("Europe/London")
|
||||
return self._help_test_epoch()
|
||||
|
||||
def _help_test_epoch(self):
|
||||
origtzname = time.tzname
|
||||
s = time_format.iso_utc_time_to_seconds("1970-01-01T00:00:01")
|
||||
self.failUnlessEqual(s, 1.0)
|
||||
s = time_format.iso_utc_time_to_seconds("1970-01-01_00:00:01")
|
||||
self.failUnlessEqual(s, 1.0)
|
||||
s = time_format.iso_utc_time_to_seconds("1970-01-01 00:00:01")
|
||||
self.failUnlessEqual(s, 1.0)
|
||||
|
||||
self.failUnlessEqual(time_format.iso_utc(1.0), "1970-01-01_00:00:01")
|
||||
self.failUnlessEqual(time_format.iso_utc(1.0, sep=" "),
|
||||
"1970-01-01 00:00:01")
|
||||
|
||||
now = time.time()
|
||||
isostr = time_format.iso_utc(now)
|
||||
timestamp = time_format.iso_utc_time_to_seconds(isostr)
|
||||
self.failUnlessEqual(int(timestamp), int(now))
|
||||
|
||||
def my_time():
|
||||
return 1.0
|
||||
self.failUnlessEqual(time_format.iso_utc(t=my_time),
|
||||
"1970-01-01_00:00:01")
|
||||
e = self.failUnlessRaises(ValueError,
|
||||
time_format.iso_utc_time_to_seconds,
|
||||
"invalid timestring")
|
||||
self.failUnless("not a complete ISO8601 timestamp" in str(e))
|
||||
s = time_format.iso_utc_time_to_seconds("1970-01-01_00:00:01.500")
|
||||
self.failUnlessEqual(s, 1.5)
|
||||
|
||||
# Look for daylight-savings-related errors.
|
||||
thatmomentinmarch = time_format.iso_utc_time_to_seconds("2009-03-20 21:49:02.226536")
|
||||
self.failUnlessEqual(thatmomentinmarch, 1237585742.226536)
|
||||
self.failUnlessEqual(origtzname, time.tzname)
|
||||
|
||||
def test_iso_utc(self):
|
||||
when = 1266760143.7841301
|
||||
out = time_format.iso_utc_date(when)
|
||||
self.failUnlessEqual(out, "2010-02-21")
|
||||
out = time_format.iso_utc_date(t=lambda: when)
|
||||
self.failUnlessEqual(out, "2010-02-21")
|
||||
out = time_format.iso_utc(when)
|
||||
self.failUnlessEqual(out, "2010-02-21_13:49:03.784130")
|
||||
out = time_format.iso_utc(when, sep="-")
|
||||
self.failUnlessEqual(out, "2010-02-21-13:49:03.784130")
|
||||
|
||||
def test_parse_duration(self):
|
||||
p = time_format.parse_duration
|
||||
DAY = 24*60*60
|
||||
MONTH = 31*DAY
|
||||
YEAR = 365*DAY
|
||||
self.failUnlessEqual(p("1 day"), DAY)
|
||||
self.failUnlessEqual(p("2 days"), 2*DAY)
|
||||
self.failUnlessEqual(p("3 months"), 3*MONTH)
|
||||
self.failUnlessEqual(p("4 mo"), 4*MONTH)
|
||||
self.failUnlessEqual(p("5 years"), 5*YEAR)
|
||||
e = self.failUnlessRaises(ValueError, p, "123")
|
||||
self.failUnlessIn("no unit (like day, month, or year) in '123'",
|
||||
str(e))
|
||||
self.failUnlessEqual(p("7days"), 7*DAY)
|
||||
self.failUnlessEqual(p("31day"), 31*DAY)
|
||||
self.failUnlessEqual(p("60 days"), 60*DAY)
|
||||
self.failUnlessEqual(p("2mo"), 2*MONTH)
|
||||
self.failUnlessEqual(p("3 month"), 3*MONTH)
|
||||
self.failUnlessEqual(p("2years"), 2*YEAR)
|
||||
e = self.failUnlessRaises(ValueError, p, "2kumquats")
|
||||
self.failUnlessIn("no unit (like day, month, or year) in '2kumquats'", str(e))
|
||||
|
||||
def test_parse_date(self):
|
||||
p = time_format.parse_date
|
||||
self.failUnlessEqual(p("2010-02-21"), 1266710400)
|
||||
self.failUnless(isinstance(p("2009-03-18"), (int, long)), p("2009-03-18"))
|
||||
self.failUnlessEqual(p("2009-03-18"), 1237334400)
|
||||
|
||||
def test_format_time(self):
|
||||
self.failUnlessEqual(time_format.format_time(time.gmtime(0)), '1970-01-01 00:00:00')
|
||||
self.failUnlessEqual(time_format.format_time(time.gmtime(60)), '1970-01-01 00:01:00')
|
||||
self.failUnlessEqual(time_format.format_time(time.gmtime(60*60)), '1970-01-01 01:00:00')
|
||||
seconds_per_day = 60*60*24
|
||||
leap_years_1970_to_2014_inclusive = ((2012 - 1968) // 4)
|
||||
self.failUnlessEqual(time_format.format_time(time.gmtime(seconds_per_day*((2015 - 1970)*365+leap_years_1970_to_2014_inclusive))), '2015-01-01 00:00:00')
|
||||
|
||||
def test_format_time_y2038(self):
|
||||
seconds_per_day = 60*60*24
|
||||
leap_years_1970_to_2047_inclusive = ((2044 - 1968) // 4)
|
||||
t = (seconds_per_day*
|
||||
((2048 - 1970)*365 + leap_years_1970_to_2047_inclusive))
|
||||
try:
|
||||
gm_t = time.gmtime(t)
|
||||
except ValueError:
|
||||
raise unittest.SkipTest("Note: this system cannot handle dates after 2037.")
|
||||
self.failUnlessEqual(time_format.format_time(gm_t),
|
||||
'2048-01-01 00:00:00')
|
||||
|
||||
def test_format_delta(self):
|
||||
time_1 = 1389812723
|
||||
time_5s_delta = 1389812728
|
||||
time_28m7s_delta = 1389814410
|
||||
time_1h_delta = 1389816323
|
||||
time_1d21h46m49s_delta = 1389977532
|
||||
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1, time_1), '0s')
|
||||
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1, time_5s_delta), '5s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1, time_28m7s_delta), '28m 7s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1, time_1h_delta), '1h 0m 0s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1, time_1d21h46m49s_delta), '1d 21h 46m 49s')
|
||||
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1d21h46m49s_delta, time_1), '-')
|
||||
|
||||
# time_1 with a decimal fraction will make the delta 1s less
|
||||
time_1decimal = 1389812723.383963
|
||||
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1decimal, time_5s_delta), '4s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1decimal, time_28m7s_delta), '28m 6s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1decimal, time_1h_delta), '59m 59s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1decimal, time_1d21h46m49s_delta), '1d 21h 46m 48s')
|
File diff suppressed because it is too large
Load Diff
@ -1,3 +1,16 @@
|
||||
"""
|
||||
Tests for allmydata.util.verlib and allmydata.version_checks.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import sys
|
||||
import pkg_resources
|
||||
@ -79,7 +92,7 @@ class CheckRequirement(unittest.TestCase):
|
||||
|
||||
res = cross_check({}, [("foo", ("unparseable", "", None))])
|
||||
self.failUnlessEqual(len(res), 1)
|
||||
self.failUnlessIn("version 'unparseable'", res[0])
|
||||
self.assertTrue(("version 'unparseable'" in res[0]) or ("version u'unparseable'" in res[0]))
|
||||
self.failUnlessIn("was not found by pkg_resources", res[0])
|
||||
|
||||
res = cross_check({"distribute": ("1.0", "/somewhere")}, [("setuptools", ("2.0", "/somewhere", "distribute"))])
|
||||
@ -120,7 +133,7 @@ class CheckRequirement(unittest.TestCase):
|
||||
|
||||
res = cross_check({"foo": ("1.0", "/somewhere")}, [("foo", ("2.0", "/somewhere_different", None))])
|
||||
self.failUnlessEqual(len(res), 1)
|
||||
self.failUnlessIn("but version '2.0'", res[0])
|
||||
self.assertTrue(("but version '2.0'" in res[0]) or ("but version u'2.0'" in res[0]))
|
||||
|
||||
def test_extract_openssl_version(self):
|
||||
self.failUnlessEqual(extract_openssl_version(MockSSL("")),
|
||||
|
230
src/allmydata/test/web/test_status.py
Normal file
230
src/allmydata/test/web/test_status.py
Normal file
@ -0,0 +1,230 @@
|
||||
"""
|
||||
Tests for ```allmydata.web.status```.
|
||||
"""
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from twisted.web.template import flattenString
|
||||
|
||||
from allmydata.web.status import (
|
||||
Status,
|
||||
StatusElement,
|
||||
)
|
||||
|
||||
from zope.interface import implementer
|
||||
|
||||
from allmydata.interfaces import IDownloadResults
|
||||
from allmydata.web.status import DownloadStatusElement
|
||||
from allmydata.immutable.downloader.status import DownloadStatus
|
||||
|
||||
from .common import (
|
||||
assert_soup_has_favicon,
|
||||
assert_soup_has_tag_with_content,
|
||||
)
|
||||
from ..common import TrialTestCase
|
||||
|
||||
from .test_web import FakeHistory
|
||||
|
||||
# Test that status.StatusElement can render HTML.
|
||||
class StatusTests(TrialTestCase):
|
||||
|
||||
def _render_status_page(self, active, recent):
|
||||
elem = StatusElement(active, recent)
|
||||
d = flattenString(None, elem)
|
||||
return self.successResultOf(d)
|
||||
|
||||
def test_status_page(self):
|
||||
status = Status(FakeHistory())
|
||||
doc = self._render_status_page(
|
||||
status._get_active_operations(),
|
||||
status._get_recent_operations()
|
||||
)
|
||||
soup = BeautifulSoup(doc, 'html5lib')
|
||||
|
||||
assert_soup_has_favicon(self, soup)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"title",
|
||||
u"Tahoe-LAFS - Recent and Active Operations"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"Active Operations:"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"td",
|
||||
u"retrieve"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"td",
|
||||
u"publish"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"td",
|
||||
u"download"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"td",
|
||||
u"upload"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
"Recent Operations:"
|
||||
)
|
||||
|
||||
|
||||
@implementer(IDownloadResults)
|
||||
class FakeDownloadResults(object):
|
||||
|
||||
def __init__(self,
|
||||
file_size=0,
|
||||
servers_used=None,
|
||||
server_problems=None,
|
||||
servermap=None,
|
||||
timings=None):
|
||||
"""
|
||||
See IDownloadResults for parameters.
|
||||
"""
|
||||
self.file_size = file_size
|
||||
self.servers_used = servers_used
|
||||
self.server_problems = server_problems
|
||||
self.servermap = servermap
|
||||
self.timings = timings
|
||||
|
||||
|
||||
class FakeDownloadStatus(DownloadStatus):
|
||||
|
||||
def __init__(self,
|
||||
storage_index = None,
|
||||
file_size = 0,
|
||||
servers_used = None,
|
||||
server_problems = None,
|
||||
servermap = None,
|
||||
timings = None):
|
||||
"""
|
||||
See IDownloadStatus and IDownloadResults for parameters.
|
||||
"""
|
||||
super(FakeDownloadStatus, self).__init__(storage_index, file_size)
|
||||
|
||||
self.servers_used = servers_used
|
||||
self.server_problems = server_problems
|
||||
self.servermap = servermap
|
||||
self.timings = timings
|
||||
|
||||
def get_results(self):
|
||||
return FakeDownloadResults(self.size,
|
||||
self.servers_used,
|
||||
self.server_problems,
|
||||
self.servermap,
|
||||
self.timings)
|
||||
|
||||
|
||||
class DownloadStatusElementTests(TrialTestCase):
|
||||
"""
|
||||
Tests for ```allmydata.web.status.DownloadStatusElement```.
|
||||
"""
|
||||
|
||||
def _render_download_status_element(self, status):
|
||||
"""
|
||||
:param IDownloadStatus status:
|
||||
:return: HTML string rendered by DownloadStatusElement
|
||||
"""
|
||||
elem = DownloadStatusElement(status)
|
||||
d = flattenString(None, elem)
|
||||
return self.successResultOf(d)
|
||||
|
||||
def test_download_status_element(self):
|
||||
"""
|
||||
See if we can render the page almost fully.
|
||||
"""
|
||||
status = FakeDownloadStatus(
|
||||
"si-1", 123,
|
||||
["s-1", "s-2", "s-3"],
|
||||
{"s-1": "unknown problem"},
|
||||
{"s-1": [1], "s-2": [1,2], "s-3": [2,3]},
|
||||
{"fetch_per_server":
|
||||
{"s-1": [1], "s-2": [2,3], "s-3": [3,2]}}
|
||||
)
|
||||
|
||||
result = self._render_download_status_element(status)
|
||||
soup = BeautifulSoup(result, 'html5lib')
|
||||
|
||||
assert_soup_has_favicon(self, soup)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"title", u"Tahoe-LAFS - File Download Status"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li", u"File Size: 123 bytes"
|
||||
)
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li", u"Progress: 0.0%"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li", u"Servers Used: [omwtc], [omwte], [omwtg]"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li", u"Server Problems:"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li", u"[omwtc]: unknown problem"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(self, soup, u"li", u"Servermap:")
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li", u"[omwtc] has share: #1"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li", u"[omwte] has shares: #1,#2"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li", u"[omwtg] has shares: #2,#3"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li", u"Per-Server Segment Fetch Response Times:"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li", u"[omwtc]: 1.00s"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li", u"[omwte]: 2.00s, 3.00s"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li", u"[omwtg]: 3.00s, 2.00s"
|
||||
)
|
||||
|
||||
def test_download_status_element_partial(self):
|
||||
"""
|
||||
See if we can render the page with incomplete download status.
|
||||
"""
|
||||
status = FakeDownloadStatus()
|
||||
result = self._render_download_status_element(status)
|
||||
soup = BeautifulSoup(result, 'html5lib')
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li", u"Servermap: None"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li", u"File Size: 0 bytes"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li", u"Total: None (None)"
|
||||
)
|
@ -22,6 +22,9 @@ class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
self.failUnlessReallyEqual(common.abbreviate_time(0.00123), "1.2ms")
|
||||
self.failUnlessReallyEqual(common.abbreviate_time(0.000123), "123us")
|
||||
self.failUnlessReallyEqual(common.abbreviate_time(-123000), "-123000000000us")
|
||||
self.failUnlessReallyEqual(common.abbreviate_time(2.5), "2.50s")
|
||||
self.failUnlessReallyEqual(common.abbreviate_time(0.25), "250ms")
|
||||
self.failUnlessReallyEqual(common.abbreviate_time(0.0021), "2.1ms")
|
||||
|
||||
self.failUnlessReallyEqual(common.abbreviate_time(None), "")
|
||||
self.failUnlessReallyEqual(common.abbreviate_time(2.5), "2.50s")
|
||||
@ -54,6 +57,9 @@ class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
self.failUnlessReallyEqual(common.abbreviate_rate(1234000), "1.23MBps")
|
||||
self.failUnlessReallyEqual(common.abbreviate_rate(12340), "12.3kBps")
|
||||
self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps")
|
||||
self.failUnlessReallyEqual(common.abbreviate_rate(2500000), "2.50MBps")
|
||||
self.failUnlessReallyEqual(common.abbreviate_rate(30100), "30.1kBps")
|
||||
self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps")
|
||||
|
||||
def test_abbreviate_size(self):
|
||||
self.failUnlessReallyEqual(common.abbreviate_size(None), "")
|
||||
|
@ -33,7 +33,6 @@ from allmydata.immutable import upload
|
||||
from allmydata.immutable.downloader.status import DownloadStatus
|
||||
from allmydata.dirnode import DirectoryNode
|
||||
from allmydata.nodemaker import NodeMaker
|
||||
from allmydata.web import status
|
||||
from allmydata.web.common import WebError, MultiFormatPage
|
||||
from allmydata.util import fileutil, base32, hashutil
|
||||
from allmydata.util.consumer import download_to_data
|
||||
@ -60,6 +59,7 @@ from .common import (
|
||||
from allmydata.interfaces import IMutableFileNode, SDMF_VERSION, MDMF_VERSION
|
||||
from allmydata.mutable import servermap, publish, retrieve
|
||||
from .. import common_util as testutil
|
||||
from ..common_py3 import TimezoneMixin
|
||||
from ..common_web import (
|
||||
do_http,
|
||||
Error,
|
||||
@ -311,7 +311,7 @@ class FakeClient(_Client):
|
||||
|
||||
MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
|
||||
|
||||
class WebMixin(testutil.TimezoneMixin):
|
||||
class WebMixin(TimezoneMixin):
|
||||
def setUp(self):
|
||||
self.setTimezone('UTC-13:00')
|
||||
self.s = FakeClient()
|
||||
@ -972,11 +972,11 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
d = self.GET("/status", followRedirect=True)
|
||||
def _check(res):
|
||||
self.failUnlessIn('Recent and Active Operations', res)
|
||||
self.failUnlessIn('"down-%d"' % dl_num, res)
|
||||
self.failUnlessIn('"up-%d"' % ul_num, res)
|
||||
self.failUnlessIn('"mapupdate-%d"' % mu_num, res)
|
||||
self.failUnlessIn('"publish-%d"' % pub_num, res)
|
||||
self.failUnlessIn('"retrieve-%d"' % ret_num, res)
|
||||
self.failUnlessIn('"/status/down-%d"' % dl_num, res)
|
||||
self.failUnlessIn('"/status/up-%d"' % ul_num, res)
|
||||
self.failUnlessIn('"/status/mapupdate-%d"' % mu_num, res)
|
||||
self.failUnlessIn('"/status/publish-%d"' % pub_num, res)
|
||||
self.failUnlessIn('"/status/retrieve-%d"' % ret_num, res)
|
||||
d.addCallback(_check)
|
||||
d.addCallback(lambda res: self.GET("/status/?t=json"))
|
||||
def _check_json(res):
|
||||
@ -1035,17 +1035,209 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
|
||||
return d
|
||||
|
||||
def test_status_numbers(self):
|
||||
drrm = status.DownloadResultsRendererMixin()
|
||||
self.failUnlessReallyEqual(drrm.render_time(None, None), "")
|
||||
self.failUnlessReallyEqual(drrm.render_time(None, 2.5), "2.50s")
|
||||
self.failUnlessReallyEqual(drrm.render_time(None, 0.25), "250ms")
|
||||
self.failUnlessReallyEqual(drrm.render_time(None, 0.0021), "2.1ms")
|
||||
self.failUnlessReallyEqual(drrm.render_time(None, 0.000123), "123us")
|
||||
self.failUnlessReallyEqual(drrm.render_rate(None, None), "")
|
||||
self.failUnlessReallyEqual(drrm.render_rate(None, 2500000), "2.50MBps")
|
||||
self.failUnlessReallyEqual(drrm.render_rate(None, 30100), "30.1kBps")
|
||||
self.failUnlessReallyEqual(drrm.render_rate(None, 123), "123Bps")
|
||||
def test_status_path_nodash_error(self):
|
||||
"""
|
||||
Expect an error, because path is expected to be of the form
|
||||
"/status/{up,down,..}-%number", with a hyphen.
|
||||
"""
|
||||
return self.shouldFail2(error.Error,
|
||||
"test_status_path_nodash",
|
||||
"400 Bad Request",
|
||||
"no '-' in 'nodash'",
|
||||
self.GET,
|
||||
"/status/nodash")
|
||||
|
||||
def test_status_page_contains_links(self):
|
||||
"""
|
||||
Check that the rendered `/status` page contains all the
|
||||
expected links.
|
||||
"""
|
||||
def _check_status_page_links(response):
|
||||
(body, status, _) = response
|
||||
|
||||
self.failUnlessReallyEqual(int(status), 200)
|
||||
|
||||
soup = BeautifulSoup(body, 'html5lib')
|
||||
h = self.s.get_history()
|
||||
|
||||
# Check for `<a href="/status/retrieve-0">Not started</a>`
|
||||
ret_num = h.list_all_retrieve_statuses()[0].get_counter()
|
||||
assert_soup_has_tag_with_attributes_and_content(
|
||||
self, soup, u"a",
|
||||
u"Not started",
|
||||
{u"href": u"/status/retrieve-{}".format(ret_num)}
|
||||
)
|
||||
|
||||
# Check for `<a href="/status/publish-0">Not started</a></td>`
|
||||
pub_num = h.list_all_publish_statuses()[0].get_counter()
|
||||
assert_soup_has_tag_with_attributes_and_content(
|
||||
self, soup, u"a",
|
||||
u"Not started",
|
||||
{u"href": u"/status/publish-{}".format(pub_num)}
|
||||
)
|
||||
|
||||
# Check for `<a href="/status/mapupdate-0">Not started</a>`
|
||||
mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
|
||||
assert_soup_has_tag_with_attributes_and_content(
|
||||
self, soup, u"a",
|
||||
u"Not started",
|
||||
{u"href": u"/status/mapupdate-{}".format(mu_num)}
|
||||
)
|
||||
|
||||
# Check for `<a href="/status/down-0">fetching segments
|
||||
# 2,3; errors on segment 1</a>`: see build_one_ds() above.
|
||||
dl_num = h.list_all_download_statuses()[0].get_counter()
|
||||
assert_soup_has_tag_with_attributes_and_content(
|
||||
self, soup, u"a",
|
||||
u"fetching segments 2,3; errors on segment 1",
|
||||
{u"href": u"/status/down-{}".format(dl_num)}
|
||||
)
|
||||
|
||||
# Check for `<a href="/status/up-0">Not started</a>`
|
||||
ul_num = h.list_all_upload_statuses()[0].get_counter()
|
||||
assert_soup_has_tag_with_attributes_and_content(
|
||||
self, soup, u"a",
|
||||
u"Not started",
|
||||
{u"href": u"/status/up-{}".format(ul_num)}
|
||||
)
|
||||
|
||||
d = self.GET("/status", return_response=True)
|
||||
d.addCallback(_check_status_page_links)
|
||||
return d
|
||||
|
||||
def test_status_path_trailing_slashes(self):
|
||||
"""
|
||||
Test that both `GET /status` and `GET /status/` are treated
|
||||
alike, but reject any additional trailing slashes and other
|
||||
non-existent child nodes.
|
||||
"""
|
||||
def _check_status(response):
|
||||
(body, status, _) = response
|
||||
|
||||
self.failUnlessReallyEqual(int(status), 200)
|
||||
|
||||
soup = BeautifulSoup(body, 'html5lib')
|
||||
assert_soup_has_favicon(self, soup)
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"title",
|
||||
u"Tahoe-LAFS - Recent and Active Operations"
|
||||
)
|
||||
|
||||
d = self.GET("/status", return_response=True)
|
||||
d.addCallback(_check_status)
|
||||
|
||||
d = self.GET("/status/", return_response=True)
|
||||
d.addCallback(_check_status)
|
||||
|
||||
d = self.shouldFail2(error.Error,
|
||||
"test_status_path_trailing_slashes",
|
||||
"400 Bad Request",
|
||||
"no '-' in ''",
|
||||
self.GET,
|
||||
"/status//")
|
||||
|
||||
d = self.shouldFail2(error.Error,
|
||||
"test_status_path_trailing_slashes",
|
||||
"400 Bad Request",
|
||||
"no '-' in ''",
|
||||
self.GET,
|
||||
"/status////////")
|
||||
|
||||
return d
|
||||
|
||||
def test_status_path_404_error(self):
|
||||
"""
|
||||
Looking for non-existent statuses under child paths should
|
||||
exercises all the iterators in web.status.Status.getChild().
|
||||
|
||||
The test suite (hopefully!) would not have done any setup for
|
||||
a very large number of statuses at this point, now or in the
|
||||
future, so these all should always return 404.
|
||||
"""
|
||||
d = self.GET("/status/up-9999999")
|
||||
d.addBoth(self.should404, "test_status_path_404_error (up)")
|
||||
|
||||
d = self.GET("/status/down-9999999")
|
||||
d.addBoth(self.should404, "test_status_path_404_error (down)")
|
||||
|
||||
d = self.GET("/status/mapupdate-9999999")
|
||||
d.addBoth(self.should404, "test_status_path_404_error (mapupdate)")
|
||||
|
||||
d = self.GET("/status/publish-9999999")
|
||||
d.addBoth(self.should404, "test_status_path_404_error (publish)")
|
||||
|
||||
d = self.GET("/status/retrieve-9999999")
|
||||
d.addBoth(self.should404, "test_status_path_404_error (retrieve)")
|
||||
|
||||
return d
|
||||
|
||||
def _check_status_subpath_result(self, result, expected_title):
|
||||
"""
|
||||
Helper to verify that results of "GET /status/up-0" and
|
||||
similar are as expected.
|
||||
"""
|
||||
body, status, _ = result
|
||||
self.failUnlessReallyEqual(int(status), 200)
|
||||
soup = BeautifulSoup(body, 'html5lib')
|
||||
assert_soup_has_favicon(self, soup)
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"title", expected_title
|
||||
)
|
||||
|
||||
def test_status_up_subpath(self):
|
||||
"""
|
||||
See that "GET /status/up-0" works.
|
||||
"""
|
||||
h = self.s.get_history()
|
||||
ul_num = h.list_all_upload_statuses()[0].get_counter()
|
||||
d = self.GET("/status/up-{}".format(ul_num), return_response=True)
|
||||
d.addCallback(self._check_status_subpath_result,
|
||||
u"Tahoe-LAFS - File Upload Status")
|
||||
return d
|
||||
|
||||
def test_status_down_subpath(self):
|
||||
"""
|
||||
See that "GET /status/down-0" works.
|
||||
"""
|
||||
h = self.s.get_history()
|
||||
dl_num = h.list_all_download_statuses()[0].get_counter()
|
||||
d = self.GET("/status/down-{}".format(dl_num), return_response=True)
|
||||
d.addCallback(self._check_status_subpath_result,
|
||||
u"Tahoe-LAFS - File Download Status")
|
||||
return d
|
||||
|
||||
def test_status_mapupdate_subpath(self):
|
||||
"""
|
||||
See that "GET /status/mapupdate-0" works.
|
||||
"""
|
||||
h = self.s.get_history()
|
||||
mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
|
||||
d = self.GET("/status/mapupdate-{}".format(mu_num), return_response=True)
|
||||
d.addCallback(self._check_status_subpath_result,
|
||||
u"Tahoe-LAFS - Mutable File Servermap Update Status")
|
||||
return d
|
||||
|
||||
def test_status_publish_subpath(self):
|
||||
"""
|
||||
See that "GET /status/publish-0" works.
|
||||
"""
|
||||
h = self.s.get_history()
|
||||
pub_num = h.list_all_publish_statuses()[0].get_counter()
|
||||
d = self.GET("/status/publish-{}".format(pub_num), return_response=True)
|
||||
d.addCallback(self._check_status_subpath_result,
|
||||
u"Tahoe-LAFS - Mutable File Publish Status")
|
||||
return d
|
||||
|
||||
def test_status_retrieve_subpath(self):
|
||||
"""
|
||||
See that "GET /status/retrieve-0" works.
|
||||
"""
|
||||
h = self.s.get_history()
|
||||
ret_num = h.list_all_retrieve_statuses()[0].get_counter()
|
||||
d = self.GET("/status/retrieve-{}".format(ret_num), return_response=True)
|
||||
d.addCallback(self._check_status_subpath_result,
|
||||
u"Tahoe-LAFS - Mutable File Retrieve Status")
|
||||
return d
|
||||
|
||||
def test_GET_FILEURL(self):
|
||||
d = self.GET(self.public_url + "/foo/bar.txt")
|
||||
|
@ -15,17 +15,50 @@ if PY2:
|
||||
|
||||
# Keep these sorted alphabetically, to reduce merge conflicts:
|
||||
PORTED_MODULES = [
|
||||
"allmydata.hashtree",
|
||||
"allmydata.util.abbreviate",
|
||||
"allmydata.util.assertutil",
|
||||
"allmydata.util.base32",
|
||||
"allmydata.util.base62",
|
||||
"allmydata.util.deferredutil",
|
||||
"allmydata.util.dictutil",
|
||||
"allmydata.util.hashutil",
|
||||
"allmydata.util.humanreadable",
|
||||
"allmydata.util.iputil",
|
||||
"allmydata.util.mathutil",
|
||||
"allmydata.util.namespace",
|
||||
"allmydata.util.netstring",
|
||||
"allmydata.util.observer",
|
||||
"allmydata.util.pipeline",
|
||||
"allmydata.util.pollmixin",
|
||||
"allmydata.util._python3",
|
||||
"allmydata.util.spans",
|
||||
"allmydata.util.statistics",
|
||||
"allmydata.util.time_format",
|
||||
"allmydata.test.common_py3",
|
||||
]
|
||||
|
||||
PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_abbreviate",
|
||||
"allmydata.test.test_base32",
|
||||
"allmydata.test.test_base62",
|
||||
"allmydata.test.test_deferredutil",
|
||||
"allmydata.test.test_dictutil",
|
||||
"allmydata.test.test_hashtree",
|
||||
"allmydata.test.test_hashutil",
|
||||
"allmydata.test.test_humanreadable",
|
||||
"allmydata.test.test_iputil",
|
||||
"allmydata.test.test_netstring",
|
||||
"allmydata.test.test_observer",
|
||||
"allmydata.test.test_pipeline",
|
||||
"allmydata.test.test_python3",
|
||||
"allmydata.test.test_spans",
|
||||
"allmydata.test.test_statistics",
|
||||
"allmydata.test.test_time_format",
|
||||
"allmydata.test.test_version",
|
||||
]
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from subprocess import check_call
|
||||
check_call(["trial"] + PORTED_TEST_MODULES)
|
||||
|
@ -1,3 +1,16 @@
|
||||
"""
|
||||
Convert timestamps to abbreviated English text.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import division
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import re
|
||||
from datetime import timedelta
|
||||
@ -9,6 +22,10 @@ MONTH = 30*DAY
|
||||
YEAR = 365*DAY
|
||||
|
||||
def abbreviate_time(s):
|
||||
"""
|
||||
Given time in seconds (float or int) or timedelta, summarize as English by
|
||||
returning unicode string.
|
||||
"""
|
||||
postfix = ''
|
||||
if isinstance(s, timedelta):
|
||||
# this feels counter-intuitive that positive numbers in a
|
||||
@ -45,6 +62,9 @@ def abbreviate_time(s):
|
||||
return _plural(s / YEAR, "year")
|
||||
|
||||
def abbreviate_space(s, SI=True):
|
||||
"""
|
||||
Given size in bytes summarize as English by returning unicode string.
|
||||
"""
|
||||
if s is None:
|
||||
return "unknown"
|
||||
if SI:
|
||||
|
@ -1,17 +1,42 @@
|
||||
# from the Python Standard Library
|
||||
import six
|
||||
import string
|
||||
"""
|
||||
Base32 encoding.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
if PY2:
|
||||
def backwardscompat_bytes(b):
|
||||
"""
|
||||
Replace Future bytes with native Python 2 bytes, so % works
|
||||
consistently until other modules are ported.
|
||||
"""
|
||||
return getattr(b, "__native__", lambda: b)()
|
||||
import string
|
||||
maketrans = string.maketrans
|
||||
else:
|
||||
def backwardscompat_bytes(b):
|
||||
return b
|
||||
maketrans = bytes.maketrans
|
||||
|
||||
import base64
|
||||
|
||||
from allmydata.util.assertutil import precondition
|
||||
|
||||
z_base_32_alphabet = "ybndrfg8ejkmcpqxot1uwisza345h769" # Zooko's choice, rationale in "DESIGN" doc
|
||||
rfc3548_alphabet = "abcdefghijklmnopqrstuvwxyz234567" # RFC3548 standard used by Gnutella, Content-Addressable Web, THEX, Bitzi, Web-Calculus...
|
||||
rfc3548_alphabet = b"abcdefghijklmnopqrstuvwxyz234567" # RFC3548 standard used by Gnutella, Content-Addressable Web, THEX, Bitzi, Web-Calculus...
|
||||
chars = rfc3548_alphabet
|
||||
|
||||
vals = ''.join(map(chr, range(32)))
|
||||
c2vtranstable = string.maketrans(chars, vals)
|
||||
v2ctranstable = string.maketrans(vals, chars)
|
||||
identitytranstable = string.maketrans('', '')
|
||||
vals = backwardscompat_bytes(bytes(range(32)))
|
||||
c2vtranstable = maketrans(chars, vals)
|
||||
v2ctranstable = maketrans(vals, chars)
|
||||
identitytranstable = maketrans(b'', b'')
|
||||
|
||||
def _get_trailing_chars_without_lsbs(N, d):
|
||||
"""
|
||||
@ -22,9 +47,9 @@ def _get_trailing_chars_without_lsbs(N, d):
|
||||
s.extend(_get_trailing_chars_without_lsbs(N+1, d=d))
|
||||
i = 0
|
||||
while i < len(chars):
|
||||
if not d.has_key(i):
|
||||
if i not in d:
|
||||
d[i] = None
|
||||
s.append(chars[i])
|
||||
s.append(chars[i:i+1])
|
||||
i = i + 2**N
|
||||
return s
|
||||
|
||||
@ -33,103 +58,31 @@ def get_trailing_chars_without_lsbs(N):
|
||||
if N == 0:
|
||||
return chars
|
||||
d = {}
|
||||
return ''.join(_get_trailing_chars_without_lsbs(N, d=d))
|
||||
return b''.join(_get_trailing_chars_without_lsbs(N, d=d))
|
||||
|
||||
BASE32CHAR = '['+get_trailing_chars_without_lsbs(0)+']'
|
||||
BASE32CHAR_4bits = '['+get_trailing_chars_without_lsbs(1)+']'
|
||||
BASE32CHAR_3bits = '['+get_trailing_chars_without_lsbs(2)+']'
|
||||
BASE32CHAR_2bits = '['+get_trailing_chars_without_lsbs(3)+']'
|
||||
BASE32CHAR_1bits = '['+get_trailing_chars_without_lsbs(4)+']'
|
||||
BASE32STR_1byte = BASE32CHAR+BASE32CHAR_3bits
|
||||
BASE32STR_2bytes = BASE32CHAR+'{3}'+BASE32CHAR_1bits
|
||||
BASE32STR_3bytes = BASE32CHAR+'{4}'+BASE32CHAR_4bits
|
||||
BASE32STR_4bytes = BASE32CHAR+'{6}'+BASE32CHAR_2bits
|
||||
BASE32STR_anybytes = '((?:%s{8})*' % (BASE32CHAR,) + "(?:|%s|%s|%s|%s))" % (BASE32STR_1byte, BASE32STR_2bytes, BASE32STR_3bytes, BASE32STR_4bytes)
|
||||
BASE32CHAR = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(0)+b']')
|
||||
BASE32CHAR_4bits = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(1)+b']')
|
||||
BASE32CHAR_3bits = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(2)+b']')
|
||||
BASE32CHAR_2bits = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(3)+b']')
|
||||
BASE32CHAR_1bits = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(4)+b']')
|
||||
BASE32STR_1byte = backwardscompat_bytes(BASE32CHAR+BASE32CHAR_3bits)
|
||||
BASE32STR_2bytes = backwardscompat_bytes(BASE32CHAR+b'{3}'+BASE32CHAR_1bits)
|
||||
BASE32STR_3bytes = backwardscompat_bytes(BASE32CHAR+b'{4}'+BASE32CHAR_4bits)
|
||||
BASE32STR_4bytes = backwardscompat_bytes(BASE32CHAR+b'{6}'+BASE32CHAR_2bits)
|
||||
BASE32STR_anybytes = backwardscompat_bytes(bytes(b'((?:%s{8})*') % (BASE32CHAR,) + bytes(b"(?:|%s|%s|%s|%s))") % (BASE32STR_1byte, BASE32STR_2bytes, BASE32STR_3bytes, BASE32STR_4bytes))
|
||||
|
||||
def b2a(os):
|
||||
"""
|
||||
@param os the data to be encoded (a string)
|
||||
@param os the data to be encoded (as bytes)
|
||||
|
||||
@return the contents of os in base-32 encoded form
|
||||
@return the contents of os in base-32 encoded form, as bytes
|
||||
"""
|
||||
return _b2a_l(os, len(os)*8)
|
||||
return base64.b32encode(os).rstrip(b"=").lower()
|
||||
|
||||
def b2a_or_none(os):
|
||||
if os is not None:
|
||||
return b2a(os)
|
||||
|
||||
def _b2a_l(os, lengthinbits):
|
||||
"""
|
||||
@param os the data to be encoded (a string)
|
||||
@param lengthinbits the number of bits of data in os to be encoded
|
||||
|
||||
b2a_l() will generate a base-32 encoded string big enough to encode lengthinbits bits. So for
|
||||
example if os is 2 bytes long and lengthinbits is 15, then b2a_l() will generate a 3-character-
|
||||
long base-32 encoded string (since 3 quintets is sufficient to encode 15 bits). If os is
|
||||
2 bytes long and lengthinbits is 16 (or None), then b2a_l() will generate a 4-character string.
|
||||
Note that b2a_l() does not mask off unused least-significant bits, so for example if os is
|
||||
2 bytes long and lengthinbits is 15, then you must ensure that the unused least-significant bit
|
||||
of os is a zero bit or you will get the wrong result. This precondition is tested by assertions
|
||||
if assertions are enabled.
|
||||
|
||||
Warning: if you generate a base-32 encoded string with b2a_l(), and then someone else tries to
|
||||
decode it by calling a2b() instead of a2b_l(), then they will (probably) get a different
|
||||
string than the one you encoded! So only use b2a_l() when you are sure that the encoding and
|
||||
decoding sides know exactly which lengthinbits to use. If you do not have a way for the
|
||||
encoder and the decoder to agree upon the lengthinbits, then it is best to use b2a() and
|
||||
a2b(). The only drawback to using b2a() over b2a_l() is that when you have a number of
|
||||
bits to encode that is not a multiple of 8, b2a() can sometimes generate a base-32 encoded
|
||||
string that is one or two characters longer than necessary.
|
||||
|
||||
@return the contents of os in base-32 encoded form
|
||||
"""
|
||||
precondition(isinstance(lengthinbits, (int, long,)), "lengthinbits is required to be an integer.", lengthinbits=lengthinbits)
|
||||
precondition((lengthinbits+7)/8 == len(os), "lengthinbits is required to specify a number of bits storable in exactly len(os) octets.", lengthinbits=lengthinbits, lenos=len(os))
|
||||
|
||||
os = map(ord, os)
|
||||
|
||||
numquintets = (lengthinbits+4)/5
|
||||
numoctetsofdata = (lengthinbits+7)/8
|
||||
# print "numoctetsofdata: %s, len(os): %s, lengthinbits: %s, numquintets: %s" % (numoctetsofdata, len(os), lengthinbits, numquintets,)
|
||||
# strip trailing octets that won't be used
|
||||
del os[numoctetsofdata:]
|
||||
# zero out any unused bits in the final octet
|
||||
if lengthinbits % 8 != 0:
|
||||
os[-1] = os[-1] >> (8-(lengthinbits % 8))
|
||||
os[-1] = os[-1] << (8-(lengthinbits % 8))
|
||||
# append zero octets for padding if needed
|
||||
numoctetsneeded = (numquintets*5+7)/8 + 1
|
||||
os.extend([0]*(numoctetsneeded-len(os)))
|
||||
|
||||
quintets = []
|
||||
cutoff = 256
|
||||
num = os[0]
|
||||
i = 0
|
||||
while len(quintets) < numquintets:
|
||||
i = i + 1
|
||||
assert len(os) > i, "len(os): %s, i: %s, len(quintets): %s, numquintets: %s, lengthinbits: %s, numoctetsofdata: %s, numoctetsneeded: %s, os: %s" % (len(os), i, len(quintets), numquintets, lengthinbits, numoctetsofdata, numoctetsneeded, os,)
|
||||
num = num * 256
|
||||
num = num + os[i]
|
||||
if cutoff == 1:
|
||||
cutoff = 256
|
||||
continue
|
||||
cutoff = cutoff * 8
|
||||
quintet = num / cutoff
|
||||
quintets.append(quintet)
|
||||
num = num - (quintet * cutoff)
|
||||
|
||||
cutoff = cutoff / 32
|
||||
quintet = num / cutoff
|
||||
quintets.append(quintet)
|
||||
num = num - (quintet * cutoff)
|
||||
|
||||
if len(quintets) > numquintets:
|
||||
assert len(quintets) == (numquintets+1), "len(quintets): %s, numquintets: %s, quintets: %s" % (len(quintets), numquintets, quintets,)
|
||||
quintets = quintets[:numquintets]
|
||||
res = string.translate(string.join(map(chr, quintets), ''), v2ctranstable)
|
||||
assert could_be_base32_encoded_l(res, lengthinbits), "lengthinbits: %s, res: %s" % (lengthinbits, res,)
|
||||
return res
|
||||
|
||||
# b2a() uses the minimal number of quintets sufficient to encode the binary
|
||||
# input. It just so happens that the relation is like this (everything is
|
||||
# modulo 40 bits).
|
||||
@ -144,7 +97,9 @@ NUM_OS_TO_NUM_QS=(0, 2, 4, 5, 7,)
|
||||
|
||||
NUM_QS_TO_NUM_OS=(0, 1, 1, 2, 2, 3, 3, 4)
|
||||
NUM_QS_LEGIT=(1, 0, 1, 0, 1, 1, 0, 1,)
|
||||
NUM_QS_TO_NUM_BITS=tuple(map(lambda x: x*8, NUM_QS_TO_NUM_OS))
|
||||
NUM_QS_TO_NUM_BITS=tuple([_x*8 for _x in NUM_QS_TO_NUM_OS])
|
||||
if PY2:
|
||||
del _x
|
||||
|
||||
# A fast way to determine whether a given string *could* be base-32 encoded data, assuming that the
|
||||
# original data had 8K bits for a positive integer K.
|
||||
@ -152,8 +107,8 @@ NUM_QS_TO_NUM_BITS=tuple(map(lambda x: x*8, NUM_QS_TO_NUM_OS))
|
||||
# tells whether the final character is reasonable.
|
||||
def add_check_array(cs, sfmap):
|
||||
checka=[0] * 256
|
||||
for c in cs:
|
||||
checka[ord(c)] = 1
|
||||
for c in bytes(cs):
|
||||
checka[c] = 1
|
||||
sfmap.append(tuple(checka))
|
||||
|
||||
def init_s8():
|
||||
@ -163,106 +118,29 @@ def init_s8():
|
||||
if NUM_QS_LEGIT[lenmod8]:
|
||||
add_check_array(get_trailing_chars_without_lsbs(4-(NUM_QS_TO_NUM_BITS[lenmod8]%5)), s8)
|
||||
else:
|
||||
add_check_array('', s8)
|
||||
add_check_array(b'', s8)
|
||||
return tuple(s8)
|
||||
s8 = init_s8()
|
||||
|
||||
# A somewhat fast way to determine whether a given string *could* be base-32 encoded data, given a
|
||||
# lengthinbits.
|
||||
# The boolean value of s5[lengthinbits%5][ord(s[-1])], where s is the possibly base-32 encoded
|
||||
# string tells whether the final character is reasonable.
|
||||
def init_s5():
|
||||
s5 = []
|
||||
add_check_array(get_trailing_chars_without_lsbs(0), s5)
|
||||
for lenmod5 in [1,2,3,4]:
|
||||
add_check_array(get_trailing_chars_without_lsbs(5-lenmod5), s5)
|
||||
return tuple(s5)
|
||||
s5 = init_s5()
|
||||
|
||||
def could_be_base32_encoded(s, s8=s8, tr=string.translate, identitytranstable=identitytranstable, chars=chars):
|
||||
precondition(isinstance(s, six.binary_type), s)
|
||||
if s == '':
|
||||
def could_be_base32_encoded(s, s8=s8, tr=bytes.translate, identitytranstable=identitytranstable, chars=chars):
|
||||
precondition(isinstance(s, bytes), s)
|
||||
if s == b'':
|
||||
return True
|
||||
return s8[len(s)%8][ord(s[-1])] and not tr(s, identitytranstable, chars)
|
||||
|
||||
def could_be_base32_encoded_l(s, lengthinbits, s5=s5, tr=string.translate, identitytranstable=identitytranstable, chars=chars):
|
||||
precondition(isinstance(s, six.binary_type), s)
|
||||
if s == '':
|
||||
return True
|
||||
assert lengthinbits%5 < len(s5), lengthinbits
|
||||
assert ord(s[-1]) < s5[lengthinbits%5]
|
||||
return (((lengthinbits+4)/5) == len(s)) and s5[lengthinbits%5][ord(s[-1])] and not string.translate(s, identitytranstable, chars)
|
||||
|
||||
def num_octets_that_encode_to_this_many_quintets(numqs):
|
||||
# Here is a computation that conveniently expresses this:
|
||||
return (numqs*5+3)/8
|
||||
s = bytes(s) # On Python 2, make sure we're using modern bytes
|
||||
return s8[len(s)%8][s[-1]] and not tr(s, identitytranstable, chars)
|
||||
|
||||
def a2b(cs):
|
||||
"""
|
||||
@param cs the base-32 encoded data (a string)
|
||||
@param cs the base-32 encoded data (as bytes)
|
||||
"""
|
||||
precondition(could_be_base32_encoded(cs), "cs is required to be possibly base32 encoded data.", cs=cs)
|
||||
precondition(isinstance(cs, six.binary_type), cs)
|
||||
precondition(isinstance(cs, bytes), cs)
|
||||
|
||||
return _a2b_l(cs, num_octets_that_encode_to_this_many_quintets(len(cs))*8)
|
||||
|
||||
def _a2b_l(cs, lengthinbits):
|
||||
"""
|
||||
@param lengthinbits the number of bits of data in encoded into cs
|
||||
|
||||
a2b_l() will return a result big enough to hold lengthinbits bits. So for example if cs is
|
||||
4 characters long (encoding at least 15 and up to 20 bits) and lengthinbits is 16, then a2b_l()
|
||||
will return a string of length 2 (since 2 bytes is sufficient to store 16 bits). If cs is 4
|
||||
characters long and lengthinbits is 20, then a2b_l() will return a string of length 3 (since
|
||||
3 bytes is sufficient to store 20 bits). Note that b2a_l() does not mask off unused least-
|
||||
significant bits, so for example if cs is 4 characters long and lengthinbits is 17, then you
|
||||
must ensure that all three of the unused least-significant bits of cs are zero bits or you will
|
||||
get the wrong result. This precondition is tested by assertions if assertions are enabled.
|
||||
(Generally you just require the encoder to ensure this consistency property between the least
|
||||
significant zero bits and value of lengthinbits, and reject strings that have a length-in-bits
|
||||
which isn't a multiple of 8 and yet don't have trailing zero bits, as improperly encoded.)
|
||||
|
||||
Please see the warning in the docstring of b2a_l() regarding the use of b2a() versus b2a_l().
|
||||
|
||||
@return the data encoded in cs
|
||||
"""
|
||||
precondition(could_be_base32_encoded_l(cs, lengthinbits), "cs is required to be possibly base32 encoded data.", cs=cs, lengthinbits=lengthinbits)
|
||||
precondition(isinstance(cs, six.binary_type), cs)
|
||||
if cs == '':
|
||||
return ''
|
||||
|
||||
qs = map(ord, string.translate(cs, c2vtranstable))
|
||||
|
||||
numoctets = (lengthinbits+7)/8
|
||||
numquintetsofdata = (lengthinbits+4)/5
|
||||
# strip trailing quintets that won't be used
|
||||
del qs[numquintetsofdata:]
|
||||
# zero out any unused bits in the final quintet
|
||||
if lengthinbits % 5 != 0:
|
||||
qs[-1] = qs[-1] >> (5-(lengthinbits % 5))
|
||||
qs[-1] = qs[-1] << (5-(lengthinbits % 5))
|
||||
# append zero quintets for padding if needed
|
||||
numquintetsneeded = (numoctets*8+4)/5
|
||||
qs.extend([0]*(numquintetsneeded-len(qs)))
|
||||
|
||||
octets = []
|
||||
pos = 2048
|
||||
num = qs[0] * pos
|
||||
i = 1
|
||||
while len(octets) < numoctets:
|
||||
while pos > 256:
|
||||
pos = pos / 32
|
||||
num = num + (qs[i] * pos)
|
||||
i = i + 1
|
||||
octet = num / 256
|
||||
octets.append(octet)
|
||||
num = num - (octet * 256)
|
||||
num = num * 256
|
||||
pos = pos * 256
|
||||
assert len(octets) == numoctets, "len(octets): %s, numoctets: %s, octets: %s" % (len(octets), numoctets, octets,)
|
||||
res = ''.join(map(chr, octets))
|
||||
precondition(_b2a_l(res, lengthinbits) == cs, "cs is required to be the canonical base-32 encoding of some data.", b2a(res), res=res, cs=cs)
|
||||
return res
|
||||
cs = cs.upper()
|
||||
# Add padding back, to make Python's base64 module happy:
|
||||
while (len(cs) * 5) % 8 != 0:
|
||||
cs += b"="
|
||||
return base64.b32decode(cs)
|
||||
|
||||
|
||||
__all__ = ["b2a", "a2b", "b2a_or_none", "BASE32CHAR_3bits", "BASE32CHAR_1bits", "BASE32CHAR", "BASE32STR_anybytes", "could_be_base32_encoded"]
|
||||
|
@ -1,22 +1,43 @@
|
||||
# from the Python Standard Library
|
||||
import string
|
||||
"""
|
||||
Base62 encoding.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
if PY2:
|
||||
import string
|
||||
maketrans = string.maketrans
|
||||
translate = string.translate
|
||||
else:
|
||||
maketrans = bytes.maketrans
|
||||
translate = bytes.translate
|
||||
|
||||
from past.builtins import chr as byteschr
|
||||
|
||||
from allmydata.util.mathutil import log_ceil, log_floor
|
||||
|
||||
chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||
chars = b"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||
|
||||
BASE62CHAR = '[' + chars + ']'
|
||||
BASE62CHAR = b'[' + chars + b']'
|
||||
|
||||
vals = ''.join([chr(i) for i in range(62)])
|
||||
c2vtranstable = string.maketrans(chars, vals)
|
||||
v2ctranstable = string.maketrans(vals, chars)
|
||||
identitytranstable = string.maketrans(chars, chars)
|
||||
vals = b''.join([byteschr(i) for i in range(62)])
|
||||
c2vtranstable = maketrans(chars, vals)
|
||||
v2ctranstable = maketrans(vals, chars)
|
||||
identitytranstable = maketrans(chars, chars)
|
||||
|
||||
def b2a(os):
|
||||
"""
|
||||
@param os the data to be encoded (a string)
|
||||
@param os the data to be encoded (as bytes)
|
||||
|
||||
@return the contents of os in base-62 encoded form
|
||||
@return the contents of os in base-62 encoded form, as bytes
|
||||
"""
|
||||
cs = b2a_l(os, len(os)*8)
|
||||
assert num_octets_that_encode_to_this_many_chars(len(cs)) == len(os), "%s != %s, numchars: %s" % (num_octets_that_encode_to_this_many_chars(len(cs)), len(os), len(cs))
|
||||
@ -24,7 +45,7 @@ def b2a(os):
|
||||
|
||||
def b2a_l(os, lengthinbits):
|
||||
"""
|
||||
@param os the data to be encoded (a string)
|
||||
@param os the data to be encoded (as bytes)
|
||||
@param lengthinbits the number of bits of data in os to be encoded
|
||||
|
||||
b2a_l() will generate a base-62 encoded string big enough to encode
|
||||
@ -45,9 +66,11 @@ def b2a_l(os, lengthinbits):
|
||||
bits to encode that is not a multiple of 8, b2a() can sometimes generate a base-62 encoded
|
||||
string that is one or two characters longer than necessary.
|
||||
|
||||
@return the contents of os in base-62 encoded form
|
||||
@return the contents of os in base-62 encoded form, as bytes
|
||||
"""
|
||||
os = [ord(o) for o in reversed(os)] # treat os as big-endian -- and we want to process the least-significant o first
|
||||
# We call bytes() again for Python 2, to ensure literals are using future's
|
||||
# Python 3-compatible variant.
|
||||
os = [o for o in reversed(bytes(os))] # treat os as big-endian -- and we want to process the least-significant o first
|
||||
|
||||
value = 0
|
||||
numvalues = 1 # the number of possible values that value could be
|
||||
@ -62,7 +85,7 @@ def b2a_l(os, lengthinbits):
|
||||
value //= 62
|
||||
numvalues //= 62
|
||||
|
||||
return string.translate(''.join([chr(c) for c in reversed(chars)]), v2ctranstable) # make it big-endian
|
||||
return translate(bytes([c for c in reversed(chars)]), v2ctranstable) # make it big-endian
|
||||
|
||||
def num_octets_that_encode_to_this_many_chars(numcs):
|
||||
return log_floor(62**numcs, 256)
|
||||
@ -89,9 +112,11 @@ def a2b_l(cs, lengthinbits):
|
||||
Please see the warning in the docstring of b2a_l() regarding the use of
|
||||
b2a() versus b2a_l().
|
||||
|
||||
@return the data encoded in cs
|
||||
@return the data encoded in cs, as bytes
|
||||
"""
|
||||
cs = [ord(c) for c in reversed(string.translate(cs, c2vtranstable))] # treat cs as big-endian -- and we want to process the least-significant c first
|
||||
# We call bytes() again for Python 2, to ensure literals are using future's
|
||||
# Python 3-compatible variant.
|
||||
cs = [c for c in reversed(bytes(translate(cs, c2vtranstable)))] # treat cs as big-endian -- and we want to process the least-significant c first
|
||||
|
||||
value = 0
|
||||
numvalues = 1 # the number of possible values that value could be
|
||||
@ -101,10 +126,10 @@ def a2b_l(cs, lengthinbits):
|
||||
numvalues *= 62
|
||||
|
||||
numvalues = 2**lengthinbits
|
||||
bytes = []
|
||||
result_bytes = []
|
||||
while numvalues > 1:
|
||||
bytes.append(value % 256)
|
||||
result_bytes.append(value % 256)
|
||||
value //= 256
|
||||
numvalues //= 256
|
||||
|
||||
return ''.join([chr(b) for b in reversed(bytes)]) # make it big-endian
|
||||
return bytes([b for b in reversed(result_bytes)]) # make it big-endian
|
||||
|
@ -1,6 +1,20 @@
|
||||
"""
|
||||
Tools to mess with dicts.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
# IMPORTANT: We deliberately don't import dict. The issue is that we're
|
||||
# subclassing dict, so we'd end up exposing Python 3 dict APIs to lots of
|
||||
# code that doesn't support it.
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
|
||||
class DictOfSets(dict):
|
||||
def add(self, key, value):
|
||||
@ -10,7 +24,7 @@ class DictOfSets(dict):
|
||||
self[key] = set([value])
|
||||
|
||||
def update(self, otherdictofsets):
|
||||
for key, values in otherdictofsets.iteritems():
|
||||
for key, values in otherdictofsets.items():
|
||||
if key in self:
|
||||
self[key].update(values)
|
||||
else:
|
||||
|
@ -4,7 +4,6 @@ unicode and back.
|
||||
"""
|
||||
|
||||
import sys, os, re, locale
|
||||
from types import NoneType
|
||||
|
||||
from allmydata.util.assertutil import precondition, _assert
|
||||
from twisted.python import usage
|
||||
@ -12,6 +11,8 @@ from twisted.python.filepath import FilePath
|
||||
from allmydata.util import log
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
|
||||
NoneType = type(None)
|
||||
|
||||
|
||||
def canonical_encoding(encoding):
|
||||
if encoding is None:
|
||||
|
@ -4,7 +4,7 @@ from __future__ import print_function
|
||||
Futz with files like a pro.
|
||||
"""
|
||||
|
||||
import sys, exceptions, os, stat, tempfile, time, binascii
|
||||
import sys, os, stat, tempfile, time, binascii
|
||||
import six
|
||||
from collections import namedtuple
|
||||
from errno import ENOENT
|
||||
@ -190,7 +190,7 @@ def make_dirs(dirname, mode=0o777):
|
||||
if not os.path.isdir(dirname):
|
||||
if tx:
|
||||
raise tx
|
||||
raise exceptions.IOError("unknown error prevented creation of directory, or deleted the directory immediately after creation: %s" % dirname) # careful not to construct an IOError with a 2-tuple, as that has a special meaning...
|
||||
raise IOError("unknown error prevented creation of directory, or deleted the directory immediately after creation: %s" % dirname) # careful not to construct an IOError with a 2-tuple, as that has a special meaning...
|
||||
|
||||
def rm_dir(dirname):
|
||||
"""
|
||||
|
@ -1,3 +1,19 @@
|
||||
"""
|
||||
Hashing utilities.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import chr as byteschr
|
||||
|
||||
import os
|
||||
import hashlib
|
||||
from allmydata.util.netstring import netstring
|
||||
@ -60,34 +76,34 @@ def tagged_pair_hash(tag, val1, val2, truncate_to=None):
|
||||
|
||||
|
||||
# immutable
|
||||
STORAGE_INDEX_TAG = "allmydata_immutable_key_to_storage_index_v1"
|
||||
BLOCK_TAG = "allmydata_encoded_subshare_v1"
|
||||
UEB_TAG = "allmydata_uri_extension_v1"
|
||||
PLAINTEXT_TAG = "allmydata_plaintext_v1"
|
||||
CIPHERTEXT_TAG = "allmydata_crypttext_v1"
|
||||
CIPHERTEXT_SEGMENT_TAG = "allmydata_crypttext_segment_v1"
|
||||
PLAINTEXT_SEGMENT_TAG = "allmydata_plaintext_segment_v1"
|
||||
CONVERGENT_ENCRYPTION_TAG = "allmydata_immutable_content_to_key_with_added_secret_v1+"
|
||||
STORAGE_INDEX_TAG = b"allmydata_immutable_key_to_storage_index_v1"
|
||||
BLOCK_TAG = b"allmydata_encoded_subshare_v1"
|
||||
UEB_TAG = b"allmydata_uri_extension_v1"
|
||||
PLAINTEXT_TAG = b"allmydata_plaintext_v1"
|
||||
CIPHERTEXT_TAG = b"allmydata_crypttext_v1"
|
||||
CIPHERTEXT_SEGMENT_TAG = b"allmydata_crypttext_segment_v1"
|
||||
PLAINTEXT_SEGMENT_TAG = b"allmydata_plaintext_segment_v1"
|
||||
CONVERGENT_ENCRYPTION_TAG = b"allmydata_immutable_content_to_key_with_added_secret_v1+"
|
||||
|
||||
CLIENT_RENEWAL_TAG = "allmydata_client_renewal_secret_v1"
|
||||
CLIENT_CANCEL_TAG = "allmydata_client_cancel_secret_v1"
|
||||
FILE_RENEWAL_TAG = "allmydata_file_renewal_secret_v1"
|
||||
FILE_CANCEL_TAG = "allmydata_file_cancel_secret_v1"
|
||||
BUCKET_RENEWAL_TAG = "allmydata_bucket_renewal_secret_v1"
|
||||
BUCKET_CANCEL_TAG = "allmydata_bucket_cancel_secret_v1"
|
||||
CLIENT_RENEWAL_TAG = b"allmydata_client_renewal_secret_v1"
|
||||
CLIENT_CANCEL_TAG = b"allmydata_client_cancel_secret_v1"
|
||||
FILE_RENEWAL_TAG = b"allmydata_file_renewal_secret_v1"
|
||||
FILE_CANCEL_TAG = b"allmydata_file_cancel_secret_v1"
|
||||
BUCKET_RENEWAL_TAG = b"allmydata_bucket_renewal_secret_v1"
|
||||
BUCKET_CANCEL_TAG = b"allmydata_bucket_cancel_secret_v1"
|
||||
|
||||
# mutable
|
||||
MUTABLE_WRITEKEY_TAG = "allmydata_mutable_privkey_to_writekey_v1"
|
||||
MUTABLE_WRITE_ENABLER_MASTER_TAG = "allmydata_mutable_writekey_to_write_enabler_master_v1"
|
||||
MUTABLE_WRITE_ENABLER_TAG = "allmydata_mutable_write_enabler_master_and_nodeid_to_write_enabler_v1"
|
||||
MUTABLE_PUBKEY_TAG = "allmydata_mutable_pubkey_to_fingerprint_v1"
|
||||
MUTABLE_READKEY_TAG = "allmydata_mutable_writekey_to_readkey_v1"
|
||||
MUTABLE_DATAKEY_TAG = "allmydata_mutable_readkey_to_datakey_v1"
|
||||
MUTABLE_STORAGEINDEX_TAG = "allmydata_mutable_readkey_to_storage_index_v1"
|
||||
MUTABLE_WRITEKEY_TAG = b"allmydata_mutable_privkey_to_writekey_v1"
|
||||
MUTABLE_WRITE_ENABLER_MASTER_TAG = b"allmydata_mutable_writekey_to_write_enabler_master_v1"
|
||||
MUTABLE_WRITE_ENABLER_TAG = b"allmydata_mutable_write_enabler_master_and_nodeid_to_write_enabler_v1"
|
||||
MUTABLE_PUBKEY_TAG = b"allmydata_mutable_pubkey_to_fingerprint_v1"
|
||||
MUTABLE_READKEY_TAG = b"allmydata_mutable_writekey_to_readkey_v1"
|
||||
MUTABLE_DATAKEY_TAG = b"allmydata_mutable_readkey_to_datakey_v1"
|
||||
MUTABLE_STORAGEINDEX_TAG = b"allmydata_mutable_readkey_to_storage_index_v1"
|
||||
|
||||
# dirnodes
|
||||
DIRNODE_CHILD_WRITECAP_TAG = "allmydata_mutable_writekey_and_salt_to_dirnode_child_capkey_v1"
|
||||
DIRNODE_CHILD_SALT_TAG = "allmydata_dirnode_child_rwcap_to_salt_v1"
|
||||
DIRNODE_CHILD_WRITECAP_TAG = b"allmydata_mutable_writekey_and_salt_to_dirnode_child_capkey_v1"
|
||||
DIRNODE_CHILD_SALT_TAG = b"allmydata_dirnode_child_rwcap_to_salt_v1"
|
||||
|
||||
|
||||
def storage_index_hash(key):
|
||||
@ -158,8 +174,8 @@ def convergence_hash(k, n, segsize, data, convergence):
|
||||
|
||||
|
||||
def convergence_hasher(k, n, segsize, convergence):
|
||||
assert isinstance(convergence, str)
|
||||
param_tag = netstring("%d,%d,%d" % (k, n, segsize))
|
||||
assert isinstance(convergence, bytes)
|
||||
param_tag = netstring(b"%d,%d,%d" % (k, n, segsize))
|
||||
tag = CONVERGENT_ENCRYPTION_TAG + netstring(convergence) + param_tag
|
||||
return tagged_hasher(tag, KEYLEN)
|
||||
|
||||
@ -197,12 +213,13 @@ def bucket_cancel_secret_hash(file_cancel_secret, peerid):
|
||||
|
||||
|
||||
def _xor(a, b):
|
||||
return "".join([chr(ord(c) ^ ord(b)) for c in a])
|
||||
return b"".join([byteschr(c ^ b) for c in a])
|
||||
|
||||
|
||||
def hmac(tag, data):
|
||||
ikey = _xor(tag, "\x36")
|
||||
okey = _xor(tag, "\x5c")
|
||||
tag = bytes(tag) # Make sure it matches Python 3 behavior
|
||||
ikey = _xor(tag, 0x36)
|
||||
okey = _xor(tag, 0x5c)
|
||||
h1 = hashlib.sha256(ikey + data).digest()
|
||||
h2 = hashlib.sha256(okey + h1).digest()
|
||||
return h2
|
||||
@ -251,7 +268,7 @@ def timing_safe_compare(a, b):
|
||||
return bool(tagged_hash(n, a) == tagged_hash(n, b))
|
||||
|
||||
|
||||
BACKUPDB_DIRHASH_TAG = "allmydata_backupdb_dirhash_v1"
|
||||
BACKUPDB_DIRHASH_TAG = b"allmydata_backupdb_dirhash_v1"
|
||||
|
||||
|
||||
def backupdb_dirhash(contents):
|
||||
|
@ -1,4 +1,18 @@
|
||||
# from the Python Standard Library
|
||||
"""
|
||||
Utilities for getting IP addresses.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2, native_str
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os, re, socket, subprocess, errno
|
||||
from sys import platform
|
||||
|
||||
@ -88,13 +102,18 @@ except ImportError:
|
||||
increase_rlimits = _increase_rlimits
|
||||
|
||||
def get_local_addresses_sync():
|
||||
return _synchronously_find_addresses_via_config()
|
||||
"""
|
||||
Return a list of IPv4 addresses (as dotted-quad native strings) that are
|
||||
currently configured on this host, sorted in descending order of how likely
|
||||
we think they are to work.
|
||||
"""
|
||||
return [native_str(a) for a in _synchronously_find_addresses_via_config()]
|
||||
|
||||
def get_local_addresses_async(target="198.41.0.4"): # A.ROOT-SERVERS.NET
|
||||
"""
|
||||
Return a Deferred that fires with a list of IPv4 addresses (as dotted-quad
|
||||
strings) that are currently configured on this host, sorted in descending
|
||||
order of how likely we think they are to work.
|
||||
native strings) that are currently configured on this host, sorted in
|
||||
descending order of how likely we think they are to work.
|
||||
|
||||
@param target: we want to learn an IP address they could try using to
|
||||
connect to us; The default value is fine, but it might help if you
|
||||
@ -117,13 +136,13 @@ def get_local_addresses_async(target="198.41.0.4"): # A.ROOT-SERVERS.NET
|
||||
addresses.append(addr)
|
||||
return addresses
|
||||
d.addCallback(_collect)
|
||||
|
||||
d.addCallback(lambda addresses: [native_str(s) for s in addresses])
|
||||
return d
|
||||
|
||||
def get_local_ip_for(target):
|
||||
"""Find out what our IP address is for use by a given target.
|
||||
|
||||
@return: the IP address as a dotted-quad string which could be used by
|
||||
@return: the IP address as a dotted-quad native string which could be used
|
||||
to connect to us. It might work for them, it might not. If
|
||||
there is no suitable address (perhaps we don't currently have an
|
||||
externally-visible interface), this will return None.
|
||||
@ -162,7 +181,7 @@ def get_local_ip_for(target):
|
||||
except (socket.error, CannotListenError):
|
||||
# no route to that host
|
||||
localip = None
|
||||
return localip
|
||||
return native_str(localip)
|
||||
|
||||
|
||||
# Wow, I'm really amazed at home much mileage we've gotten out of calling
|
||||
@ -171,11 +190,11 @@ def get_local_ip_for(target):
|
||||
# ... thus wrote Greg Smith in time immemorial...
|
||||
# Also, the Win32 APIs for this are really klunky and error-prone. --Daira
|
||||
|
||||
_win32_re = re.compile(r'^\s*\d+\.\d+\.\d+\.\d+\s.+\s(?P<address>\d+\.\d+\.\d+\.\d+)\s+(?P<metric>\d+)\s*$', flags=re.M|re.I|re.S)
|
||||
_win32_re = re.compile(br'^\s*\d+\.\d+\.\d+\.\d+\s.+\s(?P<address>\d+\.\d+\.\d+\.\d+)\s+(?P<metric>\d+)\s*$', flags=re.M|re.I|re.S)
|
||||
_win32_commands = (('route.exe', ('print',), _win32_re),)
|
||||
|
||||
# These work in most Unices.
|
||||
_addr_re = re.compile(r'^\s*inet [a-zA-Z]*:?(?P<address>\d+\.\d+\.\d+\.\d+)[\s/].+$', flags=re.M|re.I|re.S)
|
||||
_addr_re = re.compile(br'^\s*inet [a-zA-Z]*:?(?P<address>\d+\.\d+\.\d+\.\d+)[\s/].+$', flags=re.M|re.I|re.S)
|
||||
_unix_commands = (('/bin/ip', ('addr',), _addr_re),
|
||||
('/sbin/ip', ('addr',), _addr_re),
|
||||
('/sbin/ifconfig', ('-a',), _addr_re),
|
||||
@ -209,10 +228,13 @@ def _synchronously_find_addresses_via_config():
|
||||
else:
|
||||
exes_to_try = which(pathtotool)
|
||||
|
||||
subprocess_error = getattr(
|
||||
subprocess, "SubprocessError", subprocess.CalledProcessError
|
||||
)
|
||||
for exe in exes_to_try:
|
||||
try:
|
||||
addresses = _query(exe, args, regex)
|
||||
except Exception:
|
||||
except (IOError, OSError, ValueError, subprocess_error):
|
||||
addresses = []
|
||||
if addresses:
|
||||
return addresses
|
||||
@ -222,9 +244,9 @@ def _synchronously_find_addresses_via_config():
|
||||
def _query(path, args, regex):
|
||||
if not os.path.isfile(path):
|
||||
return []
|
||||
env = {'LANG': 'en_US.UTF-8'}
|
||||
env = {native_str('LANG'): native_str('en_US.UTF-8')}
|
||||
TRIES = 5
|
||||
for trial in xrange(TRIES):
|
||||
for trial in range(TRIES):
|
||||
try:
|
||||
p = subprocess.Popen([path] + list(args), stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
|
||||
(output, err) = p.communicate()
|
||||
@ -235,13 +257,13 @@ def _query(path, args, regex):
|
||||
raise
|
||||
|
||||
addresses = []
|
||||
outputsplit = output.split('\n')
|
||||
outputsplit = output.split(b'\n')
|
||||
for outline in outputsplit:
|
||||
m = regex.match(outline)
|
||||
if m:
|
||||
addr = m.group('address')
|
||||
if addr not in addresses:
|
||||
addresses.append(addr)
|
||||
addresses.append(addr.decode("utf-8"))
|
||||
|
||||
return addresses
|
||||
|
||||
@ -304,7 +326,7 @@ def _foolscapEndpointForPortNumber(portnum):
|
||||
# approach is error prone for the reasons described on
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2787
|
||||
portnum = allocate_tcp_port()
|
||||
return (portnum, "tcp:%d" % (portnum,))
|
||||
return (portnum, native_str("tcp:%d" % (portnum,)))
|
||||
|
||||
|
||||
@implementer(IStreamServerEndpoint)
|
||||
@ -353,7 +375,7 @@ def listenOnUnused(tub, portnum=None):
|
||||
"""
|
||||
portnum, endpoint = _foolscapEndpointForPortNumber(portnum)
|
||||
tub.listenOn(endpoint)
|
||||
tub.setLocation("localhost:%d" % (portnum,))
|
||||
tub.setLocation(native_str("localhost:%d" % (portnum,)))
|
||||
return portnum
|
||||
|
||||
|
||||
@ -362,4 +384,5 @@ __all__ = ["allocate_tcp_port",
|
||||
"get_local_addresses_sync",
|
||||
"get_local_addresses_async",
|
||||
"get_local_ip_for",
|
||||
"listenOnUnused",
|
||||
]
|
||||
|
@ -1,8 +1,23 @@
|
||||
"""
|
||||
Netstring encoding and decoding.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
|
||||
def netstring(s):
|
||||
assert isinstance(s, str), s # no unicode here
|
||||
return "%d:%s," % (len(s), s,)
|
||||
assert isinstance(s, bytes), s # no unicode here
|
||||
return b"%d:%s," % (len(s), s,)
|
||||
|
||||
def split_netstring(data, numstrings,
|
||||
position=0,
|
||||
@ -13,18 +28,19 @@ def split_netstring(data, numstrings,
|
||||
byte which was not consumed (the 'required_trailer', if any, counts as
|
||||
consumed). If 'required_trailer' is not None, throw ValueError if leftover
|
||||
data does not exactly equal 'required_trailer'."""
|
||||
|
||||
assert type(position) in (int, long), (repr(position), type(position))
|
||||
assert isinstance(data, bytes)
|
||||
assert required_trailer is None or isinstance(required_trailer, bytes)
|
||||
assert isinstance(position, (int, long)), (repr(position), type(position))
|
||||
elements = []
|
||||
assert numstrings >= 0
|
||||
while position < len(data):
|
||||
colon = data.index(":", position)
|
||||
colon = data.index(b":", position)
|
||||
length = int(data[position:colon])
|
||||
string = data[colon+1:colon+1+length]
|
||||
assert len(string) == length, (len(string), length)
|
||||
elements.append(string)
|
||||
position = colon+1+length
|
||||
assert data[position] == ",", position
|
||||
assert data[position] == b","[0], position
|
||||
position += 1
|
||||
if len(elements) == numstrings:
|
||||
break
|
||||
|
@ -1,4 +1,17 @@
|
||||
# -*- test-case-name: allmydata.test.test_observer -*-
|
||||
"""
|
||||
Observer for Twisted code.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import weakref
|
||||
from twisted.internet import defer
|
||||
|
@ -1,9 +1,24 @@
|
||||
"""
|
||||
A pipeline of Deferreds.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.internet import defer
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.python import log
|
||||
from allmydata.util.assertutil import precondition
|
||||
|
||||
|
||||
class PipelineError(Exception):
|
||||
"""One of the pipelined messages returned an error. The received Failure
|
||||
object is stored in my .error attribute."""
|
||||
|
@ -1,4 +1,11 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
|
||||
class Spans(object):
|
||||
@ -155,9 +162,11 @@ class Spans(object):
|
||||
for s in self._spans:
|
||||
yield s
|
||||
|
||||
def __nonzero__(self): # this gets us bool()
|
||||
def __bool__(self): # this gets us bool()
|
||||
return bool(self.len())
|
||||
|
||||
#__nonzero__ = __bool__ # Python 2 backwards compatibility
|
||||
|
||||
def len(self):
|
||||
# guess what! python doesn't allow __len__ to return a long, only an
|
||||
# int. So we stop using len(spans), use spans.len() instead.
|
||||
@ -235,7 +244,7 @@ class DataSpans(object):
|
||||
for (start, data) in other.get_chunks():
|
||||
self.add(start, data)
|
||||
|
||||
def __nonzero__(self): # this gets us bool()
|
||||
def __bool__(self): # this gets us bool()
|
||||
return bool(self.len())
|
||||
|
||||
def len(self):
|
||||
|
@ -1,3 +1,8 @@
|
||||
"""
|
||||
Statistical utilities.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
# Copyright (c) 2009 Shawn Willden
|
||||
# mailto:shawn@willden.org
|
||||
# I hereby license all patches I have contributed or will contribute to the
|
||||
@ -5,7 +10,18 @@
|
||||
# either the GNU General Public License, version 2 or later, or under the
|
||||
# Transitive Grace Period Public License, version 1 or later.
|
||||
|
||||
from __future__ import division, print_function
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from functools import reduce
|
||||
|
||||
from allmydata.util.mathutil import round_sigfigs
|
||||
import math
|
||||
import sys
|
||||
@ -78,7 +94,7 @@ def survival_pmf_via_bd(p_list):
|
||||
"""
|
||||
pmf_list = [ binomial_distribution_pmf(p_list.count(p), p)
|
||||
for p in set(p_list) ]
|
||||
return reduce(convolve, pmf_list)
|
||||
return list(reduce(convolve, pmf_list))
|
||||
|
||||
def survival_pmf_via_conv(p_list):
|
||||
"""
|
||||
@ -89,7 +105,7 @@ def survival_pmf_via_conv(p_list):
|
||||
intended for internal use and testing only.
|
||||
"""
|
||||
pmf_list = [ [1 - p, p] for p in p_list ];
|
||||
return reduce(convolve, pmf_list)
|
||||
return list(reduce(convolve, pmf_list))
|
||||
|
||||
def print_pmf(pmf, n=4, out=sys.stdout):
|
||||
"""
|
||||
|
@ -1,5 +1,18 @@
|
||||
# ISO-8601:
|
||||
# http://www.cl.cam.ac.uk/~mgk25/iso-time.html
|
||||
"""
|
||||
Time formatting utilities.
|
||||
|
||||
ISO-8601:
|
||||
http://www.cl.cam.ac.uk/~mgk25/iso-time.html
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from future.utils import native_str
|
||||
|
||||
import calendar, datetime, re, time
|
||||
|
||||
@ -14,6 +27,7 @@ def iso_utc_date(now=None, t=time.time):
|
||||
def iso_utc(now=None, sep='_', t=time.time):
|
||||
if now is None:
|
||||
now = t()
|
||||
sep = native_str(sep) # Python 2 doesn't allow unicode input to isoformat
|
||||
return datetime.datetime.utcfromtimestamp(now).isoformat(sep)
|
||||
|
||||
def iso_utc_time_to_seconds(isotime, _conversion_re=re.compile(r"(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})[T_ ](?P<hour>\d{2}):(?P<minute>\d{2}):(?P<second>\d{2})(?P<subsecond>\.\d+)?")):
|
||||
@ -74,11 +88,11 @@ def format_delta(time_1, time_2):
|
||||
delta = int(time_2 - time_1)
|
||||
seconds = delta % 60
|
||||
delta -= seconds
|
||||
minutes = (delta / 60) % 60
|
||||
minutes = (delta // 60) % 60
|
||||
delta -= minutes * 60
|
||||
hours = delta / (60*60) % 24
|
||||
hours = delta // (60*60) % 24
|
||||
delta -= hours * 24
|
||||
days = delta / (24*60*60)
|
||||
days = delta // (24*60*60)
|
||||
if not days:
|
||||
if not hours:
|
||||
if not minutes:
|
||||
|
@ -1,11 +1,21 @@
|
||||
"""
|
||||
"Rational" version definition and parsing for DistutilsVersionFight
|
||||
discussion at PyCon 2009.
|
||||
"""
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import re
|
||||
|
||||
|
||||
class IrrationalVersionError(Exception):
|
||||
"""This is an irrational version."""
|
||||
pass
|
||||
|
@ -1,7 +1,17 @@
|
||||
"""
|
||||
Produce reports about the versions of Python software in use by Tahoe-LAFS
|
||||
for debugging and auditing purposes.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
__all__ = [
|
||||
"PackagingError",
|
||||
@ -10,10 +20,12 @@ __all__ = [
|
||||
"normalized_version",
|
||||
]
|
||||
|
||||
import os, platform, re, subprocess, sys, traceback, pkg_resources
|
||||
import os, platform, re, sys, traceback, pkg_resources
|
||||
|
||||
import six
|
||||
|
||||
import distro
|
||||
|
||||
from . import (
|
||||
__appname__,
|
||||
full_version,
|
||||
@ -80,7 +92,7 @@ def normalized_version(verstr, what=None):
|
||||
return verlib.NormalizedVersion(suggested)
|
||||
except verlib.IrrationalVersionError:
|
||||
raise
|
||||
except StandardError:
|
||||
except Exception:
|
||||
cls, value, trace = sys.exc_info()
|
||||
new_exc = PackagingError("could not parse %s due to %s: %s"
|
||||
% (what or repr(verstr), cls.__name__, value))
|
||||
@ -109,7 +121,7 @@ def _get_error_string(errors, debug=False):
|
||||
def _cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list):
|
||||
"""This function returns a list of errors due to any failed cross-checks."""
|
||||
|
||||
from _auto_deps import not_import_versionable
|
||||
from ._auto_deps import not_import_versionable
|
||||
|
||||
errors = []
|
||||
not_pkg_resourceable = ['python', 'platform', __appname__.lower(), 'openssl']
|
||||
@ -201,83 +213,6 @@ def _extract_openssl_version(ssl_module):
|
||||
|
||||
return (version, None, comment if comment else None)
|
||||
|
||||
def _get_linux_distro():
|
||||
""" Tries to determine the name of the Linux OS distribution name.
|
||||
|
||||
First, try to parse a file named "/etc/lsb-release". If it exists, and
|
||||
contains the "DISTRIB_ID=" line and the "DISTRIB_RELEASE=" line, then return
|
||||
the strings parsed from that file.
|
||||
|
||||
If that doesn't work, then invoke platform.dist().
|
||||
|
||||
If that doesn't work, then try to execute "lsb_release", as standardized in
|
||||
2001:
|
||||
|
||||
http://refspecs.freestandards.org/LSB_1.0.0/gLSB/lsbrelease.html
|
||||
|
||||
The current version of the standard is here:
|
||||
|
||||
http://refspecs.freestandards.org/LSB_3.2.0/LSB-Core-generic/LSB-Core-generic/lsbrelease.html
|
||||
|
||||
that lsb_release emitted, as strings.
|
||||
|
||||
Returns a tuple (distname,version). Distname is what LSB calls a
|
||||
"distributor id", e.g. "Ubuntu". Version is what LSB calls a "release",
|
||||
e.g. "8.04".
|
||||
|
||||
A version of this has been submitted to python as a patch for the standard
|
||||
library module "platform":
|
||||
|
||||
http://bugs.python.org/issue3937
|
||||
"""
|
||||
global _distname,_version
|
||||
if _distname and _version:
|
||||
return (_distname, _version)
|
||||
|
||||
try:
|
||||
with open("/etc/lsb-release", "rU") as etclsbrel:
|
||||
for line in etclsbrel:
|
||||
m = _distributor_id_file_re.search(line)
|
||||
if m:
|
||||
_distname = m.group(1).strip()
|
||||
if _distname and _version:
|
||||
return (_distname, _version)
|
||||
m = _release_file_re.search(line)
|
||||
if m:
|
||||
_version = m.group(1).strip()
|
||||
if _distname and _version:
|
||||
return (_distname, _version)
|
||||
except EnvironmentError:
|
||||
pass
|
||||
|
||||
(_distname, _version) = platform.dist()[:2]
|
||||
if _distname and _version:
|
||||
return (_distname, _version)
|
||||
|
||||
if os.path.isfile("/usr/bin/lsb_release") or os.path.isfile("/bin/lsb_release"):
|
||||
try:
|
||||
p = subprocess.Popen(["lsb_release", "--all"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
rc = p.wait()
|
||||
if rc == 0:
|
||||
for line in p.stdout.readlines():
|
||||
m = _distributor_id_cmdline_re.search(line)
|
||||
if m:
|
||||
_distname = m.group(1).strip()
|
||||
if _distname and _version:
|
||||
return (_distname, _version)
|
||||
|
||||
m = _release_cmdline_re.search(p.stdout.read())
|
||||
if m:
|
||||
_version = m.group(1).strip()
|
||||
if _distname and _version:
|
||||
return (_distname, _version)
|
||||
except EnvironmentError:
|
||||
pass
|
||||
|
||||
if os.path.exists("/etc/arch-release"):
|
||||
return ("Arch_Linux", "")
|
||||
|
||||
return (_distname,_version)
|
||||
|
||||
def _get_platform():
|
||||
# Our version of platform.platform(), telling us both less and more than the
|
||||
@ -288,7 +223,7 @@ def _get_platform():
|
||||
if "linux" in platform.system().lower():
|
||||
return (
|
||||
platform.system() + "-" +
|
||||
"_".join(_get_linux_distro()) + "-" +
|
||||
"_".join(distro.linux_distribution()[:2]) + "-" +
|
||||
platform.machine() + "-" +
|
||||
"_".join([x for x in platform.architecture() if x])
|
||||
)
|
||||
@ -321,7 +256,7 @@ def _get_package_versions_and_locations():
|
||||
for modulename in warning_imports:
|
||||
try:
|
||||
__import__(modulename)
|
||||
except ImportError:
|
||||
except (ImportError, SyntaxError):
|
||||
pass
|
||||
finally:
|
||||
# Leave suppressions for UserWarnings and global_deprecation_messages active.
|
||||
@ -355,7 +290,7 @@ def _get_package_versions_and_locations():
|
||||
try:
|
||||
__import__(modulename)
|
||||
module = sys.modules[modulename]
|
||||
except ImportError:
|
||||
except (ImportError, SyntaxError):
|
||||
etype, emsg, etrace = sys.exc_info()
|
||||
trace_info = (etype, str(emsg), ([None] + traceback.extract_tb(etrace))[-1])
|
||||
packages.append( (pkgname, (None, None, trace_info)) )
|
||||
@ -386,7 +321,7 @@ def _get_package_versions_and_locations():
|
||||
imported_packages = set([p.lower() for (p, _) in packages])
|
||||
extra_packages = []
|
||||
|
||||
for pr_name, (pr_ver, pr_loc) in pkg_resources_vers_and_locs.iteritems():
|
||||
for pr_name, (pr_ver, pr_loc) in pkg_resources_vers_and_locs.items():
|
||||
if pr_name not in imported_packages and pr_name not in ignorable:
|
||||
extra_packages.append( (pr_name, (pr_ver, pr_loc, "according to pkg_resources")) )
|
||||
|
||||
|
@ -1,58 +1,62 @@
|
||||
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
|
||||
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
|
||||
|
||||
<head>
|
||||
<title>Tahoe-LAFS - File Download Status</title>
|
||||
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
|
||||
<link href="/icon.png" rel="shortcut icon" />
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<h1>File Download Status</h1>
|
||||
<h1>File Download Status</h1>
|
||||
|
||||
<ul>
|
||||
<li>Started: <span n:render="started"/></li>
|
||||
<li>Storage Index: <span n:render="si"/></li>
|
||||
<li>Helper?: <span n:render="helper"/></li>
|
||||
<li>Total Size: <span n:render="total_size"/></li>
|
||||
<li>Progress: <span n:render="progress"/></li>
|
||||
<li>Status: <span n:render="status"/></li>
|
||||
</ul>
|
||||
|
||||
<div n:render="events"></div>
|
||||
|
||||
<div n:render="results">
|
||||
<h2>Download Results</h2>
|
||||
<ul>
|
||||
<li n:render="servers_used" />
|
||||
<li>Servermap: <span n:render="servermap" /></li>
|
||||
<li n:render="problems" />
|
||||
<li>Timings:</li>
|
||||
<ul>
|
||||
<li>File Size: <span n:render="string" n:data="file_size" /> bytes</li>
|
||||
<li>Total: <span n:render="time" n:data="time_total" />
|
||||
(<span n:render="rate" n:data="rate_total" />)</li>
|
||||
<ul>
|
||||
<li>Peer Selection: <span n:render="time" n:data="time_peer_selection" /></li>
|
||||
<li>UEB Fetch: <span n:render="time" n:data="time_uri_extension" /></li>
|
||||
<li>Hashtree Fetch: <span n:render="time" n:data="time_hashtrees" /></li>
|
||||
<li>Segment Fetch: <span n:render="time" n:data="time_segments" />
|
||||
(<span n:render="rate" n:data="rate_segments" />)</li>
|
||||
<ul>
|
||||
<li>Cumulative Fetching: <span n:render="time" n:data="time_cumulative_fetch" />
|
||||
(<span n:render="rate" n:data="rate_fetch" />)</li>
|
||||
<li>Cumulative Decoding: <span n:render="time" n:data="time_cumulative_decode" />
|
||||
(<span n:render="rate" n:data="rate_decode" />)</li>
|
||||
<li>Cumulative Decrypting: <span n:render="time" n:data="time_cumulative_decrypt" />
|
||||
(<span n:render="rate" n:data="rate_decrypt" />)</li>
|
||||
</ul>
|
||||
<li>Paused by client: <span n:render="time" n:data="time_paused" /></li>
|
||||
</ul>
|
||||
<li n:render="server_timings" />
|
||||
<li>Started: <t:transparent t:render="started"/></li>
|
||||
<li>Storage Index: <t:transparent t:render="si"/></li>
|
||||
<li>Helper?: <t:transparent t:render="helper"/></li>
|
||||
<li>Total Size: <t:transparent t:render="total_size"/></li>
|
||||
<li>Progress: <t:transparent t:render="progress"/></li>
|
||||
<li>Status: <t:transparent t:render="status"/></li>
|
||||
</ul>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div>Return to the <a href="/">Welcome Page</a></div>
|
||||
<div t:render="events"></div>
|
||||
|
||||
<div t:render="results">
|
||||
|
||||
<h2>Download Results</h2>
|
||||
|
||||
<ul>
|
||||
<li t:render="servers_used" />
|
||||
<li>Servermap: <t:transparent t:render="servermap" /></li>
|
||||
<li t:render="problems" />
|
||||
<li>Timings:</li>
|
||||
<ul>
|
||||
<li>File Size: <t:transparent t:render="file_size" /> bytes</li>
|
||||
<li>Total: <t:transparent t:render="time_total" />
|
||||
(<t:transparent t:render="rate_total" />)</li>
|
||||
<ul>
|
||||
<li>Peer Selection: <t:transparent t:render="time_peer_selection" /></li>
|
||||
<li>UEB Fetch: <t:transparent t:render="time_uri_extension" /></li>
|
||||
<li>Hashtree Fetch: <t:transparent t:render="time_hashtrees" /></li>
|
||||
<li>Segment Fetch: <t:transparent t:render="time_segments" />
|
||||
(<t:transparent t:render="rate_segments" />)</li>
|
||||
<ul>
|
||||
<li>Cumulative Fetching: <t:transparent t:render="time_cumulative_fetch" />
|
||||
(<t:transparent t:render="rate_fetch" />)</li>
|
||||
<li>Cumulative Decoding: <t:transparent t:render="time_cumulative_decode" />
|
||||
(<t:transparent t:render="rate_decode" />)</li>
|
||||
<li>Cumulative Decrypting: <t:transparent t:render="time_cumulative_decrypt" />
|
||||
(<t:transparent t:render="rate_decrypt" />)</li>
|
||||
</ul>
|
||||
<li>Paused by client: <t:transparent t:render="time_paused" /></li>
|
||||
</ul>
|
||||
<li t:render="server_timings" />
|
||||
</ul>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div>Return to the <a href="/">Welcome Page</a></div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
@ -1,19 +1,23 @@
|
||||
|
||||
import time
|
||||
from nevow import rend, url
|
||||
from nevow.inevow import IRequest
|
||||
from nevow import url
|
||||
from twisted.web.template import (
|
||||
renderer,
|
||||
tags as T,
|
||||
)
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.internet import reactor, defer
|
||||
from twisted.web import resource
|
||||
from twisted.web.http import NOT_FOUND
|
||||
from twisted.web.html import escape
|
||||
from twisted.application import service
|
||||
|
||||
from allmydata.web.common import WebError, \
|
||||
get_root, get_arg, boolean_of_arg
|
||||
from allmydata.web.common import (
|
||||
WebError,
|
||||
get_root,
|
||||
get_arg,
|
||||
boolean_of_arg,
|
||||
)
|
||||
|
||||
MINUTE = 60
|
||||
HOUR = 60*MINUTE
|
||||
@ -21,13 +25,16 @@ DAY = 24*HOUR
|
||||
|
||||
(MONITOR, RENDERER, WHEN_ADDED) = range(3)
|
||||
|
||||
class OphandleTable(rend.Page, service.Service):
|
||||
class OphandleTable(resource.Resource, service.Service):
|
||||
"""Renders /operations/%d."""
|
||||
|
||||
name = "operations"
|
||||
|
||||
UNCOLLECTED_HANDLE_LIFETIME = 4*DAY
|
||||
COLLECTED_HANDLE_LIFETIME = 1*DAY
|
||||
|
||||
def __init__(self, clock=None):
|
||||
super(OphandleTable, self).__init__()
|
||||
# both of these are indexed by ophandle
|
||||
self.handles = {} # tuple of (monitor, renderer, when_added)
|
||||
self.timers = {}
|
||||
@ -45,12 +52,17 @@ class OphandleTable(rend.Page, service.Service):
|
||||
del self.timers
|
||||
return service.Service.stopService(self)
|
||||
|
||||
def add_monitor(self, ctx, monitor, renderer):
|
||||
ophandle = get_arg(ctx, "ophandle")
|
||||
def add_monitor(self, req, monitor, renderer):
|
||||
"""
|
||||
:param allmydata.webish.MyRequest req:
|
||||
:param allmydata.monitor.Monitor monitor:
|
||||
:param allmydata.web.directory.ManifestResults renderer:
|
||||
"""
|
||||
ophandle = get_arg(req, "ophandle")
|
||||
assert ophandle
|
||||
now = time.time()
|
||||
self.handles[ophandle] = (monitor, renderer, now)
|
||||
retain_for = get_arg(ctx, "retain-for", None)
|
||||
retain_for = get_arg(req, "retain-for", None)
|
||||
if retain_for is not None:
|
||||
self._set_timer(ophandle, int(retain_for))
|
||||
monitor.when_done().addBoth(self._operation_complete, ophandle)
|
||||
@ -67,36 +79,42 @@ class OphandleTable(rend.Page, service.Service):
|
||||
# if we already have a timer, the client must have provided the
|
||||
# retain-for= value, so don't touch it.
|
||||
|
||||
def redirect_to(self, ctx):
|
||||
ophandle = get_arg(ctx, "ophandle")
|
||||
def redirect_to(self, req):
|
||||
"""
|
||||
:param allmydata.webish.MyRequest req:
|
||||
"""
|
||||
ophandle = get_arg(req, "ophandle")
|
||||
assert ophandle
|
||||
target = get_root(ctx) + "/operations/" + ophandle
|
||||
output = get_arg(ctx, "output")
|
||||
target = get_root(req) + "/operations/" + ophandle
|
||||
output = get_arg(req, "output")
|
||||
if output:
|
||||
target = target + "?output=%s" % output
|
||||
|
||||
# XXX: We have to use nevow.url here because nevow.appserver
|
||||
# is unhappy with anything else; so this gets its own ticket.
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3314
|
||||
return url.URL.fromString(target)
|
||||
|
||||
def childFactory(self, ctx, name):
|
||||
def getChild(self, name, req):
|
||||
ophandle = name
|
||||
if ophandle not in self.handles:
|
||||
raise WebError("unknown/expired handle '%s'" % escape(ophandle),
|
||||
NOT_FOUND)
|
||||
(monitor, renderer, when_added) = self.handles[ophandle]
|
||||
|
||||
request = IRequest(ctx)
|
||||
t = get_arg(ctx, "t", "status")
|
||||
if t == "cancel" and request.method == "POST":
|
||||
t = get_arg(req, "t", "status")
|
||||
if t == "cancel" and req.method == "POST":
|
||||
monitor.cancel()
|
||||
# return the status anyways, but release the handle
|
||||
self._release_ophandle(ophandle)
|
||||
|
||||
else:
|
||||
retain_for = get_arg(ctx, "retain-for", None)
|
||||
retain_for = get_arg(req, "retain-for", None)
|
||||
if retain_for is not None:
|
||||
self._set_timer(ophandle, int(retain_for))
|
||||
|
||||
if monitor.is_finished():
|
||||
if boolean_of_arg(get_arg(ctx, "release-after-complete", "false")):
|
||||
if boolean_of_arg(get_arg(req, "release-after-complete", "false")):
|
||||
self._release_ophandle(ophandle)
|
||||
if retain_for is None:
|
||||
# this GET is collecting the ophandle, so change its timer
|
||||
@ -123,6 +141,7 @@ class OphandleTable(rend.Page, service.Service):
|
||||
self.timers.pop(ophandle, None)
|
||||
self.handles.pop(ophandle, None)
|
||||
|
||||
|
||||
class ReloadMixin(object):
|
||||
REFRESH_TIME = 1*MINUTE
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
|
||||
import pprint, itertools, hashlib
|
||||
import pprint
|
||||
import itertools
|
||||
import hashlib
|
||||
import json
|
||||
from twisted.internet import defer
|
||||
from twisted.python.filepath import FilePath
|
||||
@ -11,29 +13,26 @@ from twisted.web.template import (
|
||||
renderElement,
|
||||
tags,
|
||||
)
|
||||
from nevow import rend, tags as T
|
||||
from allmydata.util import base32, idlib
|
||||
from allmydata.web.common import (
|
||||
getxmlfile,
|
||||
abbreviate_time,
|
||||
abbreviate_rate,
|
||||
abbreviate_size,
|
||||
plural,
|
||||
compute_rate,
|
||||
render_time,
|
||||
MultiFormatPage,
|
||||
MultiFormatResource,
|
||||
SlotsSequenceElement,
|
||||
WebError,
|
||||
)
|
||||
from allmydata.interfaces import IUploadStatus, IDownloadStatus, \
|
||||
IPublishStatus, IRetrieveStatus, IServermapUpdaterStatus
|
||||
|
||||
class RateAndTimeMixin(object):
|
||||
|
||||
def render_time(self, ctx, data):
|
||||
return abbreviate_time(data)
|
||||
|
||||
def render_rate(self, ctx, data):
|
||||
return abbreviate_rate(data)
|
||||
from allmydata.interfaces import (
|
||||
IUploadStatus,
|
||||
IDownloadStatus,
|
||||
IPublishStatus,
|
||||
IRetrieveStatus,
|
||||
IServermapUpdaterStatus,
|
||||
)
|
||||
|
||||
|
||||
class UploadResultsRendererMixin(Element):
|
||||
@ -266,130 +265,6 @@ class UploadStatusElement(UploadResultsRendererMixin):
|
||||
return tag(self._upload_status.get_status())
|
||||
|
||||
|
||||
class DownloadResultsRendererMixin(RateAndTimeMixin):
|
||||
# this requires a method named 'download_results'
|
||||
|
||||
def render_servermap(self, ctx, data):
|
||||
d = self.download_results()
|
||||
d.addCallback(lambda res: res.servermap)
|
||||
def _render(servermap):
|
||||
if servermap is None:
|
||||
return "None"
|
||||
l = T.ul()
|
||||
for peerid in sorted(servermap.keys()):
|
||||
peerid_s = idlib.shortnodeid_b2a(peerid)
|
||||
shares_s = ",".join(["#%d" % shnum
|
||||
for shnum in servermap[peerid]])
|
||||
l[T.li["[%s] has share%s: %s" % (peerid_s,
|
||||
plural(servermap[peerid]),
|
||||
shares_s)]]
|
||||
return l
|
||||
d.addCallback(_render)
|
||||
return d
|
||||
|
||||
def render_servers_used(self, ctx, data):
|
||||
d = self.download_results()
|
||||
d.addCallback(lambda res: res.servers_used)
|
||||
def _got(servers_used):
|
||||
if not servers_used:
|
||||
return ""
|
||||
peerids_s = ", ".join(["[%s]" % idlib.shortnodeid_b2a(peerid)
|
||||
for peerid in servers_used])
|
||||
return T.li["Servers Used: ", peerids_s]
|
||||
d.addCallback(_got)
|
||||
return d
|
||||
|
||||
def render_problems(self, ctx, data):
|
||||
d = self.download_results()
|
||||
d.addCallback(lambda res: res.server_problems)
|
||||
def _got(server_problems):
|
||||
if not server_problems:
|
||||
return ""
|
||||
l = T.ul()
|
||||
for peerid in sorted(server_problems.keys()):
|
||||
peerid_s = idlib.shortnodeid_b2a(peerid)
|
||||
l[T.li["[%s]: %s" % (peerid_s, server_problems[peerid])]]
|
||||
return T.li["Server Problems:", l]
|
||||
d.addCallback(_got)
|
||||
return d
|
||||
|
||||
def data_file_size(self, ctx, data):
|
||||
d = self.download_results()
|
||||
d.addCallback(lambda res: res.file_size)
|
||||
return d
|
||||
|
||||
def _get_time(self, name):
|
||||
d = self.download_results()
|
||||
d.addCallback(lambda res: res.timings.get(name))
|
||||
return d
|
||||
|
||||
def data_time_total(self, ctx, data):
|
||||
return self._get_time("total")
|
||||
|
||||
def data_time_peer_selection(self, ctx, data):
|
||||
return self._get_time("peer_selection")
|
||||
|
||||
def data_time_uri_extension(self, ctx, data):
|
||||
return self._get_time("uri_extension")
|
||||
|
||||
def data_time_hashtrees(self, ctx, data):
|
||||
return self._get_time("hashtrees")
|
||||
|
||||
def data_time_segments(self, ctx, data):
|
||||
return self._get_time("segments")
|
||||
|
||||
def data_time_cumulative_fetch(self, ctx, data):
|
||||
return self._get_time("cumulative_fetch")
|
||||
|
||||
def data_time_cumulative_decode(self, ctx, data):
|
||||
return self._get_time("cumulative_decode")
|
||||
|
||||
def data_time_cumulative_decrypt(self, ctx, data):
|
||||
return self._get_time("cumulative_decrypt")
|
||||
|
||||
def data_time_paused(self, ctx, data):
|
||||
return self._get_time("paused")
|
||||
|
||||
def _get_rate(self, name):
|
||||
d = self.download_results()
|
||||
def _convert(r):
|
||||
file_size = r.file_size
|
||||
duration = r.timings.get(name)
|
||||
return compute_rate(file_size, duration)
|
||||
d.addCallback(_convert)
|
||||
return d
|
||||
|
||||
def data_rate_total(self, ctx, data):
|
||||
return self._get_rate("total")
|
||||
|
||||
def data_rate_segments(self, ctx, data):
|
||||
return self._get_rate("segments")
|
||||
|
||||
def data_rate_fetch(self, ctx, data):
|
||||
return self._get_rate("cumulative_fetch")
|
||||
|
||||
def data_rate_decode(self, ctx, data):
|
||||
return self._get_rate("cumulative_decode")
|
||||
|
||||
def data_rate_decrypt(self, ctx, data):
|
||||
return self._get_rate("cumulative_decrypt")
|
||||
|
||||
def render_server_timings(self, ctx, data):
|
||||
d = self.download_results()
|
||||
d.addCallback(lambda res: res.timings.get("fetch_per_server"))
|
||||
def _render(per_server):
|
||||
if per_server is None:
|
||||
return ""
|
||||
l = T.ul()
|
||||
for peerid in sorted(per_server.keys()):
|
||||
peerid_s = idlib.shortnodeid_b2a(peerid)
|
||||
times_s = ", ".join([abbreviate_time(t)
|
||||
for t in per_server[peerid]])
|
||||
l[T.li["[%s]: %s" % (peerid_s, times_s)]]
|
||||
return T.li["Per-Server Segment Fetch Response Times: ", l]
|
||||
d.addCallback(_render)
|
||||
return d
|
||||
|
||||
def _find_overlap(events, start_key, end_key):
|
||||
"""
|
||||
given a list of event dicts, return a new list in which each event
|
||||
@ -538,50 +413,85 @@ class _EventJson(Resource, object):
|
||||
return json.dumps(data, indent=1) + "\n"
|
||||
|
||||
|
||||
class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
|
||||
docFactory = getxmlfile("download-status.xhtml")
|
||||
class DownloadStatusPage(Resource, object):
|
||||
"""Renders /status/down-%d."""
|
||||
|
||||
def __init__(self, data):
|
||||
rend.Page.__init__(self, data)
|
||||
self.download_status = data
|
||||
self.putChild("event_json", _EventJson(self.download_status))
|
||||
def __init__(self, download_status):
|
||||
"""
|
||||
:param IDownloadStatus download_status: stats provider
|
||||
"""
|
||||
super(DownloadStatusPage, self).__init__()
|
||||
self._download_status = download_status
|
||||
self.putChild("event_json", _EventJson(self._download_status))
|
||||
|
||||
def render_GET(self, req):
|
||||
elem = DownloadStatusElement(self._download_status)
|
||||
return renderElement(req, elem)
|
||||
|
||||
|
||||
class DownloadStatusElement(Element):
|
||||
|
||||
loader = XMLFile(FilePath(__file__).sibling("download-status.xhtml"))
|
||||
|
||||
def __init__(self, download_status):
|
||||
super(DownloadStatusElement, self).__init__()
|
||||
self._download_status = download_status
|
||||
|
||||
# XXX: fun fact: the `get_results()` method which we wind up
|
||||
# invoking here (see immutable.downloader.status.DownloadStatus)
|
||||
# is unimplemented, and simply returns a `None`. As a result,
|
||||
# `results()` renderer returns an empty tag, and does not invoke
|
||||
# any of the subsequent renderers. Thus we end up not displaying
|
||||
# download results on the download status page.
|
||||
#
|
||||
# See #3310: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3310
|
||||
def download_results(self):
|
||||
return defer.maybeDeferred(self.download_status.get_results)
|
||||
return self._download_status.get_results()
|
||||
|
||||
def relative_time(self, t):
|
||||
def _relative_time(self, t):
|
||||
if t is None:
|
||||
return t
|
||||
if self.download_status.first_timestamp is not None:
|
||||
return t - self.download_status.first_timestamp
|
||||
if self._download_status.first_timestamp is not None:
|
||||
return t - self._download_status.first_timestamp
|
||||
return t
|
||||
def short_relative_time(self, t):
|
||||
t = self.relative_time(t)
|
||||
|
||||
def _short_relative_time(self, t):
|
||||
t = self._relative_time(t)
|
||||
if t is None:
|
||||
return ""
|
||||
return "+%.6fs" % t
|
||||
|
||||
def render_timeline_link(self, ctx, data):
|
||||
from nevow import url
|
||||
return T.a(href=url.URL.fromContext(ctx).child("timeline"))["timeline"]
|
||||
|
||||
def _rate_and_time(self, bytes, seconds):
|
||||
time_s = self.render_time(None, seconds)
|
||||
time_s = abbreviate_time(seconds)
|
||||
if seconds != 0:
|
||||
rate = self.render_rate(None, 1.0 * bytes / seconds)
|
||||
return T.span(title=rate)[time_s]
|
||||
return T.span[time_s]
|
||||
rate = abbreviate_rate(1.0 * bytes / seconds)
|
||||
return tags.span(time_s, title=rate)
|
||||
return tags.span(time_s)
|
||||
|
||||
def render_events(self, ctx, data):
|
||||
if not self.download_status.storage_index:
|
||||
return
|
||||
srt = self.short_relative_time
|
||||
l = T.div()
|
||||
# XXX: This method is a candidate for refactoring. It renders
|
||||
# four tables from this function. Layout part of those tables
|
||||
# could be moved to download-status.xhtml.
|
||||
#
|
||||
# See #3311: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3311
|
||||
@renderer
|
||||
def events(self, req, tag):
|
||||
if not self._download_status.get_storage_index():
|
||||
return tag
|
||||
|
||||
t = T.table(align="left", class_="status-download-events")
|
||||
t[T.tr[T.th["serverid"], T.th["sent"], T.th["received"],
|
||||
T.th["shnums"], T.th["RTT"]]]
|
||||
for d_ev in self.download_status.dyhb_requests:
|
||||
srt = self._short_relative_time
|
||||
|
||||
evtag = tags.div()
|
||||
|
||||
# "DYHB Requests" table.
|
||||
dyhbtag = tags.table(align="left", class_="status-download-events")
|
||||
|
||||
dyhbtag(tags.tr(tags.th("serverid"),
|
||||
tags.th("sent"),
|
||||
tags.th("received"),
|
||||
tags.th("shnums"),
|
||||
tags.th("RTT")))
|
||||
|
||||
for d_ev in self._download_status.dyhb_requests:
|
||||
server = d_ev["server"]
|
||||
sent = d_ev["start_time"]
|
||||
shnums = d_ev["response_shnums"]
|
||||
@ -591,20 +501,32 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
|
||||
rtt = received - sent
|
||||
if not shnums:
|
||||
shnums = ["-"]
|
||||
t[T.tr(style="background: %s" % _color(server))[
|
||||
[T.td[server.get_name()], T.td[srt(sent)], T.td[srt(received)],
|
||||
T.td[",".join([str(shnum) for shnum in shnums])],
|
||||
T.td[self.render_time(None, rtt)],
|
||||
]]]
|
||||
|
||||
l[T.h2["DYHB Requests:"], t]
|
||||
l[T.br(clear="all")]
|
||||
dyhbtag(tags.tr(style="background: %s" % _color(server))(
|
||||
(tags.td(server.get_name()),
|
||||
tags.td(srt(sent)),
|
||||
tags.td(srt(received)),
|
||||
tags.td(",".join([str(shnum) for shnum in shnums])),
|
||||
tags.td(abbreviate_time(rtt)),
|
||||
)))
|
||||
|
||||
t = T.table(align="left",class_="status-download-events")
|
||||
t[T.tr[T.th["range"], T.th["start"], T.th["finish"], T.th["got"],
|
||||
T.th["time"], T.th["decrypttime"], T.th["pausedtime"],
|
||||
T.th["speed"]]]
|
||||
for r_ev in self.download_status.read_events:
|
||||
evtag(tags.h2("DYHB Requests:"), dyhbtag)
|
||||
evtag(tags.br(clear="all"))
|
||||
|
||||
# "Read Events" table.
|
||||
readtag = tags.table(align="left",class_="status-download-events")
|
||||
|
||||
readtag(tags.tr((
|
||||
tags.th("range"),
|
||||
tags.th("start"),
|
||||
tags.th("finish"),
|
||||
tags.th("got"),
|
||||
tags.th("time"),
|
||||
tags.th("decrypttime"),
|
||||
tags.th("pausedtime"),
|
||||
tags.th("speed"))))
|
||||
|
||||
for r_ev in self._download_status.read_events:
|
||||
start = r_ev["start"]
|
||||
length = r_ev["length"]
|
||||
bytes = r_ev["bytes_returned"]
|
||||
@ -614,25 +536,38 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
|
||||
speed, rtt = "",""
|
||||
if r_ev["finish_time"] is not None:
|
||||
rtt = r_ev["finish_time"] - r_ev["start_time"] - r_ev["paused_time"]
|
||||
speed = self.render_rate(None, compute_rate(bytes, rtt))
|
||||
rtt = self.render_time(None, rtt)
|
||||
paused = self.render_time(None, r_ev["paused_time"])
|
||||
speed = abbreviate_rate(compute_rate(bytes, rtt))
|
||||
rtt = abbreviate_time(rtt)
|
||||
paused = abbreviate_time(r_ev["paused_time"])
|
||||
|
||||
t[T.tr[T.td["[%d:+%d]" % (start, length)],
|
||||
T.td[srt(r_ev["start_time"])], T.td[srt(r_ev["finish_time"])],
|
||||
T.td[bytes], T.td[rtt],
|
||||
T.td[decrypt_time], T.td[paused],
|
||||
T.td[speed],
|
||||
]]
|
||||
readtag(tags.tr(
|
||||
tags.td("[%d:+%d]" % (start, length)),
|
||||
tags.td(srt(r_ev["start_time"])),
|
||||
tags.td(srt(r_ev["finish_time"])),
|
||||
tags.td(str(bytes)),
|
||||
tags.td(rtt),
|
||||
tags.td(decrypt_time),
|
||||
tags.td(paused),
|
||||
tags.td(speed),
|
||||
))
|
||||
|
||||
l[T.h2["Read Events:"], t]
|
||||
l[T.br(clear="all")]
|
||||
evtag(tags.h2("Read Events:"), readtag)
|
||||
evtag(tags.br(clear="all"))
|
||||
|
||||
t = T.table(align="left",class_="status-download-events")
|
||||
t[T.tr[T.th["segnum"], T.th["start"], T.th["active"], T.th["finish"],
|
||||
T.th["range"],
|
||||
T.th["decodetime"], T.th["segtime"], T.th["speed"]]]
|
||||
for s_ev in self.download_status.segment_events:
|
||||
# "Segment Events" table.
|
||||
segtag = tags.table(align="left",class_="status-download-events")
|
||||
|
||||
segtag(tags.tr(
|
||||
tags.th("segnum"),
|
||||
tags.th("start"),
|
||||
tags.th("active"),
|
||||
tags.th("finish"),
|
||||
tags.th("range"),
|
||||
tags.th("decodetime"),
|
||||
tags.th("segtime"),
|
||||
tags.th("speed")))
|
||||
|
||||
for s_ev in self._download_status.segment_events:
|
||||
range_s = "-"
|
||||
segtime_s = "-"
|
||||
speed = "-"
|
||||
@ -640,10 +575,10 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
|
||||
if s_ev["finish_time"] is not None:
|
||||
if s_ev["success"]:
|
||||
segtime = s_ev["finish_time"] - s_ev["active_time"]
|
||||
segtime_s = self.render_time(None, segtime)
|
||||
segtime_s = abbreviate_time(segtime)
|
||||
seglen = s_ev["segment_length"]
|
||||
range_s = "[%d:+%d]" % (s_ev["segment_start"], seglen)
|
||||
speed = self.render_rate(None, compute_rate(seglen, segtime))
|
||||
speed = abbreviate_rate(compute_rate(seglen, segtime))
|
||||
decode_time = self._rate_and_time(seglen, s_ev["decode_time"])
|
||||
else:
|
||||
# error
|
||||
@ -652,76 +587,213 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
|
||||
# not finished yet
|
||||
pass
|
||||
|
||||
t[T.tr[T.td["seg%d" % s_ev["segment_number"]],
|
||||
T.td[srt(s_ev["start_time"])],
|
||||
T.td[srt(s_ev["active_time"])],
|
||||
T.td[srt(s_ev["finish_time"])],
|
||||
T.td[range_s],
|
||||
T.td[decode_time],
|
||||
T.td[segtime_s], T.td[speed]]]
|
||||
segtag(tags.tr(
|
||||
tags.td("seg%d" % s_ev["segment_number"]),
|
||||
tags.td(srt(s_ev["start_time"])),
|
||||
tags.td(srt(s_ev["active_time"])),
|
||||
tags.td(srt(s_ev["finish_time"])),
|
||||
tags.td(range_s),
|
||||
tags.td(decode_time),
|
||||
tags.td(segtime_s),
|
||||
tags.td(speed)))
|
||||
|
||||
l[T.h2["Segment Events:"], t]
|
||||
l[T.br(clear="all")]
|
||||
t = T.table(align="left",class_="status-download-events")
|
||||
t[T.tr[T.th["serverid"], T.th["shnum"], T.th["range"],
|
||||
T.th["txtime"], T.th["rxtime"],
|
||||
T.th["received"], T.th["RTT"]]]
|
||||
for r_ev in self.download_status.block_requests:
|
||||
evtag(tags.h2("Segment Events:"), segtag)
|
||||
evtag(tags.br(clear="all"))
|
||||
|
||||
# "Requests" table.
|
||||
reqtab = tags.table(align="left",class_="status-download-events")
|
||||
|
||||
reqtab(tags.tr(
|
||||
tags.th("serverid"),
|
||||
tags.th("shnum"),
|
||||
tags.th("range"),
|
||||
tags.th("txtime"),
|
||||
tags.th("rxtime"),
|
||||
tags.th("received"),
|
||||
tags.th("RTT")))
|
||||
|
||||
for r_ev in self._download_status.block_requests:
|
||||
server = r_ev["server"]
|
||||
rtt = None
|
||||
if r_ev["finish_time"] is not None:
|
||||
rtt = r_ev["finish_time"] - r_ev["start_time"]
|
||||
color = _color(server)
|
||||
t[T.tr(style="background: %s" % color)[
|
||||
T.td[server.get_name()], T.td[r_ev["shnum"]],
|
||||
T.td["[%d:+%d]" % (r_ev["start"], r_ev["length"])],
|
||||
T.td[srt(r_ev["start_time"])], T.td[srt(r_ev["finish_time"])],
|
||||
T.td[r_ev["response_length"] or ""],
|
||||
T.td[self.render_time(None, rtt)],
|
||||
]]
|
||||
reqtab(tags.tr(style="background: %s" % color)
|
||||
(
|
||||
tags.td(server.get_name()),
|
||||
tags.td(str(r_ev["shnum"])),
|
||||
tags.td("[%d:+%d]" % (r_ev["start"], r_ev["length"])),
|
||||
tags.td(srt(r_ev["start_time"])),
|
||||
tags.td(srt(r_ev["finish_time"])),
|
||||
tags.td(str(r_ev["response_length"]) or ""),
|
||||
tags.td(abbreviate_time(rtt)),
|
||||
))
|
||||
|
||||
l[T.h2["Requests:"], t]
|
||||
l[T.br(clear="all")]
|
||||
evtag(tags.h2("Requests:"), reqtab)
|
||||
evtag(tags.br(clear="all"))
|
||||
|
||||
return l
|
||||
return evtag
|
||||
|
||||
def render_results(self, ctx, data):
|
||||
d = self.download_results()
|
||||
def _got_results(results):
|
||||
if results:
|
||||
return ctx.tag
|
||||
return ""
|
||||
d.addCallback(_got_results)
|
||||
return d
|
||||
@renderer
|
||||
def results(self, req, tag):
|
||||
if self.download_results():
|
||||
return tag
|
||||
return ""
|
||||
|
||||
def render_started(self, ctx, data):
|
||||
started_s = render_time(data.get_started())
|
||||
return started_s + " (%s)" % data.get_started()
|
||||
@renderer
|
||||
def started(self, req, tag):
|
||||
started_s = render_time(self._download_status.get_started())
|
||||
return tag(started_s + " (%s)" % self._download_status.get_started())
|
||||
|
||||
def render_si(self, ctx, data):
|
||||
si_s = base32.b2a_or_none(data.get_storage_index())
|
||||
@renderer
|
||||
def si(self, req, tag):
|
||||
si_s = base32.b2a_or_none(self._download_status.get_storage_index())
|
||||
if si_s is None:
|
||||
si_s = "(None)"
|
||||
return si_s
|
||||
return tag(si_s)
|
||||
|
||||
def render_helper(self, ctx, data):
|
||||
return {True: "Yes",
|
||||
False: "No"}[data.using_helper()]
|
||||
@renderer
|
||||
def helper(self, req, tag):
|
||||
return tag({True: "Yes",
|
||||
False: "No"}[self._download_status.using_helper()])
|
||||
|
||||
def render_total_size(self, ctx, data):
|
||||
size = data.get_size()
|
||||
@renderer
|
||||
def total_size(self, req, tag):
|
||||
size = self._download_status.get_size()
|
||||
if size is None:
|
||||
return "(unknown)"
|
||||
return size
|
||||
return tag(str(size))
|
||||
|
||||
def render_progress(self, ctx, data):
|
||||
progress = data.get_progress()
|
||||
@renderer
|
||||
def progress(self, req, tag):
|
||||
progress = self._download_status.get_progress()
|
||||
# TODO: make an ascii-art bar
|
||||
return "%.1f%%" % (100.0 * progress)
|
||||
return tag("%.1f%%" % (100.0 * progress))
|
||||
|
||||
def render_status(self, ctx, data):
|
||||
return data.get_status()
|
||||
@renderer
|
||||
def status(self, req, tag):
|
||||
return tag(self._download_status.get_status())
|
||||
|
||||
@renderer
|
||||
def servers_used(self, req, tag):
|
||||
servers_used = self.download_results().servers_used
|
||||
if not servers_used:
|
||||
return ""
|
||||
peerids_s = ", ".join(["[%s]" % idlib.shortnodeid_b2a(peerid)
|
||||
for peerid in servers_used])
|
||||
return tags.li("Servers Used: ", peerids_s)
|
||||
|
||||
@renderer
|
||||
def servermap(self, req, tag):
|
||||
servermap = self.download_results().servermap
|
||||
if not servermap:
|
||||
return tag("None")
|
||||
ul = tags.ul()
|
||||
for peerid in sorted(servermap.keys()):
|
||||
peerid_s = idlib.shortnodeid_b2a(peerid)
|
||||
shares_s = ",".join(["#%d" % shnum
|
||||
for shnum in servermap[peerid]])
|
||||
ul(tags.li("[%s] has share%s: %s" % (peerid_s,
|
||||
plural(servermap[peerid]),
|
||||
shares_s)))
|
||||
return ul
|
||||
|
||||
@renderer
|
||||
def problems(self, req, tag):
|
||||
server_problems = self.download_results().server_problems
|
||||
if not server_problems:
|
||||
return ""
|
||||
ul = tags.ul()
|
||||
for peerid in sorted(server_problems.keys()):
|
||||
peerid_s = idlib.shortnodeid_b2a(peerid)
|
||||
ul(tags.li("[%s]: %s" % (peerid_s, server_problems[peerid])))
|
||||
return tags.li("Server Problems:", ul)
|
||||
|
||||
@renderer
|
||||
def file_size(self, req, tag):
|
||||
return tag(str(self.download_results().file_size))
|
||||
|
||||
def _get_time(self, name):
|
||||
if self.download_results().timings:
|
||||
return self.download_results().timings.get(name)
|
||||
return None
|
||||
|
||||
@renderer
|
||||
def time_total(self, req, tag):
|
||||
return tag(str(self._get_time("total")))
|
||||
|
||||
@renderer
|
||||
def time_peer_selection(self, req, tag):
|
||||
return tag(str(self._get_time("peer_selection")))
|
||||
|
||||
@renderer
|
||||
def time_uri_extension(self, req, tag):
|
||||
return tag(str(self._get_time("uri_extension")))
|
||||
|
||||
@renderer
|
||||
def time_hashtrees(self, req, tag):
|
||||
return tag(str(self._get_time("hashtrees")))
|
||||
|
||||
@renderer
|
||||
def time_segments(self, req, tag):
|
||||
return tag(str(self._get_time("segments")))
|
||||
|
||||
@renderer
|
||||
def time_cumulative_fetch(self, req, tag):
|
||||
return tag(str(self._get_time("cumulative_fetch")))
|
||||
|
||||
@renderer
|
||||
def time_cumulative_decode(self, req, tag):
|
||||
return tag(str(self._get_time("cumulative_decode")))
|
||||
|
||||
@renderer
|
||||
def time_cumulative_decrypt(self, req, tag):
|
||||
return tag(str(self._get_time("cumulative_decrypt")))
|
||||
|
||||
@renderer
|
||||
def time_paused(self, req, tag):
|
||||
return tag(str(self._get_time("paused")))
|
||||
|
||||
def _get_rate(self, name):
|
||||
r = self.download_results()
|
||||
file_size = r.file_size
|
||||
duration = None
|
||||
if r.timings:
|
||||
duration = r.timings.get(name)
|
||||
return compute_rate(file_size, duration)
|
||||
|
||||
@renderer
|
||||
def rate_total(self, req, tag):
|
||||
return tag(str(self._get_rate("total")))
|
||||
|
||||
@renderer
|
||||
def rate_segments(self, req, tag):
|
||||
return tag(str(self._get_rate("segments")))
|
||||
|
||||
@renderer
|
||||
def rate_fetch(self, req, tag):
|
||||
return tag(str(self._get_rate("cumulative_fetch")))
|
||||
|
||||
@renderer
|
||||
def rate_decode(self, req, tag):
|
||||
return tag(str(self._get_rate("cumulative_decode")))
|
||||
|
||||
@renderer
|
||||
def rate_decrypt(self, req, tag):
|
||||
return tag(str(self._get_rate("cumulative_decrypt")))
|
||||
|
||||
@renderer
|
||||
def server_timings(self, req, tag):
|
||||
per_server = self._get_time("fetch_per_server")
|
||||
if per_server is None:
|
||||
return ""
|
||||
ul = tags.ul()
|
||||
for peerid in sorted(per_server.keys()):
|
||||
peerid_s = idlib.shortnodeid_b2a(peerid)
|
||||
times_s = ", ".join([abbreviate_time(t)
|
||||
for t in per_server[peerid]])
|
||||
ul(tags.li("[%s]: %s" % (peerid_s, times_s)))
|
||||
return tags.li("Per-Server Segment Fetch Response Times: ", ul)
|
||||
|
||||
|
||||
class RetrieveStatusPage(MultiFormatResource):
|
||||
@ -1166,14 +1238,21 @@ def marshal_json(s):
|
||||
return item
|
||||
|
||||
|
||||
class Status(MultiFormatPage):
|
||||
docFactory = getxmlfile("status.xhtml")
|
||||
addSlash = True
|
||||
class Status(MultiFormatResource):
|
||||
"""Renders /status page."""
|
||||
|
||||
def __init__(self, history):
|
||||
rend.Page.__init__(self, history)
|
||||
"""
|
||||
:param allmydata.history.History history: provides operation statuses.
|
||||
"""
|
||||
super(Status, self).__init__()
|
||||
self.history = history
|
||||
|
||||
def render_HTML(self, req):
|
||||
elem = StatusElement(self._get_active_operations(),
|
||||
self._get_recent_operations())
|
||||
return renderElement(req, elem)
|
||||
|
||||
def render_JSON(self, req):
|
||||
# modern browsers now render this instead of forcing downloads
|
||||
req.setHeader("content-type", "application/json")
|
||||
@ -1189,97 +1268,23 @@ class Status(MultiFormatPage):
|
||||
|
||||
return json.dumps(data, indent=1) + "\n"
|
||||
|
||||
def _get_all_statuses(self):
|
||||
h = self.history
|
||||
return itertools.chain(h.list_all_upload_statuses(),
|
||||
h.list_all_download_statuses(),
|
||||
h.list_all_mapupdate_statuses(),
|
||||
h.list_all_publish_statuses(),
|
||||
h.list_all_retrieve_statuses(),
|
||||
h.list_all_helper_statuses(),
|
||||
)
|
||||
def getChild(self, path, request):
|
||||
# The "if (path is empty) return self" line should handle
|
||||
# trailing slash in request path.
|
||||
#
|
||||
# Twisted Web's documentation says this: "If the URL ends in a
|
||||
# slash, for example ``http://example.com/foo/bar/`` , the
|
||||
# final URL segment will be an empty string. Resources can
|
||||
# thus know if they were requested with or without a final
|
||||
# slash."
|
||||
if not path and request.postpath != ['']:
|
||||
return self
|
||||
|
||||
def data_active_operations(self, ctx, data):
|
||||
return self._get_active_operations()
|
||||
|
||||
def _get_active_operations(self):
|
||||
active = [s
|
||||
for s in self._get_all_statuses()
|
||||
if s.get_active()]
|
||||
active.sort(lambda a, b: cmp(a.get_started(), b.get_started()))
|
||||
active.reverse()
|
||||
return active
|
||||
|
||||
def data_recent_operations(self, ctx, data):
|
||||
return self._get_recent_operations()
|
||||
|
||||
def _get_recent_operations(self):
|
||||
recent = [s
|
||||
for s in self._get_all_statuses()
|
||||
if not s.get_active()]
|
||||
recent.sort(lambda a, b: cmp(a.get_started(), b.get_started()))
|
||||
recent.reverse()
|
||||
return recent
|
||||
|
||||
def render_row(self, ctx, data):
|
||||
s = data
|
||||
|
||||
started_s = render_time(s.get_started())
|
||||
ctx.fillSlots("started", started_s)
|
||||
|
||||
si_s = base32.b2a_or_none(s.get_storage_index())
|
||||
if si_s is None:
|
||||
si_s = "(None)"
|
||||
ctx.fillSlots("si", si_s)
|
||||
ctx.fillSlots("helper", {True: "Yes",
|
||||
False: "No"}[s.using_helper()])
|
||||
|
||||
size = s.get_size()
|
||||
if size is None:
|
||||
size = "(unknown)"
|
||||
elif isinstance(size, (int, long, float)):
|
||||
size = abbreviate_size(size)
|
||||
ctx.fillSlots("total_size", size)
|
||||
|
||||
progress = data.get_progress()
|
||||
if IUploadStatus.providedBy(data):
|
||||
link = "up-%d" % data.get_counter()
|
||||
ctx.fillSlots("type", "upload")
|
||||
# TODO: make an ascii-art bar
|
||||
(chk, ciphertext, encandpush) = progress
|
||||
progress_s = ("hash: %.1f%%, ciphertext: %.1f%%, encode: %.1f%%" %
|
||||
( (100.0 * chk),
|
||||
(100.0 * ciphertext),
|
||||
(100.0 * encandpush) ))
|
||||
ctx.fillSlots("progress", progress_s)
|
||||
elif IDownloadStatus.providedBy(data):
|
||||
link = "down-%d" % data.get_counter()
|
||||
ctx.fillSlots("type", "download")
|
||||
ctx.fillSlots("progress", "%.1f%%" % (100.0 * progress))
|
||||
elif IPublishStatus.providedBy(data):
|
||||
link = "publish-%d" % data.get_counter()
|
||||
ctx.fillSlots("type", "publish")
|
||||
ctx.fillSlots("progress", "%.1f%%" % (100.0 * progress))
|
||||
elif IRetrieveStatus.providedBy(data):
|
||||
ctx.fillSlots("type", "retrieve")
|
||||
link = "retrieve-%d" % data.get_counter()
|
||||
ctx.fillSlots("progress", "%.1f%%" % (100.0 * progress))
|
||||
else:
|
||||
assert IServermapUpdaterStatus.providedBy(data)
|
||||
ctx.fillSlots("type", "mapupdate %s" % data.get_mode())
|
||||
link = "mapupdate-%d" % data.get_counter()
|
||||
ctx.fillSlots("progress", "%.1f%%" % (100.0 * progress))
|
||||
ctx.fillSlots("status", T.a(href=link)[s.get_status()])
|
||||
return ctx.tag
|
||||
|
||||
def childFactory(self, ctx, name):
|
||||
h = self.history
|
||||
try:
|
||||
stype, count_s = name.split("-")
|
||||
stype, count_s = path.split("-")
|
||||
except ValueError:
|
||||
raise RuntimeError(
|
||||
"no - in '{}'".format(name)
|
||||
)
|
||||
raise WebError("no '-' in '{}'".format(path))
|
||||
count = int(count_s)
|
||||
if stype == "up":
|
||||
for s in itertools.chain(h.list_all_upload_statuses(),
|
||||
@ -1305,6 +1310,109 @@ class Status(MultiFormatPage):
|
||||
if s.get_counter() == count:
|
||||
return RetrieveStatusPage(s)
|
||||
|
||||
def _get_all_statuses(self):
|
||||
h = self.history
|
||||
return itertools.chain(h.list_all_upload_statuses(),
|
||||
h.list_all_download_statuses(),
|
||||
h.list_all_mapupdate_statuses(),
|
||||
h.list_all_publish_statuses(),
|
||||
h.list_all_retrieve_statuses(),
|
||||
h.list_all_helper_statuses(),
|
||||
)
|
||||
|
||||
def _get_active_operations(self):
|
||||
active = [s
|
||||
for s in self._get_all_statuses()
|
||||
if s.get_active()]
|
||||
active.sort(lambda a, b: cmp(a.get_started(), b.get_started()))
|
||||
active.reverse()
|
||||
return active
|
||||
|
||||
def _get_recent_operations(self):
|
||||
recent = [s
|
||||
for s in self._get_all_statuses()
|
||||
if not s.get_active()]
|
||||
recent.sort(lambda a, b: cmp(a.get_started(), b.get_started()))
|
||||
recent.reverse()
|
||||
return recent
|
||||
|
||||
|
||||
class StatusElement(Element):
|
||||
|
||||
loader = XMLFile(FilePath(__file__).sibling("status.xhtml"))
|
||||
|
||||
def __init__(self, active, recent):
|
||||
super(StatusElement, self).__init__()
|
||||
self._active = active
|
||||
self._recent = recent
|
||||
|
||||
@renderer
|
||||
def active_operations(self, req, tag):
|
||||
active = [self.get_op_state(op) for op in self._active]
|
||||
return SlotsSequenceElement(tag, active)
|
||||
|
||||
@renderer
|
||||
def recent_operations(self, req, tag):
|
||||
recent = [self.get_op_state(op) for op in self._recent]
|
||||
return SlotsSequenceElement(tag, recent)
|
||||
|
||||
@staticmethod
|
||||
def get_op_state(op):
|
||||
result = dict()
|
||||
|
||||
started_s = render_time(op.get_started())
|
||||
result["started"] = started_s
|
||||
|
||||
si_s = base32.b2a_or_none(op.get_storage_index())
|
||||
if si_s is None:
|
||||
si_s = "(None)"
|
||||
|
||||
result["si"] = si_s
|
||||
result["helper"] = {True: "Yes", False: "No"}[op.using_helper()]
|
||||
|
||||
size = op.get_size()
|
||||
if size is None:
|
||||
size = "(unknown)"
|
||||
elif isinstance(size, (int, long, float)):
|
||||
size = abbreviate_size(size)
|
||||
|
||||
result["total_size"] = size
|
||||
|
||||
progress = op.get_progress()
|
||||
if IUploadStatus.providedBy(op):
|
||||
link = "up-%d" % op.get_counter()
|
||||
result["type"] = "upload"
|
||||
# TODO: make an ascii-art bar
|
||||
(chk, ciphertext, encandpush) = progress
|
||||
progress_s = ("hash: %.1f%%, ciphertext: %.1f%%, encode: %.1f%%" %
|
||||
((100.0 * chk),
|
||||
(100.0 * ciphertext),
|
||||
(100.0 * encandpush)))
|
||||
result["progress"] = progress_s
|
||||
elif IDownloadStatus.providedBy(op):
|
||||
link = "down-%d" % op.get_counter()
|
||||
result["type"] = "download"
|
||||
result["progress"] = "%.1f%%" % (100.0 * progress)
|
||||
elif IPublishStatus.providedBy(op):
|
||||
link = "publish-%d" % op.get_counter()
|
||||
result["type"] = "publish"
|
||||
result["progress"] = "%.1f%%" % (100.0 * progress)
|
||||
elif IRetrieveStatus.providedBy(op):
|
||||
result["type"] = "retrieve"
|
||||
link = "retrieve-%d" % op.get_counter()
|
||||
result["progress"] = "%.1f%%" % (100.0 * progress)
|
||||
else:
|
||||
assert IServermapUpdaterStatus.providedBy(op)
|
||||
result["type"] = "mapupdate %s" % op.get_mode()
|
||||
link = "mapupdate-%d" % op.get_counter()
|
||||
result["progress"] = "%.1f%%" % (100.0 * progress)
|
||||
|
||||
result["status"] = tags.a(op.get_status(),
|
||||
href="/status/{}".format(link))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# Render "/helper_status" page.
|
||||
class HelperStatus(MultiFormatResource):
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
|
||||
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
|
||||
<head>
|
||||
<title>Tahoe-LAFS - Recent and Active Operations</title>
|
||||
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
|
||||
@ -11,8 +11,8 @@
|
||||
|
||||
|
||||
<h2>Active Operations:</h2>
|
||||
<table align="left" class="table-headings-top" n:render="sequence" n:data="active_operations">
|
||||
<tr n:pattern="header">
|
||||
<table align="left" class="table-headings-top" t:render="active_operations">
|
||||
<tr t:render="header">
|
||||
<th>Type</th>
|
||||
<th>Storage Index</th>
|
||||
<th>Helper?</th>
|
||||
@ -20,21 +20,21 @@
|
||||
<th>Progress</th>
|
||||
<th>Status</th>
|
||||
</tr>
|
||||
<tr n:pattern="item" n:render="row">
|
||||
<td><n:slot name="type"/></td>
|
||||
<td><n:slot name="si"/></td>
|
||||
<td><n:slot name="helper"/></td>
|
||||
<td><n:slot name="total_size"/></td>
|
||||
<td><n:slot name="progress"/></td>
|
||||
<td><n:slot name="status"/></td>
|
||||
<tr t:render="item">
|
||||
<td><t:slot name="type"/></td>
|
||||
<td><t:slot name="si"/></td>
|
||||
<td><t:slot name="helper"/></td>
|
||||
<td><t:slot name="total_size"/></td>
|
||||
<td><t:slot name="progress"/></td>
|
||||
<td><t:slot name="status"/></td>
|
||||
</tr>
|
||||
<tr n:pattern="empty"><td>No active operations!</td></tr>
|
||||
<tr t:render="empty"><td>No active operations!</td></tr>
|
||||
</table>
|
||||
<br clear="all" />
|
||||
|
||||
<h2>Recent Operations:</h2>
|
||||
<table align="left" class="table-headings-top" n:render="sequence" n:data="recent_operations">
|
||||
<tr n:pattern="header">
|
||||
<table align="left" class="table-headings-top" t:render="recent_operations">
|
||||
<tr t:render="header">
|
||||
<th>Started</th>
|
||||
<th>Type</th>
|
||||
<th>Storage Index</th>
|
||||
@ -43,16 +43,16 @@
|
||||
<th>Progress</th>
|
||||
<th>Status</th>
|
||||
</tr>
|
||||
<tr n:pattern="item" n:render="row">
|
||||
<td><n:slot name="started"/></td>
|
||||
<td><n:slot name="type"/></td>
|
||||
<td><n:slot name="si"/></td>
|
||||
<td><n:slot name="helper"/></td>
|
||||
<td><n:slot name="total_size"/></td>
|
||||
<td><n:slot name="progress"/></td>
|
||||
<td><n:slot name="status"/></td>
|
||||
<tr t:render="item">
|
||||
<td><t:slot name="started"/></td>
|
||||
<td><t:slot name="type"/></td>
|
||||
<td><t:slot name="si"/></td>
|
||||
<td><t:slot name="helper"/></td>
|
||||
<td><t:slot name="total_size"/></td>
|
||||
<td><t:slot name="progress"/></td>
|
||||
<td><t:slot name="status"/></td>
|
||||
</tr>
|
||||
<tr n:pattern="empty"><td>No recent operations!</td></tr>
|
||||
<tr t:render="empty"><td>No recent operations!</td></tr>
|
||||
</table>
|
||||
<br clear="all" />
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user