mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-19 04:57:54 +00:00
Merge remote-tracking branch 'origin/master' into 3351.spans-python-3
This commit is contained in:
commit
e0d3e46683
1
.github/workflows/ci.yml
vendored
1
.github/workflows/ci.yml
vendored
@ -72,6 +72,7 @@ jobs:
|
||||
matrix:
|
||||
os:
|
||||
- macos-latest
|
||||
- windows-latest
|
||||
python-version:
|
||||
- 2.7
|
||||
|
||||
|
@ -358,7 +358,10 @@ def bob(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, reques
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@pytest.mark.skipif(sys.platform.startswith('win'),
|
||||
'Tor tests are unstable on Windows')
|
||||
def chutney(reactor, temp_dir):
|
||||
|
||||
chutney_dir = join(temp_dir, 'chutney')
|
||||
mkdir(chutney_dir)
|
||||
|
||||
@ -406,7 +409,10 @@ def chutney(reactor, temp_dir):
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@pytest.mark.skipif(sys.platform.startswith('win'),
|
||||
reason='Tor tests are unstable on Windows')
|
||||
def tor_network(reactor, temp_dir, chutney, request):
|
||||
|
||||
# this is the actual "chutney" script at the root of a chutney checkout
|
||||
chutney_dir = chutney
|
||||
chut = join(chutney_dir, 'chutney')
|
||||
|
@ -10,12 +10,21 @@ from six.moves import StringIO
|
||||
from twisted.internet.protocol import ProcessProtocol
|
||||
from twisted.internet.error import ProcessExitedAlready, ProcessDone
|
||||
from twisted.internet.defer import inlineCallbacks, Deferred
|
||||
|
||||
import pytest
|
||||
import pytest_twisted
|
||||
|
||||
import util
|
||||
|
||||
# see "conftest.py" for the fixtures (e.g. "tor_network")
|
||||
|
||||
# XXX: Integration tests that involve Tor do not run reliably on
|
||||
# Windows. They are skipped for now, in order to reduce CI noise.
|
||||
#
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3347
|
||||
if sys.platform.startswith('win'):
|
||||
pytest.skip('Skipping Tor tests on Windows', allow_module_level=True)
|
||||
|
||||
@pytest_twisted.inlineCallbacks
|
||||
def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl):
|
||||
yield _create_anonymous_node(reactor, 'carol', 8008, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl)
|
||||
|
@ -1,4 +1,14 @@
|
||||
allmydata.test.mutable.test_exceptions.Exceptions.test_repr
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_1s
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_25s
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_day
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_future_5_minutes
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_hours
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_month
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_year
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_parse_space
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_space
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_time
|
||||
allmydata.test.test_base32.Base32.test_a2b
|
||||
allmydata.test.test_base32.Base32.test_a2b_b2a_match_Pythons
|
||||
allmydata.test.test_base32.Base32.test_b2a
|
||||
@ -18,11 +28,33 @@ allmydata.test.test_deferredutil.DeferredUtilTests.test_failure
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_gather_results
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_success
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_wait_for_delayed_calls
|
||||
allmydata.test.test_hashtree.Complete.test_create
|
||||
allmydata.test.test_hashtree.Complete.test_dump
|
||||
allmydata.test.test_hashtree.Complete.test_needed_hashes
|
||||
allmydata.test.test_hashtree.Incomplete.test_check
|
||||
allmydata.test.test_hashtree.Incomplete.test_create
|
||||
allmydata.test.test_hashtree.Incomplete.test_depth_of
|
||||
allmydata.test.test_hashtree.Incomplete.test_large
|
||||
allmydata.test.test_hashtree.Incomplete.test_needed_hashes
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_chk
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_hashers
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_known_answers
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_random_key
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_sha256d
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_sha256d_truncated
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_timing_safe_compare
|
||||
allmydata.test.test_humanreadable.HumanReadable.test_repr
|
||||
allmydata.test.test_netstring.Netstring.test_encode
|
||||
allmydata.test.test_netstring.Netstring.test_extra
|
||||
allmydata.test.test_netstring.Netstring.test_nested
|
||||
allmydata.test.test_netstring.Netstring.test_split
|
||||
allmydata.test.test_observer.Observer.test_lazy_oneshot
|
||||
allmydata.test.test_observer.Observer.test_observerlist
|
||||
allmydata.test.test_observer.Observer.test_oneshot
|
||||
allmydata.test.test_observer.Observer.test_oneshot_fireagain
|
||||
allmydata.test.test_pipeline.Pipeline.test_basic
|
||||
allmydata.test.test_pipeline.Pipeline.test_errors
|
||||
allmydata.test.test_pipeline.Pipeline.test_errors2
|
||||
allmydata.test.test_python3.Python3PortingEffortTests.test_finished_porting
|
||||
allmydata.test.test_python3.Python3PortingEffortTests.test_ported_modules_distinct
|
||||
allmydata.test.test_python3.Python3PortingEffortTests.test_ported_modules_exist
|
||||
@ -34,3 +66,11 @@ allmydata.test.test_spans.ByteSpans.test_random
|
||||
allmydata.test.test_spans.StringSpans.test_basic
|
||||
allmydata.test.test_spans.StringSpans.test_random
|
||||
allmydata.test.test_spans.StringSpans.test_test
|
||||
allmydata.test.test_time_format.TimeFormat.test_epoch
|
||||
allmydata.test.test_time_format.TimeFormat.test_epoch_in_London
|
||||
allmydata.test.test_time_format.TimeFormat.test_format_delta
|
||||
allmydata.test.test_time_format.TimeFormat.test_format_time
|
||||
allmydata.test.test_time_format.TimeFormat.test_format_time_y2038
|
||||
allmydata.test.test_time_format.TimeFormat.test_iso_utc
|
||||
allmydata.test.test_time_format.TimeFormat.test_parse_date
|
||||
allmydata.test.test_time_format.TimeFormat.test_parse_duration
|
||||
|
0
newsfragments/3320.minor
Normal file
0
newsfragments/3320.minor
Normal file
0
newsfragments/3344.minor
Normal file
0
newsfragments/3344.minor
Normal file
0
newsfragments/3346.minor
Normal file
0
newsfragments/3346.minor
Normal file
0
newsfragments/3353.minor
Normal file
0
newsfragments/3353.minor
Normal file
1
newsfragments/3354.minor
Normal file
1
newsfragments/3354.minor
Normal file
@ -0,0 +1 @@
|
||||
|
@ -1,7 +1,4 @@
|
||||
# -*- test-case-name: allmydata.test.test_hashtree -*-
|
||||
|
||||
from allmydata.util import mathutil # from the pyutil library
|
||||
|
||||
"""
|
||||
Read and write chunks from files.
|
||||
|
||||
@ -50,6 +47,17 @@ or implied. It probably won't make your computer catch on fire,
|
||||
or eat your children, but it might. Use at your own risk.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from allmydata.util import mathutil # from the pyutil library
|
||||
|
||||
from allmydata.util import base32
|
||||
from allmydata.util.hashutil import tagged_hash, tagged_pair_hash
|
||||
|
||||
@ -170,9 +178,10 @@ def depth_of(i):
|
||||
return mathutil.log_floor(i+1, 2)
|
||||
|
||||
def empty_leaf_hash(i):
|
||||
return tagged_hash('Merkle tree empty leaf', "%d" % i)
|
||||
return tagged_hash(b'Merkle tree empty leaf', b"%d" % i)
|
||||
|
||||
def pair_hash(a, b):
|
||||
return tagged_pair_hash('Merkle tree internal node', a, b)
|
||||
return tagged_pair_hash(b'Merkle tree internal node', a, b)
|
||||
|
||||
class HashTree(CompleteBinaryTreeMixin, list):
|
||||
"""
|
||||
@ -215,7 +224,7 @@ class HashTree(CompleteBinaryTreeMixin, list):
|
||||
while len(rows[-1]) != 1:
|
||||
last = rows[-1]
|
||||
rows += [[pair_hash(last[2*i], last[2*i+1])
|
||||
for i in xrange(len(last)//2)]]
|
||||
for i in range(len(last)//2)]]
|
||||
# Flatten the list of rows into a single list.
|
||||
rows.reverse()
|
||||
self[:] = sum(rows, [])
|
||||
@ -289,7 +298,7 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
|
||||
rows = [L]
|
||||
while len(rows[-1]) != 1:
|
||||
last = rows[-1]
|
||||
rows += [[None for i in xrange(len(last)//2)]]
|
||||
rows += [[None for i in range(len(last)//2)]]
|
||||
# Flatten the list of rows into a single list.
|
||||
rows.reverse()
|
||||
self[:] = sum(rows, [])
|
||||
@ -372,12 +381,12 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
|
||||
|
||||
assert isinstance(hashes, dict)
|
||||
for h in hashes.values():
|
||||
assert isinstance(h, str)
|
||||
assert isinstance(h, bytes)
|
||||
assert isinstance(leaves, dict)
|
||||
for h in leaves.values():
|
||||
assert isinstance(h, str)
|
||||
assert isinstance(h, bytes)
|
||||
new_hashes = hashes.copy()
|
||||
for leafnum,leafhash in leaves.iteritems():
|
||||
for leafnum,leafhash in leaves.items():
|
||||
hashnum = self.first_leaf_num + leafnum
|
||||
if hashnum in new_hashes:
|
||||
if new_hashes[hashnum] != leafhash:
|
||||
@ -416,7 +425,7 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
|
||||
|
||||
# first we provisionally add all hashes to the tree, comparing
|
||||
# any duplicates
|
||||
for i,h in new_hashes.iteritems():
|
||||
for i,h in new_hashes.items():
|
||||
if self[i]:
|
||||
if self[i] != h:
|
||||
raise BadHashError("new hash %s does not match "
|
||||
|
42
src/allmydata/test/common_py3.py
Normal file
42
src/allmydata/test/common_py3.py
Normal file
@ -0,0 +1,42 @@
|
||||
"""
|
||||
Common utilities that have been ported to Python 3.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
|
||||
class TimezoneMixin(object):
|
||||
|
||||
def setTimezone(self, timezone):
|
||||
def tzset_if_possible():
|
||||
# Windows doesn't have time.tzset().
|
||||
if hasattr(time, 'tzset'):
|
||||
time.tzset()
|
||||
|
||||
unset = object()
|
||||
originalTimezone = os.environ.get('TZ', unset)
|
||||
def restoreTimezone():
|
||||
if originalTimezone is unset:
|
||||
del os.environ['TZ']
|
||||
else:
|
||||
os.environ['TZ'] = originalTimezone
|
||||
tzset_if_possible()
|
||||
|
||||
os.environ['TZ'] = timezone
|
||||
self.addCleanup(restoreTimezone)
|
||||
tzset_if_possible()
|
||||
|
||||
def have_working_tzset(self):
|
||||
return hasattr(time, 'tzset')
|
@ -1,6 +1,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os, signal, time
|
||||
import os, signal
|
||||
from random import randrange
|
||||
from six.moves import StringIO
|
||||
|
||||
@ -185,31 +185,6 @@ class TestMixin(SignalMixin):
|
||||
self.fail("Reactor was still active when it was required to be quiescent.")
|
||||
|
||||
|
||||
class TimezoneMixin(object):
|
||||
|
||||
def setTimezone(self, timezone):
|
||||
def tzset_if_possible():
|
||||
# Windows doesn't have time.tzset().
|
||||
if hasattr(time, 'tzset'):
|
||||
time.tzset()
|
||||
|
||||
unset = object()
|
||||
originalTimezone = os.environ.get('TZ', unset)
|
||||
def restoreTimezone():
|
||||
if originalTimezone is unset:
|
||||
del os.environ['TZ']
|
||||
else:
|
||||
os.environ['TZ'] = originalTimezone
|
||||
tzset_if_possible()
|
||||
|
||||
os.environ['TZ'] = timezone
|
||||
self.addCleanup(restoreTimezone)
|
||||
tzset_if_possible()
|
||||
|
||||
def have_working_tzset(self):
|
||||
return hasattr(time, 'tzset')
|
||||
|
||||
|
||||
try:
|
||||
import win32file
|
||||
import win32con
|
||||
|
149
src/allmydata/test/test_abbreviate.py
Normal file
149
src/allmydata/test/test_abbreviate.py
Normal file
@ -0,0 +1,149 @@
|
||||
"""
|
||||
Tests for allmydata.util.abbreviate.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.util import abbreviate
|
||||
|
||||
|
||||
class Abbreviate(unittest.TestCase):
|
||||
def test_abbrev_time_1s(self):
|
||||
diff = timedelta(seconds=1)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('1 second ago', s)
|
||||
|
||||
def test_abbrev_time_25s(self):
|
||||
diff = timedelta(seconds=25)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('25 seconds ago', s)
|
||||
|
||||
def test_abbrev_time_future_5_minutes(self):
|
||||
diff = timedelta(minutes=-5)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('5 minutes in the future', s)
|
||||
|
||||
def test_abbrev_time_hours(self):
|
||||
diff = timedelta(hours=4)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('4 hours ago', s)
|
||||
|
||||
def test_abbrev_time_day(self):
|
||||
diff = timedelta(hours=49) # must be more than 2 days
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('2 days ago', s)
|
||||
|
||||
def test_abbrev_time_month(self):
|
||||
diff = timedelta(days=91)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('3 months ago', s)
|
||||
|
||||
def test_abbrev_time_year(self):
|
||||
diff = timedelta(weeks=(5 * 52) + 1)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('5 years ago', s)
|
||||
|
||||
def test_time(self):
|
||||
a = abbreviate.abbreviate_time
|
||||
self.failUnlessEqual(a(None), "unknown")
|
||||
self.failUnlessEqual(a(0), "0 seconds")
|
||||
self.failUnlessEqual(a(1), "1 second")
|
||||
self.failUnlessEqual(a(2), "2 seconds")
|
||||
self.failUnlessEqual(a(119), "119 seconds")
|
||||
MIN = 60
|
||||
self.failUnlessEqual(a(2*MIN), "2 minutes")
|
||||
self.failUnlessEqual(a(60*MIN), "60 minutes")
|
||||
self.failUnlessEqual(a(179*MIN), "179 minutes")
|
||||
HOUR = 60*MIN
|
||||
self.failUnlessEqual(a(180*MIN), "3 hours")
|
||||
self.failUnlessEqual(a(4*HOUR), "4 hours")
|
||||
DAY = 24*HOUR
|
||||
MONTH = 30*DAY
|
||||
self.failUnlessEqual(a(2*DAY), "2 days")
|
||||
self.failUnlessEqual(a(2*MONTH), "2 months")
|
||||
YEAR = 365*DAY
|
||||
self.failUnlessEqual(a(5*YEAR), "5 years")
|
||||
|
||||
def test_space(self):
|
||||
tests_si = [(None, "unknown"),
|
||||
(0, "0 B"),
|
||||
(1, "1 B"),
|
||||
(999, "999 B"),
|
||||
(1000, "1000 B"),
|
||||
(1023, "1023 B"),
|
||||
(1024, "1.02 kB"),
|
||||
(20*1000, "20.00 kB"),
|
||||
(1024*1024, "1.05 MB"),
|
||||
(1000*1000, "1.00 MB"),
|
||||
(1000*1000*1000, "1.00 GB"),
|
||||
(1000*1000*1000*1000, "1.00 TB"),
|
||||
(1000*1000*1000*1000*1000, "1.00 PB"),
|
||||
(1000*1000*1000*1000*1000*1000, "1.00 EB"),
|
||||
(1234567890123456789, "1.23 EB"),
|
||||
]
|
||||
for (x, expected) in tests_si:
|
||||
got = abbreviate.abbreviate_space(x, SI=True)
|
||||
self.failUnlessEqual(got, expected)
|
||||
|
||||
tests_base1024 = [(None, "unknown"),
|
||||
(0, "0 B"),
|
||||
(1, "1 B"),
|
||||
(999, "999 B"),
|
||||
(1000, "1000 B"),
|
||||
(1023, "1023 B"),
|
||||
(1024, "1.00 kiB"),
|
||||
(20*1024, "20.00 kiB"),
|
||||
(1000*1000, "976.56 kiB"),
|
||||
(1024*1024, "1.00 MiB"),
|
||||
(1024*1024*1024, "1.00 GiB"),
|
||||
(1024*1024*1024*1024, "1.00 TiB"),
|
||||
(1000*1000*1000*1000*1000, "909.49 TiB"),
|
||||
(1024*1024*1024*1024*1024, "1.00 PiB"),
|
||||
(1024*1024*1024*1024*1024*1024, "1.00 EiB"),
|
||||
(1234567890123456789, "1.07 EiB"),
|
||||
]
|
||||
for (x, expected) in tests_base1024:
|
||||
got = abbreviate.abbreviate_space(x, SI=False)
|
||||
self.failUnlessEqual(got, expected)
|
||||
|
||||
self.failUnlessEqual(abbreviate.abbreviate_space_both(1234567),
|
||||
"(1.23 MB, 1.18 MiB)")
|
||||
|
||||
def test_parse_space(self):
|
||||
p = abbreviate.parse_abbreviated_size
|
||||
self.failUnlessEqual(p(""), None)
|
||||
self.failUnlessEqual(p(None), None)
|
||||
self.failUnlessEqual(p("123"), 123)
|
||||
self.failUnlessEqual(p("123B"), 123)
|
||||
self.failUnlessEqual(p("2K"), 2000)
|
||||
self.failUnlessEqual(p("2kb"), 2000)
|
||||
self.failUnlessEqual(p("2KiB"), 2048)
|
||||
self.failUnlessEqual(p("10MB"), 10*1000*1000)
|
||||
self.failUnlessEqual(p("10MiB"), 10*1024*1024)
|
||||
self.failUnlessEqual(p("5G"), 5*1000*1000*1000)
|
||||
self.failUnlessEqual(p("4GiB"), 4*1024*1024*1024)
|
||||
self.failUnlessEqual(p("3TB"), 3*1000*1000*1000*1000)
|
||||
self.failUnlessEqual(p("3TiB"), 3*1024*1024*1024*1024)
|
||||
self.failUnlessEqual(p("6PB"), 6*1000*1000*1000*1000*1000)
|
||||
self.failUnlessEqual(p("6PiB"), 6*1024*1024*1024*1024*1024)
|
||||
self.failUnlessEqual(p("9EB"), 9*1000*1000*1000*1000*1000*1000)
|
||||
self.failUnlessEqual(p("9EiB"), 9*1024*1024*1024*1024*1024*1024)
|
||||
|
||||
e = self.failUnlessRaises(ValueError, p, "12 cubits")
|
||||
self.failUnlessIn("12 cubits", str(e))
|
||||
e = self.failUnlessRaises(ValueError, p, "1 BB")
|
||||
self.failUnlessIn("1 BB", str(e))
|
||||
e = self.failUnlessRaises(ValueError, p, "fhtagn")
|
||||
self.failUnlessIn("fhtagn", str(e))
|
@ -1,4 +1,18 @@
|
||||
# -*- test-case-name: allmydata.test.test_hashtree -*-
|
||||
"""
|
||||
Tests for allmydata.hashtree.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
@ -7,8 +21,8 @@ from allmydata import hashtree
|
||||
|
||||
|
||||
def make_tree(numleaves):
|
||||
leaves = ["%d" % i for i in range(numleaves)]
|
||||
leaf_hashes = [tagged_hash("tag", leaf) for leaf in leaves]
|
||||
leaves = [b"%d" % i for i in range(numleaves)]
|
||||
leaf_hashes = [tagged_hash(b"tag", leaf) for leaf in leaves]
|
||||
ht = hashtree.HashTree(leaf_hashes)
|
||||
return ht
|
||||
|
||||
@ -20,7 +34,7 @@ class Complete(unittest.TestCase):
|
||||
ht = make_tree(8)
|
||||
root = ht[0]
|
||||
self.failUnlessEqual(len(root), 32)
|
||||
self.failUnlessEqual(ht.get_leaf(0), tagged_hash("tag", "0"))
|
||||
self.failUnlessEqual(ht.get_leaf(0), tagged_hash(b"tag", b"0"))
|
||||
self.failUnlessRaises(IndexError, ht.get_leaf, 8)
|
||||
self.failUnlessEqual(ht.get_leaf_index(0), 7)
|
||||
self.failUnlessRaises(IndexError, ht.parent, 0)
|
||||
@ -143,7 +157,7 @@ class Incomplete(unittest.TestCase):
|
||||
current_hashes = list(iht)
|
||||
# this should fail because there aren't enough hashes known
|
||||
try:
|
||||
iht.set_hashes(leaves={0: tagged_hash("tag", "0")})
|
||||
iht.set_hashes(leaves={0: tagged_hash(b"tag", b"0")})
|
||||
except hashtree.NotEnoughHashesError:
|
||||
pass
|
||||
else:
|
||||
@ -157,7 +171,7 @@ class Incomplete(unittest.TestCase):
|
||||
chain = {0: ht[0], 2: ht[2], 4: ht[4], 8: ht[8]}
|
||||
# this should fail because the leaf hash is just plain wrong
|
||||
try:
|
||||
iht.set_hashes(chain, leaves={0: tagged_hash("bad tag", "0")})
|
||||
iht.set_hashes(chain, leaves={0: tagged_hash(b"bad tag", b"0")})
|
||||
except hashtree.BadHashError:
|
||||
pass
|
||||
else:
|
||||
@ -166,18 +180,18 @@ class Incomplete(unittest.TestCase):
|
||||
# this should fail because we give it conflicting hashes: one as an
|
||||
# internal node, another as a leaf
|
||||
try:
|
||||
iht.set_hashes(chain, leaves={1: tagged_hash("bad tag", "1")})
|
||||
iht.set_hashes(chain, leaves={1: tagged_hash(b"bad tag", b"1")})
|
||||
except hashtree.BadHashError:
|
||||
pass
|
||||
else:
|
||||
self.fail("didn't catch bad hash")
|
||||
|
||||
bad_chain = chain.copy()
|
||||
bad_chain[2] = ht[2] + "BOGUS"
|
||||
bad_chain[2] = ht[2] + b"BOGUS"
|
||||
|
||||
# this should fail because the internal hash is wrong
|
||||
try:
|
||||
iht.set_hashes(bad_chain, leaves={0: tagged_hash("tag", "0")})
|
||||
iht.set_hashes(bad_chain, leaves={0: tagged_hash(b"tag", b"0")})
|
||||
except hashtree.BadHashError:
|
||||
pass
|
||||
else:
|
||||
@ -185,23 +199,23 @@ class Incomplete(unittest.TestCase):
|
||||
|
||||
# this should succeed
|
||||
try:
|
||||
iht.set_hashes(chain, leaves={0: tagged_hash("tag", "0")})
|
||||
iht.set_hashes(chain, leaves={0: tagged_hash(b"tag", b"0")})
|
||||
except hashtree.BadHashError as e:
|
||||
self.fail("bad hash: %s" % e)
|
||||
|
||||
self.failUnlessEqual(ht.get_leaf(0), tagged_hash("tag", "0"))
|
||||
self.failUnlessEqual(ht.get_leaf(0), tagged_hash(b"tag", b"0"))
|
||||
self.failUnlessRaises(IndexError, ht.get_leaf, 8)
|
||||
|
||||
# this should succeed too
|
||||
try:
|
||||
iht.set_hashes(leaves={1: tagged_hash("tag", "1")})
|
||||
iht.set_hashes(leaves={1: tagged_hash(b"tag", b"1")})
|
||||
except hashtree.BadHashError:
|
||||
self.fail("bad hash")
|
||||
|
||||
# this should fail because we give it hashes that conflict with some
|
||||
# that we added successfully before
|
||||
try:
|
||||
iht.set_hashes(leaves={1: tagged_hash("bad tag", "1")})
|
||||
iht.set_hashes(leaves={1: tagged_hash(b"bad tag", b"1")})
|
||||
except hashtree.BadHashError:
|
||||
pass
|
||||
else:
|
||||
@ -214,6 +228,6 @@ class Incomplete(unittest.TestCase):
|
||||
|
||||
# this should succeed
|
||||
try:
|
||||
iht.set_hashes(chain, leaves={4: tagged_hash("tag", "4")})
|
||||
iht.set_hashes(chain, leaves={4: tagged_hash(b"tag", b"4")})
|
||||
except hashtree.BadHashError as e:
|
||||
self.fail("bad hash: %s" % e)
|
||||
|
135
src/allmydata/test/test_hashutil.py
Normal file
135
src/allmydata/test/test_hashutil.py
Normal file
@ -0,0 +1,135 @@
|
||||
"""
|
||||
Tests for allmydata.util.hashutil.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.util import hashutil, base32
|
||||
|
||||
|
||||
class HashUtilTests(unittest.TestCase):
|
||||
|
||||
def test_random_key(self):
|
||||
k = hashutil.random_key()
|
||||
self.failUnlessEqual(len(k), hashutil.KEYLEN)
|
||||
self.assertIsInstance(k, bytes)
|
||||
|
||||
def test_sha256d(self):
|
||||
h1 = hashutil.tagged_hash(b"tag1", b"value")
|
||||
self.assertIsInstance(h1, bytes)
|
||||
h2 = hashutil.tagged_hasher(b"tag1")
|
||||
h2.update(b"value")
|
||||
h2a = h2.digest()
|
||||
h2b = h2.digest()
|
||||
self.assertIsInstance(h2a, bytes)
|
||||
self.failUnlessEqual(h1, h2a)
|
||||
self.failUnlessEqual(h2a, h2b)
|
||||
|
||||
def test_sha256d_truncated(self):
|
||||
h1 = hashutil.tagged_hash(b"tag1", b"value", 16)
|
||||
h2 = hashutil.tagged_hasher(b"tag1", 16)
|
||||
h2.update(b"value")
|
||||
h2 = h2.digest()
|
||||
self.failUnlessEqual(len(h1), 16)
|
||||
self.failUnlessEqual(len(h2), 16)
|
||||
self.failUnlessEqual(h1, h2)
|
||||
|
||||
def test_chk(self):
|
||||
h1 = hashutil.convergence_hash(3, 10, 1000, b"data", b"secret")
|
||||
h2 = hashutil.convergence_hasher(3, 10, 1000, b"secret")
|
||||
h2.update(b"data")
|
||||
h2 = h2.digest()
|
||||
self.failUnlessEqual(h1, h2)
|
||||
self.assertIsInstance(h1, bytes)
|
||||
self.assertIsInstance(h2, bytes)
|
||||
|
||||
def test_hashers(self):
|
||||
h1 = hashutil.block_hash(b"foo")
|
||||
h2 = hashutil.block_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
h1 = hashutil.uri_extension_hash(b"foo")
|
||||
h2 = hashutil.uri_extension_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
h1 = hashutil.plaintext_hash(b"foo")
|
||||
h2 = hashutil.plaintext_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
h1 = hashutil.crypttext_hash(b"foo")
|
||||
h2 = hashutil.crypttext_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
h1 = hashutil.crypttext_segment_hash(b"foo")
|
||||
h2 = hashutil.crypttext_segment_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
h1 = hashutil.plaintext_segment_hash(b"foo")
|
||||
h2 = hashutil.plaintext_segment_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
def test_timing_safe_compare(self):
|
||||
self.failUnless(hashutil.timing_safe_compare(b"a", b"a"))
|
||||
self.failUnless(hashutil.timing_safe_compare(b"ab", b"ab"))
|
||||
self.failIf(hashutil.timing_safe_compare(b"a", b"b"))
|
||||
self.failIf(hashutil.timing_safe_compare(b"a", b"aa"))
|
||||
|
||||
def _testknown(self, hashf, expected_a, *args):
|
||||
got = hashf(*args)
|
||||
self.assertIsInstance(got, bytes)
|
||||
got_a = base32.b2a(got)
|
||||
self.failUnlessEqual(got_a, expected_a)
|
||||
|
||||
def test_known_answers(self):
|
||||
# assert backwards compatibility
|
||||
self._testknown(hashutil.storage_index_hash, b"qb5igbhcc5esa6lwqorsy7e6am", b"")
|
||||
self._testknown(hashutil.block_hash, b"msjr5bh4evuh7fa3zw7uovixfbvlnstr5b65mrerwfnvjxig2jvq", b"")
|
||||
self._testknown(hashutil.uri_extension_hash, b"wthsu45q7zewac2mnivoaa4ulh5xvbzdmsbuyztq2a5fzxdrnkka", b"")
|
||||
self._testknown(hashutil.plaintext_hash, b"5lz5hwz3qj3af7n6e3arblw7xzutvnd3p3fjsngqjcb7utf3x3da", b"")
|
||||
self._testknown(hashutil.crypttext_hash, b"itdj6e4njtkoiavlrmxkvpreosscssklunhwtvxn6ggho4rkqwga", b"")
|
||||
self._testknown(hashutil.crypttext_segment_hash, b"aovy5aa7jej6ym5ikgwyoi4pxawnoj3wtaludjz7e2nb5xijb7aa", b"")
|
||||
self._testknown(hashutil.plaintext_segment_hash, b"4fdgf6qruaisyukhqcmoth4t3li6bkolbxvjy4awwcpprdtva7za", b"")
|
||||
self._testknown(hashutil.convergence_hash, b"3mo6ni7xweplycin6nowynw2we", 3, 10, 100, b"", b"converge")
|
||||
self._testknown(hashutil.my_renewal_secret_hash, b"ujhr5k5f7ypkp67jkpx6jl4p47pyta7hu5m527cpcgvkafsefm6q", b"")
|
||||
self._testknown(hashutil.my_cancel_secret_hash, b"rjwzmafe2duixvqy6h47f5wfrokdziry6zhx4smew4cj6iocsfaa", b"")
|
||||
self._testknown(hashutil.file_renewal_secret_hash, b"hzshk2kf33gzbd5n3a6eszkf6q6o6kixmnag25pniusyaulqjnia", b"", b"si")
|
||||
self._testknown(hashutil.file_cancel_secret_hash, b"bfciwvr6w7wcavsngxzxsxxaszj72dej54n4tu2idzp6b74g255q", b"", b"si")
|
||||
self._testknown(hashutil.bucket_renewal_secret_hash, b"e7imrzgzaoashsncacvy3oysdd2m5yvtooo4gmj4mjlopsazmvuq", b"", b"\x00"*20)
|
||||
self._testknown(hashutil.bucket_cancel_secret_hash, b"dvdujeyxeirj6uux6g7xcf4lvesk632aulwkzjar7srildvtqwma", b"", b"\x00"*20)
|
||||
self._testknown(hashutil.hmac, b"c54ypfi6pevb3nvo6ba42jtglpkry2kbdopqsi7dgrm4r7tw5sra", b"tag", b"")
|
||||
self._testknown(hashutil.mutable_rwcap_key_hash, b"6rvn2iqrghii5n4jbbwwqqsnqu", b"iv", b"wk")
|
||||
self._testknown(hashutil.ssk_writekey_hash, b"ykpgmdbpgbb6yqz5oluw2q26ye", b"")
|
||||
self._testknown(hashutil.ssk_write_enabler_master_hash, b"izbfbfkoait4dummruol3gy2bnixrrrslgye6ycmkuyujnenzpia", b"")
|
||||
self._testknown(hashutil.ssk_write_enabler_hash, b"fuu2dvx7g6gqu5x22vfhtyed7p4pd47y5hgxbqzgrlyvxoev62tq", b"wk", b"\x00"*20)
|
||||
self._testknown(hashutil.ssk_pubkey_fingerprint_hash, b"3opzw4hhm2sgncjx224qmt5ipqgagn7h5zivnfzqycvgqgmgz35q", b"")
|
||||
self._testknown(hashutil.ssk_readkey_hash, b"vugid4as6qbqgeq2xczvvcedai", b"")
|
||||
self._testknown(hashutil.ssk_readkey_data_hash, b"73wsaldnvdzqaf7v4pzbr2ae5a", b"iv", b"rk")
|
||||
self._testknown(hashutil.ssk_storage_index_hash, b"j7icz6kigb6hxrej3tv4z7ayym", b"")
|
||||
|
||||
self._testknown(hashutil.permute_server_hash,
|
||||
b"kb4354zeeurpo3ze5e275wzbynm6hlap", # b32(expected)
|
||||
b"SI", # peer selection index == storage_index
|
||||
base32.a2b(b"u33m4y7klhz3bypswqkozwetvabelhxt"), # seed
|
||||
)
|
@ -1,36 +1,59 @@
|
||||
"""
|
||||
Tests for allmydata.util.netstring.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.util.netstring import netstring, split_netstring
|
||||
|
||||
|
||||
class Netstring(unittest.TestCase):
|
||||
def test_encode(self):
|
||||
"""netstring() correctly encodes the given bytes."""
|
||||
result = netstring(b"abc")
|
||||
self.assertEqual(result, b"3:abc,")
|
||||
self.assertIsInstance(result, bytes)
|
||||
|
||||
def test_split(self):
|
||||
a = netstring("hello") + netstring("world")
|
||||
self.failUnlessEqual(split_netstring(a, 2), (["hello", "world"], len(a)))
|
||||
self.failUnlessEqual(split_netstring(a, 2, required_trailer=""), (["hello", "world"], len(a)))
|
||||
a = netstring(b"hello") + netstring(b"world")
|
||||
for s in split_netstring(a, 2)[0]:
|
||||
self.assertIsInstance(s, bytes)
|
||||
self.failUnlessEqual(split_netstring(a, 2), ([b"hello", b"world"], len(a)))
|
||||
self.failUnlessEqual(split_netstring(a, 2, required_trailer=b""), ([b"hello", b"world"], len(a)))
|
||||
self.failUnlessRaises(ValueError, split_netstring, a, 3)
|
||||
self.failUnlessRaises(ValueError, split_netstring, a+" extra", 2, required_trailer="")
|
||||
self.failUnlessEqual(split_netstring(a+" extra", 2), (["hello", "world"], len(a)))
|
||||
self.failUnlessEqual(split_netstring(a+"++", 2, required_trailer="++"),
|
||||
(["hello", "world"], len(a)+2))
|
||||
self.failUnlessRaises(ValueError, split_netstring, a+b" extra", 2, required_trailer=b"")
|
||||
self.failUnlessEqual(split_netstring(a+b" extra", 2), ([b"hello", b"world"], len(a)))
|
||||
self.failUnlessEqual(split_netstring(a+b"++", 2, required_trailer=b"++"),
|
||||
([b"hello", b"world"], len(a)+2))
|
||||
self.failUnlessRaises(ValueError,
|
||||
split_netstring, a+"+", 2, required_trailer="not")
|
||||
split_netstring, a+b"+", 2, required_trailer=b"not")
|
||||
|
||||
def test_extra(self):
|
||||
a = netstring("hello")
|
||||
self.failUnlessEqual(split_netstring(a, 1), (["hello"], len(a)))
|
||||
b = netstring("hello") + "extra stuff"
|
||||
a = netstring(b"hello")
|
||||
self.failUnlessEqual(split_netstring(a, 1), ([b"hello"], len(a)))
|
||||
b = netstring(b"hello") + b"extra stuff"
|
||||
self.failUnlessEqual(split_netstring(b, 1),
|
||||
(["hello"], len(a)))
|
||||
([b"hello"], len(a)))
|
||||
|
||||
def test_nested(self):
|
||||
a = netstring("hello") + netstring("world") + "extra stuff"
|
||||
b = netstring("a") + netstring("is") + netstring(a) + netstring(".")
|
||||
a = netstring(b"hello") + netstring(b"world") + b"extra stuff"
|
||||
b = netstring(b"a") + netstring(b"is") + netstring(a) + netstring(b".")
|
||||
(top, pos) = split_netstring(b, 4)
|
||||
self.failUnlessEqual(len(top), 4)
|
||||
self.failUnlessEqual(top[0], "a")
|
||||
self.failUnlessEqual(top[1], "is")
|
||||
self.failUnlessEqual(top[0], b"a")
|
||||
self.failUnlessEqual(top[1], b"is")
|
||||
self.failUnlessEqual(top[2], a)
|
||||
self.failUnlessEqual(top[3], ".")
|
||||
self.failUnlessRaises(ValueError, split_netstring, a, 2, required_trailer="")
|
||||
self.failUnlessEqual(top[3], b".")
|
||||
self.failUnlessRaises(ValueError, split_netstring, a, 2, required_trailer=b"")
|
||||
bottom = split_netstring(a, 2)
|
||||
self.failUnlessEqual(bottom, (["hello", "world"], len(netstring("hello")+netstring("world"))))
|
||||
self.failUnlessEqual(bottom, ([b"hello", b"world"], len(netstring(b"hello")+netstring(b"world"))))
|
||||
|
@ -1,3 +1,17 @@
|
||||
"""
|
||||
Tests for allmydata.util.observer.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, reactor
|
||||
|
198
src/allmydata/test/test_pipeline.py
Normal file
198
src/allmydata/test/test_pipeline.py
Normal file
@ -0,0 +1,198 @@
|
||||
"""
|
||||
Tests for allmydata.util.pipeline.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import gc
|
||||
|
||||
from twisted.internet import defer
|
||||
from twisted.trial import unittest
|
||||
from twisted.python import log
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
from allmydata.util import pipeline
|
||||
|
||||
|
||||
class Pipeline(unittest.TestCase):
|
||||
def pause(self, *args, **kwargs):
|
||||
d = defer.Deferred()
|
||||
self.calls.append( (d, args, kwargs) )
|
||||
return d
|
||||
|
||||
def failUnlessCallsAre(self, expected):
|
||||
#print self.calls
|
||||
#print expected
|
||||
self.failUnlessEqual(len(self.calls), len(expected), self.calls)
|
||||
for i,c in enumerate(self.calls):
|
||||
self.failUnlessEqual(c[1:], expected[i], str(i))
|
||||
|
||||
def test_basic(self):
|
||||
self.calls = []
|
||||
finished = []
|
||||
p = pipeline.Pipeline(100)
|
||||
|
||||
d = p.flush() # fires immediately
|
||||
d.addCallbacks(finished.append, log.err)
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
finished = []
|
||||
|
||||
d = p.add(10, self.pause, "one")
|
||||
# the call should start right away, and our return Deferred should
|
||||
# fire right away
|
||||
d.addCallbacks(finished.append, log.err)
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
self.failUnlessEqual(finished[0], None)
|
||||
self.failUnlessCallsAre([ ( ("one",) , {} ) ])
|
||||
self.failUnlessEqual(p.gauge, 10)
|
||||
|
||||
# pipeline: [one]
|
||||
|
||||
finished = []
|
||||
d = p.add(20, self.pause, "two", kw=2)
|
||||
# pipeline: [one, two]
|
||||
|
||||
# the call and the Deferred should fire right away
|
||||
d.addCallbacks(finished.append, log.err)
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
self.failUnlessEqual(finished[0], None)
|
||||
self.failUnlessCallsAre([ ( ("one",) , {} ),
|
||||
( ("two",) , {"kw": 2} ),
|
||||
])
|
||||
self.failUnlessEqual(p.gauge, 30)
|
||||
|
||||
self.calls[0][0].callback("one-result")
|
||||
# pipeline: [two]
|
||||
self.failUnlessEqual(p.gauge, 20)
|
||||
|
||||
finished = []
|
||||
d = p.add(90, self.pause, "three", "posarg1")
|
||||
# pipeline: [two, three]
|
||||
flushed = []
|
||||
fd = p.flush()
|
||||
fd.addCallbacks(flushed.append, log.err)
|
||||
self.failUnlessEqual(flushed, [])
|
||||
|
||||
# the call will be made right away, but the return Deferred will not,
|
||||
# because the pipeline is now full.
|
||||
d.addCallbacks(finished.append, log.err)
|
||||
self.failUnlessEqual(len(finished), 0)
|
||||
self.failUnlessCallsAre([ ( ("one",) , {} ),
|
||||
( ("two",) , {"kw": 2} ),
|
||||
( ("three", "posarg1"), {} ),
|
||||
])
|
||||
self.failUnlessEqual(p.gauge, 110)
|
||||
|
||||
self.failUnlessRaises(pipeline.SingleFileError, p.add, 10, self.pause)
|
||||
|
||||
# retiring either call will unblock the pipeline, causing the #3
|
||||
# Deferred to fire
|
||||
self.calls[2][0].callback("three-result")
|
||||
# pipeline: [two]
|
||||
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
self.failUnlessEqual(finished[0], None)
|
||||
self.failUnlessEqual(flushed, [])
|
||||
|
||||
# retiring call#2 will finally allow the flush() Deferred to fire
|
||||
self.calls[1][0].callback("two-result")
|
||||
self.failUnlessEqual(len(flushed), 1)
|
||||
|
||||
def test_errors(self):
|
||||
self.calls = []
|
||||
p = pipeline.Pipeline(100)
|
||||
|
||||
d1 = p.add(200, self.pause, "one")
|
||||
d2 = p.flush()
|
||||
|
||||
finished = []
|
||||
d1.addBoth(finished.append)
|
||||
self.failUnlessEqual(finished, [])
|
||||
|
||||
flushed = []
|
||||
d2.addBoth(flushed.append)
|
||||
self.failUnlessEqual(flushed, [])
|
||||
|
||||
self.calls[0][0].errback(ValueError("oops"))
|
||||
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
f = finished[0]
|
||||
self.failUnless(isinstance(f, Failure))
|
||||
self.failUnless(f.check(pipeline.PipelineError))
|
||||
self.failUnlessIn("PipelineError", str(f.value))
|
||||
self.failUnlessIn("ValueError", str(f.value))
|
||||
r = repr(f.value)
|
||||
self.failUnless("ValueError" in r, r)
|
||||
f2 = f.value.error
|
||||
self.failUnless(f2.check(ValueError))
|
||||
|
||||
self.failUnlessEqual(len(flushed), 1)
|
||||
f = flushed[0]
|
||||
self.failUnless(isinstance(f, Failure))
|
||||
self.failUnless(f.check(pipeline.PipelineError))
|
||||
f2 = f.value.error
|
||||
self.failUnless(f2.check(ValueError))
|
||||
|
||||
# now that the pipeline is in the failed state, any new calls will
|
||||
# fail immediately
|
||||
|
||||
d3 = p.add(20, self.pause, "two")
|
||||
|
||||
finished = []
|
||||
d3.addBoth(finished.append)
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
f = finished[0]
|
||||
self.failUnless(isinstance(f, Failure))
|
||||
self.failUnless(f.check(pipeline.PipelineError))
|
||||
r = repr(f.value)
|
||||
self.failUnless("ValueError" in r, r)
|
||||
f2 = f.value.error
|
||||
self.failUnless(f2.check(ValueError))
|
||||
|
||||
d4 = p.flush()
|
||||
flushed = []
|
||||
d4.addBoth(flushed.append)
|
||||
self.failUnlessEqual(len(flushed), 1)
|
||||
f = flushed[0]
|
||||
self.failUnless(isinstance(f, Failure))
|
||||
self.failUnless(f.check(pipeline.PipelineError))
|
||||
f2 = f.value.error
|
||||
self.failUnless(f2.check(ValueError))
|
||||
|
||||
def test_errors2(self):
|
||||
self.calls = []
|
||||
p = pipeline.Pipeline(100)
|
||||
|
||||
d1 = p.add(10, self.pause, "one")
|
||||
d2 = p.add(20, self.pause, "two")
|
||||
d3 = p.add(30, self.pause, "three")
|
||||
d4 = p.flush()
|
||||
|
||||
# one call fails, then the second one succeeds: make sure
|
||||
# ExpandableDeferredList tolerates the second one
|
||||
|
||||
flushed = []
|
||||
d4.addBoth(flushed.append)
|
||||
self.failUnlessEqual(flushed, [])
|
||||
|
||||
self.calls[0][0].errback(ValueError("oops"))
|
||||
self.failUnlessEqual(len(flushed), 1)
|
||||
f = flushed[0]
|
||||
self.failUnless(isinstance(f, Failure))
|
||||
self.failUnless(f.check(pipeline.PipelineError))
|
||||
f2 = f.value.error
|
||||
self.failUnless(f2.check(ValueError))
|
||||
|
||||
self.calls[1][0].callback("two-result")
|
||||
self.calls[2][0].errback(ValueError("three-error"))
|
||||
|
||||
del d1,d2,d3,d4
|
||||
gc.collect() # for PyPy
|
@ -26,7 +26,7 @@ from zope.interface import implementer
|
||||
from foolscap.api import fireEventually
|
||||
import itertools
|
||||
from allmydata import interfaces
|
||||
from allmydata.util import fileutil, hashutil, base32, pollmixin, time_format
|
||||
from allmydata.util import fileutil, hashutil, base32, pollmixin
|
||||
from allmydata.storage.server import StorageServer
|
||||
from allmydata.storage.mutable import MutableShareFile
|
||||
from allmydata.storage.immutable import BucketWriter, BucketReader
|
||||
@ -3852,25 +3852,6 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
|
||||
expiration_mode="bogus")
|
||||
self.failUnlessIn("GC mode 'bogus' must be 'age' or 'cutoff-date'", str(e))
|
||||
|
||||
def test_parse_duration(self):
|
||||
DAY = 24*60*60
|
||||
MONTH = 31*DAY
|
||||
YEAR = 365*DAY
|
||||
p = time_format.parse_duration
|
||||
self.failUnlessEqual(p("7days"), 7*DAY)
|
||||
self.failUnlessEqual(p("31day"), 31*DAY)
|
||||
self.failUnlessEqual(p("60 days"), 60*DAY)
|
||||
self.failUnlessEqual(p("2mo"), 2*MONTH)
|
||||
self.failUnlessEqual(p("3 month"), 3*MONTH)
|
||||
self.failUnlessEqual(p("2years"), 2*YEAR)
|
||||
e = self.failUnlessRaises(ValueError, p, "2kumquats")
|
||||
self.failUnlessIn("no unit (like day, month, or year) in '2kumquats'", str(e))
|
||||
|
||||
def test_parse_date(self):
|
||||
p = time_format.parse_date
|
||||
self.failUnless(isinstance(p("2009-03-18"), int), p("2009-03-18"))
|
||||
self.failUnlessEqual(p("2009-03-18"), 1237334400)
|
||||
|
||||
def test_limited_history(self):
|
||||
basedir = "storage/LeaseCrawler/limited_history"
|
||||
fileutil.make_dirs(basedir)
|
||||
|
169
src/allmydata/test/test_time_format.py
Normal file
169
src/allmydata/test/test_time_format.py
Normal file
@ -0,0 +1,169 @@
|
||||
"""
|
||||
Tests for allmydata.util.time_format.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
import time
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.test.common_py3 import TimezoneMixin
|
||||
from allmydata.util import time_format
|
||||
|
||||
|
||||
class TimeFormat(unittest.TestCase, TimezoneMixin):
|
||||
def test_epoch(self):
|
||||
return self._help_test_epoch()
|
||||
|
||||
def test_epoch_in_London(self):
|
||||
# Europe/London is a particularly troublesome timezone. Nowadays, its
|
||||
# offset from GMT is 0. But in 1970, its offset from GMT was 1.
|
||||
# (Apparently in 1970 Britain had redefined standard time to be GMT+1
|
||||
# and stayed in standard time all year round, whereas today
|
||||
# Europe/London standard time is GMT and Europe/London Daylight
|
||||
# Savings Time is GMT+1.) The current implementation of
|
||||
# time_format.iso_utc_time_to_localseconds() breaks if the timezone is
|
||||
# Europe/London. (As soon as this unit test is done then I'll change
|
||||
# that implementation to something that works even in this case...)
|
||||
|
||||
if not self.have_working_tzset():
|
||||
raise unittest.SkipTest("This test can't be run on a platform without time.tzset().")
|
||||
|
||||
self.setTimezone("Europe/London")
|
||||
return self._help_test_epoch()
|
||||
|
||||
def _help_test_epoch(self):
|
||||
origtzname = time.tzname
|
||||
s = time_format.iso_utc_time_to_seconds("1970-01-01T00:00:01")
|
||||
self.failUnlessEqual(s, 1.0)
|
||||
s = time_format.iso_utc_time_to_seconds("1970-01-01_00:00:01")
|
||||
self.failUnlessEqual(s, 1.0)
|
||||
s = time_format.iso_utc_time_to_seconds("1970-01-01 00:00:01")
|
||||
self.failUnlessEqual(s, 1.0)
|
||||
|
||||
self.failUnlessEqual(time_format.iso_utc(1.0), "1970-01-01_00:00:01")
|
||||
self.failUnlessEqual(time_format.iso_utc(1.0, sep=" "),
|
||||
"1970-01-01 00:00:01")
|
||||
|
||||
now = time.time()
|
||||
isostr = time_format.iso_utc(now)
|
||||
timestamp = time_format.iso_utc_time_to_seconds(isostr)
|
||||
self.failUnlessEqual(int(timestamp), int(now))
|
||||
|
||||
def my_time():
|
||||
return 1.0
|
||||
self.failUnlessEqual(time_format.iso_utc(t=my_time),
|
||||
"1970-01-01_00:00:01")
|
||||
e = self.failUnlessRaises(ValueError,
|
||||
time_format.iso_utc_time_to_seconds,
|
||||
"invalid timestring")
|
||||
self.failUnless("not a complete ISO8601 timestamp" in str(e))
|
||||
s = time_format.iso_utc_time_to_seconds("1970-01-01_00:00:01.500")
|
||||
self.failUnlessEqual(s, 1.5)
|
||||
|
||||
# Look for daylight-savings-related errors.
|
||||
thatmomentinmarch = time_format.iso_utc_time_to_seconds("2009-03-20 21:49:02.226536")
|
||||
self.failUnlessEqual(thatmomentinmarch, 1237585742.226536)
|
||||
self.failUnlessEqual(origtzname, time.tzname)
|
||||
|
||||
def test_iso_utc(self):
|
||||
when = 1266760143.7841301
|
||||
out = time_format.iso_utc_date(when)
|
||||
self.failUnlessEqual(out, "2010-02-21")
|
||||
out = time_format.iso_utc_date(t=lambda: when)
|
||||
self.failUnlessEqual(out, "2010-02-21")
|
||||
out = time_format.iso_utc(when)
|
||||
self.failUnlessEqual(out, "2010-02-21_13:49:03.784130")
|
||||
out = time_format.iso_utc(when, sep="-")
|
||||
self.failUnlessEqual(out, "2010-02-21-13:49:03.784130")
|
||||
|
||||
def test_parse_duration(self):
|
||||
p = time_format.parse_duration
|
||||
DAY = 24*60*60
|
||||
MONTH = 31*DAY
|
||||
YEAR = 365*DAY
|
||||
self.failUnlessEqual(p("1 day"), DAY)
|
||||
self.failUnlessEqual(p("2 days"), 2*DAY)
|
||||
self.failUnlessEqual(p("3 months"), 3*MONTH)
|
||||
self.failUnlessEqual(p("4 mo"), 4*MONTH)
|
||||
self.failUnlessEqual(p("5 years"), 5*YEAR)
|
||||
e = self.failUnlessRaises(ValueError, p, "123")
|
||||
self.failUnlessIn("no unit (like day, month, or year) in '123'",
|
||||
str(e))
|
||||
self.failUnlessEqual(p("7days"), 7*DAY)
|
||||
self.failUnlessEqual(p("31day"), 31*DAY)
|
||||
self.failUnlessEqual(p("60 days"), 60*DAY)
|
||||
self.failUnlessEqual(p("2mo"), 2*MONTH)
|
||||
self.failUnlessEqual(p("3 month"), 3*MONTH)
|
||||
self.failUnlessEqual(p("2years"), 2*YEAR)
|
||||
e = self.failUnlessRaises(ValueError, p, "2kumquats")
|
||||
self.failUnlessIn("no unit (like day, month, or year) in '2kumquats'", str(e))
|
||||
|
||||
def test_parse_date(self):
|
||||
p = time_format.parse_date
|
||||
self.failUnlessEqual(p("2010-02-21"), 1266710400)
|
||||
self.failUnless(isinstance(p("2009-03-18"), (int, long)), p("2009-03-18"))
|
||||
self.failUnlessEqual(p("2009-03-18"), 1237334400)
|
||||
|
||||
def test_format_time(self):
|
||||
self.failUnlessEqual(time_format.format_time(time.gmtime(0)), '1970-01-01 00:00:00')
|
||||
self.failUnlessEqual(time_format.format_time(time.gmtime(60)), '1970-01-01 00:01:00')
|
||||
self.failUnlessEqual(time_format.format_time(time.gmtime(60*60)), '1970-01-01 01:00:00')
|
||||
seconds_per_day = 60*60*24
|
||||
leap_years_1970_to_2014_inclusive = ((2012 - 1968) // 4)
|
||||
self.failUnlessEqual(time_format.format_time(time.gmtime(seconds_per_day*((2015 - 1970)*365+leap_years_1970_to_2014_inclusive))), '2015-01-01 00:00:00')
|
||||
|
||||
def test_format_time_y2038(self):
|
||||
seconds_per_day = 60*60*24
|
||||
leap_years_1970_to_2047_inclusive = ((2044 - 1968) // 4)
|
||||
t = (seconds_per_day*
|
||||
((2048 - 1970)*365 + leap_years_1970_to_2047_inclusive))
|
||||
try:
|
||||
gm_t = time.gmtime(t)
|
||||
except ValueError:
|
||||
raise unittest.SkipTest("Note: this system cannot handle dates after 2037.")
|
||||
self.failUnlessEqual(time_format.format_time(gm_t),
|
||||
'2048-01-01 00:00:00')
|
||||
|
||||
def test_format_delta(self):
|
||||
time_1 = 1389812723
|
||||
time_5s_delta = 1389812728
|
||||
time_28m7s_delta = 1389814410
|
||||
time_1h_delta = 1389816323
|
||||
time_1d21h46m49s_delta = 1389977532
|
||||
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1, time_1), '0s')
|
||||
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1, time_5s_delta), '5s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1, time_28m7s_delta), '28m 7s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1, time_1h_delta), '1h 0m 0s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1, time_1d21h46m49s_delta), '1d 21h 46m 49s')
|
||||
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1d21h46m49s_delta, time_1), '-')
|
||||
|
||||
# time_1 with a decimal fraction will make the delta 1s less
|
||||
time_1decimal = 1389812723.383963
|
||||
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1decimal, time_5s_delta), '4s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1decimal, time_28m7s_delta), '28m 6s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1decimal, time_1h_delta), '59m 59s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1decimal, time_1d21h46m49s_delta), '1d 21h 46m 48s')
|
@ -3,22 +3,19 @@ from __future__ import print_function
|
||||
import six
|
||||
import os, time, sys
|
||||
import yaml
|
||||
import gc # support PyPy
|
||||
|
||||
from six.moves import StringIO
|
||||
from datetime import timedelta
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.python import log
|
||||
|
||||
from allmydata.util import base32, idlib, mathutil, hashutil
|
||||
from allmydata.util import fileutil, abbreviate
|
||||
from allmydata.util import limiter, time_format, pollmixin
|
||||
from allmydata.util import statistics, dictutil, pipeline, yamlutil
|
||||
from allmydata.util import idlib, mathutil
|
||||
from allmydata.util import fileutil
|
||||
from allmydata.util import limiter, pollmixin
|
||||
from allmydata.util import statistics, dictutil, yamlutil
|
||||
from allmydata.util import log as tahoe_log
|
||||
from allmydata.util.fileutil import EncryptedTemporaryFile
|
||||
from allmydata.test.common_util import ReallyEqualMixin, TimezoneMixin
|
||||
from allmydata.test.common_util import ReallyEqualMixin
|
||||
|
||||
if six.PY3:
|
||||
long = int
|
||||
@ -570,240 +567,6 @@ class PollMixinTests(unittest.TestCase):
|
||||
return d
|
||||
|
||||
|
||||
class HashUtilTests(unittest.TestCase):
|
||||
|
||||
def test_random_key(self):
|
||||
k = hashutil.random_key()
|
||||
self.failUnlessEqual(len(k), hashutil.KEYLEN)
|
||||
|
||||
def test_sha256d(self):
|
||||
h1 = hashutil.tagged_hash("tag1", "value")
|
||||
h2 = hashutil.tagged_hasher("tag1")
|
||||
h2.update("value")
|
||||
h2a = h2.digest()
|
||||
h2b = h2.digest()
|
||||
self.failUnlessEqual(h1, h2a)
|
||||
self.failUnlessEqual(h2a, h2b)
|
||||
|
||||
def test_sha256d_truncated(self):
|
||||
h1 = hashutil.tagged_hash("tag1", "value", 16)
|
||||
h2 = hashutil.tagged_hasher("tag1", 16)
|
||||
h2.update("value")
|
||||
h2 = h2.digest()
|
||||
self.failUnlessEqual(len(h1), 16)
|
||||
self.failUnlessEqual(len(h2), 16)
|
||||
self.failUnlessEqual(h1, h2)
|
||||
|
||||
def test_chk(self):
|
||||
h1 = hashutil.convergence_hash(3, 10, 1000, "data", "secret")
|
||||
h2 = hashutil.convergence_hasher(3, 10, 1000, "secret")
|
||||
h2.update("data")
|
||||
h2 = h2.digest()
|
||||
self.failUnlessEqual(h1, h2)
|
||||
|
||||
def test_hashers(self):
|
||||
h1 = hashutil.block_hash("foo")
|
||||
h2 = hashutil.block_hasher()
|
||||
h2.update("foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
|
||||
h1 = hashutil.uri_extension_hash("foo")
|
||||
h2 = hashutil.uri_extension_hasher()
|
||||
h2.update("foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
|
||||
h1 = hashutil.plaintext_hash("foo")
|
||||
h2 = hashutil.plaintext_hasher()
|
||||
h2.update("foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
|
||||
h1 = hashutil.crypttext_hash("foo")
|
||||
h2 = hashutil.crypttext_hasher()
|
||||
h2.update("foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
|
||||
h1 = hashutil.crypttext_segment_hash("foo")
|
||||
h2 = hashutil.crypttext_segment_hasher()
|
||||
h2.update("foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
|
||||
h1 = hashutil.plaintext_segment_hash("foo")
|
||||
h2 = hashutil.plaintext_segment_hasher()
|
||||
h2.update("foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
|
||||
def test_timing_safe_compare(self):
|
||||
self.failUnless(hashutil.timing_safe_compare("a", "a"))
|
||||
self.failUnless(hashutil.timing_safe_compare("ab", "ab"))
|
||||
self.failIf(hashutil.timing_safe_compare("a", "b"))
|
||||
self.failIf(hashutil.timing_safe_compare("a", "aa"))
|
||||
|
||||
def _testknown(self, hashf, expected_a, *args):
|
||||
got = hashf(*args)
|
||||
got_a = base32.b2a(got)
|
||||
self.failUnlessEqual(got_a, expected_a)
|
||||
|
||||
def test_known_answers(self):
|
||||
# assert backwards compatibility
|
||||
self._testknown(hashutil.storage_index_hash, "qb5igbhcc5esa6lwqorsy7e6am", "")
|
||||
self._testknown(hashutil.block_hash, "msjr5bh4evuh7fa3zw7uovixfbvlnstr5b65mrerwfnvjxig2jvq", "")
|
||||
self._testknown(hashutil.uri_extension_hash, "wthsu45q7zewac2mnivoaa4ulh5xvbzdmsbuyztq2a5fzxdrnkka", "")
|
||||
self._testknown(hashutil.plaintext_hash, "5lz5hwz3qj3af7n6e3arblw7xzutvnd3p3fjsngqjcb7utf3x3da", "")
|
||||
self._testknown(hashutil.crypttext_hash, "itdj6e4njtkoiavlrmxkvpreosscssklunhwtvxn6ggho4rkqwga", "")
|
||||
self._testknown(hashutil.crypttext_segment_hash, "aovy5aa7jej6ym5ikgwyoi4pxawnoj3wtaludjz7e2nb5xijb7aa", "")
|
||||
self._testknown(hashutil.plaintext_segment_hash, "4fdgf6qruaisyukhqcmoth4t3li6bkolbxvjy4awwcpprdtva7za", "")
|
||||
self._testknown(hashutil.convergence_hash, "3mo6ni7xweplycin6nowynw2we", 3, 10, 100, "", "converge")
|
||||
self._testknown(hashutil.my_renewal_secret_hash, "ujhr5k5f7ypkp67jkpx6jl4p47pyta7hu5m527cpcgvkafsefm6q", "")
|
||||
self._testknown(hashutil.my_cancel_secret_hash, "rjwzmafe2duixvqy6h47f5wfrokdziry6zhx4smew4cj6iocsfaa", "")
|
||||
self._testknown(hashutil.file_renewal_secret_hash, "hzshk2kf33gzbd5n3a6eszkf6q6o6kixmnag25pniusyaulqjnia", "", "si")
|
||||
self._testknown(hashutil.file_cancel_secret_hash, "bfciwvr6w7wcavsngxzxsxxaszj72dej54n4tu2idzp6b74g255q", "", "si")
|
||||
self._testknown(hashutil.bucket_renewal_secret_hash, "e7imrzgzaoashsncacvy3oysdd2m5yvtooo4gmj4mjlopsazmvuq", "", "\x00"*20)
|
||||
self._testknown(hashutil.bucket_cancel_secret_hash, "dvdujeyxeirj6uux6g7xcf4lvesk632aulwkzjar7srildvtqwma", "", "\x00"*20)
|
||||
self._testknown(hashutil.hmac, "c54ypfi6pevb3nvo6ba42jtglpkry2kbdopqsi7dgrm4r7tw5sra", "tag", "")
|
||||
self._testknown(hashutil.mutable_rwcap_key_hash, "6rvn2iqrghii5n4jbbwwqqsnqu", "iv", "wk")
|
||||
self._testknown(hashutil.ssk_writekey_hash, "ykpgmdbpgbb6yqz5oluw2q26ye", "")
|
||||
self._testknown(hashutil.ssk_write_enabler_master_hash, "izbfbfkoait4dummruol3gy2bnixrrrslgye6ycmkuyujnenzpia", "")
|
||||
self._testknown(hashutil.ssk_write_enabler_hash, "fuu2dvx7g6gqu5x22vfhtyed7p4pd47y5hgxbqzgrlyvxoev62tq", "wk", "\x00"*20)
|
||||
self._testknown(hashutil.ssk_pubkey_fingerprint_hash, "3opzw4hhm2sgncjx224qmt5ipqgagn7h5zivnfzqycvgqgmgz35q", "")
|
||||
self._testknown(hashutil.ssk_readkey_hash, "vugid4as6qbqgeq2xczvvcedai", "")
|
||||
self._testknown(hashutil.ssk_readkey_data_hash, "73wsaldnvdzqaf7v4pzbr2ae5a", "iv", "rk")
|
||||
self._testknown(hashutil.ssk_storage_index_hash, "j7icz6kigb6hxrej3tv4z7ayym", "")
|
||||
|
||||
self._testknown(hashutil.permute_server_hash,
|
||||
"kb4354zeeurpo3ze5e275wzbynm6hlap", # b32(expected)
|
||||
"SI", # peer selection index == storage_index
|
||||
base32.a2b("u33m4y7klhz3bypswqkozwetvabelhxt"), # seed
|
||||
)
|
||||
|
||||
class Abbreviate(unittest.TestCase):
|
||||
def test_abbrev_time_1s(self):
|
||||
diff = timedelta(seconds=1)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('1 second ago', s)
|
||||
|
||||
def test_abbrev_time_25s(self):
|
||||
diff = timedelta(seconds=25)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('25 seconds ago', s)
|
||||
|
||||
def test_abbrev_time_future_5_minutes(self):
|
||||
diff = timedelta(minutes=-5)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('5 minutes in the future', s)
|
||||
|
||||
def test_abbrev_time_hours(self):
|
||||
diff = timedelta(hours=4)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('4 hours ago', s)
|
||||
|
||||
def test_abbrev_time_day(self):
|
||||
diff = timedelta(hours=49) # must be more than 2 days
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('2 days ago', s)
|
||||
|
||||
def test_abbrev_time_month(self):
|
||||
diff = timedelta(days=91)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('3 months ago', s)
|
||||
|
||||
def test_abbrev_time_year(self):
|
||||
diff = timedelta(weeks=(5 * 52) + 1)
|
||||
s = abbreviate.abbreviate_time(diff)
|
||||
self.assertEqual('5 years ago', s)
|
||||
|
||||
def test_time(self):
|
||||
a = abbreviate.abbreviate_time
|
||||
self.failUnlessEqual(a(None), "unknown")
|
||||
self.failUnlessEqual(a(0), "0 seconds")
|
||||
self.failUnlessEqual(a(1), "1 second")
|
||||
self.failUnlessEqual(a(2), "2 seconds")
|
||||
self.failUnlessEqual(a(119), "119 seconds")
|
||||
MIN = 60
|
||||
self.failUnlessEqual(a(2*MIN), "2 minutes")
|
||||
self.failUnlessEqual(a(60*MIN), "60 minutes")
|
||||
self.failUnlessEqual(a(179*MIN), "179 minutes")
|
||||
HOUR = 60*MIN
|
||||
self.failUnlessEqual(a(180*MIN), "3 hours")
|
||||
self.failUnlessEqual(a(4*HOUR), "4 hours")
|
||||
DAY = 24*HOUR
|
||||
MONTH = 30*DAY
|
||||
self.failUnlessEqual(a(2*DAY), "2 days")
|
||||
self.failUnlessEqual(a(2*MONTH), "2 months")
|
||||
YEAR = 365*DAY
|
||||
self.failUnlessEqual(a(5*YEAR), "5 years")
|
||||
|
||||
def test_space(self):
|
||||
tests_si = [(None, "unknown"),
|
||||
(0, "0 B"),
|
||||
(1, "1 B"),
|
||||
(999, "999 B"),
|
||||
(1000, "1000 B"),
|
||||
(1023, "1023 B"),
|
||||
(1024, "1.02 kB"),
|
||||
(20*1000, "20.00 kB"),
|
||||
(1024*1024, "1.05 MB"),
|
||||
(1000*1000, "1.00 MB"),
|
||||
(1000*1000*1000, "1.00 GB"),
|
||||
(1000*1000*1000*1000, "1.00 TB"),
|
||||
(1000*1000*1000*1000*1000, "1.00 PB"),
|
||||
(1000*1000*1000*1000*1000*1000, "1.00 EB"),
|
||||
(1234567890123456789, "1.23 EB"),
|
||||
]
|
||||
for (x, expected) in tests_si:
|
||||
got = abbreviate.abbreviate_space(x, SI=True)
|
||||
self.failUnlessEqual(got, expected)
|
||||
|
||||
tests_base1024 = [(None, "unknown"),
|
||||
(0, "0 B"),
|
||||
(1, "1 B"),
|
||||
(999, "999 B"),
|
||||
(1000, "1000 B"),
|
||||
(1023, "1023 B"),
|
||||
(1024, "1.00 kiB"),
|
||||
(20*1024, "20.00 kiB"),
|
||||
(1000*1000, "976.56 kiB"),
|
||||
(1024*1024, "1.00 MiB"),
|
||||
(1024*1024*1024, "1.00 GiB"),
|
||||
(1024*1024*1024*1024, "1.00 TiB"),
|
||||
(1000*1000*1000*1000*1000, "909.49 TiB"),
|
||||
(1024*1024*1024*1024*1024, "1.00 PiB"),
|
||||
(1024*1024*1024*1024*1024*1024, "1.00 EiB"),
|
||||
(1234567890123456789, "1.07 EiB"),
|
||||
]
|
||||
for (x, expected) in tests_base1024:
|
||||
got = abbreviate.abbreviate_space(x, SI=False)
|
||||
self.failUnlessEqual(got, expected)
|
||||
|
||||
self.failUnlessEqual(abbreviate.abbreviate_space_both(1234567),
|
||||
"(1.23 MB, 1.18 MiB)")
|
||||
|
||||
def test_parse_space(self):
|
||||
p = abbreviate.parse_abbreviated_size
|
||||
self.failUnlessEqual(p(""), None)
|
||||
self.failUnlessEqual(p(None), None)
|
||||
self.failUnlessEqual(p("123"), 123)
|
||||
self.failUnlessEqual(p("123B"), 123)
|
||||
self.failUnlessEqual(p("2K"), 2000)
|
||||
self.failUnlessEqual(p("2kb"), 2000)
|
||||
self.failUnlessEqual(p("2KiB"), 2048)
|
||||
self.failUnlessEqual(p("10MB"), 10*1000*1000)
|
||||
self.failUnlessEqual(p("10MiB"), 10*1024*1024)
|
||||
self.failUnlessEqual(p("5G"), 5*1000*1000*1000)
|
||||
self.failUnlessEqual(p("4GiB"), 4*1024*1024*1024)
|
||||
self.failUnlessEqual(p("3TB"), 3*1000*1000*1000*1000)
|
||||
self.failUnlessEqual(p("3TiB"), 3*1024*1024*1024*1024)
|
||||
self.failUnlessEqual(p("6PB"), 6*1000*1000*1000*1000*1000)
|
||||
self.failUnlessEqual(p("6PiB"), 6*1024*1024*1024*1024*1024)
|
||||
self.failUnlessEqual(p("9EB"), 9*1000*1000*1000*1000*1000*1000)
|
||||
self.failUnlessEqual(p("9EiB"), 9*1024*1024*1024*1024*1024*1024)
|
||||
|
||||
e = self.failUnlessRaises(ValueError, p, "12 cubits")
|
||||
self.failUnlessIn("12 cubits", str(e))
|
||||
e = self.failUnlessRaises(ValueError, p, "1 BB")
|
||||
self.failUnlessIn("1 BB", str(e))
|
||||
e = self.failUnlessRaises(ValueError, p, "fhtagn")
|
||||
self.failUnlessIn("fhtagn", str(e))
|
||||
|
||||
class Limiter(unittest.TestCase):
|
||||
|
||||
def job(self, i, foo):
|
||||
@ -878,141 +641,6 @@ class Limiter(unittest.TestCase):
|
||||
d.addCallback(_all_done)
|
||||
return d
|
||||
|
||||
class TimeFormat(unittest.TestCase, TimezoneMixin):
|
||||
def test_epoch(self):
|
||||
return self._help_test_epoch()
|
||||
|
||||
def test_epoch_in_London(self):
|
||||
# Europe/London is a particularly troublesome timezone. Nowadays, its
|
||||
# offset from GMT is 0. But in 1970, its offset from GMT was 1.
|
||||
# (Apparently in 1970 Britain had redefined standard time to be GMT+1
|
||||
# and stayed in standard time all year round, whereas today
|
||||
# Europe/London standard time is GMT and Europe/London Daylight
|
||||
# Savings Time is GMT+1.) The current implementation of
|
||||
# time_format.iso_utc_time_to_localseconds() breaks if the timezone is
|
||||
# Europe/London. (As soon as this unit test is done then I'll change
|
||||
# that implementation to something that works even in this case...)
|
||||
|
||||
if not self.have_working_tzset():
|
||||
raise unittest.SkipTest("This test can't be run on a platform without time.tzset().")
|
||||
|
||||
self.setTimezone("Europe/London")
|
||||
return self._help_test_epoch()
|
||||
|
||||
def _help_test_epoch(self):
|
||||
origtzname = time.tzname
|
||||
s = time_format.iso_utc_time_to_seconds("1970-01-01T00:00:01")
|
||||
self.failUnlessEqual(s, 1.0)
|
||||
s = time_format.iso_utc_time_to_seconds("1970-01-01_00:00:01")
|
||||
self.failUnlessEqual(s, 1.0)
|
||||
s = time_format.iso_utc_time_to_seconds("1970-01-01 00:00:01")
|
||||
self.failUnlessEqual(s, 1.0)
|
||||
|
||||
self.failUnlessEqual(time_format.iso_utc(1.0), "1970-01-01_00:00:01")
|
||||
self.failUnlessEqual(time_format.iso_utc(1.0, sep=" "),
|
||||
"1970-01-01 00:00:01")
|
||||
|
||||
now = time.time()
|
||||
isostr = time_format.iso_utc(now)
|
||||
timestamp = time_format.iso_utc_time_to_seconds(isostr)
|
||||
self.failUnlessEqual(int(timestamp), int(now))
|
||||
|
||||
def my_time():
|
||||
return 1.0
|
||||
self.failUnlessEqual(time_format.iso_utc(t=my_time),
|
||||
"1970-01-01_00:00:01")
|
||||
e = self.failUnlessRaises(ValueError,
|
||||
time_format.iso_utc_time_to_seconds,
|
||||
"invalid timestring")
|
||||
self.failUnless("not a complete ISO8601 timestamp" in str(e))
|
||||
s = time_format.iso_utc_time_to_seconds("1970-01-01_00:00:01.500")
|
||||
self.failUnlessEqual(s, 1.5)
|
||||
|
||||
# Look for daylight-savings-related errors.
|
||||
thatmomentinmarch = time_format.iso_utc_time_to_seconds("2009-03-20 21:49:02.226536")
|
||||
self.failUnlessEqual(thatmomentinmarch, 1237585742.226536)
|
||||
self.failUnlessEqual(origtzname, time.tzname)
|
||||
|
||||
def test_iso_utc(self):
|
||||
when = 1266760143.7841301
|
||||
out = time_format.iso_utc_date(when)
|
||||
self.failUnlessEqual(out, "2010-02-21")
|
||||
out = time_format.iso_utc_date(t=lambda: when)
|
||||
self.failUnlessEqual(out, "2010-02-21")
|
||||
out = time_format.iso_utc(when)
|
||||
self.failUnlessEqual(out, "2010-02-21_13:49:03.784130")
|
||||
out = time_format.iso_utc(when, sep="-")
|
||||
self.failUnlessEqual(out, "2010-02-21-13:49:03.784130")
|
||||
|
||||
def test_parse_duration(self):
|
||||
p = time_format.parse_duration
|
||||
DAY = 24*60*60
|
||||
self.failUnlessEqual(p("1 day"), DAY)
|
||||
self.failUnlessEqual(p("2 days"), 2*DAY)
|
||||
self.failUnlessEqual(p("3 months"), 3*31*DAY)
|
||||
self.failUnlessEqual(p("4 mo"), 4*31*DAY)
|
||||
self.failUnlessEqual(p("5 years"), 5*365*DAY)
|
||||
e = self.failUnlessRaises(ValueError, p, "123")
|
||||
self.failUnlessIn("no unit (like day, month, or year) in '123'",
|
||||
str(e))
|
||||
|
||||
def test_parse_date(self):
|
||||
self.failUnlessEqual(time_format.parse_date("2010-02-21"), 1266710400)
|
||||
|
||||
def test_format_time(self):
|
||||
self.failUnlessEqual(time_format.format_time(time.gmtime(0)), '1970-01-01 00:00:00')
|
||||
self.failUnlessEqual(time_format.format_time(time.gmtime(60)), '1970-01-01 00:01:00')
|
||||
self.failUnlessEqual(time_format.format_time(time.gmtime(60*60)), '1970-01-01 01:00:00')
|
||||
seconds_per_day = 60*60*24
|
||||
leap_years_1970_to_2014_inclusive = ((2012 - 1968) // 4)
|
||||
self.failUnlessEqual(time_format.format_time(time.gmtime(seconds_per_day*((2015 - 1970)*365+leap_years_1970_to_2014_inclusive))), '2015-01-01 00:00:00')
|
||||
|
||||
def test_format_time_y2038(self):
|
||||
seconds_per_day = 60*60*24
|
||||
leap_years_1970_to_2047_inclusive = ((2044 - 1968) // 4)
|
||||
t = (seconds_per_day*
|
||||
((2048 - 1970)*365 + leap_years_1970_to_2047_inclusive))
|
||||
try:
|
||||
gm_t = time.gmtime(t)
|
||||
except ValueError:
|
||||
raise unittest.SkipTest("Note: this system cannot handle dates after 2037.")
|
||||
self.failUnlessEqual(time_format.format_time(gm_t),
|
||||
'2048-01-01 00:00:00')
|
||||
|
||||
def test_format_delta(self):
|
||||
time_1 = 1389812723
|
||||
time_5s_delta = 1389812728
|
||||
time_28m7s_delta = 1389814410
|
||||
time_1h_delta = 1389816323
|
||||
time_1d21h46m49s_delta = 1389977532
|
||||
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1, time_1), '0s')
|
||||
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1, time_5s_delta), '5s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1, time_28m7s_delta), '28m 7s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1, time_1h_delta), '1h 0m 0s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1, time_1d21h46m49s_delta), '1d 21h 46m 49s')
|
||||
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1d21h46m49s_delta, time_1), '-')
|
||||
|
||||
# time_1 with a decimal fraction will make the delta 1s less
|
||||
time_1decimal = 1389812723.383963
|
||||
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1decimal, time_5s_delta), '4s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1decimal, time_28m7s_delta), '28m 6s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1decimal, time_1h_delta), '59m 59s')
|
||||
self.failUnlessEqual(
|
||||
time_format.format_delta(time_1decimal, time_1d21h46m49s_delta), '1d 21h 46m 48s')
|
||||
|
||||
|
||||
ctr = [0]
|
||||
class EqButNotIs(object):
|
||||
@ -1109,180 +737,6 @@ class DictUtil(unittest.TestCase):
|
||||
self.failUnlessEqual(d["one"], 1)
|
||||
self.failUnlessEqual(d.get_aux("one"), None)
|
||||
|
||||
class Pipeline(unittest.TestCase):
|
||||
def pause(self, *args, **kwargs):
|
||||
d = defer.Deferred()
|
||||
self.calls.append( (d, args, kwargs) )
|
||||
return d
|
||||
|
||||
def failUnlessCallsAre(self, expected):
|
||||
#print self.calls
|
||||
#print expected
|
||||
self.failUnlessEqual(len(self.calls), len(expected), self.calls)
|
||||
for i,c in enumerate(self.calls):
|
||||
self.failUnlessEqual(c[1:], expected[i], str(i))
|
||||
|
||||
def test_basic(self):
|
||||
self.calls = []
|
||||
finished = []
|
||||
p = pipeline.Pipeline(100)
|
||||
|
||||
d = p.flush() # fires immediately
|
||||
d.addCallbacks(finished.append, log.err)
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
finished = []
|
||||
|
||||
d = p.add(10, self.pause, "one")
|
||||
# the call should start right away, and our return Deferred should
|
||||
# fire right away
|
||||
d.addCallbacks(finished.append, log.err)
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
self.failUnlessEqual(finished[0], None)
|
||||
self.failUnlessCallsAre([ ( ("one",) , {} ) ])
|
||||
self.failUnlessEqual(p.gauge, 10)
|
||||
|
||||
# pipeline: [one]
|
||||
|
||||
finished = []
|
||||
d = p.add(20, self.pause, "two", kw=2)
|
||||
# pipeline: [one, two]
|
||||
|
||||
# the call and the Deferred should fire right away
|
||||
d.addCallbacks(finished.append, log.err)
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
self.failUnlessEqual(finished[0], None)
|
||||
self.failUnlessCallsAre([ ( ("one",) , {} ),
|
||||
( ("two",) , {"kw": 2} ),
|
||||
])
|
||||
self.failUnlessEqual(p.gauge, 30)
|
||||
|
||||
self.calls[0][0].callback("one-result")
|
||||
# pipeline: [two]
|
||||
self.failUnlessEqual(p.gauge, 20)
|
||||
|
||||
finished = []
|
||||
d = p.add(90, self.pause, "three", "posarg1")
|
||||
# pipeline: [two, three]
|
||||
flushed = []
|
||||
fd = p.flush()
|
||||
fd.addCallbacks(flushed.append, log.err)
|
||||
self.failUnlessEqual(flushed, [])
|
||||
|
||||
# the call will be made right away, but the return Deferred will not,
|
||||
# because the pipeline is now full.
|
||||
d.addCallbacks(finished.append, log.err)
|
||||
self.failUnlessEqual(len(finished), 0)
|
||||
self.failUnlessCallsAre([ ( ("one",) , {} ),
|
||||
( ("two",) , {"kw": 2} ),
|
||||
( ("three", "posarg1"), {} ),
|
||||
])
|
||||
self.failUnlessEqual(p.gauge, 110)
|
||||
|
||||
self.failUnlessRaises(pipeline.SingleFileError, p.add, 10, self.pause)
|
||||
|
||||
# retiring either call will unblock the pipeline, causing the #3
|
||||
# Deferred to fire
|
||||
self.calls[2][0].callback("three-result")
|
||||
# pipeline: [two]
|
||||
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
self.failUnlessEqual(finished[0], None)
|
||||
self.failUnlessEqual(flushed, [])
|
||||
|
||||
# retiring call#2 will finally allow the flush() Deferred to fire
|
||||
self.calls[1][0].callback("two-result")
|
||||
self.failUnlessEqual(len(flushed), 1)
|
||||
|
||||
def test_errors(self):
|
||||
self.calls = []
|
||||
p = pipeline.Pipeline(100)
|
||||
|
||||
d1 = p.add(200, self.pause, "one")
|
||||
d2 = p.flush()
|
||||
|
||||
finished = []
|
||||
d1.addBoth(finished.append)
|
||||
self.failUnlessEqual(finished, [])
|
||||
|
||||
flushed = []
|
||||
d2.addBoth(flushed.append)
|
||||
self.failUnlessEqual(flushed, [])
|
||||
|
||||
self.calls[0][0].errback(ValueError("oops"))
|
||||
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
f = finished[0]
|
||||
self.failUnless(isinstance(f, Failure))
|
||||
self.failUnless(f.check(pipeline.PipelineError))
|
||||
self.failUnlessIn("PipelineError", str(f.value))
|
||||
self.failUnlessIn("ValueError", str(f.value))
|
||||
r = repr(f.value)
|
||||
self.failUnless("ValueError" in r, r)
|
||||
f2 = f.value.error
|
||||
self.failUnless(f2.check(ValueError))
|
||||
|
||||
self.failUnlessEqual(len(flushed), 1)
|
||||
f = flushed[0]
|
||||
self.failUnless(isinstance(f, Failure))
|
||||
self.failUnless(f.check(pipeline.PipelineError))
|
||||
f2 = f.value.error
|
||||
self.failUnless(f2.check(ValueError))
|
||||
|
||||
# now that the pipeline is in the failed state, any new calls will
|
||||
# fail immediately
|
||||
|
||||
d3 = p.add(20, self.pause, "two")
|
||||
|
||||
finished = []
|
||||
d3.addBoth(finished.append)
|
||||
self.failUnlessEqual(len(finished), 1)
|
||||
f = finished[0]
|
||||
self.failUnless(isinstance(f, Failure))
|
||||
self.failUnless(f.check(pipeline.PipelineError))
|
||||
r = repr(f.value)
|
||||
self.failUnless("ValueError" in r, r)
|
||||
f2 = f.value.error
|
||||
self.failUnless(f2.check(ValueError))
|
||||
|
||||
d4 = p.flush()
|
||||
flushed = []
|
||||
d4.addBoth(flushed.append)
|
||||
self.failUnlessEqual(len(flushed), 1)
|
||||
f = flushed[0]
|
||||
self.failUnless(isinstance(f, Failure))
|
||||
self.failUnless(f.check(pipeline.PipelineError))
|
||||
f2 = f.value.error
|
||||
self.failUnless(f2.check(ValueError))
|
||||
|
||||
def test_errors2(self):
|
||||
self.calls = []
|
||||
p = pipeline.Pipeline(100)
|
||||
|
||||
d1 = p.add(10, self.pause, "one")
|
||||
d2 = p.add(20, self.pause, "two")
|
||||
d3 = p.add(30, self.pause, "three")
|
||||
d4 = p.flush()
|
||||
|
||||
# one call fails, then the second one succeeds: make sure
|
||||
# ExpandableDeferredList tolerates the second one
|
||||
|
||||
flushed = []
|
||||
d4.addBoth(flushed.append)
|
||||
self.failUnlessEqual(flushed, [])
|
||||
|
||||
self.calls[0][0].errback(ValueError("oops"))
|
||||
self.failUnlessEqual(len(flushed), 1)
|
||||
f = flushed[0]
|
||||
self.failUnless(isinstance(f, Failure))
|
||||
self.failUnless(f.check(pipeline.PipelineError))
|
||||
f2 = f.value.error
|
||||
self.failUnless(f2.check(ValueError))
|
||||
|
||||
self.calls[1][0].callback("two-result")
|
||||
self.calls[2][0].errback(ValueError("three-error"))
|
||||
|
||||
del d1,d2,d3,d4
|
||||
gc.collect() # for PyPy
|
||||
|
||||
class SampleError(Exception):
|
||||
pass
|
||||
|
@ -59,6 +59,7 @@ from .common import (
|
||||
from allmydata.interfaces import IMutableFileNode, SDMF_VERSION, MDMF_VERSION
|
||||
from allmydata.mutable import servermap, publish, retrieve
|
||||
from .. import common_util as testutil
|
||||
from ..common_py3 import TimezoneMixin
|
||||
from ..common_web import (
|
||||
do_http,
|
||||
Error,
|
||||
@ -310,7 +311,7 @@ class FakeClient(_Client):
|
||||
|
||||
MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
|
||||
|
||||
class WebMixin(testutil.TimezoneMixin):
|
||||
class WebMixin(TimezoneMixin):
|
||||
def setUp(self):
|
||||
self.setTimezone('UTC-13:00')
|
||||
self.s = FakeClient()
|
||||
|
@ -15,25 +15,40 @@ if PY2:
|
||||
|
||||
# Keep these sorted alphabetically, to reduce merge conflicts:
|
||||
PORTED_MODULES = [
|
||||
"allmydata.hashtree",
|
||||
"allmydata.util.abbreviate",
|
||||
"allmydata.util.assertutil",
|
||||
"allmydata.util.base32",
|
||||
"allmydata.util.base62",
|
||||
"allmydata.util.deferredutil",
|
||||
"allmydata.util.hashutil",
|
||||
"allmydata.util.humanreadable",
|
||||
"allmydata.util.mathutil",
|
||||
"allmydata.util.namespace",
|
||||
"allmydata.util.netstring",
|
||||
"allmydata.util.observer",
|
||||
"allmydata.util.pipeline",
|
||||
"allmydata.util.pollmixin",
|
||||
"allmydata.util._python3",
|
||||
"allmydata.util.spans",
|
||||
"allmydata.util.time_format",
|
||||
"allmydata.test.common_py3",
|
||||
]
|
||||
|
||||
PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_abbreviate",
|
||||
"allmydata.test.test_base32",
|
||||
"allmydata.test.test_base62",
|
||||
"allmydata.test.test_deferredutil",
|
||||
"allmydata.test.test_hashtree",
|
||||
"allmydata.test.test_hashutil",
|
||||
"allmydata.test.test_humanreadable",
|
||||
"allmydata.test.test_netstring",
|
||||
"allmydata.test.test_observer",
|
||||
"allmydata.test.test_pipeline",
|
||||
"allmydata.test.test_python3",
|
||||
"allmydata.test.test_spans",
|
||||
"allmydata.test.test_time_format",
|
||||
]
|
||||
|
||||
|
||||
|
@ -1,3 +1,16 @@
|
||||
"""
|
||||
Convert timestamps to abbreviated English text.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import division
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import re
|
||||
from datetime import timedelta
|
||||
@ -9,6 +22,10 @@ MONTH = 30*DAY
|
||||
YEAR = 365*DAY
|
||||
|
||||
def abbreviate_time(s):
|
||||
"""
|
||||
Given time in seconds (float or int) or timedelta, summarize as English by
|
||||
returning unicode string.
|
||||
"""
|
||||
postfix = ''
|
||||
if isinstance(s, timedelta):
|
||||
# this feels counter-intuitive that positive numbers in a
|
||||
@ -45,6 +62,9 @@ def abbreviate_time(s):
|
||||
return _plural(s / YEAR, "year")
|
||||
|
||||
def abbreviate_space(s, SI=True):
|
||||
"""
|
||||
Given size in bytes summarize as English by returning unicode string.
|
||||
"""
|
||||
if s is None:
|
||||
return "unknown"
|
||||
if SI:
|
||||
|
@ -1,3 +1,19 @@
|
||||
"""
|
||||
Hashing utilities.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import chr as byteschr
|
||||
|
||||
import os
|
||||
import hashlib
|
||||
from allmydata.util.netstring import netstring
|
||||
@ -60,34 +76,34 @@ def tagged_pair_hash(tag, val1, val2, truncate_to=None):
|
||||
|
||||
|
||||
# immutable
|
||||
STORAGE_INDEX_TAG = "allmydata_immutable_key_to_storage_index_v1"
|
||||
BLOCK_TAG = "allmydata_encoded_subshare_v1"
|
||||
UEB_TAG = "allmydata_uri_extension_v1"
|
||||
PLAINTEXT_TAG = "allmydata_plaintext_v1"
|
||||
CIPHERTEXT_TAG = "allmydata_crypttext_v1"
|
||||
CIPHERTEXT_SEGMENT_TAG = "allmydata_crypttext_segment_v1"
|
||||
PLAINTEXT_SEGMENT_TAG = "allmydata_plaintext_segment_v1"
|
||||
CONVERGENT_ENCRYPTION_TAG = "allmydata_immutable_content_to_key_with_added_secret_v1+"
|
||||
STORAGE_INDEX_TAG = b"allmydata_immutable_key_to_storage_index_v1"
|
||||
BLOCK_TAG = b"allmydata_encoded_subshare_v1"
|
||||
UEB_TAG = b"allmydata_uri_extension_v1"
|
||||
PLAINTEXT_TAG = b"allmydata_plaintext_v1"
|
||||
CIPHERTEXT_TAG = b"allmydata_crypttext_v1"
|
||||
CIPHERTEXT_SEGMENT_TAG = b"allmydata_crypttext_segment_v1"
|
||||
PLAINTEXT_SEGMENT_TAG = b"allmydata_plaintext_segment_v1"
|
||||
CONVERGENT_ENCRYPTION_TAG = b"allmydata_immutable_content_to_key_with_added_secret_v1+"
|
||||
|
||||
CLIENT_RENEWAL_TAG = "allmydata_client_renewal_secret_v1"
|
||||
CLIENT_CANCEL_TAG = "allmydata_client_cancel_secret_v1"
|
||||
FILE_RENEWAL_TAG = "allmydata_file_renewal_secret_v1"
|
||||
FILE_CANCEL_TAG = "allmydata_file_cancel_secret_v1"
|
||||
BUCKET_RENEWAL_TAG = "allmydata_bucket_renewal_secret_v1"
|
||||
BUCKET_CANCEL_TAG = "allmydata_bucket_cancel_secret_v1"
|
||||
CLIENT_RENEWAL_TAG = b"allmydata_client_renewal_secret_v1"
|
||||
CLIENT_CANCEL_TAG = b"allmydata_client_cancel_secret_v1"
|
||||
FILE_RENEWAL_TAG = b"allmydata_file_renewal_secret_v1"
|
||||
FILE_CANCEL_TAG = b"allmydata_file_cancel_secret_v1"
|
||||
BUCKET_RENEWAL_TAG = b"allmydata_bucket_renewal_secret_v1"
|
||||
BUCKET_CANCEL_TAG = b"allmydata_bucket_cancel_secret_v1"
|
||||
|
||||
# mutable
|
||||
MUTABLE_WRITEKEY_TAG = "allmydata_mutable_privkey_to_writekey_v1"
|
||||
MUTABLE_WRITE_ENABLER_MASTER_TAG = "allmydata_mutable_writekey_to_write_enabler_master_v1"
|
||||
MUTABLE_WRITE_ENABLER_TAG = "allmydata_mutable_write_enabler_master_and_nodeid_to_write_enabler_v1"
|
||||
MUTABLE_PUBKEY_TAG = "allmydata_mutable_pubkey_to_fingerprint_v1"
|
||||
MUTABLE_READKEY_TAG = "allmydata_mutable_writekey_to_readkey_v1"
|
||||
MUTABLE_DATAKEY_TAG = "allmydata_mutable_readkey_to_datakey_v1"
|
||||
MUTABLE_STORAGEINDEX_TAG = "allmydata_mutable_readkey_to_storage_index_v1"
|
||||
MUTABLE_WRITEKEY_TAG = b"allmydata_mutable_privkey_to_writekey_v1"
|
||||
MUTABLE_WRITE_ENABLER_MASTER_TAG = b"allmydata_mutable_writekey_to_write_enabler_master_v1"
|
||||
MUTABLE_WRITE_ENABLER_TAG = b"allmydata_mutable_write_enabler_master_and_nodeid_to_write_enabler_v1"
|
||||
MUTABLE_PUBKEY_TAG = b"allmydata_mutable_pubkey_to_fingerprint_v1"
|
||||
MUTABLE_READKEY_TAG = b"allmydata_mutable_writekey_to_readkey_v1"
|
||||
MUTABLE_DATAKEY_TAG = b"allmydata_mutable_readkey_to_datakey_v1"
|
||||
MUTABLE_STORAGEINDEX_TAG = b"allmydata_mutable_readkey_to_storage_index_v1"
|
||||
|
||||
# dirnodes
|
||||
DIRNODE_CHILD_WRITECAP_TAG = "allmydata_mutable_writekey_and_salt_to_dirnode_child_capkey_v1"
|
||||
DIRNODE_CHILD_SALT_TAG = "allmydata_dirnode_child_rwcap_to_salt_v1"
|
||||
DIRNODE_CHILD_WRITECAP_TAG = b"allmydata_mutable_writekey_and_salt_to_dirnode_child_capkey_v1"
|
||||
DIRNODE_CHILD_SALT_TAG = b"allmydata_dirnode_child_rwcap_to_salt_v1"
|
||||
|
||||
|
||||
def storage_index_hash(key):
|
||||
@ -158,8 +174,8 @@ def convergence_hash(k, n, segsize, data, convergence):
|
||||
|
||||
|
||||
def convergence_hasher(k, n, segsize, convergence):
|
||||
assert isinstance(convergence, str)
|
||||
param_tag = netstring("%d,%d,%d" % (k, n, segsize))
|
||||
assert isinstance(convergence, bytes)
|
||||
param_tag = netstring(b"%d,%d,%d" % (k, n, segsize))
|
||||
tag = CONVERGENT_ENCRYPTION_TAG + netstring(convergence) + param_tag
|
||||
return tagged_hasher(tag, KEYLEN)
|
||||
|
||||
@ -197,12 +213,13 @@ def bucket_cancel_secret_hash(file_cancel_secret, peerid):
|
||||
|
||||
|
||||
def _xor(a, b):
|
||||
return "".join([chr(ord(c) ^ ord(b)) for c in a])
|
||||
return b"".join([byteschr(c ^ b) for c in a])
|
||||
|
||||
|
||||
def hmac(tag, data):
|
||||
ikey = _xor(tag, "\x36")
|
||||
okey = _xor(tag, "\x5c")
|
||||
tag = bytes(tag) # Make sure it matches Python 3 behavior
|
||||
ikey = _xor(tag, 0x36)
|
||||
okey = _xor(tag, 0x5c)
|
||||
h1 = hashlib.sha256(ikey + data).digest()
|
||||
h2 = hashlib.sha256(okey + h1).digest()
|
||||
return h2
|
||||
@ -251,7 +268,7 @@ def timing_safe_compare(a, b):
|
||||
return bool(tagged_hash(n, a) == tagged_hash(n, b))
|
||||
|
||||
|
||||
BACKUPDB_DIRHASH_TAG = "allmydata_backupdb_dirhash_v1"
|
||||
BACKUPDB_DIRHASH_TAG = b"allmydata_backupdb_dirhash_v1"
|
||||
|
||||
|
||||
def backupdb_dirhash(contents):
|
||||
|
@ -1,8 +1,23 @@
|
||||
"""
|
||||
Netstring encoding and decoding.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
|
||||
def netstring(s):
|
||||
assert isinstance(s, str), s # no unicode here
|
||||
return "%d:%s," % (len(s), s,)
|
||||
assert isinstance(s, bytes), s # no unicode here
|
||||
return b"%d:%s," % (len(s), s,)
|
||||
|
||||
def split_netstring(data, numstrings,
|
||||
position=0,
|
||||
@ -13,18 +28,19 @@ def split_netstring(data, numstrings,
|
||||
byte which was not consumed (the 'required_trailer', if any, counts as
|
||||
consumed). If 'required_trailer' is not None, throw ValueError if leftover
|
||||
data does not exactly equal 'required_trailer'."""
|
||||
|
||||
assert type(position) in (int, long), (repr(position), type(position))
|
||||
assert isinstance(data, bytes)
|
||||
assert required_trailer is None or isinstance(required_trailer, bytes)
|
||||
assert isinstance(position, (int, long)), (repr(position), type(position))
|
||||
elements = []
|
||||
assert numstrings >= 0
|
||||
while position < len(data):
|
||||
colon = data.index(":", position)
|
||||
colon = data.index(b":", position)
|
||||
length = int(data[position:colon])
|
||||
string = data[colon+1:colon+1+length]
|
||||
assert len(string) == length, (len(string), length)
|
||||
elements.append(string)
|
||||
position = colon+1+length
|
||||
assert data[position] == ",", position
|
||||
assert data[position] == b","[0], position
|
||||
position += 1
|
||||
if len(elements) == numstrings:
|
||||
break
|
||||
|
@ -1,4 +1,17 @@
|
||||
# -*- test-case-name: allmydata.test.test_observer -*-
|
||||
"""
|
||||
Observer for Twisted code.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import weakref
|
||||
from twisted.internet import defer
|
||||
|
@ -1,9 +1,24 @@
|
||||
"""
|
||||
A pipeline of Deferreds.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.internet import defer
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.python import log
|
||||
from allmydata.util.assertutil import precondition
|
||||
|
||||
|
||||
class PipelineError(Exception):
|
||||
"""One of the pipelined messages returned an error. The received Failure
|
||||
object is stored in my .error attribute."""
|
||||
|
@ -1,5 +1,18 @@
|
||||
# ISO-8601:
|
||||
# http://www.cl.cam.ac.uk/~mgk25/iso-time.html
|
||||
"""
|
||||
Time formatting utilities.
|
||||
|
||||
ISO-8601:
|
||||
http://www.cl.cam.ac.uk/~mgk25/iso-time.html
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from future.utils import native_str
|
||||
|
||||
import calendar, datetime, re, time
|
||||
|
||||
@ -14,6 +27,7 @@ def iso_utc_date(now=None, t=time.time):
|
||||
def iso_utc(now=None, sep='_', t=time.time):
|
||||
if now is None:
|
||||
now = t()
|
||||
sep = native_str(sep) # Python 2 doesn't allow unicode input to isoformat
|
||||
return datetime.datetime.utcfromtimestamp(now).isoformat(sep)
|
||||
|
||||
def iso_utc_time_to_seconds(isotime, _conversion_re=re.compile(r"(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})[T_ ](?P<hour>\d{2}):(?P<minute>\d{2}):(?P<second>\d{2})(?P<subsecond>\.\d+)?")):
|
||||
@ -74,11 +88,11 @@ def format_delta(time_1, time_2):
|
||||
delta = int(time_2 - time_1)
|
||||
seconds = delta % 60
|
||||
delta -= seconds
|
||||
minutes = (delta / 60) % 60
|
||||
minutes = (delta // 60) % 60
|
||||
delta -= minutes * 60
|
||||
hours = delta / (60*60) % 24
|
||||
hours = delta // (60*60) % 24
|
||||
delta -= hours * 24
|
||||
days = delta / (24*60*60)
|
||||
days = delta // (24*60*60)
|
||||
if not days:
|
||||
if not hours:
|
||||
if not minutes:
|
||||
|
Loading…
Reference in New Issue
Block a user