mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-23 14:52:26 +00:00
Merge branch 'master' into 3353.observer-and-pipeline-py3
This commit is contained in:
commit
dd4737f20c
1
.github/workflows/ci.yml
vendored
1
.github/workflows/ci.yml
vendored
@ -72,6 +72,7 @@ jobs:
|
||||
matrix:
|
||||
os:
|
||||
- macos-latest
|
||||
- windows-latest
|
||||
python-version:
|
||||
- 2.7
|
||||
|
||||
|
@ -358,7 +358,10 @@ def bob(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, reques
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@pytest.mark.skipif(sys.platform.startswith('win'),
|
||||
'Tor tests are unstable on Windows')
|
||||
def chutney(reactor, temp_dir):
|
||||
|
||||
chutney_dir = join(temp_dir, 'chutney')
|
||||
mkdir(chutney_dir)
|
||||
|
||||
@ -406,7 +409,10 @@ def chutney(reactor, temp_dir):
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@pytest.mark.skipif(sys.platform.startswith('win'),
|
||||
reason='Tor tests are unstable on Windows')
|
||||
def tor_network(reactor, temp_dir, chutney, request):
|
||||
|
||||
# this is the actual "chutney" script at the root of a chutney checkout
|
||||
chutney_dir = chutney
|
||||
chut = join(chutney_dir, 'chutney')
|
||||
|
@ -10,12 +10,21 @@ from six.moves import StringIO
|
||||
from twisted.internet.protocol import ProcessProtocol
|
||||
from twisted.internet.error import ProcessExitedAlready, ProcessDone
|
||||
from twisted.internet.defer import inlineCallbacks, Deferred
|
||||
|
||||
import pytest
|
||||
import pytest_twisted
|
||||
|
||||
import util
|
||||
|
||||
# see "conftest.py" for the fixtures (e.g. "tor_network")
|
||||
|
||||
# XXX: Integration tests that involve Tor do not run reliably on
|
||||
# Windows. They are skipped for now, in order to reduce CI noise.
|
||||
#
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3347
|
||||
if sys.platform.startswith('win'):
|
||||
pytest.skip('Skipping Tor tests on Windows', allow_module_level=True)
|
||||
|
||||
@pytest_twisted.inlineCallbacks
|
||||
def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl):
|
||||
yield _create_anonymous_node(reactor, 'carol', 8008, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl)
|
||||
|
@ -18,7 +18,26 @@ allmydata.test.test_deferredutil.DeferredUtilTests.test_failure
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_gather_results
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_success
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_wait_for_delayed_calls
|
||||
allmydata.test.test_hashtree.Complete.test_create
|
||||
allmydata.test.test_hashtree.Complete.test_dump
|
||||
allmydata.test.test_hashtree.Complete.test_needed_hashes
|
||||
allmydata.test.test_hashtree.Incomplete.test_check
|
||||
allmydata.test.test_hashtree.Incomplete.test_create
|
||||
allmydata.test.test_hashtree.Incomplete.test_depth_of
|
||||
allmydata.test.test_hashtree.Incomplete.test_large
|
||||
allmydata.test.test_hashtree.Incomplete.test_needed_hashes
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_chk
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_hashers
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_known_answers
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_random_key
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_sha256d
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_sha256d_truncated
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_timing_safe_compare
|
||||
allmydata.test.test_humanreadable.HumanReadable.test_repr
|
||||
allmydata.test.test_netstring.Netstring.test_encode
|
||||
allmydata.test.test_netstring.Netstring.test_extra
|
||||
allmydata.test.test_netstring.Netstring.test_nested
|
||||
allmydata.test.test_netstring.Netstring.test_split
|
||||
allmydata.test.test_observer.Observer.test_lazy_oneshot
|
||||
allmydata.test.test_observer.Observer.test_observerlist
|
||||
allmydata.test.test_observer.Observer.test_oneshot
|
||||
|
0
newsfragments/3320.minor
Normal file
0
newsfragments/3320.minor
Normal file
0
newsfragments/3344.minor
Normal file
0
newsfragments/3344.minor
Normal file
1
newsfragments/3354.minor
Normal file
1
newsfragments/3354.minor
Normal file
@ -0,0 +1 @@
|
||||
|
@ -1,7 +1,4 @@
|
||||
# -*- test-case-name: allmydata.test.test_hashtree -*-
|
||||
|
||||
from allmydata.util import mathutil # from the pyutil library
|
||||
|
||||
"""
|
||||
Read and write chunks from files.
|
||||
|
||||
@ -50,6 +47,17 @@ or implied. It probably won't make your computer catch on fire,
|
||||
or eat your children, but it might. Use at your own risk.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from allmydata.util import mathutil # from the pyutil library
|
||||
|
||||
from allmydata.util import base32
|
||||
from allmydata.util.hashutil import tagged_hash, tagged_pair_hash
|
||||
|
||||
@ -170,9 +178,10 @@ def depth_of(i):
|
||||
return mathutil.log_floor(i+1, 2)
|
||||
|
||||
def empty_leaf_hash(i):
|
||||
return tagged_hash('Merkle tree empty leaf', "%d" % i)
|
||||
return tagged_hash(b'Merkle tree empty leaf', b"%d" % i)
|
||||
|
||||
def pair_hash(a, b):
|
||||
return tagged_pair_hash('Merkle tree internal node', a, b)
|
||||
return tagged_pair_hash(b'Merkle tree internal node', a, b)
|
||||
|
||||
class HashTree(CompleteBinaryTreeMixin, list):
|
||||
"""
|
||||
@ -215,7 +224,7 @@ class HashTree(CompleteBinaryTreeMixin, list):
|
||||
while len(rows[-1]) != 1:
|
||||
last = rows[-1]
|
||||
rows += [[pair_hash(last[2*i], last[2*i+1])
|
||||
for i in xrange(len(last)//2)]]
|
||||
for i in range(len(last)//2)]]
|
||||
# Flatten the list of rows into a single list.
|
||||
rows.reverse()
|
||||
self[:] = sum(rows, [])
|
||||
@ -289,7 +298,7 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
|
||||
rows = [L]
|
||||
while len(rows[-1]) != 1:
|
||||
last = rows[-1]
|
||||
rows += [[None for i in xrange(len(last)//2)]]
|
||||
rows += [[None for i in range(len(last)//2)]]
|
||||
# Flatten the list of rows into a single list.
|
||||
rows.reverse()
|
||||
self[:] = sum(rows, [])
|
||||
@ -372,12 +381,12 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
|
||||
|
||||
assert isinstance(hashes, dict)
|
||||
for h in hashes.values():
|
||||
assert isinstance(h, str)
|
||||
assert isinstance(h, bytes)
|
||||
assert isinstance(leaves, dict)
|
||||
for h in leaves.values():
|
||||
assert isinstance(h, str)
|
||||
assert isinstance(h, bytes)
|
||||
new_hashes = hashes.copy()
|
||||
for leafnum,leafhash in leaves.iteritems():
|
||||
for leafnum,leafhash in leaves.items():
|
||||
hashnum = self.first_leaf_num + leafnum
|
||||
if hashnum in new_hashes:
|
||||
if new_hashes[hashnum] != leafhash:
|
||||
@ -416,7 +425,7 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
|
||||
|
||||
# first we provisionally add all hashes to the tree, comparing
|
||||
# any duplicates
|
||||
for i,h in new_hashes.iteritems():
|
||||
for i,h in new_hashes.items():
|
||||
if self[i]:
|
||||
if self[i] != h:
|
||||
raise BadHashError("new hash %s does not match "
|
||||
|
@ -1,4 +1,18 @@
|
||||
# -*- test-case-name: allmydata.test.test_hashtree -*-
|
||||
"""
|
||||
Tests for allmydata.hashtree.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
@ -7,8 +21,8 @@ from allmydata import hashtree
|
||||
|
||||
|
||||
def make_tree(numleaves):
|
||||
leaves = ["%d" % i for i in range(numleaves)]
|
||||
leaf_hashes = [tagged_hash("tag", leaf) for leaf in leaves]
|
||||
leaves = [b"%d" % i for i in range(numleaves)]
|
||||
leaf_hashes = [tagged_hash(b"tag", leaf) for leaf in leaves]
|
||||
ht = hashtree.HashTree(leaf_hashes)
|
||||
return ht
|
||||
|
||||
@ -20,7 +34,7 @@ class Complete(unittest.TestCase):
|
||||
ht = make_tree(8)
|
||||
root = ht[0]
|
||||
self.failUnlessEqual(len(root), 32)
|
||||
self.failUnlessEqual(ht.get_leaf(0), tagged_hash("tag", "0"))
|
||||
self.failUnlessEqual(ht.get_leaf(0), tagged_hash(b"tag", b"0"))
|
||||
self.failUnlessRaises(IndexError, ht.get_leaf, 8)
|
||||
self.failUnlessEqual(ht.get_leaf_index(0), 7)
|
||||
self.failUnlessRaises(IndexError, ht.parent, 0)
|
||||
@ -143,7 +157,7 @@ class Incomplete(unittest.TestCase):
|
||||
current_hashes = list(iht)
|
||||
# this should fail because there aren't enough hashes known
|
||||
try:
|
||||
iht.set_hashes(leaves={0: tagged_hash("tag", "0")})
|
||||
iht.set_hashes(leaves={0: tagged_hash(b"tag", b"0")})
|
||||
except hashtree.NotEnoughHashesError:
|
||||
pass
|
||||
else:
|
||||
@ -157,7 +171,7 @@ class Incomplete(unittest.TestCase):
|
||||
chain = {0: ht[0], 2: ht[2], 4: ht[4], 8: ht[8]}
|
||||
# this should fail because the leaf hash is just plain wrong
|
||||
try:
|
||||
iht.set_hashes(chain, leaves={0: tagged_hash("bad tag", "0")})
|
||||
iht.set_hashes(chain, leaves={0: tagged_hash(b"bad tag", b"0")})
|
||||
except hashtree.BadHashError:
|
||||
pass
|
||||
else:
|
||||
@ -166,18 +180,18 @@ class Incomplete(unittest.TestCase):
|
||||
# this should fail because we give it conflicting hashes: one as an
|
||||
# internal node, another as a leaf
|
||||
try:
|
||||
iht.set_hashes(chain, leaves={1: tagged_hash("bad tag", "1")})
|
||||
iht.set_hashes(chain, leaves={1: tagged_hash(b"bad tag", b"1")})
|
||||
except hashtree.BadHashError:
|
||||
pass
|
||||
else:
|
||||
self.fail("didn't catch bad hash")
|
||||
|
||||
bad_chain = chain.copy()
|
||||
bad_chain[2] = ht[2] + "BOGUS"
|
||||
bad_chain[2] = ht[2] + b"BOGUS"
|
||||
|
||||
# this should fail because the internal hash is wrong
|
||||
try:
|
||||
iht.set_hashes(bad_chain, leaves={0: tagged_hash("tag", "0")})
|
||||
iht.set_hashes(bad_chain, leaves={0: tagged_hash(b"tag", b"0")})
|
||||
except hashtree.BadHashError:
|
||||
pass
|
||||
else:
|
||||
@ -185,23 +199,23 @@ class Incomplete(unittest.TestCase):
|
||||
|
||||
# this should succeed
|
||||
try:
|
||||
iht.set_hashes(chain, leaves={0: tagged_hash("tag", "0")})
|
||||
iht.set_hashes(chain, leaves={0: tagged_hash(b"tag", b"0")})
|
||||
except hashtree.BadHashError as e:
|
||||
self.fail("bad hash: %s" % e)
|
||||
|
||||
self.failUnlessEqual(ht.get_leaf(0), tagged_hash("tag", "0"))
|
||||
self.failUnlessEqual(ht.get_leaf(0), tagged_hash(b"tag", b"0"))
|
||||
self.failUnlessRaises(IndexError, ht.get_leaf, 8)
|
||||
|
||||
# this should succeed too
|
||||
try:
|
||||
iht.set_hashes(leaves={1: tagged_hash("tag", "1")})
|
||||
iht.set_hashes(leaves={1: tagged_hash(b"tag", b"1")})
|
||||
except hashtree.BadHashError:
|
||||
self.fail("bad hash")
|
||||
|
||||
# this should fail because we give it hashes that conflict with some
|
||||
# that we added successfully before
|
||||
try:
|
||||
iht.set_hashes(leaves={1: tagged_hash("bad tag", "1")})
|
||||
iht.set_hashes(leaves={1: tagged_hash(b"bad tag", b"1")})
|
||||
except hashtree.BadHashError:
|
||||
pass
|
||||
else:
|
||||
@ -214,6 +228,6 @@ class Incomplete(unittest.TestCase):
|
||||
|
||||
# this should succeed
|
||||
try:
|
||||
iht.set_hashes(chain, leaves={4: tagged_hash("tag", "4")})
|
||||
iht.set_hashes(chain, leaves={4: tagged_hash(b"tag", b"4")})
|
||||
except hashtree.BadHashError as e:
|
||||
self.fail("bad hash: %s" % e)
|
||||
|
135
src/allmydata/test/test_hashutil.py
Normal file
135
src/allmydata/test/test_hashutil.py
Normal file
@ -0,0 +1,135 @@
|
||||
"""
|
||||
Tests for allmydata.util.hashutil.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.util import hashutil, base32
|
||||
|
||||
|
||||
class HashUtilTests(unittest.TestCase):
|
||||
|
||||
def test_random_key(self):
|
||||
k = hashutil.random_key()
|
||||
self.failUnlessEqual(len(k), hashutil.KEYLEN)
|
||||
self.assertIsInstance(k, bytes)
|
||||
|
||||
def test_sha256d(self):
|
||||
h1 = hashutil.tagged_hash(b"tag1", b"value")
|
||||
self.assertIsInstance(h1, bytes)
|
||||
h2 = hashutil.tagged_hasher(b"tag1")
|
||||
h2.update(b"value")
|
||||
h2a = h2.digest()
|
||||
h2b = h2.digest()
|
||||
self.assertIsInstance(h2a, bytes)
|
||||
self.failUnlessEqual(h1, h2a)
|
||||
self.failUnlessEqual(h2a, h2b)
|
||||
|
||||
def test_sha256d_truncated(self):
|
||||
h1 = hashutil.tagged_hash(b"tag1", b"value", 16)
|
||||
h2 = hashutil.tagged_hasher(b"tag1", 16)
|
||||
h2.update(b"value")
|
||||
h2 = h2.digest()
|
||||
self.failUnlessEqual(len(h1), 16)
|
||||
self.failUnlessEqual(len(h2), 16)
|
||||
self.failUnlessEqual(h1, h2)
|
||||
|
||||
def test_chk(self):
|
||||
h1 = hashutil.convergence_hash(3, 10, 1000, b"data", b"secret")
|
||||
h2 = hashutil.convergence_hasher(3, 10, 1000, b"secret")
|
||||
h2.update(b"data")
|
||||
h2 = h2.digest()
|
||||
self.failUnlessEqual(h1, h2)
|
||||
self.assertIsInstance(h1, bytes)
|
||||
self.assertIsInstance(h2, bytes)
|
||||
|
||||
def test_hashers(self):
|
||||
h1 = hashutil.block_hash(b"foo")
|
||||
h2 = hashutil.block_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
h1 = hashutil.uri_extension_hash(b"foo")
|
||||
h2 = hashutil.uri_extension_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
h1 = hashutil.plaintext_hash(b"foo")
|
||||
h2 = hashutil.plaintext_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
h1 = hashutil.crypttext_hash(b"foo")
|
||||
h2 = hashutil.crypttext_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
h1 = hashutil.crypttext_segment_hash(b"foo")
|
||||
h2 = hashutil.crypttext_segment_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
h1 = hashutil.plaintext_segment_hash(b"foo")
|
||||
h2 = hashutil.plaintext_segment_hasher()
|
||||
h2.update(b"foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
self.assertIsInstance(h1, bytes)
|
||||
|
||||
def test_timing_safe_compare(self):
|
||||
self.failUnless(hashutil.timing_safe_compare(b"a", b"a"))
|
||||
self.failUnless(hashutil.timing_safe_compare(b"ab", b"ab"))
|
||||
self.failIf(hashutil.timing_safe_compare(b"a", b"b"))
|
||||
self.failIf(hashutil.timing_safe_compare(b"a", b"aa"))
|
||||
|
||||
def _testknown(self, hashf, expected_a, *args):
|
||||
got = hashf(*args)
|
||||
self.assertIsInstance(got, bytes)
|
||||
got_a = base32.b2a(got)
|
||||
self.failUnlessEqual(got_a, expected_a)
|
||||
|
||||
def test_known_answers(self):
|
||||
# assert backwards compatibility
|
||||
self._testknown(hashutil.storage_index_hash, b"qb5igbhcc5esa6lwqorsy7e6am", b"")
|
||||
self._testknown(hashutil.block_hash, b"msjr5bh4evuh7fa3zw7uovixfbvlnstr5b65mrerwfnvjxig2jvq", b"")
|
||||
self._testknown(hashutil.uri_extension_hash, b"wthsu45q7zewac2mnivoaa4ulh5xvbzdmsbuyztq2a5fzxdrnkka", b"")
|
||||
self._testknown(hashutil.plaintext_hash, b"5lz5hwz3qj3af7n6e3arblw7xzutvnd3p3fjsngqjcb7utf3x3da", b"")
|
||||
self._testknown(hashutil.crypttext_hash, b"itdj6e4njtkoiavlrmxkvpreosscssklunhwtvxn6ggho4rkqwga", b"")
|
||||
self._testknown(hashutil.crypttext_segment_hash, b"aovy5aa7jej6ym5ikgwyoi4pxawnoj3wtaludjz7e2nb5xijb7aa", b"")
|
||||
self._testknown(hashutil.plaintext_segment_hash, b"4fdgf6qruaisyukhqcmoth4t3li6bkolbxvjy4awwcpprdtva7za", b"")
|
||||
self._testknown(hashutil.convergence_hash, b"3mo6ni7xweplycin6nowynw2we", 3, 10, 100, b"", b"converge")
|
||||
self._testknown(hashutil.my_renewal_secret_hash, b"ujhr5k5f7ypkp67jkpx6jl4p47pyta7hu5m527cpcgvkafsefm6q", b"")
|
||||
self._testknown(hashutil.my_cancel_secret_hash, b"rjwzmafe2duixvqy6h47f5wfrokdziry6zhx4smew4cj6iocsfaa", b"")
|
||||
self._testknown(hashutil.file_renewal_secret_hash, b"hzshk2kf33gzbd5n3a6eszkf6q6o6kixmnag25pniusyaulqjnia", b"", b"si")
|
||||
self._testknown(hashutil.file_cancel_secret_hash, b"bfciwvr6w7wcavsngxzxsxxaszj72dej54n4tu2idzp6b74g255q", b"", b"si")
|
||||
self._testknown(hashutil.bucket_renewal_secret_hash, b"e7imrzgzaoashsncacvy3oysdd2m5yvtooo4gmj4mjlopsazmvuq", b"", b"\x00"*20)
|
||||
self._testknown(hashutil.bucket_cancel_secret_hash, b"dvdujeyxeirj6uux6g7xcf4lvesk632aulwkzjar7srildvtqwma", b"", b"\x00"*20)
|
||||
self._testknown(hashutil.hmac, b"c54ypfi6pevb3nvo6ba42jtglpkry2kbdopqsi7dgrm4r7tw5sra", b"tag", b"")
|
||||
self._testknown(hashutil.mutable_rwcap_key_hash, b"6rvn2iqrghii5n4jbbwwqqsnqu", b"iv", b"wk")
|
||||
self._testknown(hashutil.ssk_writekey_hash, b"ykpgmdbpgbb6yqz5oluw2q26ye", b"")
|
||||
self._testknown(hashutil.ssk_write_enabler_master_hash, b"izbfbfkoait4dummruol3gy2bnixrrrslgye6ycmkuyujnenzpia", b"")
|
||||
self._testknown(hashutil.ssk_write_enabler_hash, b"fuu2dvx7g6gqu5x22vfhtyed7p4pd47y5hgxbqzgrlyvxoev62tq", b"wk", b"\x00"*20)
|
||||
self._testknown(hashutil.ssk_pubkey_fingerprint_hash, b"3opzw4hhm2sgncjx224qmt5ipqgagn7h5zivnfzqycvgqgmgz35q", b"")
|
||||
self._testknown(hashutil.ssk_readkey_hash, b"vugid4as6qbqgeq2xczvvcedai", b"")
|
||||
self._testknown(hashutil.ssk_readkey_data_hash, b"73wsaldnvdzqaf7v4pzbr2ae5a", b"iv", b"rk")
|
||||
self._testknown(hashutil.ssk_storage_index_hash, b"j7icz6kigb6hxrej3tv4z7ayym", b"")
|
||||
|
||||
self._testknown(hashutil.permute_server_hash,
|
||||
b"kb4354zeeurpo3ze5e275wzbynm6hlap", # b32(expected)
|
||||
b"SI", # peer selection index == storage_index
|
||||
base32.a2b(b"u33m4y7klhz3bypswqkozwetvabelhxt"), # seed
|
||||
)
|
@ -1,36 +1,59 @@
|
||||
"""
|
||||
Tests for allmydata.util.netstring.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.util.netstring import netstring, split_netstring
|
||||
|
||||
|
||||
class Netstring(unittest.TestCase):
|
||||
def test_encode(self):
|
||||
"""netstring() correctly encodes the given bytes."""
|
||||
result = netstring(b"abc")
|
||||
self.assertEqual(result, b"3:abc,")
|
||||
self.assertIsInstance(result, bytes)
|
||||
|
||||
def test_split(self):
|
||||
a = netstring("hello") + netstring("world")
|
||||
self.failUnlessEqual(split_netstring(a, 2), (["hello", "world"], len(a)))
|
||||
self.failUnlessEqual(split_netstring(a, 2, required_trailer=""), (["hello", "world"], len(a)))
|
||||
a = netstring(b"hello") + netstring(b"world")
|
||||
for s in split_netstring(a, 2)[0]:
|
||||
self.assertIsInstance(s, bytes)
|
||||
self.failUnlessEqual(split_netstring(a, 2), ([b"hello", b"world"], len(a)))
|
||||
self.failUnlessEqual(split_netstring(a, 2, required_trailer=b""), ([b"hello", b"world"], len(a)))
|
||||
self.failUnlessRaises(ValueError, split_netstring, a, 3)
|
||||
self.failUnlessRaises(ValueError, split_netstring, a+" extra", 2, required_trailer="")
|
||||
self.failUnlessEqual(split_netstring(a+" extra", 2), (["hello", "world"], len(a)))
|
||||
self.failUnlessEqual(split_netstring(a+"++", 2, required_trailer="++"),
|
||||
(["hello", "world"], len(a)+2))
|
||||
self.failUnlessRaises(ValueError, split_netstring, a+b" extra", 2, required_trailer=b"")
|
||||
self.failUnlessEqual(split_netstring(a+b" extra", 2), ([b"hello", b"world"], len(a)))
|
||||
self.failUnlessEqual(split_netstring(a+b"++", 2, required_trailer=b"++"),
|
||||
([b"hello", b"world"], len(a)+2))
|
||||
self.failUnlessRaises(ValueError,
|
||||
split_netstring, a+"+", 2, required_trailer="not")
|
||||
split_netstring, a+b"+", 2, required_trailer=b"not")
|
||||
|
||||
def test_extra(self):
|
||||
a = netstring("hello")
|
||||
self.failUnlessEqual(split_netstring(a, 1), (["hello"], len(a)))
|
||||
b = netstring("hello") + "extra stuff"
|
||||
a = netstring(b"hello")
|
||||
self.failUnlessEqual(split_netstring(a, 1), ([b"hello"], len(a)))
|
||||
b = netstring(b"hello") + b"extra stuff"
|
||||
self.failUnlessEqual(split_netstring(b, 1),
|
||||
(["hello"], len(a)))
|
||||
([b"hello"], len(a)))
|
||||
|
||||
def test_nested(self):
|
||||
a = netstring("hello") + netstring("world") + "extra stuff"
|
||||
b = netstring("a") + netstring("is") + netstring(a) + netstring(".")
|
||||
a = netstring(b"hello") + netstring(b"world") + b"extra stuff"
|
||||
b = netstring(b"a") + netstring(b"is") + netstring(a) + netstring(b".")
|
||||
(top, pos) = split_netstring(b, 4)
|
||||
self.failUnlessEqual(len(top), 4)
|
||||
self.failUnlessEqual(top[0], "a")
|
||||
self.failUnlessEqual(top[1], "is")
|
||||
self.failUnlessEqual(top[0], b"a")
|
||||
self.failUnlessEqual(top[1], b"is")
|
||||
self.failUnlessEqual(top[2], a)
|
||||
self.failUnlessEqual(top[3], ".")
|
||||
self.failUnlessRaises(ValueError, split_netstring, a, 2, required_trailer="")
|
||||
self.failUnlessEqual(top[3], b".")
|
||||
self.failUnlessRaises(ValueError, split_netstring, a, 2, required_trailer=b"")
|
||||
bottom = split_netstring(a, 2)
|
||||
self.failUnlessEqual(bottom, (["hello", "world"], len(netstring("hello")+netstring("world"))))
|
||||
self.failUnlessEqual(bottom, ([b"hello", b"world"], len(netstring(b"hello")+netstring(b"world"))))
|
||||
|
@ -12,7 +12,7 @@ from twisted.trial import unittest
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
from allmydata.util import base32, idlib, mathutil, hashutil
|
||||
from allmydata.util import idlib, mathutil
|
||||
from allmydata.util import fileutil, abbreviate
|
||||
from allmydata.util import limiter, time_format, pollmixin
|
||||
from allmydata.util import statistics, dictutil, yamlutil
|
||||
@ -580,111 +580,6 @@ class PollMixinTests(unittest.TestCase):
|
||||
return d
|
||||
|
||||
|
||||
class HashUtilTests(unittest.TestCase):
|
||||
|
||||
def test_random_key(self):
|
||||
k = hashutil.random_key()
|
||||
self.failUnlessEqual(len(k), hashutil.KEYLEN)
|
||||
|
||||
def test_sha256d(self):
|
||||
h1 = hashutil.tagged_hash("tag1", "value")
|
||||
h2 = hashutil.tagged_hasher("tag1")
|
||||
h2.update("value")
|
||||
h2a = h2.digest()
|
||||
h2b = h2.digest()
|
||||
self.failUnlessEqual(h1, h2a)
|
||||
self.failUnlessEqual(h2a, h2b)
|
||||
|
||||
def test_sha256d_truncated(self):
|
||||
h1 = hashutil.tagged_hash("tag1", "value", 16)
|
||||
h2 = hashutil.tagged_hasher("tag1", 16)
|
||||
h2.update("value")
|
||||
h2 = h2.digest()
|
||||
self.failUnlessEqual(len(h1), 16)
|
||||
self.failUnlessEqual(len(h2), 16)
|
||||
self.failUnlessEqual(h1, h2)
|
||||
|
||||
def test_chk(self):
|
||||
h1 = hashutil.convergence_hash(3, 10, 1000, "data", "secret")
|
||||
h2 = hashutil.convergence_hasher(3, 10, 1000, "secret")
|
||||
h2.update("data")
|
||||
h2 = h2.digest()
|
||||
self.failUnlessEqual(h1, h2)
|
||||
|
||||
def test_hashers(self):
|
||||
h1 = hashutil.block_hash("foo")
|
||||
h2 = hashutil.block_hasher()
|
||||
h2.update("foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
|
||||
h1 = hashutil.uri_extension_hash("foo")
|
||||
h2 = hashutil.uri_extension_hasher()
|
||||
h2.update("foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
|
||||
h1 = hashutil.plaintext_hash("foo")
|
||||
h2 = hashutil.plaintext_hasher()
|
||||
h2.update("foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
|
||||
h1 = hashutil.crypttext_hash("foo")
|
||||
h2 = hashutil.crypttext_hasher()
|
||||
h2.update("foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
|
||||
h1 = hashutil.crypttext_segment_hash("foo")
|
||||
h2 = hashutil.crypttext_segment_hasher()
|
||||
h2.update("foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
|
||||
h1 = hashutil.plaintext_segment_hash("foo")
|
||||
h2 = hashutil.plaintext_segment_hasher()
|
||||
h2.update("foo")
|
||||
self.failUnlessEqual(h1, h2.digest())
|
||||
|
||||
def test_timing_safe_compare(self):
|
||||
self.failUnless(hashutil.timing_safe_compare("a", "a"))
|
||||
self.failUnless(hashutil.timing_safe_compare("ab", "ab"))
|
||||
self.failIf(hashutil.timing_safe_compare("a", "b"))
|
||||
self.failIf(hashutil.timing_safe_compare("a", "aa"))
|
||||
|
||||
def _testknown(self, hashf, expected_a, *args):
|
||||
got = hashf(*args)
|
||||
got_a = base32.b2a(got)
|
||||
self.failUnlessEqual(got_a, expected_a)
|
||||
|
||||
def test_known_answers(self):
|
||||
# assert backwards compatibility
|
||||
self._testknown(hashutil.storage_index_hash, "qb5igbhcc5esa6lwqorsy7e6am", "")
|
||||
self._testknown(hashutil.block_hash, "msjr5bh4evuh7fa3zw7uovixfbvlnstr5b65mrerwfnvjxig2jvq", "")
|
||||
self._testknown(hashutil.uri_extension_hash, "wthsu45q7zewac2mnivoaa4ulh5xvbzdmsbuyztq2a5fzxdrnkka", "")
|
||||
self._testknown(hashutil.plaintext_hash, "5lz5hwz3qj3af7n6e3arblw7xzutvnd3p3fjsngqjcb7utf3x3da", "")
|
||||
self._testknown(hashutil.crypttext_hash, "itdj6e4njtkoiavlrmxkvpreosscssklunhwtvxn6ggho4rkqwga", "")
|
||||
self._testknown(hashutil.crypttext_segment_hash, "aovy5aa7jej6ym5ikgwyoi4pxawnoj3wtaludjz7e2nb5xijb7aa", "")
|
||||
self._testknown(hashutil.plaintext_segment_hash, "4fdgf6qruaisyukhqcmoth4t3li6bkolbxvjy4awwcpprdtva7za", "")
|
||||
self._testknown(hashutil.convergence_hash, "3mo6ni7xweplycin6nowynw2we", 3, 10, 100, "", "converge")
|
||||
self._testknown(hashutil.my_renewal_secret_hash, "ujhr5k5f7ypkp67jkpx6jl4p47pyta7hu5m527cpcgvkafsefm6q", "")
|
||||
self._testknown(hashutil.my_cancel_secret_hash, "rjwzmafe2duixvqy6h47f5wfrokdziry6zhx4smew4cj6iocsfaa", "")
|
||||
self._testknown(hashutil.file_renewal_secret_hash, "hzshk2kf33gzbd5n3a6eszkf6q6o6kixmnag25pniusyaulqjnia", "", "si")
|
||||
self._testknown(hashutil.file_cancel_secret_hash, "bfciwvr6w7wcavsngxzxsxxaszj72dej54n4tu2idzp6b74g255q", "", "si")
|
||||
self._testknown(hashutil.bucket_renewal_secret_hash, "e7imrzgzaoashsncacvy3oysdd2m5yvtooo4gmj4mjlopsazmvuq", "", "\x00"*20)
|
||||
self._testknown(hashutil.bucket_cancel_secret_hash, "dvdujeyxeirj6uux6g7xcf4lvesk632aulwkzjar7srildvtqwma", "", "\x00"*20)
|
||||
self._testknown(hashutil.hmac, "c54ypfi6pevb3nvo6ba42jtglpkry2kbdopqsi7dgrm4r7tw5sra", "tag", "")
|
||||
self._testknown(hashutil.mutable_rwcap_key_hash, "6rvn2iqrghii5n4jbbwwqqsnqu", "iv", "wk")
|
||||
self._testknown(hashutil.ssk_writekey_hash, "ykpgmdbpgbb6yqz5oluw2q26ye", "")
|
||||
self._testknown(hashutil.ssk_write_enabler_master_hash, "izbfbfkoait4dummruol3gy2bnixrrrslgye6ycmkuyujnenzpia", "")
|
||||
self._testknown(hashutil.ssk_write_enabler_hash, "fuu2dvx7g6gqu5x22vfhtyed7p4pd47y5hgxbqzgrlyvxoev62tq", "wk", "\x00"*20)
|
||||
self._testknown(hashutil.ssk_pubkey_fingerprint_hash, "3opzw4hhm2sgncjx224qmt5ipqgagn7h5zivnfzqycvgqgmgz35q", "")
|
||||
self._testknown(hashutil.ssk_readkey_hash, "vugid4as6qbqgeq2xczvvcedai", "")
|
||||
self._testknown(hashutil.ssk_readkey_data_hash, "73wsaldnvdzqaf7v4pzbr2ae5a", "iv", "rk")
|
||||
self._testknown(hashutil.ssk_storage_index_hash, "j7icz6kigb6hxrej3tv4z7ayym", "")
|
||||
|
||||
self._testknown(hashutil.permute_server_hash,
|
||||
"kb4354zeeurpo3ze5e275wzbynm6hlap", # b32(expected)
|
||||
"SI", # peer selection index == storage_index
|
||||
base32.a2b("u33m4y7klhz3bypswqkozwetvabelhxt"), # seed
|
||||
)
|
||||
|
||||
class Abbreviate(unittest.TestCase):
|
||||
def test_abbrev_time_1s(self):
|
||||
diff = timedelta(seconds=1)
|
||||
|
@ -15,13 +15,16 @@ if PY2:
|
||||
|
||||
# Keep these sorted alphabetically, to reduce merge conflicts:
|
||||
PORTED_MODULES = [
|
||||
"allmydata.hashtree",
|
||||
"allmydata.util.assertutil",
|
||||
"allmydata.util.base32",
|
||||
"allmydata.util.base62",
|
||||
"allmydata.util.deferredutil",
|
||||
"allmydata.util.hashutil",
|
||||
"allmydata.util.humanreadable",
|
||||
"allmydata.util.mathutil",
|
||||
"allmydata.util.namespace",
|
||||
"allmydata.util.netstring",
|
||||
"allmydata.util.observer",
|
||||
"allmydata.util.pipeline",
|
||||
"allmydata.util.pollmixin",
|
||||
@ -32,7 +35,10 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_base32",
|
||||
"allmydata.test.test_base62",
|
||||
"allmydata.test.test_deferredutil",
|
||||
"allmydata.test.test_hashtree",
|
||||
"allmydata.test.test_hashutil",
|
||||
"allmydata.test.test_humanreadable",
|
||||
"allmydata.test.test_netstring",
|
||||
"allmydata.test.test_observer",
|
||||
"allmydata.test.test_pipeline",
|
||||
"allmydata.test.test_python3",
|
||||
|
@ -1,3 +1,19 @@
|
||||
"""
|
||||
Hashing utilities.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import chr as byteschr
|
||||
|
||||
import os
|
||||
import hashlib
|
||||
from allmydata.util.netstring import netstring
|
||||
@ -60,34 +76,34 @@ def tagged_pair_hash(tag, val1, val2, truncate_to=None):
|
||||
|
||||
|
||||
# immutable
|
||||
STORAGE_INDEX_TAG = "allmydata_immutable_key_to_storage_index_v1"
|
||||
BLOCK_TAG = "allmydata_encoded_subshare_v1"
|
||||
UEB_TAG = "allmydata_uri_extension_v1"
|
||||
PLAINTEXT_TAG = "allmydata_plaintext_v1"
|
||||
CIPHERTEXT_TAG = "allmydata_crypttext_v1"
|
||||
CIPHERTEXT_SEGMENT_TAG = "allmydata_crypttext_segment_v1"
|
||||
PLAINTEXT_SEGMENT_TAG = "allmydata_plaintext_segment_v1"
|
||||
CONVERGENT_ENCRYPTION_TAG = "allmydata_immutable_content_to_key_with_added_secret_v1+"
|
||||
STORAGE_INDEX_TAG = b"allmydata_immutable_key_to_storage_index_v1"
|
||||
BLOCK_TAG = b"allmydata_encoded_subshare_v1"
|
||||
UEB_TAG = b"allmydata_uri_extension_v1"
|
||||
PLAINTEXT_TAG = b"allmydata_plaintext_v1"
|
||||
CIPHERTEXT_TAG = b"allmydata_crypttext_v1"
|
||||
CIPHERTEXT_SEGMENT_TAG = b"allmydata_crypttext_segment_v1"
|
||||
PLAINTEXT_SEGMENT_TAG = b"allmydata_plaintext_segment_v1"
|
||||
CONVERGENT_ENCRYPTION_TAG = b"allmydata_immutable_content_to_key_with_added_secret_v1+"
|
||||
|
||||
CLIENT_RENEWAL_TAG = "allmydata_client_renewal_secret_v1"
|
||||
CLIENT_CANCEL_TAG = "allmydata_client_cancel_secret_v1"
|
||||
FILE_RENEWAL_TAG = "allmydata_file_renewal_secret_v1"
|
||||
FILE_CANCEL_TAG = "allmydata_file_cancel_secret_v1"
|
||||
BUCKET_RENEWAL_TAG = "allmydata_bucket_renewal_secret_v1"
|
||||
BUCKET_CANCEL_TAG = "allmydata_bucket_cancel_secret_v1"
|
||||
CLIENT_RENEWAL_TAG = b"allmydata_client_renewal_secret_v1"
|
||||
CLIENT_CANCEL_TAG = b"allmydata_client_cancel_secret_v1"
|
||||
FILE_RENEWAL_TAG = b"allmydata_file_renewal_secret_v1"
|
||||
FILE_CANCEL_TAG = b"allmydata_file_cancel_secret_v1"
|
||||
BUCKET_RENEWAL_TAG = b"allmydata_bucket_renewal_secret_v1"
|
||||
BUCKET_CANCEL_TAG = b"allmydata_bucket_cancel_secret_v1"
|
||||
|
||||
# mutable
|
||||
MUTABLE_WRITEKEY_TAG = "allmydata_mutable_privkey_to_writekey_v1"
|
||||
MUTABLE_WRITE_ENABLER_MASTER_TAG = "allmydata_mutable_writekey_to_write_enabler_master_v1"
|
||||
MUTABLE_WRITE_ENABLER_TAG = "allmydata_mutable_write_enabler_master_and_nodeid_to_write_enabler_v1"
|
||||
MUTABLE_PUBKEY_TAG = "allmydata_mutable_pubkey_to_fingerprint_v1"
|
||||
MUTABLE_READKEY_TAG = "allmydata_mutable_writekey_to_readkey_v1"
|
||||
MUTABLE_DATAKEY_TAG = "allmydata_mutable_readkey_to_datakey_v1"
|
||||
MUTABLE_STORAGEINDEX_TAG = "allmydata_mutable_readkey_to_storage_index_v1"
|
||||
MUTABLE_WRITEKEY_TAG = b"allmydata_mutable_privkey_to_writekey_v1"
|
||||
MUTABLE_WRITE_ENABLER_MASTER_TAG = b"allmydata_mutable_writekey_to_write_enabler_master_v1"
|
||||
MUTABLE_WRITE_ENABLER_TAG = b"allmydata_mutable_write_enabler_master_and_nodeid_to_write_enabler_v1"
|
||||
MUTABLE_PUBKEY_TAG = b"allmydata_mutable_pubkey_to_fingerprint_v1"
|
||||
MUTABLE_READKEY_TAG = b"allmydata_mutable_writekey_to_readkey_v1"
|
||||
MUTABLE_DATAKEY_TAG = b"allmydata_mutable_readkey_to_datakey_v1"
|
||||
MUTABLE_STORAGEINDEX_TAG = b"allmydata_mutable_readkey_to_storage_index_v1"
|
||||
|
||||
# dirnodes
|
||||
DIRNODE_CHILD_WRITECAP_TAG = "allmydata_mutable_writekey_and_salt_to_dirnode_child_capkey_v1"
|
||||
DIRNODE_CHILD_SALT_TAG = "allmydata_dirnode_child_rwcap_to_salt_v1"
|
||||
DIRNODE_CHILD_WRITECAP_TAG = b"allmydata_mutable_writekey_and_salt_to_dirnode_child_capkey_v1"
|
||||
DIRNODE_CHILD_SALT_TAG = b"allmydata_dirnode_child_rwcap_to_salt_v1"
|
||||
|
||||
|
||||
def storage_index_hash(key):
|
||||
@ -158,8 +174,8 @@ def convergence_hash(k, n, segsize, data, convergence):
|
||||
|
||||
|
||||
def convergence_hasher(k, n, segsize, convergence):
|
||||
assert isinstance(convergence, str)
|
||||
param_tag = netstring("%d,%d,%d" % (k, n, segsize))
|
||||
assert isinstance(convergence, bytes)
|
||||
param_tag = netstring(b"%d,%d,%d" % (k, n, segsize))
|
||||
tag = CONVERGENT_ENCRYPTION_TAG + netstring(convergence) + param_tag
|
||||
return tagged_hasher(tag, KEYLEN)
|
||||
|
||||
@ -197,12 +213,13 @@ def bucket_cancel_secret_hash(file_cancel_secret, peerid):
|
||||
|
||||
|
||||
def _xor(a, b):
|
||||
return "".join([chr(ord(c) ^ ord(b)) for c in a])
|
||||
return b"".join([byteschr(c ^ b) for c in a])
|
||||
|
||||
|
||||
def hmac(tag, data):
|
||||
ikey = _xor(tag, "\x36")
|
||||
okey = _xor(tag, "\x5c")
|
||||
tag = bytes(tag) # Make sure it matches Python 3 behavior
|
||||
ikey = _xor(tag, 0x36)
|
||||
okey = _xor(tag, 0x5c)
|
||||
h1 = hashlib.sha256(ikey + data).digest()
|
||||
h2 = hashlib.sha256(okey + h1).digest()
|
||||
return h2
|
||||
@ -251,7 +268,7 @@ def timing_safe_compare(a, b):
|
||||
return bool(tagged_hash(n, a) == tagged_hash(n, b))
|
||||
|
||||
|
||||
BACKUPDB_DIRHASH_TAG = "allmydata_backupdb_dirhash_v1"
|
||||
BACKUPDB_DIRHASH_TAG = b"allmydata_backupdb_dirhash_v1"
|
||||
|
||||
|
||||
def backupdb_dirhash(contents):
|
||||
|
@ -1,8 +1,23 @@
|
||||
"""
|
||||
Netstring encoding and decoding.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
|
||||
def netstring(s):
|
||||
assert isinstance(s, str), s # no unicode here
|
||||
return "%d:%s," % (len(s), s,)
|
||||
assert isinstance(s, bytes), s # no unicode here
|
||||
return b"%d:%s," % (len(s), s,)
|
||||
|
||||
def split_netstring(data, numstrings,
|
||||
position=0,
|
||||
@ -13,18 +28,19 @@ def split_netstring(data, numstrings,
|
||||
byte which was not consumed (the 'required_trailer', if any, counts as
|
||||
consumed). If 'required_trailer' is not None, throw ValueError if leftover
|
||||
data does not exactly equal 'required_trailer'."""
|
||||
|
||||
assert type(position) in (int, long), (repr(position), type(position))
|
||||
assert isinstance(data, bytes)
|
||||
assert required_trailer is None or isinstance(required_trailer, bytes)
|
||||
assert isinstance(position, (int, long)), (repr(position), type(position))
|
||||
elements = []
|
||||
assert numstrings >= 0
|
||||
while position < len(data):
|
||||
colon = data.index(":", position)
|
||||
colon = data.index(b":", position)
|
||||
length = int(data[position:colon])
|
||||
string = data[colon+1:colon+1+length]
|
||||
assert len(string) == length, (len(string), length)
|
||||
elements.append(string)
|
||||
position = colon+1+length
|
||||
assert data[position] == ",", position
|
||||
assert data[position] == b","[0], position
|
||||
position += 1
|
||||
if len(elements) == numstrings:
|
||||
break
|
||||
|
Loading…
Reference in New Issue
Block a user