Merge branch '3344.netstring-and-hashutil-to-python-3' into 3354.hashtree-to-python-3

This commit is contained in:
Itamar Turner-Trauring 2020-07-22 13:09:58 -04:00
commit 85c8ca6d77
8 changed files with 261 additions and 160 deletions

View File

@ -18,7 +18,18 @@ allmydata.test.test_deferredutil.DeferredUtilTests.test_failure
allmydata.test.test_deferredutil.DeferredUtilTests.test_gather_results allmydata.test.test_deferredutil.DeferredUtilTests.test_gather_results
allmydata.test.test_deferredutil.DeferredUtilTests.test_success allmydata.test.test_deferredutil.DeferredUtilTests.test_success
allmydata.test.test_deferredutil.DeferredUtilTests.test_wait_for_delayed_calls allmydata.test.test_deferredutil.DeferredUtilTests.test_wait_for_delayed_calls
allmydata.test.test_hashutil.HashUtilTests.test_chk
allmydata.test.test_hashutil.HashUtilTests.test_hashers
allmydata.test.test_hashutil.HashUtilTests.test_known_answers
allmydata.test.test_hashutil.HashUtilTests.test_random_key
allmydata.test.test_hashutil.HashUtilTests.test_sha256d
allmydata.test.test_hashutil.HashUtilTests.test_sha256d_truncated
allmydata.test.test_hashutil.HashUtilTests.test_timing_safe_compare
allmydata.test.test_humanreadable.HumanReadable.test_repr allmydata.test.test_humanreadable.HumanReadable.test_repr
allmydata.test.test_netstring.Netstring.test_encode
allmydata.test.test_netstring.Netstring.test_extra
allmydata.test.test_netstring.Netstring.test_nested
allmydata.test.test_netstring.Netstring.test_split
allmydata.test.test_observer.Observer.test_lazy_oneshot allmydata.test.test_observer.Observer.test_lazy_oneshot
allmydata.test.test_observer.Observer.test_observerlist allmydata.test.test_observer.Observer.test_observerlist
allmydata.test.test_observer.Observer.test_oneshot allmydata.test.test_observer.Observer.test_oneshot

0
newsfragments/3344.minor Normal file
View File

View File

@ -0,0 +1,135 @@
"""
Tests for allmydata.util.hashutil.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from allmydata.util import hashutil, base32
class HashUtilTests(unittest.TestCase):
def test_random_key(self):
k = hashutil.random_key()
self.failUnlessEqual(len(k), hashutil.KEYLEN)
self.assertIsInstance(k, bytes)
def test_sha256d(self):
h1 = hashutil.tagged_hash(b"tag1", b"value")
self.assertIsInstance(h1, bytes)
h2 = hashutil.tagged_hasher(b"tag1")
h2.update(b"value")
h2a = h2.digest()
h2b = h2.digest()
self.assertIsInstance(h2a, bytes)
self.failUnlessEqual(h1, h2a)
self.failUnlessEqual(h2a, h2b)
def test_sha256d_truncated(self):
h1 = hashutil.tagged_hash(b"tag1", b"value", 16)
h2 = hashutil.tagged_hasher(b"tag1", 16)
h2.update(b"value")
h2 = h2.digest()
self.failUnlessEqual(len(h1), 16)
self.failUnlessEqual(len(h2), 16)
self.failUnlessEqual(h1, h2)
def test_chk(self):
h1 = hashutil.convergence_hash(3, 10, 1000, b"data", b"secret")
h2 = hashutil.convergence_hasher(3, 10, 1000, b"secret")
h2.update(b"data")
h2 = h2.digest()
self.failUnlessEqual(h1, h2)
self.assertIsInstance(h1, bytes)
self.assertIsInstance(h2, bytes)
def test_hashers(self):
h1 = hashutil.block_hash(b"foo")
h2 = hashutil.block_hasher()
h2.update(b"foo")
self.failUnlessEqual(h1, h2.digest())
self.assertIsInstance(h1, bytes)
h1 = hashutil.uri_extension_hash(b"foo")
h2 = hashutil.uri_extension_hasher()
h2.update(b"foo")
self.failUnlessEqual(h1, h2.digest())
self.assertIsInstance(h1, bytes)
h1 = hashutil.plaintext_hash(b"foo")
h2 = hashutil.plaintext_hasher()
h2.update(b"foo")
self.failUnlessEqual(h1, h2.digest())
self.assertIsInstance(h1, bytes)
h1 = hashutil.crypttext_hash(b"foo")
h2 = hashutil.crypttext_hasher()
h2.update(b"foo")
self.failUnlessEqual(h1, h2.digest())
self.assertIsInstance(h1, bytes)
h1 = hashutil.crypttext_segment_hash(b"foo")
h2 = hashutil.crypttext_segment_hasher()
h2.update(b"foo")
self.failUnlessEqual(h1, h2.digest())
self.assertIsInstance(h1, bytes)
h1 = hashutil.plaintext_segment_hash(b"foo")
h2 = hashutil.plaintext_segment_hasher()
h2.update(b"foo")
self.failUnlessEqual(h1, h2.digest())
self.assertIsInstance(h1, bytes)
def test_timing_safe_compare(self):
self.failUnless(hashutil.timing_safe_compare(b"a", b"a"))
self.failUnless(hashutil.timing_safe_compare(b"ab", b"ab"))
self.failIf(hashutil.timing_safe_compare(b"a", b"b"))
self.failIf(hashutil.timing_safe_compare(b"a", b"aa"))
def _testknown(self, hashf, expected_a, *args):
got = hashf(*args)
self.assertIsInstance(got, bytes)
got_a = base32.b2a(got)
self.failUnlessEqual(got_a, expected_a)
def test_known_answers(self):
# assert backwards compatibility
self._testknown(hashutil.storage_index_hash, b"qb5igbhcc5esa6lwqorsy7e6am", b"")
self._testknown(hashutil.block_hash, b"msjr5bh4evuh7fa3zw7uovixfbvlnstr5b65mrerwfnvjxig2jvq", b"")
self._testknown(hashutil.uri_extension_hash, b"wthsu45q7zewac2mnivoaa4ulh5xvbzdmsbuyztq2a5fzxdrnkka", b"")
self._testknown(hashutil.plaintext_hash, b"5lz5hwz3qj3af7n6e3arblw7xzutvnd3p3fjsngqjcb7utf3x3da", b"")
self._testknown(hashutil.crypttext_hash, b"itdj6e4njtkoiavlrmxkvpreosscssklunhwtvxn6ggho4rkqwga", b"")
self._testknown(hashutil.crypttext_segment_hash, b"aovy5aa7jej6ym5ikgwyoi4pxawnoj3wtaludjz7e2nb5xijb7aa", b"")
self._testknown(hashutil.plaintext_segment_hash, b"4fdgf6qruaisyukhqcmoth4t3li6bkolbxvjy4awwcpprdtva7za", b"")
self._testknown(hashutil.convergence_hash, b"3mo6ni7xweplycin6nowynw2we", 3, 10, 100, b"", b"converge")
self._testknown(hashutil.my_renewal_secret_hash, b"ujhr5k5f7ypkp67jkpx6jl4p47pyta7hu5m527cpcgvkafsefm6q", b"")
self._testknown(hashutil.my_cancel_secret_hash, b"rjwzmafe2duixvqy6h47f5wfrokdziry6zhx4smew4cj6iocsfaa", b"")
self._testknown(hashutil.file_renewal_secret_hash, b"hzshk2kf33gzbd5n3a6eszkf6q6o6kixmnag25pniusyaulqjnia", b"", b"si")
self._testknown(hashutil.file_cancel_secret_hash, b"bfciwvr6w7wcavsngxzxsxxaszj72dej54n4tu2idzp6b74g255q", b"", b"si")
self._testknown(hashutil.bucket_renewal_secret_hash, b"e7imrzgzaoashsncacvy3oysdd2m5yvtooo4gmj4mjlopsazmvuq", b"", b"\x00"*20)
self._testknown(hashutil.bucket_cancel_secret_hash, b"dvdujeyxeirj6uux6g7xcf4lvesk632aulwkzjar7srildvtqwma", b"", b"\x00"*20)
self._testknown(hashutil.hmac, b"c54ypfi6pevb3nvo6ba42jtglpkry2kbdopqsi7dgrm4r7tw5sra", b"tag", b"")
self._testknown(hashutil.mutable_rwcap_key_hash, b"6rvn2iqrghii5n4jbbwwqqsnqu", b"iv", b"wk")
self._testknown(hashutil.ssk_writekey_hash, b"ykpgmdbpgbb6yqz5oluw2q26ye", b"")
self._testknown(hashutil.ssk_write_enabler_master_hash, b"izbfbfkoait4dummruol3gy2bnixrrrslgye6ycmkuyujnenzpia", b"")
self._testknown(hashutil.ssk_write_enabler_hash, b"fuu2dvx7g6gqu5x22vfhtyed7p4pd47y5hgxbqzgrlyvxoev62tq", b"wk", b"\x00"*20)
self._testknown(hashutil.ssk_pubkey_fingerprint_hash, b"3opzw4hhm2sgncjx224qmt5ipqgagn7h5zivnfzqycvgqgmgz35q", b"")
self._testknown(hashutil.ssk_readkey_hash, b"vugid4as6qbqgeq2xczvvcedai", b"")
self._testknown(hashutil.ssk_readkey_data_hash, b"73wsaldnvdzqaf7v4pzbr2ae5a", b"iv", b"rk")
self._testknown(hashutil.ssk_storage_index_hash, b"j7icz6kigb6hxrej3tv4z7ayym", b"")
self._testknown(hashutil.permute_server_hash,
b"kb4354zeeurpo3ze5e275wzbynm6hlap", # b32(expected)
b"SI", # peer selection index == storage_index
base32.a2b(b"u33m4y7klhz3bypswqkozwetvabelhxt"), # seed
)

View File

@ -1,36 +1,59 @@
"""
Tests for allmydata.util.netstring.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest from twisted.trial import unittest
from allmydata.util.netstring import netstring, split_netstring from allmydata.util.netstring import netstring, split_netstring
class Netstring(unittest.TestCase): class Netstring(unittest.TestCase):
def test_encode(self):
"""netstring() correctly encodes the given bytes."""
result = netstring(b"abc")
self.assertEqual(result, b"3:abc,")
self.assertIsInstance(result, bytes)
def test_split(self): def test_split(self):
a = netstring("hello") + netstring("world") a = netstring(b"hello") + netstring(b"world")
self.failUnlessEqual(split_netstring(a, 2), (["hello", "world"], len(a))) for s in split_netstring(a, 2)[0]:
self.failUnlessEqual(split_netstring(a, 2, required_trailer=""), (["hello", "world"], len(a))) self.assertIsInstance(s, bytes)
self.failUnlessEqual(split_netstring(a, 2), ([b"hello", b"world"], len(a)))
self.failUnlessEqual(split_netstring(a, 2, required_trailer=b""), ([b"hello", b"world"], len(a)))
self.failUnlessRaises(ValueError, split_netstring, a, 3) self.failUnlessRaises(ValueError, split_netstring, a, 3)
self.failUnlessRaises(ValueError, split_netstring, a+" extra", 2, required_trailer="") self.failUnlessRaises(ValueError, split_netstring, a+b" extra", 2, required_trailer=b"")
self.failUnlessEqual(split_netstring(a+" extra", 2), (["hello", "world"], len(a))) self.failUnlessEqual(split_netstring(a+b" extra", 2), ([b"hello", b"world"], len(a)))
self.failUnlessEqual(split_netstring(a+"++", 2, required_trailer="++"), self.failUnlessEqual(split_netstring(a+b"++", 2, required_trailer=b"++"),
(["hello", "world"], len(a)+2)) ([b"hello", b"world"], len(a)+2))
self.failUnlessRaises(ValueError, self.failUnlessRaises(ValueError,
split_netstring, a+"+", 2, required_trailer="not") split_netstring, a+b"+", 2, required_trailer=b"not")
def test_extra(self): def test_extra(self):
a = netstring("hello") a = netstring(b"hello")
self.failUnlessEqual(split_netstring(a, 1), (["hello"], len(a))) self.failUnlessEqual(split_netstring(a, 1), ([b"hello"], len(a)))
b = netstring("hello") + "extra stuff" b = netstring(b"hello") + b"extra stuff"
self.failUnlessEqual(split_netstring(b, 1), self.failUnlessEqual(split_netstring(b, 1),
(["hello"], len(a))) ([b"hello"], len(a)))
def test_nested(self): def test_nested(self):
a = netstring("hello") + netstring("world") + "extra stuff" a = netstring(b"hello") + netstring(b"world") + b"extra stuff"
b = netstring("a") + netstring("is") + netstring(a) + netstring(".") b = netstring(b"a") + netstring(b"is") + netstring(a) + netstring(b".")
(top, pos) = split_netstring(b, 4) (top, pos) = split_netstring(b, 4)
self.failUnlessEqual(len(top), 4) self.failUnlessEqual(len(top), 4)
self.failUnlessEqual(top[0], "a") self.failUnlessEqual(top[0], b"a")
self.failUnlessEqual(top[1], "is") self.failUnlessEqual(top[1], b"is")
self.failUnlessEqual(top[2], a) self.failUnlessEqual(top[2], a)
self.failUnlessEqual(top[3], ".") self.failUnlessEqual(top[3], b".")
self.failUnlessRaises(ValueError, split_netstring, a, 2, required_trailer="") self.failUnlessRaises(ValueError, split_netstring, a, 2, required_trailer=b"")
bottom = split_netstring(a, 2) bottom = split_netstring(a, 2)
self.failUnlessEqual(bottom, (["hello", "world"], len(netstring("hello")+netstring("world")))) self.failUnlessEqual(bottom, ([b"hello", b"world"], len(netstring(b"hello")+netstring(b"world"))))

View File

@ -14,7 +14,7 @@ from twisted.internet import defer, reactor
from twisted.python.failure import Failure from twisted.python.failure import Failure
from twisted.python import log from twisted.python import log
from allmydata.util import base32, idlib, mathutil, hashutil from allmydata.util import idlib, mathutil
from allmydata.util import fileutil, abbreviate from allmydata.util import fileutil, abbreviate
from allmydata.util import limiter, time_format, pollmixin from allmydata.util import limiter, time_format, pollmixin
from allmydata.util import statistics, dictutil, pipeline, yamlutil from allmydata.util import statistics, dictutil, pipeline, yamlutil
@ -582,111 +582,6 @@ class PollMixinTests(unittest.TestCase):
return d return d
class HashUtilTests(unittest.TestCase):
def test_random_key(self):
k = hashutil.random_key()
self.failUnlessEqual(len(k), hashutil.KEYLEN)
def test_sha256d(self):
h1 = hashutil.tagged_hash("tag1", "value")
h2 = hashutil.tagged_hasher("tag1")
h2.update("value")
h2a = h2.digest()
h2b = h2.digest()
self.failUnlessEqual(h1, h2a)
self.failUnlessEqual(h2a, h2b)
def test_sha256d_truncated(self):
h1 = hashutil.tagged_hash("tag1", "value", 16)
h2 = hashutil.tagged_hasher("tag1", 16)
h2.update("value")
h2 = h2.digest()
self.failUnlessEqual(len(h1), 16)
self.failUnlessEqual(len(h2), 16)
self.failUnlessEqual(h1, h2)
def test_chk(self):
h1 = hashutil.convergence_hash(3, 10, 1000, "data", "secret")
h2 = hashutil.convergence_hasher(3, 10, 1000, "secret")
h2.update("data")
h2 = h2.digest()
self.failUnlessEqual(h1, h2)
def test_hashers(self):
h1 = hashutil.block_hash("foo")
h2 = hashutil.block_hasher()
h2.update("foo")
self.failUnlessEqual(h1, h2.digest())
h1 = hashutil.uri_extension_hash("foo")
h2 = hashutil.uri_extension_hasher()
h2.update("foo")
self.failUnlessEqual(h1, h2.digest())
h1 = hashutil.plaintext_hash("foo")
h2 = hashutil.plaintext_hasher()
h2.update("foo")
self.failUnlessEqual(h1, h2.digest())
h1 = hashutil.crypttext_hash("foo")
h2 = hashutil.crypttext_hasher()
h2.update("foo")
self.failUnlessEqual(h1, h2.digest())
h1 = hashutil.crypttext_segment_hash("foo")
h2 = hashutil.crypttext_segment_hasher()
h2.update("foo")
self.failUnlessEqual(h1, h2.digest())
h1 = hashutil.plaintext_segment_hash("foo")
h2 = hashutil.plaintext_segment_hasher()
h2.update("foo")
self.failUnlessEqual(h1, h2.digest())
def test_timing_safe_compare(self):
self.failUnless(hashutil.timing_safe_compare("a", "a"))
self.failUnless(hashutil.timing_safe_compare("ab", "ab"))
self.failIf(hashutil.timing_safe_compare("a", "b"))
self.failIf(hashutil.timing_safe_compare("a", "aa"))
def _testknown(self, hashf, expected_a, *args):
got = hashf(*args)
got_a = base32.b2a(got)
self.failUnlessEqual(got_a, expected_a)
def test_known_answers(self):
# assert backwards compatibility
self._testknown(hashutil.storage_index_hash, "qb5igbhcc5esa6lwqorsy7e6am", "")
self._testknown(hashutil.block_hash, "msjr5bh4evuh7fa3zw7uovixfbvlnstr5b65mrerwfnvjxig2jvq", "")
self._testknown(hashutil.uri_extension_hash, "wthsu45q7zewac2mnivoaa4ulh5xvbzdmsbuyztq2a5fzxdrnkka", "")
self._testknown(hashutil.plaintext_hash, "5lz5hwz3qj3af7n6e3arblw7xzutvnd3p3fjsngqjcb7utf3x3da", "")
self._testknown(hashutil.crypttext_hash, "itdj6e4njtkoiavlrmxkvpreosscssklunhwtvxn6ggho4rkqwga", "")
self._testknown(hashutil.crypttext_segment_hash, "aovy5aa7jej6ym5ikgwyoi4pxawnoj3wtaludjz7e2nb5xijb7aa", "")
self._testknown(hashutil.plaintext_segment_hash, "4fdgf6qruaisyukhqcmoth4t3li6bkolbxvjy4awwcpprdtva7za", "")
self._testknown(hashutil.convergence_hash, "3mo6ni7xweplycin6nowynw2we", 3, 10, 100, "", "converge")
self._testknown(hashutil.my_renewal_secret_hash, "ujhr5k5f7ypkp67jkpx6jl4p47pyta7hu5m527cpcgvkafsefm6q", "")
self._testknown(hashutil.my_cancel_secret_hash, "rjwzmafe2duixvqy6h47f5wfrokdziry6zhx4smew4cj6iocsfaa", "")
self._testknown(hashutil.file_renewal_secret_hash, "hzshk2kf33gzbd5n3a6eszkf6q6o6kixmnag25pniusyaulqjnia", "", "si")
self._testknown(hashutil.file_cancel_secret_hash, "bfciwvr6w7wcavsngxzxsxxaszj72dej54n4tu2idzp6b74g255q", "", "si")
self._testknown(hashutil.bucket_renewal_secret_hash, "e7imrzgzaoashsncacvy3oysdd2m5yvtooo4gmj4mjlopsazmvuq", "", "\x00"*20)
self._testknown(hashutil.bucket_cancel_secret_hash, "dvdujeyxeirj6uux6g7xcf4lvesk632aulwkzjar7srildvtqwma", "", "\x00"*20)
self._testknown(hashutil.hmac, "c54ypfi6pevb3nvo6ba42jtglpkry2kbdopqsi7dgrm4r7tw5sra", "tag", "")
self._testknown(hashutil.mutable_rwcap_key_hash, "6rvn2iqrghii5n4jbbwwqqsnqu", "iv", "wk")
self._testknown(hashutil.ssk_writekey_hash, "ykpgmdbpgbb6yqz5oluw2q26ye", "")
self._testknown(hashutil.ssk_write_enabler_master_hash, "izbfbfkoait4dummruol3gy2bnixrrrslgye6ycmkuyujnenzpia", "")
self._testknown(hashutil.ssk_write_enabler_hash, "fuu2dvx7g6gqu5x22vfhtyed7p4pd47y5hgxbqzgrlyvxoev62tq", "wk", "\x00"*20)
self._testknown(hashutil.ssk_pubkey_fingerprint_hash, "3opzw4hhm2sgncjx224qmt5ipqgagn7h5zivnfzqycvgqgmgz35q", "")
self._testknown(hashutil.ssk_readkey_hash, "vugid4as6qbqgeq2xczvvcedai", "")
self._testknown(hashutil.ssk_readkey_data_hash, "73wsaldnvdzqaf7v4pzbr2ae5a", "iv", "rk")
self._testknown(hashutil.ssk_storage_index_hash, "j7icz6kigb6hxrej3tv4z7ayym", "")
self._testknown(hashutil.permute_server_hash,
"kb4354zeeurpo3ze5e275wzbynm6hlap", # b32(expected)
"SI", # peer selection index == storage_index
base32.a2b("u33m4y7klhz3bypswqkozwetvabelhxt"), # seed
)
class Abbreviate(unittest.TestCase): class Abbreviate(unittest.TestCase):
def test_abbrev_time_1s(self): def test_abbrev_time_1s(self):
diff = timedelta(seconds=1) diff = timedelta(seconds=1)

View File

@ -19,9 +19,11 @@ PORTED_MODULES = [
"allmydata.util.base32", "allmydata.util.base32",
"allmydata.util.base62", "allmydata.util.base62",
"allmydata.util.deferredutil", "allmydata.util.deferredutil",
"allmydata.util.hashutil",
"allmydata.util.humanreadable", "allmydata.util.humanreadable",
"allmydata.util.mathutil", "allmydata.util.mathutil",
"allmydata.util.namespace", "allmydata.util.namespace",
"allmydata.util.netstring",
"allmydata.util.pollmixin", "allmydata.util.pollmixin",
"allmydata.util._python3", "allmydata.util._python3",
] ]
@ -30,7 +32,9 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_base32", "allmydata.test.test_base32",
"allmydata.test.test_base62", "allmydata.test.test_base62",
"allmydata.test.test_deferredutil", "allmydata.test.test_deferredutil",
"allmydata.test.test_hashutil",
"allmydata.test.test_humanreadable", "allmydata.test.test_humanreadable",
"allmydata.test.test_netstring",
"allmydata.test.test_python3", "allmydata.test.test_python3",
] ]

View File

@ -1,3 +1,19 @@
"""
Hashing utilities.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
from past.builtins import chr as byteschr
import os import os
import hashlib import hashlib
from allmydata.util.netstring import netstring from allmydata.util.netstring import netstring
@ -60,34 +76,34 @@ def tagged_pair_hash(tag, val1, val2, truncate_to=None):
# immutable # immutable
STORAGE_INDEX_TAG = "allmydata_immutable_key_to_storage_index_v1" STORAGE_INDEX_TAG = b"allmydata_immutable_key_to_storage_index_v1"
BLOCK_TAG = "allmydata_encoded_subshare_v1" BLOCK_TAG = b"allmydata_encoded_subshare_v1"
UEB_TAG = "allmydata_uri_extension_v1" UEB_TAG = b"allmydata_uri_extension_v1"
PLAINTEXT_TAG = "allmydata_plaintext_v1" PLAINTEXT_TAG = b"allmydata_plaintext_v1"
CIPHERTEXT_TAG = "allmydata_crypttext_v1" CIPHERTEXT_TAG = b"allmydata_crypttext_v1"
CIPHERTEXT_SEGMENT_TAG = "allmydata_crypttext_segment_v1" CIPHERTEXT_SEGMENT_TAG = b"allmydata_crypttext_segment_v1"
PLAINTEXT_SEGMENT_TAG = "allmydata_plaintext_segment_v1" PLAINTEXT_SEGMENT_TAG = b"allmydata_plaintext_segment_v1"
CONVERGENT_ENCRYPTION_TAG = "allmydata_immutable_content_to_key_with_added_secret_v1+" CONVERGENT_ENCRYPTION_TAG = b"allmydata_immutable_content_to_key_with_added_secret_v1+"
CLIENT_RENEWAL_TAG = "allmydata_client_renewal_secret_v1" CLIENT_RENEWAL_TAG = b"allmydata_client_renewal_secret_v1"
CLIENT_CANCEL_TAG = "allmydata_client_cancel_secret_v1" CLIENT_CANCEL_TAG = b"allmydata_client_cancel_secret_v1"
FILE_RENEWAL_TAG = "allmydata_file_renewal_secret_v1" FILE_RENEWAL_TAG = b"allmydata_file_renewal_secret_v1"
FILE_CANCEL_TAG = "allmydata_file_cancel_secret_v1" FILE_CANCEL_TAG = b"allmydata_file_cancel_secret_v1"
BUCKET_RENEWAL_TAG = "allmydata_bucket_renewal_secret_v1" BUCKET_RENEWAL_TAG = b"allmydata_bucket_renewal_secret_v1"
BUCKET_CANCEL_TAG = "allmydata_bucket_cancel_secret_v1" BUCKET_CANCEL_TAG = b"allmydata_bucket_cancel_secret_v1"
# mutable # mutable
MUTABLE_WRITEKEY_TAG = "allmydata_mutable_privkey_to_writekey_v1" MUTABLE_WRITEKEY_TAG = b"allmydata_mutable_privkey_to_writekey_v1"
MUTABLE_WRITE_ENABLER_MASTER_TAG = "allmydata_mutable_writekey_to_write_enabler_master_v1" MUTABLE_WRITE_ENABLER_MASTER_TAG = b"allmydata_mutable_writekey_to_write_enabler_master_v1"
MUTABLE_WRITE_ENABLER_TAG = "allmydata_mutable_write_enabler_master_and_nodeid_to_write_enabler_v1" MUTABLE_WRITE_ENABLER_TAG = b"allmydata_mutable_write_enabler_master_and_nodeid_to_write_enabler_v1"
MUTABLE_PUBKEY_TAG = "allmydata_mutable_pubkey_to_fingerprint_v1" MUTABLE_PUBKEY_TAG = b"allmydata_mutable_pubkey_to_fingerprint_v1"
MUTABLE_READKEY_TAG = "allmydata_mutable_writekey_to_readkey_v1" MUTABLE_READKEY_TAG = b"allmydata_mutable_writekey_to_readkey_v1"
MUTABLE_DATAKEY_TAG = "allmydata_mutable_readkey_to_datakey_v1" MUTABLE_DATAKEY_TAG = b"allmydata_mutable_readkey_to_datakey_v1"
MUTABLE_STORAGEINDEX_TAG = "allmydata_mutable_readkey_to_storage_index_v1" MUTABLE_STORAGEINDEX_TAG = b"allmydata_mutable_readkey_to_storage_index_v1"
# dirnodes # dirnodes
DIRNODE_CHILD_WRITECAP_TAG = "allmydata_mutable_writekey_and_salt_to_dirnode_child_capkey_v1" DIRNODE_CHILD_WRITECAP_TAG = b"allmydata_mutable_writekey_and_salt_to_dirnode_child_capkey_v1"
DIRNODE_CHILD_SALT_TAG = "allmydata_dirnode_child_rwcap_to_salt_v1" DIRNODE_CHILD_SALT_TAG = b"allmydata_dirnode_child_rwcap_to_salt_v1"
def storage_index_hash(key): def storage_index_hash(key):
@ -158,8 +174,8 @@ def convergence_hash(k, n, segsize, data, convergence):
def convergence_hasher(k, n, segsize, convergence): def convergence_hasher(k, n, segsize, convergence):
assert isinstance(convergence, str) assert isinstance(convergence, bytes)
param_tag = netstring("%d,%d,%d" % (k, n, segsize)) param_tag = netstring(b"%d,%d,%d" % (k, n, segsize))
tag = CONVERGENT_ENCRYPTION_TAG + netstring(convergence) + param_tag tag = CONVERGENT_ENCRYPTION_TAG + netstring(convergence) + param_tag
return tagged_hasher(tag, KEYLEN) return tagged_hasher(tag, KEYLEN)
@ -197,12 +213,13 @@ def bucket_cancel_secret_hash(file_cancel_secret, peerid):
def _xor(a, b): def _xor(a, b):
return "".join([chr(ord(c) ^ ord(b)) for c in a]) return b"".join([byteschr(c ^ b) for c in a])
def hmac(tag, data): def hmac(tag, data):
ikey = _xor(tag, "\x36") tag = bytes(tag) # Make sure it matches Python 3 behavior
okey = _xor(tag, "\x5c") ikey = _xor(tag, 0x36)
okey = _xor(tag, 0x5c)
h1 = hashlib.sha256(ikey + data).digest() h1 = hashlib.sha256(ikey + data).digest()
h2 = hashlib.sha256(okey + h1).digest() h2 = hashlib.sha256(okey + h1).digest()
return h2 return h2
@ -251,7 +268,7 @@ def timing_safe_compare(a, b):
return bool(tagged_hash(n, a) == tagged_hash(n, b)) return bool(tagged_hash(n, a) == tagged_hash(n, b))
BACKUPDB_DIRHASH_TAG = "allmydata_backupdb_dirhash_v1" BACKUPDB_DIRHASH_TAG = b"allmydata_backupdb_dirhash_v1"
def backupdb_dirhash(contents): def backupdb_dirhash(contents):

View File

@ -1,8 +1,23 @@
"""
Netstring encoding and decoding.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
from past.builtins import long
def netstring(s): def netstring(s):
assert isinstance(s, str), s # no unicode here assert isinstance(s, bytes), s # no unicode here
return "%d:%s," % (len(s), s,) return b"%d:%s," % (len(s), s,)
def split_netstring(data, numstrings, def split_netstring(data, numstrings,
position=0, position=0,
@ -13,18 +28,19 @@ def split_netstring(data, numstrings,
byte which was not consumed (the 'required_trailer', if any, counts as byte which was not consumed (the 'required_trailer', if any, counts as
consumed). If 'required_trailer' is not None, throw ValueError if leftover consumed). If 'required_trailer' is not None, throw ValueError if leftover
data does not exactly equal 'required_trailer'.""" data does not exactly equal 'required_trailer'."""
assert isinstance(data, bytes)
assert type(position) in (int, long), (repr(position), type(position)) assert required_trailer is None or isinstance(required_trailer, bytes)
assert isinstance(position, (int, long)), (repr(position), type(position))
elements = [] elements = []
assert numstrings >= 0 assert numstrings >= 0
while position < len(data): while position < len(data):
colon = data.index(":", position) colon = data.index(b":", position)
length = int(data[position:colon]) length = int(data[position:colon])
string = data[colon+1:colon+1+length] string = data[colon+1:colon+1+length]
assert len(string) == length, (len(string), length) assert len(string) == length, (len(string), length)
elements.append(string) elements.append(string)
position = colon+1+length position = colon+1+length
assert data[position] == ",", position assert data[position] == b","[0], position
position += 1 position += 1
if len(elements) == numstrings: if len(elements) == numstrings:
break break