Merge pull request #812 from tahoe-lafs/3416.test-encode-python-3

Port allmydata.test.no_network to Python 3

Fixes ticket:3416
This commit is contained in:
Itamar Turner-Trauring 2020-09-21 11:51:34 -04:00 committed by GitHub
commit 7c6e3104ac
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 98 additions and 65 deletions

0
newsfragments/3416.minor Normal file
View File

View File

@ -147,7 +147,7 @@ def _make_secret():
Returns a base32-encoded random secret of hashutil.CRYPTO_VAL_SIZE
bytes.
"""
return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + "\n"
return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + b"\n"
class SecretHolder(object):
@ -739,12 +739,12 @@ class _Client(node.Node, pollmixin.PollMixin):
# existing key
def _make_key():
private_key, _ = ed25519.create_signing_keypair()
return ed25519.string_from_signing_key(private_key) + "\n"
return ed25519.string_from_signing_key(private_key) + b"\n"
private_key_str = self.config.get_or_create_private_config("node.privkey", _make_key)
private_key, public_key = ed25519.signing_keypair_from_string(private_key_str)
public_key_str = ed25519.string_from_verifying_key(public_key)
self.config.write_config_file("node.pubkey", public_key_str + "\n", "w")
self.config.write_config_file("node.pubkey", public_key_str + b"\n", "wb")
self._node_private_key = private_key
self._node_public_key = public_key
@ -971,7 +971,7 @@ class _Client(node.Node, pollmixin.PollMixin):
"""
self.config.write_private_config(
'api_auth_token',
urlsafe_b64encode(os.urandom(32)) + '\n',
urlsafe_b64encode(os.urandom(32)) + b'\n',
)
def get_storage_broker(self):
@ -1021,7 +1021,7 @@ class _Client(node.Node, pollmixin.PollMixin):
c = ControlServer()
c.setServiceParent(self)
control_url = self.control_tub.registerReference(c)
self.config.write_private_config("control.furl", control_url + "\n")
self.config.write_private_config("control.furl", control_url + b"\n")
def init_helper(self):
self.helper = Helper(self.config.get_config_path("helper"),

View File

@ -106,7 +106,7 @@ class ShareFinder(object):
server = None
try:
if self._servers:
server = self._servers.next()
server = next(self._servers)
except StopIteration:
self._servers = None
@ -175,7 +175,7 @@ class ShareFinder(object):
shnums=shnums_s, name=server.get_name(),
level=log.NOISY, parent=lp, umid="0fcEZw")
shares = []
for shnum, bucket in buckets.iteritems():
for shnum, bucket in buckets.items():
s = self._create_share(shnum, bucket, server, dyhb_rtt)
shares.append(s)
self._deliver_shares(shares)

View File

@ -353,14 +353,14 @@ class DownloadNode(object):
# each segment is turned into N blocks. All but the last are of size
# block_size, and the last is of size tail_block_size
block_size = segment_size / k
tail_block_size = tail_segment_padded / k
block_size = segment_size // k
tail_block_size = tail_segment_padded // k
return { "tail_segment_size": tail_segment_size,
"tail_segment_padded": tail_segment_padded,
"num_segments": num_segments,
"block_size": block_size,
"tail_block_size": tail_block_size,
"tail_block_size": tail_block_size
}
@ -455,7 +455,7 @@ class DownloadNode(object):
shares = []
shareids = []
for (shareid, share) in blocks.iteritems():
for (shareid, share) in blocks.items():
assert len(share) == block_size
shareids.append(shareid)
shares.append(share)
@ -465,7 +465,7 @@ class DownloadNode(object):
del shares
def _process(buffers):
decodetime = now() - start
segment = "".join(buffers)
segment = b"".join(buffers)
assert len(segment) == decoded_size
del buffers
if tail:

View File

@ -85,8 +85,8 @@ class Share(object):
self._requested_blocks = [] # (segnum, set(observer2..))
v = server.get_version()
ver = v["http://allmydata.org/tahoe/protocols/storage/v1"]
self._overrun_ok = ver["tolerates-immutable-read-overrun"]
ver = v[b"http://allmydata.org/tahoe/protocols/storage/v1"]
self._overrun_ok = ver[b"tolerates-immutable-read-overrun"]
# If _overrun_ok and we guess the offsets correctly, we can get
# everything in one RTT. If _overrun_ok and we guess wrong, we might
# need two RTT (but we could get lucky and do it in one). If overrun

View File

@ -89,7 +89,7 @@ class DownloadStatus(object):
def __init__(self, storage_index, size):
self.storage_index = storage_index
self.size = size
self.counter = self.statusid_counter.next()
self.counter = next(self.statusid_counter)
self.helper = False
self.first_timestamp = None

View File

@ -205,7 +205,7 @@ class Encoder(object):
assert IStorageBucketWriter.providedBy(landlords[k])
self.landlords = landlords.copy()
assert isinstance(servermap, dict)
for v in servermap.itervalues():
for v in servermap.values():
assert isinstance(v, set)
self.servermap = servermap.copy()
@ -410,7 +410,7 @@ class Encoder(object):
assert isinstance(data, (list,tuple))
if self._aborted:
raise UploadAborted()
data = "".join(data)
data = b"".join(data)
precondition(len(data) <= read_size, len(data), read_size)
if not allow_short:
precondition(len(data) == read_size, len(data), read_size)
@ -418,7 +418,7 @@ class Encoder(object):
self._crypttext_hasher.update(data)
if allow_short and len(data) < read_size:
# padding
data += "\x00" * (read_size - len(data))
data += b"\x00" * (read_size - len(data))
encrypted_pieces = [data[i:i+input_chunk_size]
for i in range(0, len(data), input_chunk_size)]
return encrypted_pieces

View File

@ -27,7 +27,7 @@ from allmydata.interfaces import IUploadable, IUploader, IUploadResults, \
DEFAULT_MAX_SEGMENT_SIZE, IProgress, IPeerSelector
from allmydata.immutable import layout
from six.moves import cStringIO as StringIO
from io import BytesIO
from .happiness_upload import share_placement, calculate_happiness
from ..util.eliotutil import (
@ -226,7 +226,7 @@ EXTENSION_SIZE = 1000
# this.
def pretty_print_shnum_to_servers(s):
return ', '.join([ "sh%s: %s" % (k, '+'.join([idlib.shortnodeid_b2a(x) for x in v])) for k, v in s.iteritems() ])
return ', '.join([ "sh%s: %s" % (k, '+'.join([idlib.shortnodeid_b2a(x) for x in v])) for k, v in s.items() ])
class ServerTracker(object):
def __init__(self, server,
@ -283,7 +283,7 @@ class ServerTracker(object):
#log.msg("%s._got_reply(%s)" % (self, (alreadygot, buckets)))
(alreadygot, buckets) = alreadygot_and_buckets
b = {}
for sharenum, rref in buckets.iteritems():
for sharenum, rref in buckets.items():
bp = self.wbp_class(rref, self._server, self.sharesize,
self.blocksize,
self.num_segments,
@ -352,7 +352,7 @@ class PeerSelector(object):
def get_sharemap_of_preexisting_shares(self):
preexisting = dictutil.DictOfSets()
for server, shares in self.existing_shares.iteritems():
for server, shares in self.existing_shares.items():
for share in shares:
preexisting.add(share, server)
return preexisting
@ -419,8 +419,8 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
# 12GiB). See #439 for details.
def _get_maxsize(server):
v0 = server.get_version()
v1 = v0["http://allmydata.org/tahoe/protocols/storage/v1"]
return v1["maximum-immutable-share-size"]
v1 = v0[b"http://allmydata.org/tahoe/protocols/storage/v1"]
return v1[b"maximum-immutable-share-size"]
for server in candidate_servers:
self.peer_selector.add_peer(server.get_serverid())
@ -700,7 +700,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
% (self, self._get_progress_message(),
pretty_print_shnum_to_servers(merged),
[', '.join([str_shareloc(k,v)
for k,v in st.buckets.iteritems()])
for k,v in st.buckets.items()])
for st in self.use_trackers],
pretty_print_shnum_to_servers(self.preexisting_shares))
self.log(msg, level=log.OPERATIONAL)
@ -951,7 +951,7 @@ class EncryptAnUploadable(object):
self._encryptor = aes.create_encryptor(key)
storage_index = storage_index_hash(key)
assert isinstance(storage_index, str)
assert isinstance(storage_index, bytes)
# There's no point to having the SI be longer than the key, so we
# specify that it is truncated to the same 128 bits as the AES key.
assert len(storage_index) == 16 # SHA-256 truncated to 128b
@ -1120,7 +1120,7 @@ class UploadStatus(object):
self.progress = [0.0, 0.0, 0.0]
self.active = True
self.results = None
self.counter = self.statusid_counter.next()
self.counter = next(self.statusid_counter)
self.started = time.time()
def get_started(self):
@ -1281,7 +1281,7 @@ class CHKUploader(object):
"""
msgtempl = "set_shareholders; upload_trackers is %s, already_serverids is %s"
values = ([', '.join([str_shareloc(k,v)
for k,v in st.buckets.iteritems()])
for k,v in st.buckets.items()])
for st in upload_trackers], already_serverids)
self.log(msgtempl % values, level=log.OPERATIONAL)
# record already-present shares in self._results
@ -1697,7 +1697,7 @@ class FileHandle(BaseUploadable):
then the hash will be hashed together with the string in the
"convergence" argument to form the encryption key.
"""
assert convergence is None or isinstance(convergence, str), (convergence, type(convergence))
assert convergence is None or isinstance(convergence, bytes), (convergence, type(convergence))
self._filehandle = filehandle
self._key = None
self.convergence = convergence
@ -1787,8 +1787,8 @@ class Data(FileHandle):
then the hash will be hashed together with the string in the
"convergence" argument to form the encryption key.
"""
assert convergence is None or isinstance(convergence, str), (convergence, type(convergence))
FileHandle.__init__(self, StringIO(data), convergence=convergence)
assert convergence is None or isinstance(convergence, bytes), (convergence, type(convergence))
FileHandle.__init__(self, BytesIO(data), convergence=convergence)
@implementer(IUploader)
class Uploader(service.MultiService, log.PrefixingLogMixin):

View File

@ -362,7 +362,7 @@ class _Config(object):
if default is _None:
raise MissingConfigEntry("The required configuration file %s is missing."
% (quote_output(privname),))
if isinstance(default, basestring):
if isinstance(default, (bytes, unicode)):
value = default
else:
value = default()
@ -375,7 +375,7 @@ class _Config(object):
return it.
"""
privname = os.path.join(self._basedir, "private", name)
with open(privname, "w") as f:
with open(privname, "wb") as f:
f.write(value)
def get_private_config(self, name, default=_None):
@ -759,7 +759,9 @@ class Node(service.MultiService):
"""
Initialize/create a directory for temporary files.
"""
tempdir_config = self.config.get_config("node", "tempdir", "tmp").decode('utf-8')
tempdir_config = self.config.get_config("node", "tempdir", "tmp")
if isinstance(tempdir_config, bytes):
tempdir_config = tempdir_config.decode('utf-8')
tempdir = self.config.get_config_path(tempdir_config)
if not os.path.exists(tempdir):
fileutil.make_dirs(tempdir)

View File

@ -50,8 +50,8 @@ class NodeMaker(object):
def create_from_cap(self, writecap, readcap=None, deep_immutable=False, name=u"<unknown name>"):
# this returns synchronously. It starts with a "cap string".
assert isinstance(writecap, (str, type(None))), type(writecap)
assert isinstance(readcap, (str, type(None))), type(readcap)
assert isinstance(writecap, (bytes, type(None))), type(writecap)
assert isinstance(readcap, (bytes, type(None))), type(readcap)
bigcap = writecap or readcap
if not bigcap:
@ -63,9 +63,9 @@ class NodeMaker(object):
# The name doesn't matter for caching since it's only used in the error
# attribute of an UnknownNode, and we don't cache those.
if deep_immutable:
memokey = "I" + bigcap
memokey = b"I" + bigcap
else:
memokey = "M" + bigcap
memokey = b"M" + bigcap
if memokey in self._node_cache:
node = self._node_cache[memokey]
else:

View File

@ -1,3 +1,10 @@
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
# This contains a test harness that creates a full Tahoe grid in a single
# process (actually in a single MultiService) which does not use the network.
@ -13,6 +20,11 @@
# Tubs, so it is not useful for tests that involve a Helper or the
# control.furl .
from future.utils import PY2, PY3
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from past.builtins import unicode
import os
from zope.interface import implementer
from twisted.application import service
@ -257,6 +269,11 @@ class _NoNetworkClient(_Client):
pass
#._servers will be set by the NoNetworkGrid which creates us
if PY3:
def init_web(self, *args, **kwargs):
print("Web service is temporarily disabled until nevow is gone.")
class SimpleStats(object):
def __init__(self):
self.counters = {}
@ -323,7 +340,7 @@ class NoNetworkGrid(service.MultiService):
@defer.inlineCallbacks
def make_client(self, i, write_config=True):
clientid = hashutil.tagged_hash("clientid", str(i))[:20]
clientid = hashutil.tagged_hash(b"clientid", b"%d" % i)[:20]
clientdir = os.path.join(self.basedir, "clients",
idlib.shortnodeid_b2a(clientid))
fileutil.make_dirs(clientdir)
@ -358,7 +375,7 @@ class NoNetworkGrid(service.MultiService):
defer.returnValue(c)
def make_server(self, i, readonly=False):
serverid = hashutil.tagged_hash("serverid", str(i))[:20]
serverid = hashutil.tagged_hash(b"serverid", b"%d" % i)[:20]
serverdir = os.path.join(self.basedir, "servers",
idlib.shortnodeid_b2a(serverid), "storage")
fileutil.make_dirs(serverdir)
@ -381,18 +398,18 @@ class NoNetworkGrid(service.MultiService):
self.rebuild_serverlist()
def get_all_serverids(self):
return self.proxies_by_id.keys()
return list(self.proxies_by_id.keys())
def rebuild_serverlist(self):
self._check_clients()
self.all_servers = frozenset(self.proxies_by_id.values())
self.all_servers = frozenset(list(self.proxies_by_id.values()))
for c in self.clients:
c._servers = self.all_servers
def remove_server(self, serverid):
# it's enough to remove the server from c._servers (we don't actually
# have to detach and stopService it)
for i,ss in self.servers_by_number.items():
for i,ss in list(self.servers_by_number.items()):
if ss.my_nodeid == serverid:
del self.servers_by_number[i]
break
@ -422,7 +439,7 @@ class NoNetworkGrid(service.MultiService):
def nuke_from_orbit(self):
""" Empty all share directories in this grid. It's the only way to be sure ;-) """
for server in self.servers_by_number.values():
for server in list(self.servers_by_number.values()):
for prefixdir in os.listdir(server.sharedir):
if prefixdir != 'incoming':
fileutil.rm_dir(os.path.join(server.sharedir, prefixdir))
@ -506,7 +523,7 @@ class GridTestMixin(object):
si = tahoe_uri.from_string(uri).get_storage_index()
prefixdir = storage_index_to_dir(si)
shares = []
for i,ss in self.g.servers_by_number.items():
for i,ss in list(self.g.servers_by_number.items()):
serverid = ss.my_nodeid
basedir = os.path.join(ss.sharedir, prefixdir)
if not os.path.exists(basedir):
@ -527,7 +544,7 @@ class GridTestMixin(object):
return shares
def restore_all_shares(self, shares):
for sharefile, data in shares.items():
for sharefile, data in list(shares.items()):
with open(sharefile, "wb") as f:
f.write(data)

View File

@ -1,5 +1,16 @@
"""
Test the NoNetworkGrid test harness.
# Test the NoNetworkGrid test harness
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from twisted.application import service
@ -41,8 +52,8 @@ class Harness(unittest.TestCase):
g.setServiceParent(self.s)
c0 = g.clients[0]
DATA = "Data to upload" * 100
data = Data(DATA, "")
DATA = b"Data to upload" * 100
data = Data(DATA, b"")
d = c0.upload(data)
def _uploaded(res):
n = c0.create_node_from_uri(res.get_uri())

View File

@ -210,17 +210,17 @@ class Extension(testutil.ReallyEqualMixin, unittest.TestCase):
}
ext = uri.pack_extension(data)
d = uri.unpack_extension(ext)
self.failUnlessReallyEqual(d[b"stuff"], b"value")
self.failUnlessReallyEqual(d[b"size"], 12)
self.failUnlessReallyEqual(d[b"big_hash"], hashutil.tagged_hash(b"foo", b"bar"))
self.failUnlessReallyEqual(d["stuff"], b"value")
self.failUnlessReallyEqual(d["size"], 12)
self.failUnlessReallyEqual(d["big_hash"], hashutil.tagged_hash(b"foo", b"bar"))
readable = uri.unpack_extension_readable(ext)
self.failUnlessReallyEqual(readable[b"needed_shares"], 3)
self.failUnlessReallyEqual(readable[b"stuff"], b"value")
self.failUnlessReallyEqual(readable[b"size"], 12)
self.failUnlessReallyEqual(readable[b"big_hash"],
self.failUnlessReallyEqual(readable["needed_shares"], 3)
self.failUnlessReallyEqual(readable["stuff"], b"value")
self.failUnlessReallyEqual(readable["size"], 12)
self.failUnlessReallyEqual(readable["big_hash"],
base32.b2a(hashutil.tagged_hash(b"foo", b"bar")))
self.failUnlessReallyEqual(readable[b"UEB_hash"],
self.failUnlessReallyEqual(readable["UEB_hash"],
base32.b2a(hashutil.uri_extension_hash(ext)))
class Unknown(testutil.ReallyEqualMixin, unittest.TestCase):

View File

@ -13,8 +13,9 @@ from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# Don't import bytes, to prevent leaks.
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min # noqa: F401
# Don't import bytes or str, to prevent leaks.
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, max, min # noqa: F401
str = unicode
from past.builtins import unicode, long
@ -928,11 +929,11 @@ def unpack_extension(data):
assert data[length:length+1] == b','
data = data[length+1:]
d[key] = value
d[str(key, "utf-8")] = value
# convert certain things to numbers
for intkey in (b'size', b'segment_size', b'num_segments',
b'needed_shares', b'total_shares'):
for intkey in ('size', 'segment_size', 'num_segments',
'needed_shares', 'total_shares'):
if intkey in d:
d[intkey] = int(d[intkey])
return d
@ -940,9 +941,9 @@ def unpack_extension(data):
def unpack_extension_readable(data):
unpacked = unpack_extension(data)
unpacked[b"UEB_hash"] = hashutil.uri_extension_hash(data)
unpacked["UEB_hash"] = hashutil.uri_extension_hash(data)
for k in sorted(unpacked.keys()):
if b'hash' in k:
if 'hash' in k:
unpacked[k] = base32.b2a(unpacked[k])
return unpacked

View File

@ -44,6 +44,7 @@ PORTED_MODULES = [
"allmydata.storage.server",
"allmydata.storage.shares",
"allmydata.test.common_py3",
"allmydata.test.no_network",
"allmydata.uri",
"allmydata.util._python3",
"allmydata.util.abbreviate",
@ -93,6 +94,7 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_log",
"allmydata.test.test_monitor",
"allmydata.test.test_netstring",
"allmydata.test.test_no_network",
"allmydata.test.test_observer",
"allmydata.test.test_pipeline",
"allmydata.test.test_python3",

View File

@ -36,5 +36,5 @@ def download_to_data(n, offset=0, size=None, progress=None):
:param progress: None or an IProgress implementer
"""
d = n.read(MemoryConsumer(progress=progress), offset, size)
d.addCallback(lambda mc: "".join(mc.chunks))
d.addCallback(lambda mc: b"".join(mc.chunks))
return d