Merge remote-tracking branch 'origin/master' into 3621.port-testing

This commit is contained in:
Itamar Turner-Trauring 2021-03-17 15:08:07 -04:00
commit 95ba731214
14 changed files with 171 additions and 58 deletions

0
newsfragments/3625.minor Normal file
View File

View File

@ -0,0 +1 @@
Tahoe-LAFS now uses a forked version of txi2p (named txi2p-tahoe) with Python 3 support.

0
newsfragments/3635.minor Normal file
View File

View File

@ -24,11 +24,17 @@ python.pkgs.buildPythonPackage rec {
# tests with in a module.
# Many of these tests don't properly skip when i2p or tor dependencies are
# not supplied (and we are not supplying them).
# not supplied (and we are not supplying them). test_client.py fails because
# version is "unknown" on Nix.
# see https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3629 for the latter.
rm src/allmydata/test/test_i2p_provider.py
rm src/allmydata/test/test_connections.py
rm src/allmydata/test/cli/test_create.py
rm src/allmydata/test/test_client.py
# Since we're deleting files, this complains they're missing. For now Nix
# is Python 2-only, anyway, so these tests don't add anything yet.
rm src/allmydata/test/test_python3.py
'';

View File

@ -151,8 +151,13 @@ tor_requires = [
]
i2p_requires = [
# txi2p has Python 3 support, but it's unreleased: https://github.com/str4d/txi2p/issues/10.
# txi2p has Python 3 support in master branch, but it has not been
# released -- see https://github.com/str4d/txi2p/issues/10. We
# could use a fork for Python 3 until txi2p's maintainers are back
# in action. For Python 2, we could continue using the txi2p
# version about which no one has complained to us so far.
"txi2p; python_version < '3.0'",
"txi2p-tahoe >= 0.3.5; python_version > '3.0'",
]
if len(sys.argv) > 1 and sys.argv[1] == '--fakedependency':

View File

@ -1,4 +1,16 @@
from past.builtins import unicode
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401
# Don't use future str to prevent leaking future's newbytes into foolscap, which they break.
from past.builtins import unicode as str
import os, stat, time, weakref
from base64 import urlsafe_b64encode
@ -364,8 +376,8 @@ class _StoragePlugins(object):
"""
return set(
config.get_config(
"storage", "plugins", b""
).decode("ascii").split(u",")
"storage", "plugins", ""
).split(u",")
) - {u""}
@classmethod
@ -460,7 +472,7 @@ def create_introducer_clients(config, main_tub, _introducer_factory=None):
introducers = config.get_introducer_configuration()
for petname, (furl, cache_path) in introducers.items():
for petname, (furl, cache_path) in list(introducers.items()):
ic = _introducer_factory(
main_tub,
furl.encode("ascii"),
@ -679,7 +691,7 @@ class _Client(node.Node, pollmixin.PollMixin):
def init_secrets(self):
# configs are always unicode
def _unicode_make_secret():
return unicode(_make_secret(), "ascii")
return str(_make_secret(), "ascii")
lease_s = self.config.get_or_create_private_config(
"secret", _unicode_make_secret).encode("utf-8")
lease_secret = base32.a2b(lease_s)
@ -694,7 +706,7 @@ class _Client(node.Node, pollmixin.PollMixin):
def _make_key():
private_key, _ = ed25519.create_signing_keypair()
# Config values are always unicode:
return unicode(ed25519.string_from_signing_key(private_key) + b"\n", "utf-8")
return str(ed25519.string_from_signing_key(private_key) + b"\n", "utf-8")
private_key_str = self.config.get_or_create_private_config(
"node.privkey", _make_key).encode("utf-8")
@ -870,7 +882,7 @@ class _Client(node.Node, pollmixin.PollMixin):
"""
Register a storage server.
"""
config_key = b"storage-plugin.{}.furl".format(
config_key = "storage-plugin.{}.furl".format(
# Oops, why don't I have a better handle on this value?
announceable_storage_server.announcement[u"name"],
)

View File

@ -7,6 +7,7 @@ from twisted.cred import error, checkers, credentials
from twisted.conch.ssh import keys
from twisted.conch.checkers import SSHPublicKeyChecker, InMemorySSHKeyDB
from allmydata.util.dictutil import BytesKeyDict
from allmydata.util import base32
from allmydata.util.fileutil import abspath_expanduser_unicode
@ -28,18 +29,18 @@ class AccountFileChecker(object):
credentials.ISSHPrivateKey)
def __init__(self, client, accountfile):
self.client = client
self.passwords = {}
pubkeys = {}
self.rootcaps = {}
with open(abspath_expanduser_unicode(accountfile), "r") as f:
self.passwords = BytesKeyDict()
pubkeys = BytesKeyDict()
self.rootcaps = BytesKeyDict()
with open(abspath_expanduser_unicode(accountfile), "rb") as f:
for line in f:
line = line.strip()
if line.startswith("#") or not line:
if line.startswith(b"#") or not line:
continue
name, passwd, rest = line.split(None, 2)
if passwd.startswith("ssh-"):
if passwd.startswith(b"ssh-"):
bits = rest.split()
keystring = " ".join([passwd] + bits[:-1])
keystring = b" ".join([passwd] + bits[:-1])
key = keys.Key.fromString(keystring)
rootcap = bits[-1]
pubkeys[name] = [key]

View File

@ -37,6 +37,7 @@ from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from six import ensure_text
import re, time, hashlib
@ -198,6 +199,7 @@ class StorageFarmBroker(service.MultiService):
# doesn't really matter but it makes the logging behavior more
# predictable and easier to test (and at least one test does depend on
# this sorted order).
servers = {ensure_text(key): value for (key, value) in servers.items()}
for (server_id, server) in sorted(servers.items()):
try:
storage_server = self._make_storage_server(

View File

@ -1,6 +1,16 @@
"""
Testtools-style matchers useful to the Tahoe-LAFS test suite.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import attr
@ -51,7 +61,7 @@ class MatchesNodePublicKey(object):
:return Mismatch: If the keys don't match.
"""
config = read_config(self.basedir, u"tub.port")
privkey_bytes = config.get_private_config("node.privkey")
privkey_bytes = config.get_private_config("node.privkey").encode("utf-8")
private_key = ed25519.signing_keypair_from_string(privkey_bytes)[0]
signature = ed25519.sign_data(private_key, b"")
other_public_key = ed25519.verifying_key_from_signing_key(other)

View File

@ -1,4 +1,11 @@
# Python 2 compatibility
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import str # noqa: F401
@ -35,7 +42,7 @@ DUMMY_ACCOUNTS = u"""\
alice password URI:DIR2:aaaaaaaaaaaaaaaaaaaaaaaaaa:1111111111111111111111111111111111111111111111111111
bob sekrit URI:DIR2:bbbbbbbbbbbbbbbbbbbbbbbbbb:2222222222222222222222222222222222222222222222222222
carol {key} URI:DIR2:cccccccccccccccccccccccccc:3333333333333333333333333333333333333333333333333333
""".format(key=DUMMY_KEY.public().toString("openssh")).encode("ascii")
""".format(key=str(DUMMY_KEY.public().toString("openssh"), "ascii")).encode("ascii")
class AccountFileCheckerKeyTests(unittest.TestCase):
"""

View File

@ -1,3 +1,15 @@
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os, sys
from functools import (
partial,
@ -21,7 +33,6 @@ from hypothesis.strategies import (
)
from eliot.testing import (
capture_logging,
assertHasAction,
)
from twisted.trial import unittest
@ -62,6 +73,7 @@ from allmydata.util import (
encodingutil,
configutil,
)
from allmydata.util.eliotutil import capture_logging
from allmydata.util.fileutil import abspath_expanduser_unicode
from allmydata.interfaces import IFilesystemNode, IFileNode, \
IImmutableFileNode, IMutableFileNode, IDirectoryNode
@ -186,7 +198,7 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
basedir,
"client.port",
)
abs_basedir = fileutil.abspath_expanduser_unicode(unicode(basedir)).encode(sys.getfilesystemencoding())
abs_basedir = fileutil.abspath_expanduser_unicode(str(basedir))
self.failUnlessIn(os.path.join(abs_basedir, "introducer.furl"), e.args[0])
self.failUnlessIn(os.path.join(abs_basedir, "no_storage"), e.args[0])
self.failUnlessIn(os.path.join(abs_basedir, "readonly_storage"), e.args[0])
@ -234,7 +246,7 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
fileutil.write(os.path.join(basedir, "tahoe.cfg"),
BASECONFIG)
c = yield client.create_client(basedir)
self.failUnless(c.get_long_nodeid().startswith("v0-"))
self.failUnless(c.get_long_nodeid().startswith(b"v0-"))
@defer.inlineCallbacks
def test_nodekey_no_storage(self):
@ -246,7 +258,7 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
fileutil.write(os.path.join(basedir, "tahoe.cfg"),
BASECONFIG + "[storage]\n" + "enabled = false\n")
c = yield client.create_client(basedir)
self.failUnless(c.get_long_nodeid().startswith("v0-"))
self.failUnless(c.get_long_nodeid().startswith(b"v0-"))
def test_storage_anonymous_enabled_by_default(self):
"""
@ -431,6 +443,9 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
"""
generic helper for following storage_dir tests
"""
assert isinstance(basedir, str)
assert isinstance(storage_path, (str, type(None)))
assert isinstance(expected_path, str)
os.mkdir(basedir)
cfg_path = os.path.join(basedir, "tahoe.cfg")
fileutil.write(
@ -477,7 +492,7 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
the node's basedir.
"""
basedir = u"client.Basic.test_relative_storage_dir"
config_path = b"myowndir"
config_path = u"myowndir"
expected_path = os.path.join(
abspath_expanduser_unicode(basedir),
u"myowndir",
@ -504,7 +519,7 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
expected_path = abspath_expanduser_unicode(
u"client.Basic.test_absolute_storage_dir_myowndir/" + base
)
config_path = expected_path.encode("utf-8")
config_path = expected_path
return self._storage_dir_test(
basedir,
config_path,
@ -515,33 +530,62 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
return [ s.get_longname() for s in sb.get_servers_for_psi(key) ]
def test_permute(self):
"""
Permutations need to be stable across Tahoe releases, which is why we
hardcode a specific expected order.
This is because the order of these results determines which servers a
client will choose to place shares on and which servers it will consult
(and in what order) when trying to retrieve those shares. If the order
ever changes, all already-placed shares become (at best) harder to find
or (at worst) impossible to find.
"""
sb = StorageFarmBroker(True, None, EMPTY_CLIENT_CONFIG)
for k in ["%d" % i for i in range(5)]:
ks = [b"%d" % i for i in range(5)]
for k in ks:
ann = {"anonymous-storage-FURL": SOME_FURL,
"permutation-seed-base32": base32.b2a(k) }
sb.test_add_rref(k, "rref", ann)
self.failUnlessReallyEqual(self._permute(sb, "one"), ['3','1','0','4','2'])
self.failUnlessReallyEqual(self._permute(sb, "two"), ['0','4','2','1','3'])
one = self._permute(sb, b"one")
two = self._permute(sb, b"two")
self.failUnlessReallyEqual(one, [b'3',b'1',b'0',b'4',b'2'])
self.failUnlessReallyEqual(two, [b'0',b'4',b'2',b'1',b'3'])
self.assertEqual(sorted(one), ks)
self.assertEqual(sorted(two), ks)
self.assertNotEqual(one, two)
sb.servers.clear()
self.failUnlessReallyEqual(self._permute(sb, "one"), [])
self.failUnlessReallyEqual(self._permute(sb, b"one"), [])
def test_permute_with_preferred(self):
"""
Permutations need to be stable across Tahoe releases, which is why we
hardcode a specific expected order. In this case, two values are
preferred and should come first.
"""
sb = StorageFarmBroker(
True,
None,
EMPTY_CLIENT_CONFIG,
StorageClientConfig(preferred_peers=['1','4']),
StorageClientConfig(preferred_peers=[b'1',b'4']),
)
for k in ["%d" % i for i in range(5)]:
ks = [b"%d" % i for i in range(5)]
for k in [b"%d" % i for i in range(5)]:
ann = {"anonymous-storage-FURL": SOME_FURL,
"permutation-seed-base32": base32.b2a(k) }
sb.test_add_rref(k, "rref", ann)
self.failUnlessReallyEqual(self._permute(sb, "one"), ['1','4','3','0','2'])
self.failUnlessReallyEqual(self._permute(sb, "two"), ['4','1','0','2','3'])
one = self._permute(sb, b"one")
two = self._permute(sb, b"two")
self.failUnlessReallyEqual(b"".join(one), b'14302')
self.failUnlessReallyEqual(b"".join(two), b'41023')
self.assertEqual(sorted(one), ks)
self.assertEqual(sorted(one[:2]), [b"1", b"4"])
self.assertEqual(sorted(two), ks)
self.assertEqual(sorted(two[:2]), [b"1", b"4"])
self.assertNotEqual(one, two)
sb.servers.clear()
self.failUnlessReallyEqual(self._permute(sb, "one"), [])
self.failUnlessReallyEqual(self._permute(sb, b"one"), [])
@defer.inlineCallbacks
def test_versions(self):
@ -557,8 +601,8 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
c = yield client.create_client(basedir)
ss = c.getServiceNamed("storage")
verdict = ss.remote_get_version()
self.failUnlessReallyEqual(verdict["application-version"],
str(allmydata.__full_version__))
self.failUnlessReallyEqual(verdict[b"application-version"],
allmydata.__full_version__.encode("ascii"))
self.failIfEqual(str(allmydata.__version__), "unknown")
self.failUnless("." in str(allmydata.__full_version__),
"non-numeric version in '%s'" % allmydata.__version__)
@ -783,7 +827,7 @@ class StaticServers(Fixture):
for (serverid, announcement)
in self._server_details
},
}))
}).encode("utf-8"))
class StorageClients(SyncTestCase):
@ -832,7 +876,7 @@ class StorageClients(SyncTestCase):
succeeded(
AfterPreprocessing(
get_known_server_details,
Equals([(serverid, announcement)]),
Equals([(serverid.encode("utf-8"), announcement)]),
),
),
)
@ -859,7 +903,7 @@ class StorageClients(SyncTestCase):
self.useFixture(
StaticServers(
self.basedir,
[(serverid, announcement),
[(serverid.encode("ascii"), announcement),
# Along with a "bad" server announcement. Order in this list
# doesn't matter, yaml serializer and Python dicts are going
# to shuffle everything around kind of randomly.
@ -876,7 +920,7 @@ class StorageClients(SyncTestCase):
AfterPreprocessing(
get_known_server_details,
# It should have the good server details.
Equals([(serverid, announcement)]),
Equals([(serverid.encode("utf-8"), announcement)]),
),
),
)
@ -903,7 +947,7 @@ class Run(unittest.TestCase, testutil.StallMixin):
private.makedirs()
dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus"
write_introducer(basedir, "someintroducer", dummy)
basedir.child("tahoe.cfg").setContent(BASECONFIG)
basedir.child("tahoe.cfg").setContent(BASECONFIG.encode("ascii"))
basedir.child(client._Client.EXIT_TRIGGER_FILE).touch()
yield client.create_client(basedir.path)
@ -914,7 +958,7 @@ class Run(unittest.TestCase, testutil.StallMixin):
private.makedirs()
dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus"
write_introducer(basedir, "someintroducer", dummy)
basedir.child("tahoe.cfg").setContent(BASECONFIG)
basedir.child("tahoe.cfg").setContent(BASECONFIG. encode("ascii"))
c1 = yield client.create_client(basedir.path)
c1.setServiceParent(self.sparent)
@ -1041,7 +1085,7 @@ class NodeMakerTests(testutil.ReallyEqualMixin, AsyncBrokenTestCase):
fileutil.write(os.path.join(basedir, "tahoe.cfg"), BASECONFIG)
c = yield client.create_client(basedir)
n = c.create_node_from_uri("URI:CHK:6nmrpsubgbe57udnexlkiwzmlu:bjt7j6hshrlmadjyr7otq3dc24end5meo5xcr5xe5r663po6itmq:3:10:7277")
n = c.create_node_from_uri(b"URI:CHK:6nmrpsubgbe57udnexlkiwzmlu:bjt7j6hshrlmadjyr7otq3dc24end5meo5xcr5xe5r663po6itmq:3:10:7277")
self.failUnless(IFilesystemNode.providedBy(n))
self.failUnless(IFileNode.providedBy(n))
self.failUnless(IImmutableFileNode.providedBy(n))
@ -1059,10 +1103,10 @@ class NodeMakerTests(testutil.ReallyEqualMixin, AsyncBrokenTestCase):
# current fix for this (hopefully to be superceded by a better fix
# eventually) is to prevent re-use of filenodes, so the NodeMaker is
# hereby required *not* to cache and re-use filenodes for CHKs.
other_n = c.create_node_from_uri("URI:CHK:6nmrpsubgbe57udnexlkiwzmlu:bjt7j6hshrlmadjyr7otq3dc24end5meo5xcr5xe5r663po6itmq:3:10:7277")
other_n = c.create_node_from_uri(b"URI:CHK:6nmrpsubgbe57udnexlkiwzmlu:bjt7j6hshrlmadjyr7otq3dc24end5meo5xcr5xe5r663po6itmq:3:10:7277")
self.failIf(n is other_n, (n, other_n))
n = c.create_node_from_uri("URI:LIT:n5xgk")
n = c.create_node_from_uri(b"URI:LIT:n5xgk")
self.failUnless(IFilesystemNode.providedBy(n))
self.failUnless(IFileNode.providedBy(n))
self.failUnless(IImmutableFileNode.providedBy(n))
@ -1071,7 +1115,7 @@ class NodeMakerTests(testutil.ReallyEqualMixin, AsyncBrokenTestCase):
self.failUnless(n.is_readonly())
self.failIf(n.is_mutable())
n = c.create_node_from_uri("URI:SSK:n6x24zd3seu725yluj75q5boaa:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq")
n = c.create_node_from_uri(b"URI:SSK:n6x24zd3seu725yluj75q5boaa:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq")
self.failUnless(IFilesystemNode.providedBy(n))
self.failUnless(IFileNode.providedBy(n))
self.failIf(IImmutableFileNode.providedBy(n))
@ -1080,7 +1124,7 @@ class NodeMakerTests(testutil.ReallyEqualMixin, AsyncBrokenTestCase):
self.failIf(n.is_readonly())
self.failUnless(n.is_mutable())
n = c.create_node_from_uri("URI:SSK-RO:b7sr5qsifnicca7cbk3rhrhbvq:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq")
n = c.create_node_from_uri(b"URI:SSK-RO:b7sr5qsifnicca7cbk3rhrhbvq:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq")
self.failUnless(IFilesystemNode.providedBy(n))
self.failUnless(IFileNode.providedBy(n))
self.failIf(IImmutableFileNode.providedBy(n))
@ -1089,7 +1133,7 @@ class NodeMakerTests(testutil.ReallyEqualMixin, AsyncBrokenTestCase):
self.failUnless(n.is_readonly())
self.failUnless(n.is_mutable())
n = c.create_node_from_uri("URI:DIR2:n6x24zd3seu725yluj75q5boaa:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq")
n = c.create_node_from_uri(b"URI:DIR2:n6x24zd3seu725yluj75q5boaa:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq")
self.failUnless(IFilesystemNode.providedBy(n))
self.failIf(IFileNode.providedBy(n))
self.failIf(IImmutableFileNode.providedBy(n))
@ -1098,7 +1142,7 @@ class NodeMakerTests(testutil.ReallyEqualMixin, AsyncBrokenTestCase):
self.failIf(n.is_readonly())
self.failUnless(n.is_mutable())
n = c.create_node_from_uri("URI:DIR2-RO:b7sr5qsifnicca7cbk3rhrhbvq:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq")
n = c.create_node_from_uri(b"URI:DIR2-RO:b7sr5qsifnicca7cbk3rhrhbvq:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq")
self.failUnless(IFilesystemNode.providedBy(n))
self.failIf(IFileNode.providedBy(n))
self.failIf(IImmutableFileNode.providedBy(n))
@ -1107,8 +1151,8 @@ class NodeMakerTests(testutil.ReallyEqualMixin, AsyncBrokenTestCase):
self.failUnless(n.is_readonly())
self.failUnless(n.is_mutable())
unknown_rw = "lafs://from_the_future"
unknown_ro = "lafs://readonly_from_the_future"
unknown_rw = b"lafs://from_the_future"
unknown_ro = b"lafs://readonly_from_the_future"
n = c.create_node_from_uri(unknown_rw, unknown_ro)
self.failUnless(IFilesystemNode.providedBy(n))
self.failIf(IFileNode.providedBy(n))
@ -1118,7 +1162,7 @@ class NodeMakerTests(testutil.ReallyEqualMixin, AsyncBrokenTestCase):
self.failUnless(n.is_unknown())
self.failUnlessReallyEqual(n.get_uri(), unknown_rw)
self.failUnlessReallyEqual(n.get_write_uri(), unknown_rw)
self.failUnlessReallyEqual(n.get_readonly_uri(), "ro." + unknown_ro)
self.failUnlessReallyEqual(n.get_readonly_uri(), b"ro." + unknown_ro)
# Note: it isn't that we *intend* to deploy non-ASCII caps in
# the future, it is that we want to make sure older Tahoe-LAFS
@ -1135,7 +1179,7 @@ class NodeMakerTests(testutil.ReallyEqualMixin, AsyncBrokenTestCase):
self.failUnless(n.is_unknown())
self.failUnlessReallyEqual(n.get_uri(), unknown_rw)
self.failUnlessReallyEqual(n.get_write_uri(), unknown_rw)
self.failUnlessReallyEqual(n.get_readonly_uri(), "ro." + unknown_ro)
self.failUnlessReallyEqual(n.get_readonly_uri(), b"ro." + unknown_ro)

View File

@ -1,5 +1,17 @@
# -*- coding: utf-8 -*-
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os, shutil
from twisted.trial import unittest
from twisted.internet import defer
@ -14,8 +26,8 @@ from allmydata.test.common import ShouldFailMixin
from allmydata.util.pollmixin import PollMixin
from allmydata.interfaces import NotEnoughSharesError
immutable_plaintext = "data" * 10000
mutable_plaintext = "muta" * 10000
immutable_plaintext = b"data" * 10000
mutable_plaintext = b"muta" * 10000
class HungServerDownloadTest(GridTestMixin, ShouldFailMixin, PollMixin,
unittest.TestCase):
@ -105,7 +117,7 @@ class HungServerDownloadTest(GridTestMixin, ShouldFailMixin, PollMixin,
self.shares = self.find_uri_shares(self.uri)
d.addCallback(_uploaded_mutable)
else:
data = upload.Data(immutable_plaintext, convergence="")
data = upload.Data(immutable_plaintext, convergence=b"")
d = self.c0.upload(data)
def _uploaded_immutable(upload_res):
self.uri = upload_res.get_uri()
@ -262,7 +274,7 @@ class HungServerDownloadTest(GridTestMixin, ShouldFailMixin, PollMixin,
# is shut off. That will leave 4 OVERDUE and 1
# stuck-but-not-overdue, for a total of 5 requests in in
# _sf.pending_requests
for t in self._sf.overdue_timers.values()[:4]:
for t in list(self._sf.overdue_timers.values())[:4]:
t.reset(-1.0)
# the timers ought to fire before the eventual-send does
return fireEventually()

View File

@ -29,6 +29,7 @@ PORTED_MODULES = [
"allmydata._monkeypatch",
"allmydata.blacklist",
"allmydata.check_results",
"allmydata.client",
"allmydata.codec",
"allmydata.control",
"allmydata.crypto",
@ -86,6 +87,7 @@ PORTED_MODULES = [
"allmydata.storage.server",
"allmydata.storage.shares",
"allmydata.test.no_network",
"allmydata.test.matchers",
"allmydata.test.mutable.util",
"allmydata.testing",
"allmydata.unknown",
@ -153,9 +155,11 @@ PORTED_TEST_MODULES = [
"allmydata.test.mutable.test_update",
"allmydata.test.mutable.test_version",
"allmydata.test.test_abbreviate",
"allmydata.test.test_auth",
"allmydata.test.test_base32",
"allmydata.test.test_base62",
"allmydata.test.test_checker",
"allmydata.test.test_client",
"allmydata.test.test_codec",
"allmydata.test.test_common_util",
"allmydata.test.test_configutil",
@ -180,6 +184,7 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_hashutil",
"allmydata.test.test_helper",
"allmydata.test.test_humanreadable",
"allmydata.test.test_hung_server",
"allmydata.test.test_immutable",
"allmydata.test.test_introducer",
"allmydata.test.test_iputil",

View File

@ -32,7 +32,7 @@ from six import ensure_text
from sys import (
stdout,
)
from functools import wraps
from functools import wraps, partial
from logging import (
INFO,
Handler,
@ -66,6 +66,7 @@ from eliot.twisted import (
DeferredContext,
inline_callbacks,
)
from eliot.testing import capture_logging as eliot_capture_logging
from twisted.python.usage import (
UsageError,
@ -326,3 +327,10 @@ def log_call_deferred(action_type):
return DeferredContext(d).addActionFinish()
return logged_f
return decorate_log_call_deferred
# On Python 3, encoding bytes to JSON doesn't work, so we have a custom JSON
# encoder we want to use when validating messages.
if PY2:
capture_logging = eliot_capture_logging
else:
capture_logging = partial(eliot_capture_logging, encoder_=BytesJSONEncoder)