diff --git a/misc/operations_helpers/provisioning/provisioning.py b/misc/operations_helpers/provisioning/provisioning.py index 37acd16d2..d6dfc4cd7 100644 --- a/misc/operations_helpers/provisioning/provisioning.py +++ b/misc/operations_helpers/provisioning/provisioning.py @@ -46,7 +46,7 @@ class ProvisioningTool(rend.Page): req = inevow.IRequest(ctx) def getarg(name, astype=int): - if req.method != "POST": + if req.method != b"POST": return None if name in req.fields: return astype(req.fields[name].value) diff --git a/newsfragments/3596.minor b/newsfragments/3596.minor new file mode 100644 index 000000000..e69de29bb diff --git a/src/allmydata/test/common.py b/src/allmydata/test/common.py index fde92fb59..8d50390e4 100644 --- a/src/allmydata/test/common.py +++ b/src/allmydata/test/common.py @@ -432,7 +432,7 @@ class FakeCHKFileNode(object): # type: ignore # incomplete implementation return self.storage_index def check(self, monitor, verify=False, add_lease=False): - s = StubServer("\x00"*20) + s = StubServer(b"\x00"*20) r = CheckResults(self.my_uri, self.storage_index, healthy=True, recoverable=True, count_happiness=10, @@ -566,12 +566,12 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation self.file_types[self.storage_index] = version initial_contents = self._get_initial_contents(contents) data = initial_contents.read(initial_contents.get_size()) - data = "".join(data) + data = b"".join(data) self.all_contents[self.storage_index] = data return defer.succeed(self) def _get_initial_contents(self, contents): if contents is None: - return MutableData("") + return MutableData(b"") if IMutableUploadable.providedBy(contents): return contents @@ -625,7 +625,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation def raise_error(self): pass def get_writekey(self): - return "\x00"*16 + return b"\x00"*16 def get_size(self): return len(self.all_contents[self.storage_index]) def get_current_size(self): @@ -644,7 +644,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation return self.file_types[self.storage_index] def check(self, monitor, verify=False, add_lease=False): - s = StubServer("\x00"*20) + s = StubServer(b"\x00"*20) r = CheckResults(self.my_uri, self.storage_index, healthy=True, recoverable=True, count_happiness=10, @@ -655,7 +655,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation count_recoverable_versions=1, count_unrecoverable_versions=0, servers_responding=[s], - sharemap={"seq1-abcd-sh0": [s]}, + sharemap={b"seq1-abcd-sh0": [s]}, count_wrong_shares=0, list_corrupt_shares=[], count_corrupt_shares=0, @@ -709,7 +709,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation def overwrite(self, new_contents): assert not self.is_readonly() new_data = new_contents.read(new_contents.get_size()) - new_data = "".join(new_data) + new_data = b"".join(new_data) self.all_contents[self.storage_index] = new_data return defer.succeed(None) def modify(self, modifier): @@ -740,7 +740,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation def update(self, data, offset): assert not self.is_readonly() def modifier(old, servermap, first_time): - new = old[:offset] + "".join(data.read(data.get_size())) + new = old[:offset] + b"".join(data.read(data.get_size())) new += old[len(new):] return new return self.modify(modifier) @@ -859,6 +859,8 @@ class WebErrorMixin(object): body = yield response.content() self.assertEquals(response.code, code) if response_substring is not None: + if isinstance(response_substring, unicode): + response_substring = response_substring.encode("utf-8") self.assertIn(response_substring, body) returnValue(body) diff --git a/src/allmydata/test/common_util.py b/src/allmydata/test/common_util.py index 2a70cff3a..f898e75b2 100644 --- a/src/allmydata/test/common_util.py +++ b/src/allmydata/test/common_util.py @@ -203,6 +203,14 @@ def flip_one_bit(s, offset=0, size=None): class ReallyEqualMixin(object): def failUnlessReallyEqual(self, a, b, msg=None): self.assertEqual(a, b, msg) + # Make sure unicode strings are a consistent type. Specifically there's + # Future newstr (backported Unicode type) vs. Python 2 native unicode + # type. They're equal, and _logically_ the same type, but have + # different types in practice. + if a.__class__ == future_str: + a = unicode(a) + if b.__class__ == future_str: + b = unicode(b) self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, type(a), b, type(b), msg)) diff --git a/src/allmydata/test/test_util.py b/src/allmydata/test/test_util.py index c556eb4b9..5f5db82bd 100644 --- a/src/allmydata/test/test_util.py +++ b/src/allmydata/test/test_util.py @@ -491,12 +491,16 @@ class JSONBytes(unittest.TestCase): """Tests for BytesJSONEncoder.""" def test_encode_bytes(self): - """BytesJSONEncoder can encode bytes.""" + """BytesJSONEncoder can encode bytes. + + Bytes are presumed to be UTF-8 encoded. + """ + snowman = u"def\N{SNOWMAN}\uFF00" data = { - b"hello": [1, b"cd"], + b"hello": [1, b"cd", {b"abc": [123, snowman.encode("utf-8")]}], } expected = { - u"hello": [1, u"cd"], + u"hello": [1, u"cd", {u"abc": [123, snowman]}], } # Bytes get passed through as if they were UTF-8 Unicode: encoded = jsonbytes.dumps(data) diff --git a/src/allmydata/test/web/test_web.py b/src/allmydata/test/web/test_web.py index 2f000b7a1..cebe709c1 100644 --- a/src/allmydata/test/web/test_web.py +++ b/src/allmydata/test/web/test_web.py @@ -1,8 +1,19 @@ +""" +Ported to Python 3. +""" from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals -import os.path, re, urllib, time -import json +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from six import ensure_binary + +import os.path, re, time import treq +from urllib.parse import quote as urlquote, unquote as urlunquote from bs4 import BeautifulSoup @@ -23,7 +34,7 @@ from allmydata.immutable.downloader.status import DownloadStatus from allmydata.dirnode import DirectoryNode from allmydata.nodemaker import NodeMaker from allmydata.web.common import MultiFormatResource -from allmydata.util import fileutil, base32, hashutil +from allmydata.util import fileutil, base32, hashutil, jsonbytes as json from allmydata.util.consumer import download_to_data from allmydata.util.encodingutil import to_bytes from ...util.connection_status import ConnectionStatus @@ -115,8 +126,8 @@ class FakeUploader(service.Service): servermap={}, timings={}, uri_extension_data={}, - uri_extension_hash="fake", - verifycapstr="fakevcap") + uri_extension_hash=b"fake", + verifycapstr=b"fakevcap") ur.set_uri(n.get_uri()) return ur d.addCallback(_got_data) @@ -210,7 +221,7 @@ class FakeDisplayableServer(StubServer): # type: ignore # tahoe-lafs/ticket/35 "application-version": "1.0" } def get_permutation_seed(self): - return "" + return b"" def get_announcement(self): return self.announcement def get_nickname(self): @@ -266,22 +277,22 @@ class FakeClient(_Client): # type: ignore # tahoe-lafs/ticket/3573 self.introducer_furls = [] self.introducer_clients = [] self.stats_provider = FakeStatsProvider() - self._secret_holder = SecretHolder("lease secret", "convergence secret") + self._secret_holder = SecretHolder(b"lease secret", b"convergence secret") self.helper = None - self.convergence = "some random string" + self.convergence = b"some random string" self.storage_broker = StorageFarmBroker( permute_peers=True, tub_maker=None, node_config=EMPTY_CLIENT_CONFIG, ) # fake knowledge of another server - self.storage_broker.test_add_server("other_nodeid", + self.storage_broker.test_add_server(b"other_nodeid", FakeDisplayableServer( serverid=b"other_nodeid", nickname=u"other_nickname \u263B", connected = True, last_connect_time = 10, last_loss_time = 20, last_rx_time = 30)) - self.storage_broker.test_add_server("disconnected_nodeid", + self.storage_broker.test_add_server(b"disconnected_nodeid", FakeDisplayableServer( - serverid="disconnected_nodeid", nickname=u"disconnected_nickname \u263B", connected = False, + serverid=b"disconnected_nodeid", nickname=u"disconnected_nickname \u263B", connected = False, last_connect_time = None, last_loss_time = 25, last_rx_time = 35)) self.introducer_client = None self.history = FakeHistory() @@ -297,12 +308,12 @@ class FakeClient(_Client): # type: ignore # tahoe-lafs/ticket/3573 self.addService(FakeStorageServer(self.nodeid, self.nickname)) def get_long_nodeid(self): - return "v0-nodeid" + return b"v0-nodeid" def get_long_tubid(self): - return "tubid" + return u"tubid" def get_auth_token(self): - return 'a fake debug auth token' + return b'a fake debug auth token' def startService(self): return service.MultiService.startService(self) @@ -340,7 +351,7 @@ class WebMixin(TimezoneMixin): def _then(res): self.public_root = res[0][1] assert interfaces.IDirectoryNode.providedBy(self.public_root), res - self.public_url = "/uri/" + self.public_root.get_uri() + self.public_url = "/uri/" + str(self.public_root.get_uri(), "ascii") self.private_root = res[1][1] foo = res[2][1] @@ -365,7 +376,7 @@ class WebMixin(TimezoneMixin): # mdmf self.QUUX_CONTENTS, n, self._quux_txt_uri, self._quux_txt_readonly_uri = self.makefile_mutable(0, mdmf=True) - assert self._quux_txt_uri.startswith("URI:MDMF") + assert self._quux_txt_uri.startswith(b"URI:MDMF") foo.set_uri(u"quux.txt", self._quux_txt_uri, self._quux_txt_readonly_uri) foo.set_uri(u"empty", res[3][1].get_uri(), @@ -382,7 +393,7 @@ class WebMixin(TimezoneMixin): # filenode to test for html encoding issues self._htmlname_unicode = u"<&weirdly'named\"file>>>_