From 9f0034347823e01152a4f4a1b555f99361f32776 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 21 Sep 2020 10:45:05 -0400
Subject: [PATCH 1/6] Some progress towards test_upload running on Python 3.

---
 src/allmydata/storage_client.py   |  5 ++--
 src/allmydata/test/test_upload.py | 47 ++++++++++++++++---------------
 2 files changed, 27 insertions(+), 25 deletions(-)

diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py
index cfc3bc83f..cdce28467 100644
--- a/src/allmydata/storage_client.py
+++ b/src/allmydata/storage_client.py
@@ -28,6 +28,7 @@ the foolscap-based server implemented in src/allmydata/storage/*.py .
 #
 # 6: implement other sorts of IStorageClient classes: S3, etc
 
+from past.builtins import unicode
 
 import re, time, hashlib
 try:
@@ -489,7 +490,7 @@ class _FoolscapStorage(object):
 
         *nickname* is optional.
         """
-        m = re.match(r'pb://(\w+)@', furl)
+        m = re.match(br'pb://(\w+)@', furl)
         assert m, furl
         tubid_s = m.group(1).lower()
         tubid = base32.a2b(tubid_s)
@@ -633,7 +634,7 @@ class NativeStorageServer(service.MultiService):
 
     def __init__(self, server_id, ann, tub_maker, handler_overrides, node_config, config=StorageClientConfig()):
         service.MultiService.__init__(self)
-        assert isinstance(server_id, str)
+        assert isinstance(server_id, bytes)
         self._server_id = server_id
         self.announcement = ann
         self._tub_maker = tub_maker
diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py
index 6f35d57d3..5d68447ff 100644
--- a/src/allmydata/test/test_upload.py
+++ b/src/allmydata/test/test_upload.py
@@ -1,7 +1,8 @@
 # -*- coding: utf-8 -*-
 
 import os, shutil
-from six.moves import cStringIO as StringIO
+from io import BytesIO
+
 from twisted.trial import unittest
 from twisted.python.failure import Failure
 from twisted.internet import defer, task
@@ -33,25 +34,25 @@ class Uploadable(unittest.TestCase):
     def shouldEqual(self, data, expected):
         self.failUnless(isinstance(data, list))
         for e in data:
-            self.failUnless(isinstance(e, str))
-        s = "".join(data)
+            self.failUnless(isinstance(e, bytes))
+        s = b"".join(data)
         self.failUnlessEqual(s, expected)
 
     def test_filehandle_random_key(self):
         return self._test_filehandle(convergence=None)
 
     def test_filehandle_convergent_encryption(self):
-        return self._test_filehandle(convergence="some convergence string")
+        return self._test_filehandle(convergence=b"some convergence string")
 
     def _test_filehandle(self, convergence):
-        s = StringIO("a"*41)
+        s = BytesIO(b"a"*41)
         u = upload.FileHandle(s, convergence=convergence)
         d = u.get_size()
         d.addCallback(self.failUnlessEqual, 41)
         d.addCallback(lambda res: u.read(1))
-        d.addCallback(self.shouldEqual, "a")
+        d.addCallback(self.shouldEqual, b"a")
         d.addCallback(lambda res: u.read(80))
-        d.addCallback(self.shouldEqual, "a"*40)
+        d.addCallback(self.shouldEqual, b"a"*40)
         d.addCallback(lambda res: u.close()) # this doesn't close the filehandle
         d.addCallback(lambda res: s.close()) # that privilege is reserved for us
         return d
@@ -60,28 +61,28 @@ class Uploadable(unittest.TestCase):
         basedir = "upload/Uploadable/test_filename"
         os.makedirs(basedir)
         fn = os.path.join(basedir, "file")
-        f = open(fn, "w")
-        f.write("a"*41)
+        f = open(fn, "wb")
+        f.write(b"a"*41)
         f.close()
         u = upload.FileName(fn, convergence=None)
         d = u.get_size()
         d.addCallback(self.failUnlessEqual, 41)
         d.addCallback(lambda res: u.read(1))
-        d.addCallback(self.shouldEqual, "a")
+        d.addCallback(self.shouldEqual, b"a")
         d.addCallback(lambda res: u.read(80))
-        d.addCallback(self.shouldEqual, "a"*40)
+        d.addCallback(self.shouldEqual, b"a"*40)
         d.addCallback(lambda res: u.close())
         return d
 
     def test_data(self):
-        s = "a"*41
+        s = b"a"*41
         u = upload.Data(s, convergence=None)
         d = u.get_size()
         d.addCallback(self.failUnlessEqual, 41)
         d.addCallback(lambda res: u.read(1))
-        d.addCallback(self.shouldEqual, "a")
+        d.addCallback(self.shouldEqual, b"a")
         d.addCallback(lambda res: u.read(80))
-        d.addCallback(self.shouldEqual, "a"*40)
+        d.addCallback(self.shouldEqual, b"a"*40)
         d.addCallback(lambda res: u.close())
         return d
 
@@ -167,7 +168,7 @@ class FakeStorageServer(object):
 class FakeBucketWriter(object):
     # a diagnostic version of storageserver.BucketWriter
     def __init__(self, size):
-        self.data = StringIO()
+        self.data = BytesIO()
         self.closed = False
         self._size = size
 
@@ -216,7 +217,7 @@ class FakeClient(object):
         if type(mode) is str:
             mode = dict([i,mode] for i in range(num_servers))
         servers = [
-            ("%20d" % fakeid, FakeStorageServer(mode[fakeid], reactor=reactor))
+            (b"%20d" % fakeid, FakeStorageServer(mode[fakeid], reactor=reactor))
             for fakeid in range(self.num_servers)
         ]
         self.storage_broker = StorageFarmBroker(
@@ -225,7 +226,7 @@ class FakeClient(object):
             node_config=EMPTY_CLIENT_CONFIG,
         )
         for (serverid, rref) in servers:
-            ann = {"anonymous-storage-FURL": "pb://%s@nowhere/fake" % base32.b2a(serverid),
+            ann = {"anonymous-storage-FURL": b"pb://%s@nowhere/fake" % base32.b2a(serverid),
                    "permutation-seed-base32": base32.b2a(serverid) }
             self.storage_broker.test_add_rref(serverid, rref, ann)
         self.last_servers = [s[1] for s in servers]
@@ -236,7 +237,7 @@ class FakeClient(object):
         return self.encoding_params
     def get_storage_broker(self):
         return self.storage_broker
-    _secret_holder = client.SecretHolder("lease secret", "convergence secret")
+    _secret_holder = client.SecretHolder(b"lease secret", b"convergence secret")
 
 class GotTooFarError(Exception):
     pass
@@ -247,7 +248,7 @@ class GiganticUploadable(upload.FileHandle):
         self._fp = 0
 
     def get_encryption_key(self):
-        return defer.succeed("\x00" * 16)
+        return defer.succeed(b"\x00" * 16)
     def get_size(self):
         return defer.succeed(self._size)
     def read(self, length):
@@ -257,7 +258,7 @@ class GiganticUploadable(upload.FileHandle):
         if self._fp > 1000000:
             # terminate the test early.
             raise GotTooFarError("we shouldn't be allowed to get this far")
-        return defer.succeed(["\x00" * length])
+        return defer.succeed([b"\x00" * length])
     def close(self):
         pass
 
@@ -367,21 +368,21 @@ class GoodServer(unittest.TestCase, ShouldFailMixin, SetDEPMixin):
 
     def test_filehandle_zero(self):
         data = self.get_data(SIZE_ZERO)
-        d = upload_filehandle(self.u, StringIO(data))
+        d = upload_filehandle(self.u, BytesIO(data))
         d.addCallback(extract_uri)
         d.addCallback(self._check_small, SIZE_ZERO)
         return d
 
     def test_filehandle_small(self):
         data = self.get_data(SIZE_SMALL)
-        d = upload_filehandle(self.u, StringIO(data))
+        d = upload_filehandle(self.u, BytesIO(data))
         d.addCallback(extract_uri)
         d.addCallback(self._check_small, SIZE_SMALL)
         return d
 
     def test_filehandle_large(self):
         data = self.get_data(SIZE_LARGE)
-        d = upload_filehandle(self.u, StringIO(data))
+        d = upload_filehandle(self.u, BytesIO(data))
         d.addCallback(extract_uri)
         d.addCallback(self._check_large, SIZE_LARGE)
         return d

From 125a6855d6cb0c059a98249025b8def7efdfd5c6 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 21 Sep 2020 10:52:58 -0400
Subject: [PATCH 2/6] More progress towards test_upload on Python 3.

---
 src/allmydata/immutable/upload.py |  2 +-
 src/allmydata/test/test_upload.py | 92 +++++++++++++++----------------
 2 files changed, 47 insertions(+), 47 deletions(-)

diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py
index 884b2cf5e..a6fc02f30 100644
--- a/src/allmydata/immutable/upload.py
+++ b/src/allmydata/immutable/upload.py
@@ -299,7 +299,7 @@ class ServerTracker(object):
         I abort the remote bucket writers for all shares. This is a good idea
         to conserve space on the storage server.
         """
-        self.abort_some_buckets(self.buckets.keys())
+        self.abort_some_buckets(list(self.buckets.keys()))
 
     def abort_some_buckets(self, sharenums):
         """
diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py
index 5d68447ff..0dbb39a4f 100644
--- a/src/allmydata/test/test_upload.py
+++ b/src/allmydata/test/test_upload.py
@@ -262,7 +262,7 @@ class GiganticUploadable(upload.FileHandle):
     def close(self):
         pass
 
-DATA = """
+DATA = b"""
 Once upon a time, there was a beautiful princess named Buttercup. She lived
 in a magical land where every file was stored securely among millions of
 machines, and nobody ever worried about their data being lost ever again.
@@ -765,40 +765,40 @@ class ServerSelection(unittest.TestCase):
 
 class StorageIndex(unittest.TestCase):
     def test_params_must_matter(self):
-        DATA = "I am some data"
+        DATA = b"I am some data"
         PARAMS = _Client.DEFAULT_ENCODING_PARAMETERS
 
-        u = upload.Data(DATA, convergence="")
+        u = upload.Data(DATA, convergence=b"")
         u.set_default_encoding_parameters(PARAMS)
         eu = upload.EncryptAnUploadable(u)
         d1 = eu.get_storage_index()
 
         # CHK means the same data should encrypt the same way
-        u = upload.Data(DATA, convergence="")
+        u = upload.Data(DATA, convergence=b"")
         u.set_default_encoding_parameters(PARAMS)
         eu = upload.EncryptAnUploadable(u)
         d1a = eu.get_storage_index()
 
         # but if we use a different convergence string it should be different
-        u = upload.Data(DATA, convergence="wheee!")
+        u = upload.Data(DATA, convergence=b"wheee!")
         u.set_default_encoding_parameters(PARAMS)
         eu = upload.EncryptAnUploadable(u)
         d1salt1 = eu.get_storage_index()
 
         # and if we add yet a different convergence it should be different again
-        u = upload.Data(DATA, convergence="NOT wheee!")
+        u = upload.Data(DATA, convergence=b"NOT wheee!")
         u.set_default_encoding_parameters(PARAMS)
         eu = upload.EncryptAnUploadable(u)
         d1salt2 = eu.get_storage_index()
 
         # and if we use the first string again it should be the same as last time
-        u = upload.Data(DATA, convergence="wheee!")
+        u = upload.Data(DATA, convergence=b"wheee!")
         u.set_default_encoding_parameters(PARAMS)
         eu = upload.EncryptAnUploadable(u)
         d1salt1a = eu.get_storage_index()
 
         # and if we change the encoding parameters, it should be different (from the same convergence string with different encoding parameters)
-        u = upload.Data(DATA, convergence="")
+        u = upload.Data(DATA, convergence=b"")
         u.set_default_encoding_parameters(PARAMS)
         u.encoding_param_k = u.default_encoding_param_k + 1
         eu = upload.EncryptAnUploadable(u)
@@ -910,7 +910,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         assert self.g, "I tried to find a grid at self.g, but failed"
         broker = self.g.clients[0].storage_broker
         sh     = self.g.clients[0]._secret_holder
-        data = upload.Data("data" * 10000, convergence="")
+        data = upload.Data(b"data" * 10000, convergence=b"")
         data.set_default_encoding_parameters({'k': 3, 'happy': 4, 'n': 10})
         uploadable = upload.EncryptAnUploadable(data)
         encoder = encode.Encoder()
@@ -1003,7 +1003,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         if "n" in kwargs and "k" in kwargs:
             client.encoding_params['k'] = kwargs['k']
             client.encoding_params['n'] = kwargs['n']
-        data = upload.Data("data" * 10000, convergence="")
+        data = upload.Data(b"data" * 10000, convergence=b"")
         self.data = data
         d = client.upload(data)
         def _store_uri(ur):
@@ -1022,8 +1022,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         self.set_up_grid(client_config_hooks=hooks)
         c0 = self.g.clients[0]
 
-        DATA = "data" * 100
-        u = upload.Data(DATA, convergence="")
+        DATA = b"data" * 100
+        u = upload.Data(DATA, convergence=b"")
         d = c0.upload(u)
         d.addCallback(lambda ur: c0.create_node_from_uri(ur.get_uri()))
         m = monitor.Monitor()
@@ -1046,7 +1046,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
 
     def test_happy_semantics(self):
         self._setUp(2)
-        DATA = upload.Data("kittens" * 10000, convergence="")
+        DATA = upload.Data(b"kittens" * 10000, convergence=b"")
         # These parameters are unsatisfiable with only 2 servers.
         self.set_encoding_parameters(k=3, happy=5, n=10)
         d = self.shouldFail(UploadUnhappinessError, "test_happy_semantics",
@@ -1078,7 +1078,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         self.basedir = "upload/EncodingParameters/aborted_shares"
         self.set_up_grid(num_servers=4)
         c = self.g.clients[0]
-        DATA = upload.Data(100 * "kittens", convergence="")
+        DATA = upload.Data(100 * b"kittens", convergence=b"")
         # These parameters are unsatisfiable with only 4 servers, but should
         # work with 5, as long as the original 4 are not stuck in the open
         # BucketWriter state (open() but not
@@ -1156,8 +1156,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "We were asked to place shares on at "
                             "least 4 servers such that any 3 of them have "
                             "enough shares to recover the file",
-                            client.upload, upload.Data("data" * 10000,
-                                                       convergence="")))
+                            client.upload, upload.Data(b"data" * 10000,
+                                                       convergence=b"")))
 
         # Do comment:52, but like this:
         # server 2: empty
@@ -1189,8 +1189,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "that any 3 of them have enough shares to recover "
                             "the file, but we were asked to place shares on "
                             "at least 4 such servers.",
-                            client.upload, upload.Data("data" * 10000,
-                                                       convergence="")))
+                            client.upload, upload.Data(b"data" * 10000,
+                                                       convergence=b"")))
         return d
 
 
@@ -1231,7 +1231,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             return client
         d.addCallback(_reset_encoding_parameters)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
 
@@ -1271,7 +1271,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         # Now try uploading.
         d.addCallback(_reset_encoding_parameters)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
 
@@ -1300,7 +1300,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self.g.remove_server(self.g.servers_by_number[0].my_nodeid))
         d.addCallback(_reset_encoding_parameters)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         # Make sure that only as many shares as necessary to satisfy
         # servers of happiness were pushed.
         d.addCallback(lambda results:
@@ -1331,7 +1331,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
 
         d.addCallback(_setup)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
@@ -1369,7 +1369,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             return client
         d.addCallback(_reset_encoding_parameters)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
@@ -1408,7 +1408,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             return client
         d.addCallback(_reset_encoding_parameters)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
@@ -1524,7 +1524,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             return client
         d.addCallback(_prepare_client)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
@@ -1551,8 +1551,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         d.addCallback(lambda c:
             self.shouldFail(UploadUnhappinessError, "test_query_counting",
                             "0 queries placed some shares",
-                            c.upload, upload.Data("data" * 10000,
-                                                  convergence="")))
+                            c.upload, upload.Data(b"data" * 10000,
+                                                  convergence=b"")))
         # Now try with some readonly servers. We want to make sure that
         # the readonly server share discovery phase is counted correctly.
         def _reset(ign):
@@ -1575,8 +1575,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self.shouldFail(UploadUnhappinessError, "test_query_counting",
                             "4 placed none (of which 4 placed none due to "
                             "the server being full",
-                            c.upload, upload.Data("data" * 10000,
-                                                  convergence="")))
+                            c.upload, upload.Data(b"data" * 10000,
+                                                  convergence=b"")))
         # Now try the case where the upload process finds a bunch of the
         # shares that it wants to place on the first server, including
         # the one that it wanted to allocate there. Though no shares will
@@ -1604,8 +1604,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         d.addCallback(lambda c:
             self.shouldFail(UploadUnhappinessError, "test_query_counting",
                             "0 queries placed some shares",
-                            c.upload, upload.Data("data" * 10000,
-                                                  convergence="")))
+                            c.upload, upload.Data(b"data" * 10000,
+                                                  convergence=b"")))
         return d
 
 
@@ -1627,7 +1627,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "test_upper_limit_on_readonly_queries",
                             "sent 8 queries to 8 servers",
                             client.upload,
-                            upload.Data('data' * 10000, convergence="")))
+                            upload.Data('data' * 10000, convergence=b"")))
         return d
 
 
@@ -1669,7 +1669,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "(of which 5 placed none due to the server being "
                             "full and 0 placed none due to an error)",
                             client.upload,
-                            upload.Data("data" * 10000, convergence="")))
+                            upload.Data(b"data" * 10000, convergence=b"")))
 
 
         # server 1: read-only, no shares
@@ -1710,7 +1710,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "(of which 4 placed none due to the server being "
                             "full and 1 placed none due to an error)",
                             client.upload,
-                            upload.Data("data" * 10000, convergence="")))
+                            upload.Data(b"data" * 10000, convergence=b"")))
         # server 0, server 1 = empty, accepting shares
         # This should place all of the shares, but still fail with happy=4.
         # We want to make sure that the exception message is worded correctly.
@@ -1726,8 +1726,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "server(s). We were asked to place shares on at "
                             "least 4 server(s) such that any 3 of them have "
                             "enough shares to recover the file.",
-                            client.upload, upload.Data("data" * 10000,
-                                                       convergence="")))
+                            client.upload, upload.Data(b"data" * 10000,
+                                                       convergence=b"")))
         # servers 0 - 4 = empty, accepting shares
         # This too should place all the shares, and this too should fail,
         # but since the effective happiness is more than the k encoding
@@ -1751,8 +1751,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "that any 3 of them have enough shares to recover "
                             "the file, but we were asked to place shares on "
                             "at least 7 such servers.",
-                            client.upload, upload.Data("data" * 10000,
-                                                       convergence="")))
+                            client.upload, upload.Data(b"data" * 10000,
+                                                       convergence=b"")))
         # server 0: shares 0 - 9
         # server 1: share 0, read-only
         # server 2: share 0, read-only
@@ -1783,8 +1783,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "to place shares on at least 7 servers such that "
                             "any 3 of them have enough shares to recover the "
                             "file",
-                            client.upload, upload.Data("data" * 10000,
-                                                       convergence="")))
+                            client.upload, upload.Data(b"data" * 10000,
+                                                       convergence=b"")))
         return d
 
 
@@ -1816,7 +1816,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
 
         d.addCallback(_setup)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
@@ -1874,7 +1874,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
 
         d.addCallback(_setup)
         d.addCallback(lambda client:
-                          client.upload(upload.Data("data" * 10000, convergence="")))
+                          client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
@@ -1912,7 +1912,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             return c
         d.addCallback(_server_setup)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
@@ -1941,7 +1941,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             return self.g.clients[0]
         d.addCallback(_server_setup)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
@@ -1964,8 +1964,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self.shouldFail(UploadUnhappinessError,
                             "test_server_selection_bucket_abort",
                             "",
-                            client.upload, upload.Data("data" * 10000,
-                                                       convergence="")))
+                            client.upload, upload.Data(b"data" * 10000,
+                                                       convergence=b"")))
         # wait for the abort messages to get there.
         def _turn_barrier(res):
             return fireEventually(res)

From 0cee40c00948db0c548ca557d0e84df21c07a8e9 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 21 Sep 2020 11:01:51 -0400
Subject: [PATCH 3/6] Even more progress towards test_upload on Python 3.

---
 src/allmydata/storage_client.py   | 11 ++++++---
 src/allmydata/test/test_upload.py | 37 ++++++++++++++++---------------
 2 files changed, 27 insertions(+), 21 deletions(-)

diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py
index cdce28467..4e06e28fb 100644
--- a/src/allmydata/storage_client.py
+++ b/src/allmydata/storage_client.py
@@ -495,7 +495,10 @@ class _FoolscapStorage(object):
         tubid_s = m.group(1).lower()
         tubid = base32.a2b(tubid_s)
         if "permutation-seed-base32" in ann:
-            ps = base32.a2b(str(ann["permutation-seed-base32"]))
+            seed = ann["permutation-seed-base32"]
+            if isinstance(seed, unicode):
+                seed = seed.encode("utf-8")
+            ps = base32.a2b(seed)
         elif re.search(r'^v0-[0-9a-zA-Z]{52}$', server_id):
             ps = base32.a2b(server_id[3:])
         else:
@@ -510,7 +513,7 @@ class _FoolscapStorage(object):
 
         assert server_id
         long_description = server_id
-        if server_id.startswith("v0-"):
+        if server_id.startswith(b"v0-"):
             # remove v0- prefix from abbreviated name
             short_description = server_id[3:3+8]
         else:
@@ -695,12 +698,14 @@ class NativeStorageServer(service.MultiService):
             # Nope
             pass
         else:
+            if isinstance(furl, unicode):
+                furl = furl.encode("utf-8")
             # See comment above for the _storage_from_foolscap_plugin case
             # about passing in get_rref.
             storage_server = _StorageServer(get_rref=self.get_rref)
             return _FoolscapStorage.from_announcement(
                 self._server_id,
-                furl.encode("utf-8"),
+                furl,
                 ann,
                 storage_server,
             )
diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py
index 0dbb39a4f..740c6815d 100644
--- a/src/allmydata/test/test_upload.py
+++ b/src/allmydata/test/test_upload.py
@@ -23,6 +23,7 @@ from allmydata.client import _Client
 from .common import (
     EMPTY_CLIENT_CONFIG,
 )
+from functools import reduce
 
 
 MiB = 1024*1024
@@ -839,10 +840,10 @@ def combinations(iterable, r):
     n = len(pool)
     if r > n:
         return
-    indices = range(r)
+    indices = list(range(r))
     yield tuple(pool[i] for i in indices)
     while True:
-        for i in reversed(range(r)):
+        for i in reversed(list(range(r))):
             if indices[i] != i + n - r:
                 break
         else:
@@ -856,7 +857,7 @@ def is_happy_enough(servertoshnums, h, k):
     """ I calculate whether servertoshnums achieves happiness level h. I do this with a naïve "brute force search" approach. (See src/allmydata/util/happinessutil.py for a better algorithm.) """
     if len(servertoshnums) < h:
         return False
-    for happysetcombo in combinations(servertoshnums.iterkeys(), h):
+    for happysetcombo in combinations(iter(servertoshnums.keys()), h):
         for subsetcombo in combinations(happysetcombo, k):
             shnums = reduce(set.union, [ servertoshnums[s] for s in subsetcombo ])
             if len(shnums) < k:
@@ -887,7 +888,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         assert self.g, "I tried to find a grid at self.g, but failed"
         servertoshnums = {} # k: server, v: set(shnum)
 
-        for i, c in self.g.servers_by_number.iteritems():
+        for i, c in self.g.servers_by_number.items():
             for (dirp, dirns, fns) in os.walk(c.sharedir):
                 for fn in fns:
                     try:
@@ -927,9 +928,9 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         def _have_shareholders(upload_trackers_and_already_servers):
             (upload_trackers, already_servers) = upload_trackers_and_already_servers
             assert servers_to_break <= len(upload_trackers)
-            for index in xrange(servers_to_break):
+            for index in range(servers_to_break):
                 tracker = list(upload_trackers)[index]
-                for share in tracker.buckets.keys():
+                for share in list(tracker.buckets.keys()):
                     tracker.buckets[share].abort()
             buckets = {}
             servermap = already_servers.copy()
@@ -1260,7 +1261,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self._add_server_with_share(server_number=1, share_number=2))
         # Copy all of the other shares to server number 2
         def _copy_shares(ign):
-            for i in xrange(0, 10):
+            for i in range(0, 10):
                 self._copy_share_to_server(i, 2)
         d.addCallback(_copy_shares)
         # Remove the first server, and add a placeholder with share 0
@@ -1354,7 +1355,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                                         readonly=True))
         # Copy all of the other shares to server number 2
         def _copy_shares(ign):
-            for i in xrange(1, 10):
+            for i in range(1, 10):
                 self._copy_share_to_server(i, 2)
         d.addCallback(_copy_shares)
         # Remove server 0, and add another in its place
@@ -1397,7 +1398,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self._add_server_with_share(server_number=2, share_number=0,
                                         readonly=True))
         def _copy_shares(ign):
-            for i in xrange(1, 10):
+            for i in range(1, 10):
                 self._copy_share_to_server(i, 2)
         d.addCallback(_copy_shares)
         d.addCallback(lambda ign:
@@ -1513,7 +1514,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         d.addCallback(lambda ign:
             self._add_server(4))
         def _copy_shares(ign):
-            for i in xrange(1, 10):
+            for i in range(1, 10):
                 self._copy_share_to_server(i, 1)
         d.addCallback(_copy_shares)
         d.addCallback(lambda ign:
@@ -1537,7 +1538,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         self.basedir = self.mktemp()
         d = self._setup_and_upload()
         def _setup(ign):
-            for i in xrange(1, 11):
+            for i in range(1, 11):
                 self._add_server(server_number=i)
             self.g.remove_server(self.g.servers_by_number[0].my_nodeid)
             c = self.g.clients[0]
@@ -1562,7 +1563,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         d.addCallback(lambda ign:
             self._setup_and_upload())
         def _then(ign):
-            for i in xrange(1, 11):
+            for i in range(1, 11):
                 self._add_server(server_number=i)
             self._add_server(server_number=11, readonly=True)
             self._add_server(server_number=12, readonly=True)
@@ -1588,11 +1589,11 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self._setup_and_upload())
 
         def _next(ign):
-            for i in xrange(1, 11):
+            for i in range(1, 11):
                 self._add_server(server_number=i)
             # Copy all of the shares to server 9, since that will be
             # the first one that the selector sees.
-            for i in xrange(10):
+            for i in range(10):
                 self._copy_share_to_server(i, 9)
             # Remove server 0, and its contents
             self.g.remove_server(self.g.servers_by_number[0].my_nodeid)
@@ -1613,7 +1614,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         self.basedir = self.mktemp()
         d = self._setup_and_upload()
         def _then(ign):
-            for i in xrange(1, 11):
+            for i in range(1, 11):
                 self._add_server(server_number=i, readonly=True)
             self.g.remove_server(self.g.servers_by_number[0].my_nodeid)
             c = self.g.clients[0]
@@ -1936,7 +1937,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self._add_server_with_share(server_number=8, share_number=4)
             self._add_server_with_share(server_number=5, share_number=5)
             self._add_server_with_share(server_number=10, share_number=7)
-            for i in xrange(4):
+            for i in range(4):
                 self._copy_share_to_server(i, 2)
             return self.g.clients[0]
         d.addCallback(_server_setup)
@@ -1971,7 +1972,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             return fireEventually(res)
         d.addCallback(_turn_barrier)
         def _then(ignored):
-            for server in self.g.servers_by_number.values():
+            for server in list(self.g.servers_by_number.values()):
                 self.failUnlessEqual(server.allocated_size(), 0)
         d.addCallback(_then)
         return d
@@ -1997,7 +1998,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             return fireEventually(res)
         d.addCallback(_turn_barrier)
         def _then(ignored):
-            for server in self.g.servers_by_number.values():
+            for server in list(self.g.servers_by_number.values()):
                 self.failUnlessEqual(server.allocated_size(), 0)
         d.addCallback(_then)
         return d

From 6fe68c792c3df395e4cd330e51bf578f14d51312 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 21 Sep 2020 13:03:29 -0400
Subject: [PATCH 4/6] Tests pass on Python 2 and 3.

---
 src/allmydata/immutable/upload.py |  2 +-
 src/allmydata/storage_client.py   | 16 ++++++++--------
 src/allmydata/test/test_upload.py | 26 +++++++++++++-------------
 3 files changed, 22 insertions(+), 22 deletions(-)

diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py
index a6fc02f30..1ab312ab6 100644
--- a/src/allmydata/immutable/upload.py
+++ b/src/allmydata/immutable/upload.py
@@ -1818,7 +1818,7 @@ class Uploader(service.MultiService, log.PrefixingLogMixin):
         self.log("got helper connection, getting versions")
         default = { "http://allmydata.org/tahoe/protocols/helper/v1" :
                     { },
-                    "application-version": "unknown: no get_version()",
+                    "application-version": b"unknown: no get_version()",
                     }
         d = add_version_to_remote_reference(helper, default)
         d.addCallback(self._got_versioned_helper)
diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py
index 4e06e28fb..df1e4573e 100644
--- a/src/allmydata/storage_client.py
+++ b/src/allmydata/storage_client.py
@@ -625,14 +625,14 @@ class NativeStorageServer(service.MultiService):
     """
 
     VERSION_DEFAULTS = {
-        "http://allmydata.org/tahoe/protocols/storage/v1" :
-        { "maximum-immutable-share-size": 2**32 - 1,
-          "maximum-mutable-share-size": 2*1000*1000*1000, # maximum prior to v1.9.2
-          "tolerates-immutable-read-overrun": False,
-          "delete-mutable-shares-with-zero-length-writev": False,
-          "available-space": None,
+        b"http://allmydata.org/tahoe/protocols/storage/v1" :
+        { b"maximum-immutable-share-size": 2**32 - 1,
+          b"maximum-mutable-share-size": 2*1000*1000*1000, # maximum prior to v1.9.2
+          b"tolerates-immutable-read-overrun": False,
+          b"delete-mutable-shares-with-zero-length-writev": False,
+          b"available-space": None,
           },
-        "application-version": "unknown: no get_version()",
+        b"application-version": "unknown: no get_version()",
         }
 
     def __init__(self, server_id, ann, tub_maker, handler_overrides, node_config, config=StorageClientConfig()):
@@ -773,7 +773,7 @@ class NativeStorageServer(service.MultiService):
         version = self.get_version()
         if version is None:
             return None
-        protocol_v1_version = version.get('http://allmydata.org/tahoe/protocols/storage/v1', {})
+        protocol_v1_version = version.get(b'http://allmydata.org/tahoe/protocols/storage/v1', {})
         available_space = protocol_v1_version.get('available-space')
         if available_space is None:
             available_space = protocol_v1_version.get('maximum-immutable-share-size', None)
diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py
index 740c6815d..ca4825990 100644
--- a/src/allmydata/test/test_upload.py
+++ b/src/allmydata/test/test_upload.py
@@ -106,19 +106,19 @@ class FakeStorageServer(object):
         self._alloc_queries = 0
         self._get_queries = 0
         self.version = {
-            "http://allmydata.org/tahoe/protocols/storage/v1" :
+            b"http://allmydata.org/tahoe/protocols/storage/v1" :
             {
-                "maximum-immutable-share-size": 2**32 - 1,
+                b"maximum-immutable-share-size": 2**32 - 1,
             },
-            "application-version": str(allmydata.__full_version__),
+            b"application-version": str(allmydata.__full_version__),
         }
         if mode == "small":
             self.version = {
-                "http://allmydata.org/tahoe/protocols/storage/v1" :
+                b"http://allmydata.org/tahoe/protocols/storage/v1" :
                 {
-                    "maximum-immutable-share-size": 10,
+                    b"maximum-immutable-share-size": 10,
                 },
-                "application-version": str(allmydata.__full_version__),
+                b"application-version": str(allmydata.__full_version__),
             }
 
 
@@ -306,9 +306,9 @@ class GoodServer(unittest.TestCase, ShouldFailMixin, SetDEPMixin):
     def _check_large(self, newuri, size):
         u = uri.from_string(newuri)
         self.failUnless(isinstance(u, uri.CHKFileURI))
-        self.failUnless(isinstance(u.get_storage_index(), str))
+        self.failUnless(isinstance(u.get_storage_index(), bytes))
         self.failUnlessEqual(len(u.get_storage_index()), 16)
-        self.failUnless(isinstance(u.key, str))
+        self.failUnless(isinstance(u.key, bytes))
         self.failUnlessEqual(len(u.key), 16)
         self.failUnlessEqual(u.size, size)
 
@@ -431,9 +431,9 @@ class ServerErrors(unittest.TestCase, ShouldFailMixin, SetDEPMixin):
     def _check_large(self, newuri, size):
         u = uri.from_string(newuri)
         self.failUnless(isinstance(u, uri.CHKFileURI))
-        self.failUnless(isinstance(u.get_storage_index(), str))
+        self.failUnless(isinstance(u.get_storage_index(), bytes))
         self.failUnlessEqual(len(u.get_storage_index()), 16)
-        self.failUnless(isinstance(u.key, str))
+        self.failUnless(isinstance(u.key, bytes))
         self.failUnlessEqual(len(u.key), 16)
         self.failUnlessEqual(u.size, size)
 
@@ -601,9 +601,9 @@ class ServerSelection(unittest.TestCase):
     def _check_large(self, newuri, size):
         u = uri.from_string(newuri)
         self.failUnless(isinstance(u, uri.CHKFileURI))
-        self.failUnless(isinstance(u.get_storage_index(), str))
+        self.failUnless(isinstance(u.get_storage_index(), bytes))
         self.failUnlessEqual(len(u.get_storage_index()), 16)
-        self.failUnless(isinstance(u.key, str))
+        self.failUnless(isinstance(u.key, bytes))
         self.failUnlessEqual(len(u.key), 16)
         self.failUnlessEqual(u.size, size)
 
@@ -1628,7 +1628,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "test_upper_limit_on_readonly_queries",
                             "sent 8 queries to 8 servers",
                             client.upload,
-                            upload.Data('data' * 10000, convergence=b"")))
+                            upload.Data(b'data' * 10000, convergence=b"")))
         return d
 
 

From 40b42441463151465b9c8dee64d7fd8cf5c720b7 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 21 Sep 2020 13:13:26 -0400
Subject: [PATCH 5/6] Port to Python 3.

---
 src/allmydata/test/test_upload.py | 18 +++++++++++++++---
 src/allmydata/util/_python3.py    |  1 +
 2 files changed, 16 insertions(+), 3 deletions(-)

diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py
index ca4825990..5ce2a29fb 100644
--- a/src/allmydata/test/test_upload.py
+++ b/src/allmydata/test/test_upload.py
@@ -1,5 +1,17 @@
 # -*- coding: utf-8 -*-
 
+"""
+Ported to Python 3.
+"""
+from __future__ import unicode_literals
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from future.utils import PY2
+if PY2:
+    from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min  # noqa: F401
+
 import os, shutil
 from io import BytesIO
 
@@ -110,7 +122,7 @@ class FakeStorageServer(object):
             {
                 b"maximum-immutable-share-size": 2**32 - 1,
             },
-            b"application-version": str(allmydata.__full_version__),
+            b"application-version": bytes(allmydata.__full_version__, "ascii"),
         }
         if mode == "small":
             self.version = {
@@ -118,7 +130,7 @@ class FakeStorageServer(object):
                 {
                     b"maximum-immutable-share-size": 10,
                 },
-                b"application-version": str(allmydata.__full_version__),
+                b"application-version": bytes(allmydata.__full_version__, "ascii"),
             }
 
 
@@ -215,7 +227,7 @@ class FakeClient(object):
     def __init__(self, mode="good", num_servers=50, reactor=None):
         self.num_servers = num_servers
         self.encoding_params = self.DEFAULT_ENCODING_PARAMETERS.copy()
-        if type(mode) is str:
+        if isinstance(mode, str):
             mode = dict([i,mode] for i in range(num_servers))
         servers = [
             (b"%20d" % fakeid, FakeStorageServer(mode[fakeid], reactor=reactor))
diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py
index d16f05a9a..fc2530f15 100644
--- a/src/allmydata/util/_python3.py
+++ b/src/allmydata/util/_python3.py
@@ -104,6 +104,7 @@ PORTED_TEST_MODULES = [
     "allmydata.test.test_storage",
     "allmydata.test.test_storage_web",
     "allmydata.test.test_time_format",
+    "allmydata.test.test_upload",
     "allmydata.test.test_uri",
     "allmydata.test.test_util",
     "allmydata.test.test_version",

From 5fbbb5be0f8fa0c95d004e950e4d78e256d8acdd Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Wed, 23 Sep 2020 15:21:47 -0400
Subject: [PATCH 6/6] News file.

---
 newsfragments/3430.minor | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 create mode 100644 newsfragments/3430.minor

diff --git a/newsfragments/3430.minor b/newsfragments/3430.minor
new file mode 100644
index 000000000..e69de29bb