From 5a40bf47f4b0a81cd695d99ce3523661ba7f44ca Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" <jaraco@jaraco.com>
Date: Fri, 11 Sep 2020 10:28:22 -0400
Subject: [PATCH 01/68] Convert print statements to print functions, even when
 commented. Fixes #3408.

---
 misc/build_helpers/gen-package-table.py       |  4 +-
 .../provisioning/reliability.py               | 34 +++++------
 .../provisioning/test_provisioning.py         |  3 +-
 misc/simulators/bench_spans.py                |  2 +-
 misc/simulators/count_dirs.py                 |  6 +-
 misc/simulators/hashbasedsig.py               |  6 +-
 misc/simulators/ringsim.py                    | 10 ++--
 misc/simulators/simulate_load.py              |  2 +-
 misc/simulators/sizes.py                      |  6 +-
 src/allmydata/immutable/downloader/fetcher.py |  3 +-
 src/allmydata/immutable/happiness_upload.py   | 10 ++--
 src/allmydata/scripts/debug.py                |  2 +-
 src/allmydata/scripts/tahoe_cp.py             |  2 +-
 src/allmydata/scripts/tahoe_manifest.py       |  2 +-
 src/allmydata/test/check_memory.py            | 14 ++---
 src/allmydata/test/cli/test_check.py          |  2 +-
 src/allmydata/test/cli/test_cp.py             |  4 +-
 src/allmydata/test/mutable/test_update.py     |  2 +-
 src/allmydata/test/test_backupdb.py           |  6 +-
 src/allmydata/test/test_checker.py            |  2 +-
 src/allmydata/test/test_deferredutil.py       |  2 +-
 src/allmydata/test/test_happiness.py          |  4 +-
 src/allmydata/test/test_hashtree.py           |  2 +-
 src/allmydata/test/test_immutable.py          |  6 +-
 src/allmydata/test/test_pipeline.py           |  4 +-
 src/allmydata/test/test_repairer.py           |  2 +-
 src/allmydata/test/test_spans.py              | 40 ++++++-------
 src/allmydata/test/test_system.py             |  8 +--
 src/allmydata/test/test_upload.py             |  2 +-
 src/allmydata/test/web/test_grid.py           |  2 +-
 src/allmydata/test/web/test_web.py            |  2 +-
 src/allmydata/util/spans.py                   | 56 +++++++++----------
 32 files changed, 129 insertions(+), 123 deletions(-)

diff --git a/misc/build_helpers/gen-package-table.py b/misc/build_helpers/gen-package-table.py
index a1190820d..ebcfd1ecd 100644
--- a/misc/build_helpers/gen-package-table.py
+++ b/misc/build_helpers/gen-package-table.py
@@ -156,6 +156,6 @@ for pkg in sorted(platform_independent_pkgs):
 print('</table>')
 
 # The document does validate, but not when it is included at the bottom of a directory listing.
-#print '<hr>'
-#print '<a href="http://validator.w3.org/check?uri=referer" target="_blank"><img border="0" src="http://www.w3.org/Icons/valid-html401-blue" alt="Valid HTML 4.01 Transitional" height="31" width="88"></a>'
+#print('<hr>')
+#print('<a href="http://validator.w3.org/check?uri=referer" target="_blank"><img border="0" src="http://www.w3.org/Icons/valid-html401-blue" alt="Valid HTML 4.01 Transitional" height="31" width="88"></a>')
 print('</body></html>')
diff --git a/misc/operations_helpers/provisioning/reliability.py b/misc/operations_helpers/provisioning/reliability.py
index dc241b9d1..fe274c875 100644
--- a/misc/operations_helpers/provisioning/reliability.py
+++ b/misc/operations_helpers/provisioning/reliability.py
@@ -1,5 +1,7 @@
 #! /usr/bin/python
 
+from __future__ import print_function
+
 import math
 from allmydata.util import statistics
 from numpy import array, matrix, dot
@@ -72,11 +74,11 @@ class ReliabilityModel(object):
 
         repair = self.build_repair_matrix(k, N, R)
 
-        #print "DECAY:", decay
-        #print "OLD-POST-REPAIR:", old_post_repair
-        #print "NEW-POST-REPAIR:", decay * repair
-        #print "REPAIR:", repair
-        #print "DIFF:", (old_post_repair - decay * repair)
+        #print("DECAY:", decay)
+        #print("OLD-POST-REPAIR:", old_post_repair)
+        #print("NEW-POST-REPAIR:", decay * repair)
+        #print("REPAIR:", repair)
+        #print("DIFF:", (old_post_repair - decay * repair))
 
         START = array([0]*N + [1])
         DEAD = array([1]*k + [0]*(1+N-k))
@@ -85,9 +87,9 @@ class ReliabilityModel(object):
                                  [N-i for i in range(k, R)] +
                                  [0]*(1+N-R))
         assert REPAIR_newshares.shape[0] == N+1
-        #print "START", START
-        #print "REPAIRp", REPAIRp
-        #print "REPAIR_newshares", REPAIR_newshares
+        #print("START", START)
+        #print("REPAIRp", REPAIRp)
+        #print("REPAIR_newshares", REPAIR_newshares)
 
         unmaintained_state = START
         maintained_state = START
@@ -141,15 +143,15 @@ class ReliabilityModel(object):
         #    return "%dy.%dm" % (int(seconds/YEAR), int( (seconds%YEAR)/MONTH))
         #needed_repairs_total = sum(needed_repairs)
         #needed_new_shares_total = sum(needed_new_shares)
-        #print "at 2y:"
-        #print " unmaintained", unmaintained_state
-        #print " maintained", maintained_state
-        #print " number of repairs", needed_repairs_total
-        #print " new shares generated", needed_new_shares_total
+        #print("at 2y:")
+        #print(" unmaintained", unmaintained_state)
+        #print(" maintained", maintained_state)
+        #print(" number of repairs", needed_repairs_total)
+        #print(" new shares generated", needed_new_shares_total)
         #repair_rate_inv = report_span / needed_repairs_total
-        #print "  avg repair rate: once every %s" % yandm(repair_rate_inv)
-        #print "  avg repair download: one share every %s" % yandm(repair_rate_inv/k)
-        #print "  avg repair upload: one share every %s" % yandm(report_span / needed_new_shares_total)
+        #print("  avg repair rate: once every %s" % yandm(repair_rate_inv))
+        #print("  avg repair download: one share every %s" % yandm(repair_rate_inv/k))
+        #print("  avg repair upload: one share every %s" % yandm(report_span / needed_new_shares_total))
 
         return report
 
diff --git a/misc/operations_helpers/provisioning/test_provisioning.py b/misc/operations_helpers/provisioning/test_provisioning.py
index 8835b79db..2b71c8566 100644
--- a/misc/operations_helpers/provisioning/test_provisioning.py
+++ b/misc/operations_helpers/provisioning/test_provisioning.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import unittest
 from allmydata import provisioning
@@ -99,7 +100,7 @@ class Reliability(unittest.TestCase):
         self.failUnlessEqual(len(r.samples), 20)
 
         last_row = r.samples[-1]
-        #print last_row
+        #print(last_row)
         (when, unmaintained_shareprobs, maintained_shareprobs,
          P_repaired_last_check_period,
          cumulative_number_of_repairs,
diff --git a/misc/simulators/bench_spans.py b/misc/simulators/bench_spans.py
index d6dc12d7a..c696dac1e 100644
--- a/misc/simulators/bench_spans.py
+++ b/misc/simulators/bench_spans.py
@@ -74,7 +74,7 @@ class B(object):
             count += 1
             inline = self.inf.readline()
 
-        # print self.stats
+        # print(self.stats)
 
 benchutil.print_bench_footer(UNITS_PER_SECOND=1000000)
 print("(microseconds)")
diff --git a/misc/simulators/count_dirs.py b/misc/simulators/count_dirs.py
index 6b52ba96a..22eda8917 100644
--- a/misc/simulators/count_dirs.py
+++ b/misc/simulators/count_dirs.py
@@ -89,9 +89,9 @@ def scan(root):
     num_files = 0
     num_dirs = 0
     for absroot, dirs, files in os.walk(root):
-        #print absroot
-        #print " %d files" % len(files)
-        #print " %d subdirs" % len(dirs)
+        #print(absroot)
+        #print(" %d files" % len(files))
+        #print(" %d subdirs" % len(dirs))
         num_files += len(files)
         num_dirs += len(dirs)
         stringsize = len(''.join(files) + ''.join(dirs))
diff --git a/misc/simulators/hashbasedsig.py b/misc/simulators/hashbasedsig.py
index 5c135adf6..dc141744f 100644
--- a/misc/simulators/hashbasedsig.py
+++ b/misc/simulators/hashbasedsig.py
@@ -146,8 +146,8 @@ def calculate(K, K1, K2, q_max, L_hash, trees):
             lg_q = lg(q_cand)
             lg_pforge = [lg_px[x] + (lg_q*x - lg_K2)*q_cand for x in xrange(1, j)]
             if max(lg_pforge) < -L_hash + lg(j) and lg_px[j-1] + 1.0 < -L_hash:
-                #print "K = %d, K1 = %d, K2 = %d, L_hash = %d, lg_K2 = %.3f, q = %d, lg_pforge_1 = %.3f, lg_pforge_2 = %.3f, lg_pforge_3 = %.3f" \
-                #      % (K, K1, K2, L_hash, lg_K2, q, lg_pforge_1, lg_pforge_2, lg_pforge_3)
+                #print("K = %d, K1 = %d, K2 = %d, L_hash = %d, lg_K2 = %.3f, q = %d, lg_pforge_1 = %.3f, lg_pforge_2 = %.3f, lg_pforge_3 = %.3f"
+                #      % (K, K1, K2, L_hash, lg_K2, q, lg_pforge_1, lg_pforge_2, lg_pforge_3))
                 q = q_cand
                 break
 
@@ -268,7 +268,7 @@ def search():
                         trees[y] = (h, c_y, (dau, tri))
 
         #for x in xrange(1, K_max+1):
-        #    print x, trees[x]
+        #    print(x, trees[x])
 
         candidates = []
         progress = 0
diff --git a/misc/simulators/ringsim.py b/misc/simulators/ringsim.py
index 1eec7a466..e6616351c 100644
--- a/misc/simulators/ringsim.py
+++ b/misc/simulators/ringsim.py
@@ -130,8 +130,8 @@ class Ring(object):
         # used is actual per-server ciphertext
         usedpf = [1.0*u/numfiles for u in used]
         # usedpf is actual per-server-per-file ciphertext
-        #print "min/max usage: %s/%s" % (abbreviate_space(used[-1]),
-        #                                abbreviate_space(used[0]))
+        #print("min/max usage: %s/%s" % (abbreviate_space(used[-1]),
+        #                                abbreviate_space(used[0])))
         avg_usage_per_file = avg_space_per_file/len(self.servers)
         # avg_usage_per_file is expected per-server-per-file ciphertext
         spreadpf = usedpf[0] - usedpf[-1]
@@ -146,7 +146,7 @@ class Ring(object):
             abbreviate_space(avg_usage_per_file) ), end=' ')
         print("spread-pf: %s (%.2f%%)" % (
             abbreviate_space(spreadpf), 100.0*spreadpf/avg_usage_per_file), end=' ')
-        #print "average_usage:", abbreviate_space(average_usagepf)
+        #print("average_usage:", abbreviate_space(average_usagepf))
         print("stddev: %s (%.2f%%)" % (abbreviate_space(std_deviation),
                                        100.0*sd_of_total))
         if self.SHOW_MINMAX:
@@ -176,14 +176,14 @@ def do_run(ring, opts):
     for filenum in count(0):
         #used = list(reversed(sorted([s.used for s in ring.servers])))
         #used = [s.used for s in ring.servers]
-        #print used
+        #print(used)
         si = myhash(fileseed+str(filenum)).hexdigest()
         filesize = make_up_a_file_size(si)
         sharesize = filesize / opts["k"]
         if filenum%4000==0 and filenum > 1:
             ring.dump_usage(filenum, avg_space_per_file)
         servers = ring.servers_for_si(si)
-        #print ring.show_servers(servers[:opts["N"]])
+        #print(ring.show_servers(servers[:opts["N"]]))
         remaining_shares = opts["N"]
         index = 0
         server_was_full = False
diff --git a/misc/simulators/simulate_load.py b/misc/simulators/simulate_load.py
index f522a6d93..945d96990 100644
--- a/misc/simulators/simulate_load.py
+++ b/misc/simulators/simulate_load.py
@@ -59,7 +59,7 @@ def go(permutedpeerlist):
                     server.full_at_tick = tick
                     fullservers += 1
                     if fullservers == len(servers):
-                        # print "Couldn't place share -- all servers full.  Stopping."
+                        # print("Couldn't place share -- all servers full.  Stopping.")
                         return (servers, doubled_up_shares)
 
             i += 1
diff --git a/misc/simulators/sizes.py b/misc/simulators/sizes.py
index b55c664a5..1719700fa 100644
--- a/misc/simulators/sizes.py
+++ b/misc/simulators/sizes.py
@@ -96,9 +96,9 @@ class Sizes(object):
             # means storing (and eventually transmitting) more hashes. This
             # count includes all the low-level share hashes and the root.
             hash_nodes = (num_leaves*k - 1) / (k - 1)
-            #print "hash_depth", d
-            #print "num_leaves", num_leaves
-            #print "hash_nodes", hash_nodes
+            #print("hash_depth", d)
+            #print("num_leaves", num_leaves)
+            #print("hash_nodes", hash_nodes)
             # the storage overhead is this
             self.share_storage_overhead = 32 * (hash_nodes - 1)
             # the transmission overhead is smaller: if we actually transmit
diff --git a/src/allmydata/immutable/downloader/fetcher.py b/src/allmydata/immutable/downloader/fetcher.py
index a747fda6c..349c40e96 100644
--- a/src/allmydata/immutable/downloader/fetcher.py
+++ b/src/allmydata/immutable/downloader/fetcher.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 from twisted.python.failure import Failure
 from foolscap.api import eventually
@@ -100,7 +101,7 @@ class SegmentFetcher(object):
             self._node.fetch_failed(self, f)
             return
 
-        #print "LOOP", self._blocks.keys(), "active:", self._active_share_map, "overdue:", self._overdue_share_map, "unused:", self._shares
+        #print("LOOP", self._blocks.keys(), "active:", self._active_share_map, "overdue:", self._overdue_share_map, "unused:", self._shares)
         # Should we sent out more requests?
         while len(set(self._blocks.keys())
                   | set(self._active_share_map.keys())
diff --git a/src/allmydata/immutable/happiness_upload.py b/src/allmydata/immutable/happiness_upload.py
index 9716aaef2..3e3eedbc9 100644
--- a/src/allmydata/immutable/happiness_upload.py
+++ b/src/allmydata/immutable/happiness_upload.py
@@ -198,7 +198,7 @@ def _distribute_homeless_shares(mappings, homeless_shares, peers_to_shares):
     available peers. If possible a share will be placed on the server it was
     originally on, signifying the lease should be renewed instead.
     """
-    #print "mappings, homeless_shares, peers_to_shares %s %s %s" % (mappings, homeless_shares, peers_to_shares)
+    #print("mappings, homeless_shares, peers_to_shares %s %s %s" % (mappings, homeless_shares, peers_to_shares))
     servermap_peerids = set([key for key in peers_to_shares])
     servermap_shareids = set()
     for key in sorted(peers_to_shares.keys()):
@@ -272,8 +272,8 @@ def _servermap_flow_graph(peers, shares, servermap):
     indexedShares = []
     sink_num = len(peers) + len(shares) + 1
     graph.append([peer_to_index[peer] for peer in peers])
-    #print "share_to_index %s" % share_to_index
-    #print "servermap %s" % servermap
+    #print("share_to_index %s" % share_to_index)
+    #print("servermap %s" % servermap)
     for peer in peers:
         if peer in servermap:
             for s in servermap[peer]:
@@ -386,8 +386,8 @@ def share_placement(peers, readonly_peers, shares, peers_to_shares):
 
     new_shares = new_shares - existing_shares - used_shares
     new_mappings = _calculate_mappings(new_peers, new_shares)
-    #print "new_peers %s" % new_peers
-    #print "new_mappings %s" % new_mappings
+    #print("new_peers %s" % new_peers)
+    #print("new_mappings %s" % new_mappings)
     mappings = dict(list(readonly_mappings.items()) + list(existing_mappings.items()) + list(new_mappings.items()))
     homeless_shares = set()
     for share in mappings:
diff --git a/src/allmydata/scripts/debug.py b/src/allmydata/scripts/debug.py
index 789218f8b..e6d332444 100644
--- a/src/allmydata/scripts/debug.py
+++ b/src/allmydata/scripts/debug.py
@@ -346,7 +346,7 @@ def dump_MDMF_share(m, length, options):
     print(" MDMF contents:", file=out)
     print("  seqnum: %d" % seqnum, file=out)
     print("  root_hash: %s" % base32.b2a(root_hash), file=out)
-    #print >>out, "  IV: %s" % base32.b2a(IV)
+    #print("  IV: %s" % base32.b2a(IV), file=out)
     print("  required_shares: %d" % k, file=out)
     print("  total_shares: %d" % N, file=out)
     print("  segsize: %d" % segsize, file=out)
diff --git a/src/allmydata/scripts/tahoe_cp.py b/src/allmydata/scripts/tahoe_cp.py
index c90dca072..f7879f35c 100644
--- a/src/allmydata/scripts/tahoe_cp.py
+++ b/src/allmydata/scripts/tahoe_cp.py
@@ -858,7 +858,7 @@ class Copier(object):
 
 
     def progress(self, message):
-        #print message
+        #print(message)
         if self.progressfunc:
             self.progressfunc(message)
 
diff --git a/src/allmydata/scripts/tahoe_manifest.py b/src/allmydata/scripts/tahoe_manifest.py
index b1daa7717..032c65d51 100644
--- a/src/allmydata/scripts/tahoe_manifest.py
+++ b/src/allmydata/scripts/tahoe_manifest.py
@@ -44,7 +44,7 @@ class ManifestStreamer(LineOnlyReceiver, object):
         if resp.status not in (200, 302):
             print(format_http_error("ERROR", resp), file=stderr)
             return 1
-        #print "RESP", dir(resp)
+        #print("RESP", dir(resp))
         # use Twisted to split this into lines
         self.in_error = False
         while True:
diff --git a/src/allmydata/test/check_memory.py b/src/allmydata/test/check_memory.py
index e12ed3d54..ede112df1 100644
--- a/src/allmydata/test/check_memory.py
+++ b/src/allmydata/test/check_memory.py
@@ -115,21 +115,21 @@ class SystemFramework(pollmixin.PollMixin):
             self.failed.raiseException()
 
     def setUp(self):
-        #print "STARTING"
+        #print("STARTING")
         self.stats = {}
         self.statsfile = open(os.path.join(self.basedir, "stats.out"), "a")
         self.make_introducer()
         d = self.start_client()
         def _record_control_furl(control_furl):
             self.control_furl = control_furl
-            #print "OBTAINING '%s'" % (control_furl,)
+            #print("OBTAINING '%s'" % (control_furl,))
             return self.tub.getReference(self.control_furl)
         d.addCallback(_record_control_furl)
         def _record_control(control_rref):
             self.control_rref = control_rref
         d.addCallback(_record_control)
         def _ready(res):
-            #print "CLIENT READY"
+            #print("CLIENT READY")
             pass
         d.addCallback(_ready)
         return d
@@ -430,10 +430,10 @@ this file are ignored.
         return d
 
     def do_test(self):
-        #print "CLIENT STARTED"
-        #print "FURL", self.control_furl
-        #print "RREF", self.control_rref
-        #print
+        #print("CLIENT STARTED")
+        #print("FURL", self.control_furl)
+        #print("RREF", self.control_rref)
+        #print()
         kB = 1000; MB = 1000*1000
         files = {}
         uris = {}
diff --git a/src/allmydata/test/cli/test_check.py b/src/allmydata/test/cli/test_check.py
index 85649e262..8cf963da6 100644
--- a/src/allmydata/test/cli/test_check.py
+++ b/src/allmydata/test/cli/test_check.py
@@ -362,7 +362,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
         #              self.do_cli("deep-check", "--repair", self.rooturi))
         #def _deep_check_repair_failed((rc, out, err)):
         #    self.failIfEqual(rc, 0)
-        #    print err
+        #    print(err)
         #    self.failUnlessIn("ERROR: UnrecoverableFileError", err)
         #    self.failIf("done:" in out, out)
         #d.addCallback(_deep_check_repair_failed)
diff --git a/src/allmydata/test/cli/test_cp.py b/src/allmydata/test/cli/test_cp.py
index 59331029b..ba1894f1c 100644
--- a/src/allmydata/test/cli/test_cp.py
+++ b/src/allmydata/test/cli/test_cp.py
@@ -983,7 +983,7 @@ class CopyOut(GridTestMixin, CLITestMixin, unittest.TestCase):
     def do_one_test(self, case, orig_expected):
         expected = set(orig_expected)
         printable_expected = ",".join(sorted(expected))
-        #print "---", case, ":", printable_expected
+        #print("---", case, ":", printable_expected)
 
         for f in orig_expected:
             # f is "dir/file" or "dir/sub/file" or "dir/" or "dir/sub/"
@@ -1010,7 +1010,7 @@ class CopyOut(GridTestMixin, CLITestMixin, unittest.TestCase):
         # then we run various forms of "cp [-r] TAHOETHING to[/missing]"
         # and see what happens.
         d = defer.succeed(None)
-        #print
+        #print()
 
         for line in COPYOUT_TESTCASES.splitlines():
             if "#" in line:
diff --git a/src/allmydata/test/mutable/test_update.py b/src/allmydata/test/mutable/test_update.py
index f03f7d226..971273a80 100644
--- a/src/allmydata/test/mutable/test_update.py
+++ b/src/allmydata/test/mutable/test_update.py
@@ -108,7 +108,7 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
                     for (start,end) in gotmods]
         expspans = ["%d:%d=%s" % (start,end,expected[start:end])
                     for (start,end) in expmods]
-        #print "expecting: %s" % expspans
+        #print("expecting: %s" % expspans)
 
         if got != expected:
             print("differences:")
diff --git a/src/allmydata/test/test_backupdb.py b/src/allmydata/test/test_backupdb.py
index 858640d62..04b264d39 100644
--- a/src/allmydata/test/test_backupdb.py
+++ b/src/allmydata/test/test_backupdb.py
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
 import sys
 import os.path, time
 from six.moves import cStringIO as StringIO
@@ -229,7 +231,7 @@ class BackupDB(unittest.TestCase):
         files = [fn for fn in listdir_unicode(unicode(basedir)) if fn.endswith(".txt")]
         self.failUnlessEqual(len(files), 1)
         foo_fn = os.path.join(basedir, files[0])
-        #print foo_fn, type(foo_fn)
+        #print(foo_fn, type(foo_fn))
 
         r = bdb.check_file(foo_fn)
         self.failUnlessEqual(r.was_uploaded(), False)
@@ -240,7 +242,7 @@ class BackupDB(unittest.TestCase):
         self.failUnlessEqual(r.should_check(), False)
 
         bar_fn = self.writeto(u"b\u00e5r.txt", "bar.txt")
-        #print bar_fn, type(bar_fn)
+        #print(bar_fn, type(bar_fn))
 
         r = bdb.check_file(bar_fn)
         self.failUnlessEqual(r.was_uploaded(), False)
diff --git a/src/allmydata/test/test_checker.py b/src/allmydata/test/test_checker.py
index 2296194f0..882356aeb 100644
--- a/src/allmydata/test/test_checker.py
+++ b/src/allmydata/test/test_checker.py
@@ -691,7 +691,7 @@ class BalancingAct(GridTestMixin, unittest.TestCase):
         def add_three(_, i):
             # Add a new server with just share 3
             self.add_server_with_share(i, self.uri, 3)
-            #print self._pretty_shares_chart(self.uri)
+            #print(self._pretty_shares_chart(self.uri))
         for i in range(1,5):
             d.addCallback(add_three, i)
 
diff --git a/src/allmydata/test/test_deferredutil.py b/src/allmydata/test/test_deferredutil.py
index d8f386e5f..6ebc93556 100644
--- a/src/allmydata/test/test_deferredutil.py
+++ b/src/allmydata/test/test_deferredutil.py
@@ -67,7 +67,7 @@ class DeferredUtilTests(unittest.TestCase, deferredutil.WaitForDelayedCallsMixin
         Trial would report an unclean reactor error for this test.
         """
         def _trigger():
-            #print "trigger"
+            #print("trigger")
             pass
         reactor.callLater(0.1, _trigger)
 
diff --git a/src/allmydata/test/test_happiness.py b/src/allmydata/test/test_happiness.py
index bf73ca2d8..021b75d33 100644
--- a/src/allmydata/test/test_happiness.py
+++ b/src/allmydata/test/test_happiness.py
@@ -182,9 +182,9 @@ class Happiness(unittest.TestCase):
         # we can achieve more happiness by moving "2" or "3" to server "d"
 
         places = happiness_upload.share_placement(peers, readonly_peers, shares, peers_to_shares)
-        #print "places %s" % places
+        #print("places %s" % places)
         #places = happiness_upload.slow_share_placement(peers, readonly_peers, shares, peers_to_shares)
-        #print "places %s" % places
+        #print("places %s" % places)
 
         happiness = happiness_upload.calculate_happiness(places)
         self.assertEqual(4, happiness)
diff --git a/src/allmydata/test/test_hashtree.py b/src/allmydata/test/test_hashtree.py
index d1d4cb252..5abe2095e 100644
--- a/src/allmydata/test/test_hashtree.py
+++ b/src/allmydata/test/test_hashtree.py
@@ -57,7 +57,7 @@ class Complete(unittest.TestCase):
                     ]
         self.failUnlessEqual(list(ht.depth_first()), expected)
         d = "\n" + ht.dump()
-        #print d
+        #print(d)
         self.failUnless("\n  0:" in d)
         self.failUnless("\n    1:" in d)
         self.failUnless("\n      3:" in d)
diff --git a/src/allmydata/test/test_immutable.py b/src/allmydata/test/test_immutable.py
index 0c7a15199..867d3a725 100644
--- a/src/allmydata/test/test_immutable.py
+++ b/src/allmydata/test/test_immutable.py
@@ -224,7 +224,7 @@ class Test(GridTestMixin, unittest.TestCase, common.ShouldFailMixin):
         d.addCallback(self._download_and_check_plaintext)
         def _after_download(ign):
             num_reads = self._count_reads()
-            #print num_reads
+            #print(num_reads)
             self.failIf(num_reads > 41, num_reads)
         d.addCallback(_after_download)
         return d
@@ -237,7 +237,7 @@ class Test(GridTestMixin, unittest.TestCase, common.ShouldFailMixin):
                       self.delete_shares_numbered(self.uri, range(7)))
         d.addCallback(self._download_and_check_plaintext)
         def _after_download(num_reads):
-            #print num_reads
+            #print(num_reads)
             self.failIf(num_reads > 41, num_reads)
         d.addCallback(_after_download)
         return d
@@ -284,7 +284,7 @@ class Test(GridTestMixin, unittest.TestCase, common.ShouldFailMixin):
                                  download_to_data, self.filenode)
             def _check_numreads(ign):
                 num_reads = self._count_reads() - start_reads
-                #print num_reads
+                #print(num_reads)
 
                 # To pass this test, you are required to give up before
                 # reading all of the share data. Actually, we could give up
diff --git a/src/allmydata/test/test_pipeline.py b/src/allmydata/test/test_pipeline.py
index 1295be363..31d952836 100644
--- a/src/allmydata/test/test_pipeline.py
+++ b/src/allmydata/test/test_pipeline.py
@@ -29,8 +29,8 @@ class Pipeline(unittest.TestCase):
         return d
 
     def failUnlessCallsAre(self, expected):
-        #print self.calls
-        #print expected
+        #print(self.calls)
+        #print(expected)
         self.failUnlessEqual(len(self.calls), len(expected), self.calls)
         for i,c in enumerate(self.calls):
             self.failUnlessEqual(c[1:], expected[i], str(i))
diff --git a/src/allmydata/test/test_repairer.py b/src/allmydata/test/test_repairer.py
index ec521fe96..4fdffe70e 100644
--- a/src/allmydata/test/test_repairer.py
+++ b/src/allmydata/test/test_repairer.py
@@ -335,7 +335,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin):
             self.corrupt_shares_numbered(self.uri, [0], _corruptor)
         results = {}
         def _did_check(vr, i):
-            #print "corrupt %d: healthy=%s" % (i, vr.is_healthy())
+            #print("corrupt %d: healthy=%s" % (i, vr.is_healthy()))
             results[i] = vr.is_healthy()
         def _start(ign):
             d = defer.succeed(None)
diff --git a/src/allmydata/test/test_spans.py b/src/allmydata/test/test_spans.py
index 02d8292f3..281f916c4 100644
--- a/src/allmydata/test/test_spans.py
+++ b/src/allmydata/test/test_spans.py
@@ -246,14 +246,14 @@ class ByteSpans(unittest.TestCase):
                 ns1.add(start, length); ns2.add(start, length)
             return ns1, ns2
 
-        #print
+        #print()
         for i in range(1000):
             what = sha256(seed+bytes(i))
             op = what[0:1]
             subop = what[1:2]
             start = int(what[2:4], 16)
             length = max(1,int(what[5:6], 16))
-            #print what
+            #print(what)
             if op in b"0":
                 if subop in b"01234":
                     s1 = S1(); s2 = S2()
@@ -261,34 +261,34 @@ class ByteSpans(unittest.TestCase):
                     s1 = S1(start, length); s2 = S2(start, length)
                 else:
                     s1 = S1(s1); s2 = S2(s2)
-                #print "s2 = %s" % s2.dump()
+                #print("s2 = %s" % s2.dump())
             elif op in b"123":
-                #print "s2.add(%d,%d)" % (start, length)
+                #print("s2.add(%d,%d)" % (start, length))
                 s1.add(start, length); s2.add(start, length)
             elif op in b"456":
-                #print "s2.remove(%d,%d)" % (start, length)
+                #print("s2.remove(%d,%d)" % (start, length))
                 s1.remove(start, length); s2.remove(start, length)
             elif op in b"78":
                 ns1, ns2 = _create(what[7:11])
-                #print "s2 + %s" % ns2.dump()
+                #print("s2 + %s" % ns2.dump())
                 s1 = s1 + ns1; s2 = s2 + ns2
             elif op in b"9a":
                 ns1, ns2 = _create(what[7:11])
-                #print "%s - %s" % (s2.dump(), ns2.dump())
+                #print("%s - %s" % (s2.dump(), ns2.dump()))
                 s1 = s1 - ns1; s2 = s2 - ns2
             elif op in b"bc":
                 ns1, ns2 = _create(what[7:11])
-                #print "s2 += %s" % ns2.dump()
+                #print("s2 += %s" % ns2.dump())
                 s1 += ns1; s2 += ns2
             elif op in b"de":
                 ns1, ns2 = _create(what[7:11])
-                #print "%s -= %s" % (s2.dump(), ns2.dump())
+                #print("%s -= %s" % (s2.dump(), ns2.dump()))
                 s1 -= ns1; s2 -= ns2
             else:
                 ns1, ns2 = _create(what[7:11])
-                #print "%s &= %s" % (s2.dump(), ns2.dump())
+                #print("%s &= %s" % (s2.dump(), ns2.dump()))
                 s1 = s1 & ns1; s2 = s2 & ns2
-            #print "s2 now %s" % s2.dump()
+            #print("s2 now %s" % s2.dump())
             self.failUnlessEqual(list(s1.each()), list(s2.each()))
             self.failUnlessEqual(s1.len(), s2.len())
             self.failUnlessEqual(bool(s1), bool(s2))
@@ -324,7 +324,7 @@ class ByteSpans(unittest.TestCase):
     def _test_overlap(self, a, b, c, d):
         s1 = set(range(a,a+b))
         s2 = set(range(c,c+d))
-        #print "---"
+        #print("---")
         #self._show_overlap(s1, "1")
         #self._show_overlap(s2, "2")
         o = overlap(a,b,c,d)
@@ -580,33 +580,33 @@ class StringSpans(unittest.TestCase):
                 ns2.add(start, _randstr(length, what[7:9]))
             return ns1, ns2
 
-        #print
+        #print()
         for i in range(1000):
             what = sha256(seed+bytes(i))
             op = what[0:1]
             subop = what[1:2]
             start = int(what[2:4], 16)
             length = max(1,int(what[5:6], 16))
-            #print what
+            #print(what)
             if op in b"0":
                 if subop in b"0123456":
                     s1 = S1(); s2 = S2()
                 else:
                     s1, s2 = _create(what[7:11])
-                #print "s2 = %s" % list(s2._dump())
+                #print("s2 = %s" % list(s2._dump()))
             elif op in b"123456":
-                #print "s2.add(%d,%d)" % (start, length)
+                #print("s2.add(%d,%d)" % (start, length))
                 s1.add(start, _randstr(length, what[7:9]));
                 s2.add(start, _randstr(length, what[7:9]))
             elif op in b"789abc":
-                #print "s2.remove(%d,%d)" % (start, length)
+                #print("s2.remove(%d,%d)" % (start, length))
                 s1.remove(start, length); s2.remove(start, length)
             else:
-                #print "s2.pop(%d,%d)" % (start, length)
+                #print("s2.pop(%d,%d)" % (start, length))
                 d1 = s1.pop(start, length); d2 = s2.pop(start, length)
                 self.failUnlessEqual(d1, d2)
-            #print "s1 now %s" % list(s1._dump())
-            #print "s2 now %s" % list(s2._dump())
+            #print("s1 now %s" % list(s1._dump()))
+            #print("s2 now %s" % list(s2._dump()))
             self.failUnlessEqual(s1.len(), s2.len())
             self.failUnlessEqual(list(s1._dump()), list(s2._dump()))
             for j in range(100):
diff --git a/src/allmydata/test/test_system.py b/src/allmydata/test/test_system.py
index 96fdfaba7..cc1ee21bb 100644
--- a/src/allmydata/test/test_system.py
+++ b/src/allmydata/test/test_system.py
@@ -1308,7 +1308,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
             d = self.clients[1].tub.getReference(sp_furl)
             d.addCallback(lambda sp_rref: sp_rref.callRemote("get_stats"))
             def _got_stats(stats):
-                #print "STATS"
+                #print("STATS")
                 #from pprint import pprint
                 #pprint(stats)
                 s = stats["stats"]
@@ -1748,7 +1748,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
         return d
 
     def log(self, res, *args, **kwargs):
-        # print "MSG: %s  RES: %s" % (msg, args)
+        # print("MSG: %s  RES: %s" % (msg, args))
         log.msg(*args, **kwargs)
         return res
 
@@ -2647,8 +2647,8 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
 ##             return self._run_cli(argv)
 ##         d.addCallback(_ls_missing)
 ##         def _check_ls_missing((out,err)):
-##             print "OUT", out
-##             print "ERR", err
+##             print("OUT", out)
+##             print("ERR", err)
 ##             self.failUnlessEqual(err, "")
 ##         d.addCallback(_check_ls_missing)
 
diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py
index 2ec426722..6f35d57d3 100644
--- a/src/allmydata/test/test_upload.py
+++ b/src/allmydata/test/test_upload.py
@@ -130,7 +130,7 @@ class FakeStorageServer(object):
 
     def allocate_buckets(self, storage_index, renew_secret, cancel_secret,
                          sharenums, share_size, canary):
-        # print "FakeStorageServer.allocate_buckets(num=%d, size=%d, mode=%s, queries=%d)" % (len(sharenums), share_size, self.mode, self._alloc_queries)
+        # print("FakeStorageServer.allocate_buckets(num=%d, size=%d, mode=%s, queries=%d)" % (len(sharenums), share_size, self.mode, self._alloc_queries))
         if self.mode == "timeout":
             return defer.Deferred()
         if self.mode == "first-fail":
diff --git a/src/allmydata/test/web/test_grid.py b/src/allmydata/test/web/test_grid.py
index 2a52d98e5..01eb93fa7 100644
--- a/src/allmydata/test/web/test_grid.py
+++ b/src/allmydata/test/web/test_grid.py
@@ -260,7 +260,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
         #
         #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
         #def _got_html_dead(res):
-        #    print res
+        #    print(res)
         #    self.failUnlessIn("Healthy : healthy", res)
         #    self.failIfIn("Not Healthy", res)
         #    self.failUnlessIn("No repair necessary", res)
diff --git a/src/allmydata/test/web/test_web.py b/src/allmydata/test/web/test_web.py
index 0dc12a4c5..11c5a8c5c 100644
--- a/src/allmydata/test/web/test_web.py
+++ b/src/allmydata/test/web/test_web.py
@@ -4310,7 +4310,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
         )
 
     def log(self, res, msg):
-        #print "MSG: %s  RES: %s" % (msg, res)
+        #print("MSG: %s  RES: %s" % (msg, res))
         log.msg(msg)
         return res
 
diff --git a/src/allmydata/util/spans.py b/src/allmydata/util/spans.py
index b224f0950..81e14c0fb 100644
--- a/src/allmydata/util/spans.py
+++ b/src/allmydata/util/spans.py
@@ -55,10 +55,10 @@ class Spans(object):
     def add(self, start, length):
         assert start >= 0
         assert length > 0
-        #print " ADD [%d+%d -%d) to %s" % (start, length, start+length, self.dump())
+        #print(" ADD [%d+%d -%d) to %s" % (start, length, start+length, self.dump()))
         first_overlap = last_overlap = None
         for i,(s_start,s_length) in enumerate(self._spans):
-            #print "  (%d+%d)-> overlap=%s adjacent=%s" % (s_start,s_length, overlap(s_start, s_length, start, length), adjacent(s_start, s_length, start, length))
+            #print("  (%d+%d)-> overlap=%s adjacent=%s" % (s_start,s_length, overlap(s_start, s_length, start, length), adjacent(s_start, s_length, start, length)))
             if (overlap(s_start, s_length, start, length)
                 or adjacent(s_start, s_length, start, length)):
                 last_overlap = i
@@ -68,7 +68,7 @@ class Spans(object):
             # no overlap
             if first_overlap is not None:
                 break
-        #print "  first_overlap", first_overlap, last_overlap
+        #print("  first_overlap", first_overlap, last_overlap)
         if first_overlap is None:
             # no overlap, so just insert the span and sort by starting
             # position.
@@ -83,7 +83,7 @@ class Spans(object):
             newspan_length = newspan_end - newspan_start
             newspan = (newspan_start, newspan_length)
             self._spans[first_overlap:last_overlap+1] = [newspan]
-        #print "  ADD done: %s" % self.dump()
+        #print("  ADD done: %s" % self.dump())
         self._check()
 
         return self
@@ -91,7 +91,7 @@ class Spans(object):
     def remove(self, start, length):
         assert start >= 0
         assert length > 0
-        #print " REMOVE [%d+%d -%d) from %s" % (start, length, start+length, self.dump())
+        #print(" REMOVE [%d+%d -%d) from %s" % (start, length, start+length, self.dump()))
         first_complete_overlap = last_complete_overlap = None
         for i,(s_start,s_length) in enumerate(self._spans):
             s_end = s_start + s_length
@@ -144,7 +144,7 @@ class Spans(object):
                     break
         if first_complete_overlap is not None:
             del self._spans[first_complete_overlap:last_complete_overlap+1]
-        #print "  REMOVE done: %s" % self.dump()
+        #print("  REMOVE done: %s" % self.dump())
         self._check()
         return self
 
@@ -282,26 +282,26 @@ class DataSpans(object):
 
     def get(self, start, length):
         # returns a string of LENGTH, or None
-        #print "get", start, length, self.spans
+        #print("get", start, length, self.spans)
         end = start+length
         for (s_start,s_data) in self.spans:
             s_end = s_start+len(s_data)
-            #print " ",s_start,s_end
+            #print(" ",s_start,s_end)
             if s_start <= start < s_end:
                 # we want some data from this span. Because we maintain
                 # strictly merged and non-overlapping spans, everything we
                 # want must be in this span.
                 offset = start - s_start
                 if offset + length > len(s_data):
-                    #print " None, span falls short"
+                    #print(" None, span falls short")
                     return None # span falls short
-                #print " some", s_data[offset:offset+length]
+                #print(" some", s_data[offset:offset+length])
                 return s_data[offset:offset+length]
             if s_start >= end:
                 # we've gone too far: no further spans will overlap
-                #print " None, gone too far"
+                #print(" None, gone too far")
                 return None
-        #print " None, ran out of spans"
+        #print(" None, ran out of spans")
         return None
 
     def add(self, start, data):
@@ -310,13 +310,13 @@ class DataSpans(object):
         #  add new spans
         #  sort
         #  merge adjacent spans
-        #print "add", start, data, self.spans
+        #print("add", start, data, self.spans)
         end = start + len(data)
         i = 0
         while len(data):
-            #print " loop", start, data, i, len(self.spans), self.spans
+            #print(" loop", start, data, i, len(self.spans), self.spans)
             if i >= len(self.spans):
-                #print " append and done"
+                #print(" append and done")
                 # append a last span
                 self.spans.append( (start, data) )
                 break
@@ -333,7 +333,7 @@ class DataSpans(object):
             # A). We handle E by replacing the middle and terminating.
             if start < s_start:
                 # case A: insert a new span, then loop with the remainder
-                #print " insert new span"
+                #print(" insert new span")
                 s_len = s_start-start
                 self.spans.insert(i, (start, data[:s_len]))
                 i += 1
@@ -343,12 +343,12 @@ class DataSpans(object):
             s_len = len(s_data)
             s_end = s_start+s_len
             if s_start <= start < s_end:
-                #print " modify this span", s_start, start, s_end
+                #print(" modify this span", s_start, start, s_end)
                 # we want to modify some data in this span: a prefix, a
                 # suffix, or the whole thing
                 if s_start == start:
                     if s_end <= end:
-                        #print " replace whole segment"
+                        #print(" replace whole segment")
                         # case C: replace this segment
                         self.spans[i] = (s_start, data[:s_len])
                         i += 1
@@ -357,36 +357,36 @@ class DataSpans(object):
                         # C2 is where len(data)>0
                         continue
                     # case B: modify the prefix, retain the suffix
-                    #print " modify prefix"
+                    #print(" modify prefix")
                     self.spans[i] = (s_start, data + s_data[len(data):])
                     break
                 if start > s_start and end < s_end:
                     # case E: modify the middle
-                    #print " modify middle"
+                    #print(" modify middle")
                     prefix_len = start - s_start # we retain this much
                     suffix_len = s_end - end # and retain this much
                     newdata = s_data[:prefix_len] + data + s_data[-suffix_len:]
                     self.spans[i] = (s_start, newdata)
                     break
                 # case D: retain the prefix, modify the suffix
-                #print " modify suffix"
+                #print(" modify suffix")
                 prefix_len = start - s_start # we retain this much
                 suffix_len = s_len - prefix_len # we replace this much
-                #print "  ", s_data, prefix_len, suffix_len, s_len, data
+                #print("  ", s_data, prefix_len, suffix_len, s_len, data)
                 self.spans[i] = (s_start,
                                  s_data[:prefix_len] + data[:suffix_len])
                 i += 1
                 start += suffix_len
                 data = data[suffix_len:]
-                #print "  now", start, data
+                #print("  now", start, data)
                 # D2 is where len(data)>0
                 continue
             # else we're not there yet
-            #print " still looking"
+            #print(" still looking")
             i += 1
             continue
         # now merge adjacent spans
-        #print " merging", self.spans
+        #print(" merging", self.spans)
         newspans = []
         for (s_start,s_data) in self.spans:
             if newspans and adjacent(newspans[-1][0], len(newspans[-1][1]),
@@ -396,12 +396,12 @@ class DataSpans(object):
                 newspans.append( (s_start, s_data) )
         self.spans = newspans
         self.assert_invariants()
-        #print " done", self.spans
+        #print(" done", self.spans)
 
     def remove(self, start, length):
         i = 0
         end = start + length
-        #print "remove", start, length, self.spans
+        #print("remove", start, length, self.spans)
         while i < len(self.spans):
             (s_start,s_data) = self.spans[i]
             if s_start >= end:
@@ -441,7 +441,7 @@ class DataSpans(object):
             self.spans[i] = (s_start, left)
             self.spans.insert(i+1, (o_end, right))
             break
-        #print " done", self.spans
+        #print(" done", self.spans)
 
     def pop(self, start, length):
         data = self.get(start, length)

From a7034de8523e0001f51ba53abfa60a780773be28 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" <jaraco@jaraco.com>
Date: Fri, 11 Sep 2020 13:31:37 -0400
Subject: [PATCH 02/68] Add empty newsfragment.

---
 newsfragments/3408.minor | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 create mode 100644 newsfragments/3408.minor

diff --git a/newsfragments/3408.minor b/newsfragments/3408.minor
new file mode 100644
index 000000000..e69de29bb

From e86e0d761f8d84d453c3863d92e60b38b6b30016 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Fri, 11 Sep 2020 14:49:30 -0400
Subject: [PATCH 03/68] Better test coverage for allmydata.storage.immutable.

---
 src/allmydata/test/test_storage.py | 37 +++++++++++++++++++++++++++++-
 1 file changed, 36 insertions(+), 1 deletion(-)

diff --git a/src/allmydata/test/test_storage.py b/src/allmydata/test/test_storage.py
index 4bbbcda30..bba4ae964 100644
--- a/src/allmydata/test/test_storage.py
+++ b/src/allmydata/test/test_storage.py
@@ -30,7 +30,7 @@ from allmydata import interfaces
 from allmydata.util import fileutil, hashutil, base32
 from allmydata.storage.server import StorageServer
 from allmydata.storage.mutable import MutableShareFile
-from allmydata.storage.immutable import BucketWriter, BucketReader
+from allmydata.storage.immutable import BucketWriter, BucketReader, ShareFile
 from allmydata.storage.common import DataTooLargeError, storage_index_to_dir, \
      UnknownMutableContainerVersionError, UnknownImmutableContainerVersionError
 from allmydata.storage.lease import LeaseInfo
@@ -2968,3 +2968,38 @@ class Stats(unittest.TestCase):
         self.failUnless(output["get"]["95_0_percentile"] is None, output)
         self.failUnless(output["get"]["99_0_percentile"] is None, output)
         self.failUnless(output["get"]["99_9_percentile"] is None, output)
+
+
+class ShareFileTests(unittest.TestCase):
+    """Tests for allmydata.storage.immutable.ShareFile."""
+
+    def get_sharefile(self):
+        sf = ShareFile(self.mktemp(), max_size=1000, create=True)
+        sf.write_share_data(0, b"abc")
+        sf.write_share_data(2, b"DEF")
+        # Should be b'abDEF' now.
+        return sf
+
+    def test_read_write(self):
+        """Basic writes can be read."""
+        sf = self.get_sharefile()
+        self.assertEqual(sf.read_share_data(0, 3), b"abD")
+        self.assertEqual(sf.read_share_data(1, 4), b"bDEF")
+
+    def test_reads_beyond_file_end(self):
+        """Reads beyond the file size are truncated."""
+        sf = self.get_sharefile()
+        self.assertEqual(sf.read_share_data(0, 10), b"abDEF")
+        self.assertEqual(sf.read_share_data(5, 10), b"")
+
+    def test_too_large_write(self):
+        """Can't do write larger than file size."""
+        sf = self.get_sharefile()
+        with self.assertRaises(DataTooLargeError):
+            sf.write_share_data(0, b"x" * 3000)
+
+    def test_no_leases_cancelled(self):
+        """If no leases were cancelled, IndexError is raised."""
+        sf = self.get_sharefile()
+        with self.assertRaises(IndexError):
+            sf.cancel_lease(b"garbage")

From 6f5c32c4616b2d0e8becdacf920caa8307acec93 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Fri, 11 Sep 2020 14:51:20 -0400
Subject: [PATCH 04/68] News fragment.

---
 newsfragments/3409.minor | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 create mode 100644 newsfragments/3409.minor

diff --git a/newsfragments/3409.minor b/newsfragments/3409.minor
new file mode 100644
index 000000000..e69de29bb

From 7de84e32a3536024ceb244739629b09913f46c34 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Fri, 11 Sep 2020 15:02:42 -0400
Subject: [PATCH 05/68] Port to Python 3.

---
 src/allmydata/storage/immutable.py | 13 ++++++++++++-
 src/allmydata/util/_python3.py     |  1 +
 2 files changed, 13 insertions(+), 1 deletion(-)

diff --git a/src/allmydata/storage/immutable.py b/src/allmydata/storage/immutable.py
index 8d83ec1b3..778c0ddf8 100644
--- a/src/allmydata/storage/immutable.py
+++ b/src/allmydata/storage/immutable.py
@@ -1,4 +1,15 @@
-from future.utils import bytes_to_native_str
+"""
+Ported to Python 3.
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from future.utils import PY2, bytes_to_native_str
+if PY2:
+    from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min  # noqa: F401
 
 import os, stat, struct, time
 
diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py
index afdbea1f0..5a26d2cbc 100644
--- a/src/allmydata/util/_python3.py
+++ b/src/allmydata/util/_python3.py
@@ -37,6 +37,7 @@ PORTED_MODULES = [
     "allmydata.monitor",
     "allmydata.storage.crawler",
     "allmydata.storage.expirer",
+    "allmydata.storage.immutable",
     "allmydata.test.common_py3",
     "allmydata.uri",
     "allmydata.util._python3",

From 26a8a0bfc5d0d7447b081934c87e3075ffda1ef7 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 14 Sep 2020 13:29:28 -0400
Subject: [PATCH 06/68] Make stats code deal with keys that are unicode.

---
 src/allmydata/stats.py | 17 ++++++++++++++++-
 1 file changed, 16 insertions(+), 1 deletion(-)

diff --git a/src/allmydata/stats.py b/src/allmydata/stats.py
index 34907b1fa..28a451ead 100644
--- a/src/allmydata/stats.py
+++ b/src/allmydata/stats.py
@@ -1,5 +1,7 @@
 from __future__ import print_function
 
+from past.builtins import unicode
+
 import json
 import os
 import pprint
@@ -155,6 +157,8 @@ class StatsProvider(Referenceable, service.MultiService):
         service.MultiService.startService(self)
 
     def count(self, name, delta=1):
+        if isinstance(name, unicode):
+            name = name.encode("utf-8")
         val = self.counters.setdefault(name, 0)
         self.counters[name] = val + delta
 
@@ -170,7 +174,18 @@ class StatsProvider(Referenceable, service.MultiService):
         return ret
 
     def remote_get_stats(self):
-        return self.get_stats()
+        # The remote API expects keys to be bytes:
+        def to_bytes(d):
+            result = {}
+            for (k, v) in d.items():
+                if isinstance(k, unicode):
+                    k = k.encode("utf-8")
+                result[k] = v
+            return result
+
+        stats = self.get_stats()
+        return {b"counters": to_bytes(stats["counters"]),
+                b"stats": to_bytes(stats["stats"])}
 
     def _connected(self, gatherer, nickname):
         gatherer.callRemoteOnly('provide', self, nickname or '')

From 947cb1c11bcb1c57ec3372eeefc0b59b88955f03 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 14 Sep 2020 14:40:02 -0400
Subject: [PATCH 07/68] Tiny bit more test coverage for server.py.

---
 src/allmydata/test/test_storage.py | 6 ++++++
 1 file changed, 6 insertions(+)

diff --git a/src/allmydata/test/test_storage.py b/src/allmydata/test/test_storage.py
index 4bbbcda30..080ddc0ef 100644
--- a/src/allmydata/test/test_storage.py
+++ b/src/allmydata/test/test_storage.py
@@ -702,6 +702,12 @@ class Server(unittest.TestCase):
         leases = list(ss.get_leases(b"si3"))
         self.failUnlessEqual(len(leases), 2)
 
+    def test_have_shares(self):
+        """By default the StorageServer has no shares."""
+        workdir = self.workdir("test_have_shares")
+        ss = StorageServer(workdir, b"\x00" * 20, readonly_storage=True)
+        self.assertFalse(ss.have_shares())
+
     def test_readonly(self):
         workdir = self.workdir("test_readonly")
         ss = StorageServer(workdir, b"\x00" * 20, readonly_storage=True)

From d84a7a61f30e181d83a00665ea32c9f498525b1a Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 14 Sep 2020 14:46:08 -0400
Subject: [PATCH 08/68] Port to Python 3.

---
 src/allmydata/storage/server.py | 17 ++++++++++++++---
 src/allmydata/util/_python3.py  |  1 +
 2 files changed, 15 insertions(+), 3 deletions(-)

diff --git a/src/allmydata/storage/server.py b/src/allmydata/storage/server.py
index c044a9fb0..2ba02aa9a 100644
--- a/src/allmydata/storage/server.py
+++ b/src/allmydata/storage/server.py
@@ -1,4 +1,15 @@
-from future.utils import bytes_to_native_str
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from past.utils import old_div
+from future.utils import bytes_to_native_str, PY2
+if PY2:
+    # Omit open() to get native behavior where open("w") always accepts native strings.
+    from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, bytes, dict, list, object, range, str, max, min  # noqa: F401
+
+
 import os, re, struct, time
 import weakref
 import six
@@ -146,7 +157,7 @@ class StorageServer(service.MultiService, Referenceable):
             stats["samplesize"] = count
             samples.sort()
             if count > 1:
-                stats["mean"] = sum(samples) / count
+                stats["mean"] = old_div(sum(samples), count)
             else:
                 stats["mean"] = None
 
@@ -671,7 +682,7 @@ class StorageServer(service.MultiService, Referenceable):
                 filename = os.path.join(bucketdir, sharenum_s)
                 msf = MutableShareFile(filename, self)
                 datavs[sharenum] = msf.readv(readv)
-        log.msg("returning shares %s" % (datavs.keys(),),
+        log.msg("returning shares %s" % (list(datavs.keys()),),
                 facility="tahoe.storage", level=log.NOISY, parent=lp)
         self.add_latency("readv", time.time() - start)
         return datavs
diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py
index afdbea1f0..c707356a1 100644
--- a/src/allmydata/util/_python3.py
+++ b/src/allmydata/util/_python3.py
@@ -37,6 +37,7 @@ PORTED_MODULES = [
     "allmydata.monitor",
     "allmydata.storage.crawler",
     "allmydata.storage.expirer",
+    "allmydata.storage.server",
     "allmydata.test.common_py3",
     "allmydata.uri",
     "allmydata.util._python3",

From 31aa594290cc3794de11e033a8db970b5e5d225a Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 14 Sep 2020 14:47:26 -0400
Subject: [PATCH 09/68] Looks like float is fine for mean.

---
 src/allmydata/storage/server.py | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/src/allmydata/storage/server.py b/src/allmydata/storage/server.py
index 2ba02aa9a..e36e4a2f1 100644
--- a/src/allmydata/storage/server.py
+++ b/src/allmydata/storage/server.py
@@ -1,9 +1,11 @@
+"""
+Ported to Python 3.
+"""
 from __future__ import division
 from __future__ import absolute_import
 from __future__ import print_function
 from __future__ import unicode_literals
 
-from past.utils import old_div
 from future.utils import bytes_to_native_str, PY2
 if PY2:
     # Omit open() to get native behavior where open("w") always accepts native strings.
@@ -157,7 +159,7 @@ class StorageServer(service.MultiService, Referenceable):
             stats["samplesize"] = count
             samples.sort()
             if count > 1:
-                stats["mean"] = old_div(sum(samples), count)
+                stats["mean"] = sum(samples) / count
             else:
                 stats["mean"] = None
 

From 72f72491d0adf05d6f0208d254fb750116d5a357 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 14 Sep 2020 14:48:39 -0400
Subject: [PATCH 10/68] News fragment.

---
 newsfragments/3415.minor | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 create mode 100644 newsfragments/3415.minor

diff --git a/newsfragments/3415.minor b/newsfragments/3415.minor
new file mode 100644
index 000000000..e69de29bb

From 596c4cec8a0d37588621342c0f600892715a8f8c Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Tue, 15 Sep 2020 14:14:41 -0400
Subject: [PATCH 11/68] News file.

---
 newsfragments/3416.minor | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 create mode 100644 newsfragments/3416.minor

diff --git a/newsfragments/3416.minor b/newsfragments/3416.minor
new file mode 100644
index 000000000..e69de29bb

From 7d8320b8439b04d7ff593dc35b2e050fca9689b4 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Wed, 16 Sep 2020 11:13:23 -0400
Subject: [PATCH 12/68] Python 3 fixes.

---
 src/allmydata/client.py                      | 10 +++----
 src/allmydata/immutable/downloader/finder.py |  4 +--
 src/allmydata/immutable/downloader/node.py   |  3 +++
 src/allmydata/immutable/downloader/status.py |  2 +-
 src/allmydata/immutable/encode.py            |  6 ++---
 src/allmydata/immutable/upload.py            | 22 +++++++--------
 src/allmydata/node.py                        |  8 +++---
 src/allmydata/nodemaker.py                   |  8 +++---
 src/allmydata/test/no_network.py             | 28 ++++++++++++++------
 src/allmydata/test/test_no_network.py        |  4 +--
 10 files changed, 56 insertions(+), 39 deletions(-)

diff --git a/src/allmydata/client.py b/src/allmydata/client.py
index 845290ac0..dc2e9fcaa 100644
--- a/src/allmydata/client.py
+++ b/src/allmydata/client.py
@@ -147,7 +147,7 @@ def _make_secret():
     Returns a base32-encoded random secret of hashutil.CRYPTO_VAL_SIZE
     bytes.
     """
-    return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + "\n"
+    return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + b"\n"
 
 
 class SecretHolder(object):
@@ -739,12 +739,12 @@ class _Client(node.Node, pollmixin.PollMixin):
         # existing key
         def _make_key():
             private_key, _ = ed25519.create_signing_keypair()
-            return ed25519.string_from_signing_key(private_key) + "\n"
+            return ed25519.string_from_signing_key(private_key) + b"\n"
 
         private_key_str = self.config.get_or_create_private_config("node.privkey", _make_key)
         private_key, public_key = ed25519.signing_keypair_from_string(private_key_str)
         public_key_str = ed25519.string_from_verifying_key(public_key)
-        self.config.write_config_file("node.pubkey", public_key_str + "\n", "w")
+        self.config.write_config_file("node.pubkey", public_key_str + b"\n", "wb")
         self._node_private_key = private_key
         self._node_public_key = public_key
 
@@ -971,7 +971,7 @@ class _Client(node.Node, pollmixin.PollMixin):
         """
         self.config.write_private_config(
             'api_auth_token',
-            urlsafe_b64encode(os.urandom(32)) + '\n',
+            urlsafe_b64encode(os.urandom(32)) + b'\n',
         )
 
     def get_storage_broker(self):
@@ -1021,7 +1021,7 @@ class _Client(node.Node, pollmixin.PollMixin):
         c = ControlServer()
         c.setServiceParent(self)
         control_url = self.control_tub.registerReference(c)
-        self.config.write_private_config("control.furl", control_url + "\n")
+        self.config.write_private_config("control.furl", control_url + b"\n")
 
     def init_helper(self):
         self.helper = Helper(self.config.get_config_path("helper"),
diff --git a/src/allmydata/immutable/downloader/finder.py b/src/allmydata/immutable/downloader/finder.py
index 43d5b78a4..b7dfbe491 100644
--- a/src/allmydata/immutable/downloader/finder.py
+++ b/src/allmydata/immutable/downloader/finder.py
@@ -106,7 +106,7 @@ class ShareFinder(object):
         server = None
         try:
             if self._servers:
-                server = self._servers.next()
+                server = next(self._servers)
         except StopIteration:
             self._servers = None
 
@@ -175,7 +175,7 @@ class ShareFinder(object):
                  shnums=shnums_s, name=server.get_name(),
                  level=log.NOISY, parent=lp, umid="0fcEZw")
         shares = []
-        for shnum, bucket in buckets.iteritems():
+        for shnum, bucket in buckets.items():
             s = self._create_share(shnum, bucket, server, dyhb_rtt)
             shares.append(s)
         self._deliver_shares(shares)
diff --git a/src/allmydata/immutable/downloader/node.py b/src/allmydata/immutable/downloader/node.py
index f67278132..1af81921a 100644
--- a/src/allmydata/immutable/downloader/node.py
+++ b/src/allmydata/immutable/downloader/node.py
@@ -361,6 +361,9 @@ class DownloadNode(object):
                  "num_segments": num_segments,
                  "block_size": block_size,
                  "tail_block_size": tail_block_size,
+        block_size = segment_size // k
+        tail_block_size = tail_segment_padded // k
+
                  }
 
 
diff --git a/src/allmydata/immutable/downloader/status.py b/src/allmydata/immutable/downloader/status.py
index fd30bc5fe..4cb1cc513 100644
--- a/src/allmydata/immutable/downloader/status.py
+++ b/src/allmydata/immutable/downloader/status.py
@@ -89,7 +89,7 @@ class DownloadStatus(object):
     def __init__(self, storage_index, size):
         self.storage_index = storage_index
         self.size = size
-        self.counter = self.statusid_counter.next()
+        self.counter = next(self.statusid_counter)
         self.helper = False
 
         self.first_timestamp = None
diff --git a/src/allmydata/immutable/encode.py b/src/allmydata/immutable/encode.py
index 62bb9784c..fb6733cf7 100644
--- a/src/allmydata/immutable/encode.py
+++ b/src/allmydata/immutable/encode.py
@@ -205,7 +205,7 @@ class Encoder(object):
             assert IStorageBucketWriter.providedBy(landlords[k])
         self.landlords = landlords.copy()
         assert isinstance(servermap, dict)
-        for v in servermap.itervalues():
+        for v in servermap.values():
             assert isinstance(v, set)
         self.servermap = servermap.copy()
 
@@ -410,7 +410,7 @@ class Encoder(object):
             assert isinstance(data, (list,tuple))
             if self._aborted:
                 raise UploadAborted()
-            data = "".join(data)
+            data = b"".join(data)
             precondition(len(data) <= read_size, len(data), read_size)
             if not allow_short:
                 precondition(len(data) == read_size, len(data), read_size)
@@ -418,7 +418,7 @@ class Encoder(object):
             self._crypttext_hasher.update(data)
             if allow_short and len(data) < read_size:
                 # padding
-                data += "\x00" * (read_size - len(data))
+                data += b"\x00" * (read_size - len(data))
             encrypted_pieces = [data[i:i+input_chunk_size]
                                 for i in range(0, len(data), input_chunk_size)]
             return encrypted_pieces
diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py
index 58cbea2ef..dbe348a26 100644
--- a/src/allmydata/immutable/upload.py
+++ b/src/allmydata/immutable/upload.py
@@ -27,7 +27,7 @@ from allmydata.interfaces import IUploadable, IUploader, IUploadResults, \
      DEFAULT_MAX_SEGMENT_SIZE, IProgress, IPeerSelector
 from allmydata.immutable import layout
 
-from six.moves import cStringIO as StringIO
+from io import BytesIO
 from .happiness_upload import share_placement, calculate_happiness
 
 from ..util.eliotutil import (
@@ -226,7 +226,7 @@ EXTENSION_SIZE = 1000
 # this.
 
 def pretty_print_shnum_to_servers(s):
-    return ', '.join([ "sh%s: %s" % (k, '+'.join([idlib.shortnodeid_b2a(x) for x in v])) for k, v in s.iteritems() ])
+    return ', '.join([ "sh%s: %s" % (k, '+'.join([idlib.shortnodeid_b2a(x) for x in v])) for k, v in s.items() ])
 
 class ServerTracker(object):
     def __init__(self, server,
@@ -283,7 +283,7 @@ class ServerTracker(object):
         #log.msg("%s._got_reply(%s)" % (self, (alreadygot, buckets)))
         (alreadygot, buckets) = alreadygot_and_buckets
         b = {}
-        for sharenum, rref in buckets.iteritems():
+        for sharenum, rref in buckets.items():
             bp = self.wbp_class(rref, self._server, self.sharesize,
                                 self.blocksize,
                                 self.num_segments,
@@ -352,7 +352,7 @@ class PeerSelector(object):
 
     def get_sharemap_of_preexisting_shares(self):
         preexisting = dictutil.DictOfSets()
-        for server, shares in self.existing_shares.iteritems():
+        for server, shares in self.existing_shares.items():
             for share in shares:
                 preexisting.add(share, server)
         return preexisting
@@ -700,7 +700,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
                % (self, self._get_progress_message(),
                   pretty_print_shnum_to_servers(merged),
                   [', '.join([str_shareloc(k,v)
-                              for k,v in st.buckets.iteritems()])
+                              for k,v in st.buckets.items()])
                    for st in self.use_trackers],
                   pretty_print_shnum_to_servers(self.preexisting_shares))
         self.log(msg, level=log.OPERATIONAL)
@@ -951,7 +951,7 @@ class EncryptAnUploadable(object):
             self._encryptor = aes.create_encryptor(key)
 
             storage_index = storage_index_hash(key)
-            assert isinstance(storage_index, str)
+            assert isinstance(storage_index, bytes)
             # There's no point to having the SI be longer than the key, so we
             # specify that it is truncated to the same 128 bits as the AES key.
             assert len(storage_index) == 16  # SHA-256 truncated to 128b
@@ -1120,7 +1120,7 @@ class UploadStatus(object):
         self.progress = [0.0, 0.0, 0.0]
         self.active = True
         self.results = None
-        self.counter = self.statusid_counter.next()
+        self.counter = next(self.statusid_counter)
         self.started = time.time()
 
     def get_started(self):
@@ -1281,7 +1281,7 @@ class CHKUploader(object):
         """
         msgtempl = "set_shareholders; upload_trackers is %s, already_serverids is %s"
         values = ([', '.join([str_shareloc(k,v)
-                              for k,v in st.buckets.iteritems()])
+                              for k,v in st.buckets.items()])
                    for st in upload_trackers], already_serverids)
         self.log(msgtempl % values, level=log.OPERATIONAL)
         # record already-present shares in self._results
@@ -1697,7 +1697,7 @@ class FileHandle(BaseUploadable):
         then the hash will be hashed together with the string in the
         "convergence" argument to form the encryption key.
         """
-        assert convergence is None or isinstance(convergence, str), (convergence, type(convergence))
+        assert convergence is None or isinstance(convergence, bytes), (convergence, type(convergence))
         self._filehandle = filehandle
         self._key = None
         self.convergence = convergence
@@ -1787,8 +1787,8 @@ class Data(FileHandle):
         then the hash will be hashed together with the string in the
         "convergence" argument to form the encryption key.
         """
-        assert convergence is None or isinstance(convergence, str), (convergence, type(convergence))
-        FileHandle.__init__(self, StringIO(data), convergence=convergence)
+        assert convergence is None or isinstance(convergence, bytes), (convergence, type(convergence))
+        FileHandle.__init__(self, BytesIO(data), convergence=convergence)
 
 @implementer(IUploader)
 class Uploader(service.MultiService, log.PrefixingLogMixin):
diff --git a/src/allmydata/node.py b/src/allmydata/node.py
index 12af43339..101482382 100644
--- a/src/allmydata/node.py
+++ b/src/allmydata/node.py
@@ -362,7 +362,7 @@ class _Config(object):
             if default is _None:
                 raise MissingConfigEntry("The required configuration file %s is missing."
                                          % (quote_output(privname),))
-            if isinstance(default, basestring):
+            if isinstance(default, (bytes, unicode)):
                 value = default
             else:
                 value = default()
@@ -375,7 +375,7 @@ class _Config(object):
         return it.
         """
         privname = os.path.join(self._basedir, "private", name)
-        with open(privname, "w") as f:
+        with open(privname, "wb") as f:
             f.write(value)
 
     def get_private_config(self, name, default=_None):
@@ -759,7 +759,9 @@ class Node(service.MultiService):
         """
         Initialize/create a directory for temporary files.
         """
-        tempdir_config = self.config.get_config("node", "tempdir", "tmp").decode('utf-8')
+        tempdir_config = self.config.get_config("node", "tempdir", "tmp")
+        if isinstance(tempdir_config, bytes):
+            tempdir_config = tempdir_config.decode('utf-8')
         tempdir = self.config.get_config_path(tempdir_config)
         if not os.path.exists(tempdir):
             fileutil.make_dirs(tempdir)
diff --git a/src/allmydata/nodemaker.py b/src/allmydata/nodemaker.py
index 72426ae8b..8e68d92fe 100644
--- a/src/allmydata/nodemaker.py
+++ b/src/allmydata/nodemaker.py
@@ -50,8 +50,8 @@ class NodeMaker(object):
 
     def create_from_cap(self, writecap, readcap=None, deep_immutable=False, name=u"<unknown name>"):
         # this returns synchronously. It starts with a "cap string".
-        assert isinstance(writecap, (str, type(None))), type(writecap)
-        assert isinstance(readcap,  (str, type(None))), type(readcap)
+        assert isinstance(writecap, (bytes, type(None))), type(writecap)
+        assert isinstance(readcap,  (bytes, type(None))), type(readcap)
 
         bigcap = writecap or readcap
         if not bigcap:
@@ -63,9 +63,9 @@ class NodeMaker(object):
         # The name doesn't matter for caching since it's only used in the error
         # attribute of an UnknownNode, and we don't cache those.
         if deep_immutable:
-            memokey = "I" + bigcap
+            memokey = b"I" + bigcap
         else:
-            memokey = "M" + bigcap
+            memokey = b"M" + bigcap
         if memokey in self._node_cache:
             node = self._node_cache[memokey]
         else:
diff --git a/src/allmydata/test/no_network.py b/src/allmydata/test/no_network.py
index 0ccead0b9..1fd2bead2 100644
--- a/src/allmydata/test/no_network.py
+++ b/src/allmydata/test/no_network.py
@@ -1,3 +1,7 @@
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
 
 # This contains a test harness that creates a full Tahoe grid in a single
 # process (actually in a single MultiService) which does not use the network.
@@ -13,6 +17,11 @@
 # Tubs, so it is not useful for tests that involve a Helper or the
 # control.furl .
 
+from future.utils import PY2
+if PY2:
+    from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min  # noqa: F401
+from past.builtins import unicode
+
 import os
 from zope.interface import implementer
 from twisted.application import service
@@ -256,6 +265,8 @@ class _NoNetworkClient(_Client):
     def init_stub_client(self):
         pass
     #._servers will be set by the NoNetworkGrid which creates us
+    def init_web(self, *args, **kwargs):
+        pass
 
 class SimpleStats(object):
     def __init__(self):
@@ -308,6 +319,7 @@ class NoNetworkGrid(service.MultiService):
             d.addCallback(lambda c: self.clients.append(c))
 
             def _bad(f):
+                print(f)
                 self._setup_errors.append(f)
             d.addErrback(_bad)
 
@@ -323,7 +335,7 @@ class NoNetworkGrid(service.MultiService):
 
     @defer.inlineCallbacks
     def make_client(self, i, write_config=True):
-        clientid = hashutil.tagged_hash("clientid", str(i))[:20]
+        clientid = hashutil.tagged_hash(b"clientid", b"%d" % i)[:20]
         clientdir = os.path.join(self.basedir, "clients",
                                  idlib.shortnodeid_b2a(clientid))
         fileutil.make_dirs(clientdir)
@@ -358,7 +370,7 @@ class NoNetworkGrid(service.MultiService):
         defer.returnValue(c)
 
     def make_server(self, i, readonly=False):
-        serverid = hashutil.tagged_hash("serverid", str(i))[:20]
+        serverid = hashutil.tagged_hash(b"serverid", b"%d" % i)[:20]
         serverdir = os.path.join(self.basedir, "servers",
                                  idlib.shortnodeid_b2a(serverid), "storage")
         fileutil.make_dirs(serverdir)
@@ -381,18 +393,18 @@ class NoNetworkGrid(service.MultiService):
         self.rebuild_serverlist()
 
     def get_all_serverids(self):
-        return self.proxies_by_id.keys()
+        return list(self.proxies_by_id.keys())
 
     def rebuild_serverlist(self):
         self._check_clients()
-        self.all_servers = frozenset(self.proxies_by_id.values())
+        self.all_servers = frozenset(list(self.proxies_by_id.values()))
         for c in self.clients:
             c._servers = self.all_servers
 
     def remove_server(self, serverid):
         # it's enough to remove the server from c._servers (we don't actually
         # have to detach and stopService it)
-        for i,ss in self.servers_by_number.items():
+        for i,ss in list(self.servers_by_number.items()):
             if ss.my_nodeid == serverid:
                 del self.servers_by_number[i]
                 break
@@ -422,7 +434,7 @@ class NoNetworkGrid(service.MultiService):
 
     def nuke_from_orbit(self):
         """ Empty all share directories in this grid. It's the only way to be sure ;-) """
-        for server in self.servers_by_number.values():
+        for server in list(self.servers_by_number.values()):
             for prefixdir in os.listdir(server.sharedir):
                 if prefixdir != 'incoming':
                     fileutil.rm_dir(os.path.join(server.sharedir, prefixdir))
@@ -506,7 +518,7 @@ class GridTestMixin(object):
         si = tahoe_uri.from_string(uri).get_storage_index()
         prefixdir = storage_index_to_dir(si)
         shares = []
-        for i,ss in self.g.servers_by_number.items():
+        for i,ss in list(self.g.servers_by_number.items()):
             serverid = ss.my_nodeid
             basedir = os.path.join(ss.sharedir, prefixdir)
             if not os.path.exists(basedir):
@@ -527,7 +539,7 @@ class GridTestMixin(object):
         return shares
 
     def restore_all_shares(self, shares):
-        for sharefile, data in shares.items():
+        for sharefile, data in list(shares.items()):
             with open(sharefile, "wb") as f:
                 f.write(data)
 
diff --git a/src/allmydata/test/test_no_network.py b/src/allmydata/test/test_no_network.py
index 38c44de95..a61211081 100644
--- a/src/allmydata/test/test_no_network.py
+++ b/src/allmydata/test/test_no_network.py
@@ -41,8 +41,8 @@ class Harness(unittest.TestCase):
         g.setServiceParent(self.s)
 
         c0 = g.clients[0]
-        DATA = "Data to upload" * 100
-        data = Data(DATA, "")
+        DATA = b"Data to upload" * 100
+        data = Data(DATA, b"")
         d = c0.upload(data)
         def _uploaded(res):
             n = c0.create_node_from_uri(res.get_uri())

From c035ea06980697f37daf4118998bbfd3791847c8 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Wed, 16 Sep 2020 11:20:08 -0400
Subject: [PATCH 13/68] It's better if extension keys are unicode (native
 strings for Python 3, same key as bytes on Python 2) so as not to modify lots
 and lots and lots of code.

---
 src/allmydata/test/test_uri.py | 16 ++++++++--------
 src/allmydata/uri.py           | 10 +++++-----
 2 files changed, 13 insertions(+), 13 deletions(-)

diff --git a/src/allmydata/test/test_uri.py b/src/allmydata/test/test_uri.py
index f89fae151..3e21c1674 100644
--- a/src/allmydata/test/test_uri.py
+++ b/src/allmydata/test/test_uri.py
@@ -210,17 +210,17 @@ class Extension(testutil.ReallyEqualMixin, unittest.TestCase):
                 }
         ext = uri.pack_extension(data)
         d = uri.unpack_extension(ext)
-        self.failUnlessReallyEqual(d[b"stuff"], b"value")
-        self.failUnlessReallyEqual(d[b"size"], 12)
-        self.failUnlessReallyEqual(d[b"big_hash"], hashutil.tagged_hash(b"foo", b"bar"))
+        self.failUnlessReallyEqual(d["stuff"], b"value")
+        self.failUnlessReallyEqual(d["size"], 12)
+        self.failUnlessReallyEqual(d["big_hash"], hashutil.tagged_hash(b"foo", b"bar"))
 
         readable = uri.unpack_extension_readable(ext)
-        self.failUnlessReallyEqual(readable[b"needed_shares"], 3)
-        self.failUnlessReallyEqual(readable[b"stuff"], b"value")
-        self.failUnlessReallyEqual(readable[b"size"], 12)
-        self.failUnlessReallyEqual(readable[b"big_hash"],
+        self.failUnlessReallyEqual(readable["needed_shares"], 3)
+        self.failUnlessReallyEqual(readable["stuff"], b"value")
+        self.failUnlessReallyEqual(readable["size"], 12)
+        self.failUnlessReallyEqual(readable["big_hash"],
                              base32.b2a(hashutil.tagged_hash(b"foo", b"bar")))
-        self.failUnlessReallyEqual(readable[b"UEB_hash"],
+        self.failUnlessReallyEqual(readable["UEB_hash"],
                              base32.b2a(hashutil.uri_extension_hash(ext)))
 
 class Unknown(testutil.ReallyEqualMixin, unittest.TestCase):
diff --git a/src/allmydata/uri.py b/src/allmydata/uri.py
index b601226da..253cb109c 100644
--- a/src/allmydata/uri.py
+++ b/src/allmydata/uri.py
@@ -928,11 +928,11 @@ def unpack_extension(data):
         assert data[length:length+1] == b','
         data = data[length+1:]
 
-        d[key] = value
+        d[str(key, "utf-8")] = value
 
     # convert certain things to numbers
-    for intkey in (b'size', b'segment_size', b'num_segments',
-                   b'needed_shares', b'total_shares'):
+    for intkey in ('size', 'segment_size', 'num_segments',
+                   'needed_shares', 'total_shares'):
         if intkey in d:
             d[intkey] = int(d[intkey])
     return d
@@ -940,9 +940,9 @@ def unpack_extension(data):
 
 def unpack_extension_readable(data):
     unpacked = unpack_extension(data)
-    unpacked[b"UEB_hash"] = hashutil.uri_extension_hash(data)
+    unpacked["UEB_hash"] = hashutil.uri_extension_hash(data)
     for k in sorted(unpacked.keys()):
-        if b'hash' in k:
+        if 'hash' in k:
             unpacked[k] = base32.b2a(unpacked[k])
     return unpacked
 

From 985e8a0244f895bd93db436fa9b18c43ea267ecf Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Wed, 16 Sep 2020 11:21:17 -0400
Subject: [PATCH 14/68] More Python 3 changes.

---
 src/allmydata/immutable/downloader/node.py | 13 +++++--------
 src/allmydata/util/consumer.py             |  2 +-
 2 files changed, 6 insertions(+), 9 deletions(-)

diff --git a/src/allmydata/immutable/downloader/node.py b/src/allmydata/immutable/downloader/node.py
index 1af81921a..620ce05cf 100644
--- a/src/allmydata/immutable/downloader/node.py
+++ b/src/allmydata/immutable/downloader/node.py
@@ -353,17 +353,14 @@ class DownloadNode(object):
 
         # each segment is turned into N blocks. All but the last are of size
         # block_size, and the last is of size tail_block_size
-        block_size = segment_size / k
-        tail_block_size = tail_segment_padded / k
+        block_size = segment_size // k
+        tail_block_size = tail_segment_padded // k
 
         return { "tail_segment_size": tail_segment_size,
                  "tail_segment_padded": tail_segment_padded,
                  "num_segments": num_segments,
                  "block_size": block_size,
-                 "tail_block_size": tail_block_size,
-        block_size = segment_size // k
-        tail_block_size = tail_segment_padded // k
-
+                 "tail_block_size": tail_block_size
                  }
 
 
@@ -458,7 +455,7 @@ class DownloadNode(object):
 
         shares = []
         shareids = []
-        for (shareid, share) in blocks.iteritems():
+        for (shareid, share) in blocks.items():
             assert len(share) == block_size
             shareids.append(shareid)
             shares.append(share)
@@ -468,7 +465,7 @@ class DownloadNode(object):
         del shares
         def _process(buffers):
             decodetime = now() - start
-            segment = "".join(buffers)
+            segment = b"".join(buffers)
             assert len(segment) == decoded_size
             del buffers
             if tail:
diff --git a/src/allmydata/util/consumer.py b/src/allmydata/util/consumer.py
index c41115e5e..393d7dec5 100644
--- a/src/allmydata/util/consumer.py
+++ b/src/allmydata/util/consumer.py
@@ -36,5 +36,5 @@ def download_to_data(n, offset=0, size=None, progress=None):
     :param progress: None or an IProgress implementer
     """
     d = n.read(MemoryConsumer(progress=progress), offset, size)
-    d.addCallback(lambda mc: "".join(mc.chunks))
+    d.addCallback(lambda mc: b"".join(mc.chunks))
     return d

From 7cb574f7c584a8d619732f661fa47acfb8e4396e Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Wed, 16 Sep 2020 11:23:11 -0400
Subject: [PATCH 15/68] Port to Python 3.

---
 src/allmydata/test/no_network.py      |  3 +++
 src/allmydata/test/test_no_network.py | 13 ++++++++++++-
 src/allmydata/util/_python3.py        |  2 ++
 3 files changed, 17 insertions(+), 1 deletion(-)

diff --git a/src/allmydata/test/no_network.py b/src/allmydata/test/no_network.py
index 1fd2bead2..4a75920b2 100644
--- a/src/allmydata/test/no_network.py
+++ b/src/allmydata/test/no_network.py
@@ -1,3 +1,6 @@
+"""
+Ported to Python 3.
+"""
 from __future__ import absolute_import
 from __future__ import division
 from __future__ import print_function
diff --git a/src/allmydata/test/test_no_network.py b/src/allmydata/test/test_no_network.py
index a61211081..b1aa1350a 100644
--- a/src/allmydata/test/test_no_network.py
+++ b/src/allmydata/test/test_no_network.py
@@ -1,5 +1,16 @@
+"""
+Test the NoNetworkGrid test harness.
 
-# Test the NoNetworkGrid test harness
+Ported to Python 3.
+"""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from future.utils import PY2
+if PY2:
+    from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min  # noqa: F401
 
 from twisted.trial import unittest
 from twisted.application import service
diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py
index 2b10854a9..c17b8cff7 100644
--- a/src/allmydata/util/_python3.py
+++ b/src/allmydata/util/_python3.py
@@ -42,6 +42,7 @@ PORTED_MODULES = [
     "allmydata.storage.mutable",
     "allmydata.storage.shares",
     "allmydata.test.common_py3",
+    "allmydata.test.no_network",
     "allmydata.uri",
     "allmydata.util._python3",
     "allmydata.util.abbreviate",
@@ -91,6 +92,7 @@ PORTED_TEST_MODULES = [
     "allmydata.test.test_log",
     "allmydata.test.test_monitor",
     "allmydata.test.test_netstring",
+    "allmydata.test.test_no_network",
     "allmydata.test.test_observer",
     "allmydata.test.test_pipeline",
     "allmydata.test.test_python3",

From bea1d657f32a8a4191e902e05a072980f076392b Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Wed, 16 Sep 2020 14:37:11 -0400
Subject: [PATCH 16/68] Better debug output.

---
 src/allmydata/test/common_py3.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/allmydata/test/common_py3.py b/src/allmydata/test/common_py3.py
index 50fb02ff7..b49078f93 100644
--- a/src/allmydata/test/common_py3.py
+++ b/src/allmydata/test/common_py3.py
@@ -123,7 +123,7 @@ class ShouldFailMixin(object):
 class ReallyEqualMixin(object):
     def failUnlessReallyEqual(self, a, b, msg=None):
         self.assertEqual(a, b, msg)
-        self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg))
+        self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, b, type(a), type(b), msg))
 
 
 def skip_if_cannot_represent_filename(u):

From e8743a607fb978d59c2e2a9cb45fc1beb62dec86 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Wed, 16 Sep 2020 14:37:16 -0400
Subject: [PATCH 17/68] Fix failing tests.

---
 src/allmydata/storage/server.py      | 27 +++++++++++++++------------
 src/allmydata/test/test_deepcheck.py |  3 ++-
 2 files changed, 17 insertions(+), 13 deletions(-)

diff --git a/src/allmydata/storage/server.py b/src/allmydata/storage/server.py
index e36e4a2f1..845d3ac1a 100644
--- a/src/allmydata/storage/server.py
+++ b/src/allmydata/storage/server.py
@@ -6,10 +6,11 @@ from __future__ import absolute_import
 from __future__ import print_function
 from __future__ import unicode_literals
 
-from future.utils import bytes_to_native_str, PY2
+from future.utils import bytes_to_native_str, PY2, native_str_to_bytes
 if PY2:
-    # Omit open() to get native behavior where open("w") always accepts native strings.
-    from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, bytes, dict, list, object, range, str, max, min  # noqa: F401
+    # Omit open() to get native behavior where open("w") always accepts native
+    # strings. Omit bytes so we don't leak future's custom bytes.
+    from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, dict, list, object, range, str, max, min  # noqa: F401
 
 
 import os, re, struct, time
@@ -241,16 +242,18 @@ class StorageServer(service.MultiService, Referenceable):
             # We're on a platform that has no API to get disk stats.
             remaining_space = 2**64
 
-        version = { "http://allmydata.org/tahoe/protocols/storage/v1" :
-                    { "maximum-immutable-share-size": remaining_space,
-                      "maximum-mutable-share-size": MAX_MUTABLE_SHARE_SIZE,
-                      "available-space": remaining_space,
-                      "tolerates-immutable-read-overrun": True,
-                      "delete-mutable-shares-with-zero-length-writev": True,
-                      "fills-holes-with-zero-bytes": True,
-                      "prevents-read-past-end-of-share-data": True,
+        # Unicode strings might be nicer, but for now sticking to bytes since
+        # this is what the wire protocol has always been.
+        version = { b"http://allmydata.org/tahoe/protocols/storage/v1" :
+                    { b"maximum-immutable-share-size": remaining_space,
+                      b"maximum-mutable-share-size": MAX_MUTABLE_SHARE_SIZE,
+                      b"available-space": remaining_space,
+                      b"tolerates-immutable-read-overrun": True,
+                      b"delete-mutable-shares-with-zero-length-writev": True,
+                      b"fills-holes-with-zero-bytes": True,
+                      b"prevents-read-past-end-of-share-data": True,
                       },
-                    "application-version": str(allmydata.__full_version__),
+                    b"application-version": allmydata.__full_version__.encode("utf-8"),
                     }
         return version
 
diff --git a/src/allmydata/test/test_deepcheck.py b/src/allmydata/test/test_deepcheck.py
index 90a27b424..ea3ba6338 100644
--- a/src/allmydata/test/test_deepcheck.py
+++ b/src/allmydata/test/test_deepcheck.py
@@ -1,3 +1,4 @@
+from future.utils import native_str
 
 import os, json, urllib
 from twisted.trial import unittest
@@ -945,7 +946,7 @@ class DeepCheckWebBad(DeepCheckBase, unittest.TestCase):
     def _corrupt_some_shares(self, node):
         for (shnum, serverid, sharefile) in self.find_uri_shares(node.get_uri()):
             if shnum in (0,1):
-                yield run_cli("debug", "corrupt-share", sharefile)
+                yield run_cli("debug", "corrupt-share", native_str(sharefile))
 
     def _delete_most_shares(self, node):
         self.delete_shares_numbered(node.get_uri(), range(1,10))

From 2588e757e51753f4958a8ee2e24381b87cac2069 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Wed, 16 Sep 2020 14:42:39 -0400
Subject: [PATCH 18/68] Only semi-break the web service.

---
 src/allmydata/test/no_network.py | 9 ++++++---
 1 file changed, 6 insertions(+), 3 deletions(-)

diff --git a/src/allmydata/test/no_network.py b/src/allmydata/test/no_network.py
index 4a75920b2..eb2bfd1a1 100644
--- a/src/allmydata/test/no_network.py
+++ b/src/allmydata/test/no_network.py
@@ -20,7 +20,7 @@ from __future__ import unicode_literals
 # Tubs, so it is not useful for tests that involve a Helper or the
 # control.furl .
 
-from future.utils import PY2
+from future.utils import PY2, PY3
 if PY2:
     from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min  # noqa: F401
 from past.builtins import unicode
@@ -268,8 +268,11 @@ class _NoNetworkClient(_Client):
     def init_stub_client(self):
         pass
     #._servers will be set by the NoNetworkGrid which creates us
-    def init_web(self, *args, **kwargs):
-        pass
+
+    if PY3:
+        def init_web(self, *args, **kwargs):
+            print("Web service is temporarily disabled until nevow is gone.")
+
 
 class SimpleStats(object):
     def __init__(self):

From 498e69c72e01f198f38bf24f46cb08079cb21aed Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Wed, 16 Sep 2020 14:57:55 -0400
Subject: [PATCH 19/68] Some progress.

---
 src/allmydata/immutable/checker.py | 20 +++++------
 src/allmydata/test/test_encode.py  | 56 ++++++++++++++++++------------
 2 files changed, 43 insertions(+), 33 deletions(-)

diff --git a/src/allmydata/immutable/checker.py b/src/allmydata/immutable/checker.py
index 36bf735dc..73d6a0263 100644
--- a/src/allmydata/immutable/checker.py
+++ b/src/allmydata/immutable/checker.py
@@ -117,7 +117,7 @@ class ValidatedExtendedURIProxy(object):
 
 
         # Next: things that are optional and not redundant: crypttext_hash
-        if d.has_key('crypttext_hash'):
+        if 'crypttext_hash' in d:
             self.crypttext_hash = d['crypttext_hash']
             if len(self.crypttext_hash) != CRYPTO_VAL_SIZE:
                 raise BadURIExtension('crypttext_hash is required to be hashutil.CRYPTO_VAL_SIZE bytes, not %s bytes' % (len(self.crypttext_hash),))
@@ -126,11 +126,11 @@ class ValidatedExtendedURIProxy(object):
         # Next: things that are optional, redundant, and required to be
         # consistent: codec_name, codec_params, tail_codec_params,
         # num_segments, size, needed_shares, total_shares
-        if d.has_key('codec_name'):
+        if 'codec_name' in d:
             if d['codec_name'] != "crs":
                 raise UnsupportedErasureCodec(d['codec_name'])
 
-        if d.has_key('codec_params'):
+        if 'codec_params' in d:
             ucpss, ucpns, ucpts = codec.parse_params(d['codec_params'])
             if ucpss != self.segment_size:
                 raise BadURIExtension("inconsistent erasure code params: "
@@ -145,7 +145,7 @@ class ValidatedExtendedURIProxy(object):
                                       "self._verifycap.total_shares: %s" %
                                       (ucpts, self._verifycap.total_shares))
 
-        if d.has_key('tail_codec_params'):
+        if 'tail_codec_params' in d:
             utcpss, utcpns, utcpts = codec.parse_params(d['tail_codec_params'])
             if utcpss != self.tail_segment_size:
                 raise BadURIExtension("inconsistent erasure code params: utcpss: %s != "
@@ -162,7 +162,7 @@ class ValidatedExtendedURIProxy(object):
                                       "self._verifycap.total_shares: %s" % (utcpts,
                                                                             self._verifycap.total_shares))
 
-        if d.has_key('num_segments'):
+        if 'num_segments' in d:
             if d['num_segments'] != self.num_segments:
                 raise BadURIExtension("inconsistent num_segments: size: %s, "
                                       "segment_size: %s, computed_num_segments: %s, "
@@ -170,18 +170,18 @@ class ValidatedExtendedURIProxy(object):
                                                                 self.segment_size,
                                                                 self.num_segments, d['num_segments']))
 
-        if d.has_key('size'):
+        if 'size' in d:
             if d['size'] != self._verifycap.size:
                 raise BadURIExtension("inconsistent size: URI size: %s, UEB size: %s" %
                                       (self._verifycap.size, d['size']))
 
-        if d.has_key('needed_shares'):
+        if 'needed_shares' in d:
             if d['needed_shares'] != self._verifycap.needed_shares:
                 raise BadURIExtension("inconsistent needed shares: URI needed shares: %s, UEB "
                                       "needed shares: %s" % (self._verifycap.total_shares,
                                                              d['needed_shares']))
 
-        if d.has_key('total_shares'):
+        if 'total_shares' in d:
             if d['total_shares'] != self._verifycap.total_shares:
                 raise BadURIExtension("inconsistent total shares: URI total shares: %s, UEB "
                                       "total shares: %s" % (self._verifycap.total_shares,
@@ -428,7 +428,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
                 lines.append("%3d: %s" % (i, base32.b2a_or_none(h)))
             self.log(" sharehashes:\n" + "\n".join(lines) + "\n")
             lines = []
-            for i,h in blockhashes.items():
+            for i,h in list(blockhashes.items()):
                 lines.append("%3d: %s" % (i, base32.b2a_or_none(h)))
             log.msg(" blockhashes:\n" + "\n".join(lines) + "\n")
             raise BadOrMissingHash(le)
@@ -695,7 +695,7 @@ class Checker(log.PrefixingLogMixin):
             bucketdict, success = result
 
             shareverds = []
-            for (sharenum, bucket) in bucketdict.items():
+            for (sharenum, bucket) in list(bucketdict.items()):
                 d = self._download_and_verify(s, sharenum, bucket)
                 shareverds.append(d)
 
diff --git a/src/allmydata/test/test_encode.py b/src/allmydata/test/test_encode.py
index 68d62b90a..7cec5a7d3 100644
--- a/src/allmydata/test/test_encode.py
+++ b/src/allmydata/test/test_encode.py
@@ -1,3 +1,13 @@
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from future.utils import PY2
+if PY2:
+    from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min  # noqa: F401
+from past.builtins import chr as byteschr, long
+
 from zope.interface import implementer
 from twisted.trial import unittest
 from twisted.internet import defer
@@ -15,7 +25,7 @@ class LostPeerError(Exception):
     pass
 
 def flip_bit(good): # flips the last bit
-    return good[:-1] + chr(ord(good[-1]) ^ 0x01)
+    return good[:-1] + byteschr(ord(good[-1]) ^ 0x01)
 
 @implementer(IStorageBucketWriter, IStorageBucketReader)
 class FakeBucketReaderWriterProxy(object):
@@ -158,7 +168,7 @@ class FakeBucketReaderWriterProxy(object):
 
 
 def make_data(length):
-    data = "happy happy joy joy" * 100
+    data = b"happy happy joy joy" * 100
     assert length <= len(data)
     return data[:length]
 
@@ -173,32 +183,32 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
     if _TMP % K != 0:
         _TMP += (K - (_TMP % K))
     TAIL_SEGSIZE = _TMP
-    _TMP = SIZE / SEGSIZE
+    _TMP = SIZE // SEGSIZE
     if SIZE % SEGSIZE != 0:
         _TMP += 1
     NUM_SEGMENTS = _TMP
     mindict = { 'segment_size': SEGSIZE,
-                'crypttext_root_hash': '0'*hashutil.CRYPTO_VAL_SIZE,
-                'share_root_hash': '1'*hashutil.CRYPTO_VAL_SIZE }
-    optional_consistent = { 'crypttext_hash': '2'*hashutil.CRYPTO_VAL_SIZE,
-                            'codec_name': "crs",
-                            'codec_params': "%d-%d-%d" % (SEGSIZE, K, M),
-                            'tail_codec_params': "%d-%d-%d" % (TAIL_SEGSIZE, K, M),
+                'crypttext_root_hash': b'0'*hashutil.CRYPTO_VAL_SIZE,
+                'share_root_hash': b'1'*hashutil.CRYPTO_VAL_SIZE }
+    optional_consistent = { 'crypttext_hash': b'2'*hashutil.CRYPTO_VAL_SIZE,
+                            'codec_name': b"crs",
+                            'codec_params': b"%d-%d-%d" % (SEGSIZE, K, M),
+                            'tail_codec_params': b"%d-%d-%d" % (TAIL_SEGSIZE, K, M),
                             'num_segments': NUM_SEGMENTS,
                             'size': SIZE,
                             'needed_shares': K,
                             'total_shares': M,
-                            'plaintext_hash': "anything",
-                            'plaintext_root_hash': "anything", }
+                            'plaintext_hash': b"anything",
+                            'plaintext_root_hash': b"anything", }
     # optional_inconsistent = { 'crypttext_hash': ('2'*(hashutil.CRYPTO_VAL_SIZE-1), "", 77),
     optional_inconsistent = { 'crypttext_hash': (77,),
-                              'codec_name': ("digital fountain", ""),
-                              'codec_params': ("%d-%d-%d" % (SEGSIZE, K-1, M),
-                                               "%d-%d-%d" % (SEGSIZE-1, K, M),
-                                               "%d-%d-%d" % (SEGSIZE, K, M-1)),
-                              'tail_codec_params': ("%d-%d-%d" % (TAIL_SEGSIZE, K-1, M),
-                                               "%d-%d-%d" % (TAIL_SEGSIZE-1, K, M),
-                                               "%d-%d-%d" % (TAIL_SEGSIZE, K, M-1)),
+                              'codec_name': (b"digital fountain", b""),
+                              'codec_params': (b"%d-%d-%d" % (SEGSIZE, K-1, M),
+                                               b"%d-%d-%d" % (SEGSIZE-1, K, M),
+                                               b"%d-%d-%d" % (SEGSIZE, K, M-1)),
+                              'tail_codec_params': (b"%d-%d-%d" % (TAIL_SEGSIZE, K-1, M),
+                                               b"%d-%d-%d" % (TAIL_SEGSIZE-1, K, M),
+                                               b"%d-%d-%d" % (TAIL_SEGSIZE, K, M-1)),
                               'num_segments': (NUM_SEGMENTS-1,),
                               'size': (SIZE-1,),
                               'needed_shares': (K-1,),
@@ -209,7 +219,7 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
         uebhash = hashutil.uri_extension_hash(uebstring)
         fb = FakeBucketReaderWriterProxy()
         fb.put_uri_extension(uebstring)
-        verifycap = uri.CHKFileVerifierURI(storage_index='x'*16, uri_extension_hash=uebhash, needed_shares=self.K, total_shares=self.M, size=self.SIZE)
+        verifycap = uri.CHKFileVerifierURI(storage_index=b'x'*16, uri_extension_hash=uebhash, needed_shares=self.K, total_shares=self.M, size=self.SIZE)
         vup = checker.ValidatedExtendedURIProxy(fb, verifycap)
         return vup.start()
 
@@ -232,7 +242,7 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
 
     def test_reject_insufficient(self):
         dl = []
-        for k in self.mindict.iterkeys():
+        for k in self.mindict.keys():
             insuffdict = self.mindict.copy()
             del insuffdict[k]
             d = self._test_reject(insuffdict)
@@ -241,7 +251,7 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
 
     def test_accept_optional(self):
         dl = []
-        for k in self.optional_consistent.iterkeys():
+        for k in self.optional_consistent.keys():
             mydict = self.mindict.copy()
             mydict[k] = self.optional_consistent[k]
             d = self._test_accept(mydict)
@@ -250,7 +260,7 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
 
     def test_reject_optional(self):
         dl = []
-        for k in self.optional_inconsistent.iterkeys():
+        for k in self.optional_inconsistent.keys():
             for v in self.optional_inconsistent[k]:
                 mydict = self.mindict.copy()
                 mydict[k] = v
@@ -398,7 +408,7 @@ class Roundtrip(GridTestMixin, unittest.TestCase):
         self.basedir = self.mktemp()
         self.set_up_grid()
         self.c0 = self.g.clients[0]
-        DATA = "p"*size
+        DATA = b"p"*size
         d = self.upload(DATA)
         d.addCallback(lambda n: download_to_data(n))
         def _downloaded(newdata):

From cecbc260fa11eedbdb62959f09bf85c3a533db22 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Thu, 17 Sep 2020 11:43:35 -0400
Subject: [PATCH 20/68] Fix order.

---
 src/allmydata/test/common_py3.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/allmydata/test/common_py3.py b/src/allmydata/test/common_py3.py
index b49078f93..5b791fd0a 100644
--- a/src/allmydata/test/common_py3.py
+++ b/src/allmydata/test/common_py3.py
@@ -123,7 +123,7 @@ class ShouldFailMixin(object):
 class ReallyEqualMixin(object):
     def failUnlessReallyEqual(self, a, b, msg=None):
         self.assertEqual(a, b, msg)
-        self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, b, type(a), type(b), msg))
+        self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, type(a), b, type(b), msg))
 
 
 def skip_if_cannot_represent_filename(u):

From 03fd566e2c1f9d25332643d8d71de8f85b2533d1 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Thu, 17 Sep 2020 12:37:10 -0400
Subject: [PATCH 21/68] Fix flake error.

---
 src/allmydata/storage/server.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/allmydata/storage/server.py b/src/allmydata/storage/server.py
index 845d3ac1a..14eb08f84 100644
--- a/src/allmydata/storage/server.py
+++ b/src/allmydata/storage/server.py
@@ -6,7 +6,7 @@ from __future__ import absolute_import
 from __future__ import print_function
 from __future__ import unicode_literals
 
-from future.utils import bytes_to_native_str, PY2, native_str_to_bytes
+from future.utils import bytes_to_native_str, PY2
 if PY2:
     # Omit open() to get native behavior where open("w") always accepts native
     # strings. Omit bytes so we don't leak future's custom bytes.

From 98185128024d7d480d41327f97139591a6228431 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Thu, 17 Sep 2020 13:10:52 -0400
Subject: [PATCH 22/68] Fix newbytes leak.

---
 src/allmydata/util/hashutil.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/src/allmydata/util/hashutil.py b/src/allmydata/util/hashutil.py
index 96d52c862..46f361287 100644
--- a/src/allmydata/util/hashutil.py
+++ b/src/allmydata/util/hashutil.py
@@ -10,7 +10,8 @@ from __future__ import unicode_literals
 
 from future.utils import PY2
 if PY2:
-    from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min  # noqa: F401
+    # Don't import bytes to prevent leaking future's bytes.
+    from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min  # noqa: F401
 
 from past.builtins import chr as byteschr
 

From 6c85f392dd1472ff48e10a4df3c9179f2ea1d9fb Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Thu, 17 Sep 2020 13:39:47 -0400
Subject: [PATCH 23/68] Fix another future newbytes leak that was breaking
 Foolscap.

---
 src/allmydata/uri.py | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/src/allmydata/uri.py b/src/allmydata/uri.py
index 253cb109c..bad0dd9df 100644
--- a/src/allmydata/uri.py
+++ b/src/allmydata/uri.py
@@ -13,8 +13,9 @@ from __future__ import unicode_literals
 
 from future.utils import PY2
 if PY2:
-    # Don't import bytes, to prevent leaks.
-    from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min  # noqa: F401
+    # Don't import bytes or str, to prevent leaks.
+    from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, max, min  # noqa: F401
+    str = unicode
 
 from past.builtins import unicode, long
 

From 7a3320f4bb4af5a6d1844fdefb7f2d3178917ccf Mon Sep 17 00:00:00 2001
From: Ross Patterson <me@rpatterson.net>
Date: Thu, 17 Sep 2020 11:54:49 -0700
Subject: [PATCH 24/68] build(make): Defensive make settings

https://tech.davis-hansson.com/p/make/

Prevents things like silent shell pipeline errors in recipes, unexpected behavior from
undefined variables, etc.
---
 Makefile | 10 ++++++++++
 1 file changed, 10 insertions(+)

diff --git a/Makefile b/Makefile
index 6dd87409e..e85f29b48 100644
--- a/Makefile
+++ b/Makefile
@@ -1,6 +1,16 @@
 
 # NOTE: this Makefile requires GNU make
 
+### Defensive settings for make:
+#     https://tech.davis-hansson.com/p/make/
+SHELL := bash
+.ONESHELL:
+.SHELLFLAGS := -xeu -o pipefail -c
+.SILENT:
+.DELETE_ON_ERROR:
+MAKEFLAGS += --warn-undefined-variables
+MAKEFLAGS += --no-builtin-rules
+
 default:
 	@echo "no default target"
 

From da24739c59255b6b2217fcdf16cfd3c7d78801cf Mon Sep 17 00:00:00 2001
From: Ross Patterson <me@rpatterson.net>
Date: Thu, 17 Sep 2020 12:19:35 -0700
Subject: [PATCH 25/68] build(make): Cleanup unused target

AFAIK, prefixing a target with a `.` (aside from the special targets make itself treats
differently) is just to exclude it from being the default target, but the default target
is already defined above this one.  Also, it just runs another target which doesn't
exist AFAIKT.
---
 Makefile | 3 ---
 1 file changed, 3 deletions(-)

diff --git a/Makefile b/Makefile
index e85f29b48..5b0defe32 100644
--- a/Makefile
+++ b/Makefile
@@ -28,9 +28,6 @@ APPNAME=tahoe-lafs
 make-version:
 	$(PYTHON) ./setup.py update_version
 
-.built:
-	$(MAKE) build
-
 src/allmydata/_version.py:
 	$(MAKE) make-version
 

From 735ff1e709acd31d05e3cb610194ebf79feedfce Mon Sep 17 00:00:00 2001
From: Ross Patterson <me@rpatterson.net>
Date: Thu, 17 Sep 2020 12:25:34 -0700
Subject: [PATCH 26/68] build(make): Improve and clarify Makefile org

This is opinionated, so I understand if community members disagree and I'm happy to back
this out.

It's conventional (and I prefer) to group variables toward the top of the `./Makefile`
so I've done that.

I also prefer separating task-oriented "phony" targets above any "real" targets (whose
recipes create/update actual build artifacts on the filesystem).  The task-oriented
targets tend to be a better starting point for any developers approaching the
`./Makefile` to understand where to get started.
---
 Makefile | 24 +++++++++++++++---------
 1 file changed, 15 insertions(+), 9 deletions(-)

diff --git a/Makefile b/Makefile
index 5b0defe32..07762f925 100644
--- a/Makefile
+++ b/Makefile
@@ -1,4 +1,5 @@
-
+# Tahoe LFS Development and maintenance tasks
+#
 # NOTE: this Makefile requires GNU make
 
 ### Defensive settings for make:
@@ -11,26 +12,26 @@ SHELL := bash
 MAKEFLAGS += --warn-undefined-variables
 MAKEFLAGS += --no-builtin-rules
 
-default:
-	@echo "no default target"
-
+# Local target variables
 PYTHON=python
 export PYTHON
 PYFLAKES=flake8
 export PYFLAKES
-
 SOURCES=src/allmydata static misc setup.py
 APPNAME=tahoe-lafs
 
+
+# Top-level targets
+
+default:
+	@echo "no default target"
+
 # This is necessary only if you want to automatically produce a new
 # _version.py file from the current git history (without doing a build).
 .PHONY: make-version
 make-version:
 	$(PYTHON) ./setup.py update_version
 
-src/allmydata/_version.py:
-	$(MAKE) make-version
-
 # Build OS X pkg packages.
 .PHONY: build-osx-pkg test-osx-pkg upload-osx-pkg
 build-osx-pkg:
@@ -66,7 +67,6 @@ upload-osx-pkg:
 ## # --include appeared in coverage-3.4
 ## COVERAGE_OMIT=--include '$(CURDIR)/src/allmydata/*' --omit '$(CURDIR)/src/allmydata/test/*'
 
-
 .PHONY: code-checks
 #code-checks: build version-and-path check-interfaces check-miscaptures -find-trailing-spaces -check-umids pyflakes
 code-checks: check-interfaces check-debugging check-miscaptures -find-trailing-spaces -check-umids pyflakes
@@ -227,3 +227,9 @@ tarballs: # delegated to tox, so setup.py can update setuptools if needed
 .PHONY: upload-tarballs
 upload-tarballs:
 	@if [ "X${BB_BRANCH}" = "Xmaster" ] || [ "X${BB_BRANCH}" = "X" ]; then for f in dist/*; do flappclient --furlfile ~/.tahoe-tarball-upload.furl upload-file $$f; done ; else echo not uploading tarballs because this is not trunk but is branch \"${BB_BRANCH}\" ; fi
+
+
+# Real targets
+
+src/allmydata/_version.py:
+	$(MAKE) make-version

From 775a3dba33bc5345c34083d20f576bbf94ef1033 Mon Sep 17 00:00:00 2001
From: Ross Patterson <me@rpatterson.net>
Date: Thu, 17 Sep 2020 12:33:13 -0700
Subject: [PATCH 27/68] build(make): Fix missing target name

From context, I'm assuming this is just an omission.
---
 Makefile | 1 +
 1 file changed, 1 insertion(+)

diff --git a/Makefile b/Makefile
index 07762f925..6ab6db754 100644
--- a/Makefile
+++ b/Makefile
@@ -72,6 +72,7 @@ upload-osx-pkg:
 code-checks: check-interfaces check-debugging check-miscaptures -find-trailing-spaces -check-umids pyflakes
 
 .PHONY: check-interfaces
+check-interfaces:
 	$(PYTHON) misc/coding_tools/check-interfaces.py 2>&1 |tee violations.txt
 	@echo
 

From d8df630729c6632cb02986fe36203531e9219d5b Mon Sep 17 00:00:00 2001
From: Ross Patterson <me@rpatterson.net>
Date: Thu, 17 Sep 2020 12:36:55 -0700
Subject: [PATCH 28/68] build(make): Add missing phony target declarations

I also prefer having a `.PHONY: ...` declaration for each phony target, as is done in
the rest of the `./Makefile`, because it makes is easier when removing or refactoring
targets not to forget to also remove or adjust the `.PHONY: ...` declaration.
---
 Makefile | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/Makefile b/Makefile
index 6ab6db754..780d27c26 100644
--- a/Makefile
+++ b/Makefile
@@ -21,8 +21,9 @@ SOURCES=src/allmydata static misc setup.py
 APPNAME=tahoe-lafs
 
 
-# Top-level targets
+# Top-level, phony targets
 
+.PHONY: default
 default:
 	@echo "no default target"
 
@@ -33,13 +34,15 @@ make-version:
 	$(PYTHON) ./setup.py update_version
 
 # Build OS X pkg packages.
-.PHONY: build-osx-pkg test-osx-pkg upload-osx-pkg
+.PHONY: build-osx-pkg
 build-osx-pkg:
 	misc/build_helpers/build-osx-pkg.sh $(APPNAME)
 
+.PHONY: test-osx-pkg
 test-osx-pkg:
 	$(PYTHON) misc/build_helpers/test-osx-pkg.py
 
+.PHONY: upload-osx-pkg
 upload-osx-pkg:
 	# [Failure instance: Traceback: <class 'OpenSSL.SSL.Error'>: [('SSL routines', 'ssl3_read_bytes', 'tlsv1 alert unknown ca'), ('SSL routines', 'ssl3_write_bytes', 'ssl handshake failure')]
 	#

From 3d79793ee855b8e385797fcc72ed5e38f0789d03 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Thu, 17 Sep 2020 15:38:08 -0400
Subject: [PATCH 29/68] Try to fix hashutil.

---
 src/allmydata/util/hashutil.py | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/src/allmydata/util/hashutil.py b/src/allmydata/util/hashutil.py
index 46f361287..ebb2f12af 100644
--- a/src/allmydata/util/hashutil.py
+++ b/src/allmydata/util/hashutil.py
@@ -11,7 +11,9 @@ from __future__ import unicode_literals
 from future.utils import PY2
 if PY2:
     # Don't import bytes to prevent leaking future's bytes.
-    from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min  # noqa: F401
+    from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min, bytes as future_bytes  # noqa: F401
+else:
+    future_bytes = bytes
 
 from past.builtins import chr as byteschr
 
@@ -214,7 +216,7 @@ def bucket_cancel_secret_hash(file_cancel_secret, peerid):
 
 
 def _xor(a, b):
-    return b"".join([byteschr(c ^ b) for c in a])
+    return b"".join([byteschr(c ^ b) for c in future_bytes(a)])
 
 
 def hmac(tag, data):

From 02cb451a6b97a992f11b8edaed1103eb2749646d Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Thu, 17 Sep 2020 16:06:26 -0400
Subject: [PATCH 30/68] Fix failing tests.

---
 src/allmydata/test/test_storage.py | 18 +++++++++---------
 src/allmydata/util/hashutil.py     |  6 ++++--
 2 files changed, 13 insertions(+), 11 deletions(-)

diff --git a/src/allmydata/test/test_storage.py b/src/allmydata/test/test_storage.py
index 1f5f91844..b9e3d00c6 100644
--- a/src/allmydata/test/test_storage.py
+++ b/src/allmydata/test/test_storage.py
@@ -366,21 +366,21 @@ class Server(unittest.TestCase):
     def test_declares_fixed_1528(self):
         ss = self.create("test_declares_fixed_1528")
         ver = ss.remote_get_version()
-        sv1 = ver['http://allmydata.org/tahoe/protocols/storage/v1']
-        self.failUnless(sv1.get('prevents-read-past-end-of-share-data'), sv1)
+        sv1 = ver[b'http://allmydata.org/tahoe/protocols/storage/v1']
+        self.failUnless(sv1.get(b'prevents-read-past-end-of-share-data'), sv1)
 
     def test_declares_maximum_share_sizes(self):
         ss = self.create("test_declares_maximum_share_sizes")
         ver = ss.remote_get_version()
-        sv1 = ver['http://allmydata.org/tahoe/protocols/storage/v1']
-        self.failUnlessIn('maximum-immutable-share-size', sv1)
-        self.failUnlessIn('maximum-mutable-share-size', sv1)
+        sv1 = ver[b'http://allmydata.org/tahoe/protocols/storage/v1']
+        self.failUnlessIn(b'maximum-immutable-share-size', sv1)
+        self.failUnlessIn(b'maximum-mutable-share-size', sv1)
 
     def test_declares_available_space(self):
         ss = self.create("test_declares_available_space")
         ver = ss.remote_get_version()
-        sv1 = ver['http://allmydata.org/tahoe/protocols/storage/v1']
-        self.failUnlessIn('available-space', sv1)
+        sv1 = ver[b'http://allmydata.org/tahoe/protocols/storage/v1']
+        self.failUnlessIn(b'available-space', sv1)
 
     def allocate(self, ss, storage_index, sharenums, size, canary=None):
         renew_secret = hashutil.tagged_hash(b"blah", b"%d" % next(self._lease_secret))
@@ -980,8 +980,8 @@ class MutableServer(unittest.TestCase):
         # Also see if the server explicitly declares that it supports this
         # feature.
         ver = ss.remote_get_version()
-        storage_v1_ver = ver["http://allmydata.org/tahoe/protocols/storage/v1"]
-        self.failUnless(storage_v1_ver.get("fills-holes-with-zero-bytes"))
+        storage_v1_ver = ver[b"http://allmydata.org/tahoe/protocols/storage/v1"]
+        self.failUnless(storage_v1_ver.get(b"fills-holes-with-zero-bytes"))
 
         # If the size is dropped to zero the share is deleted.
         answer = rstaraw(b"si1", secrets,
diff --git a/src/allmydata/util/hashutil.py b/src/allmydata/util/hashutil.py
index 46f361287..ebb2f12af 100644
--- a/src/allmydata/util/hashutil.py
+++ b/src/allmydata/util/hashutil.py
@@ -11,7 +11,9 @@ from __future__ import unicode_literals
 from future.utils import PY2
 if PY2:
     # Don't import bytes to prevent leaking future's bytes.
-    from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min  # noqa: F401
+    from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min, bytes as future_bytes  # noqa: F401
+else:
+    future_bytes = bytes
 
 from past.builtins import chr as byteschr
 
@@ -214,7 +216,7 @@ def bucket_cancel_secret_hash(file_cancel_secret, peerid):
 
 
 def _xor(a, b):
-    return b"".join([byteschr(c ^ b) for c in a])
+    return b"".join([byteschr(c ^ b) for c in future_bytes(a)])
 
 
 def hmac(tag, data):

From 51338bd87429bd283a5adc7ecb274dd80b345f59 Mon Sep 17 00:00:00 2001
From: Ross Patterson <me@rpatterson.net>
Date: Thu, 17 Sep 2020 14:09:08 -0700
Subject: [PATCH 31/68] build(make): Add targets for running tests

Fully parallelize the build of the environments since they tend to be network I/O
bound.  Parallelize the run of tests to use all CPU cores.
---
 .gitignore | 1 +
 Makefile   | 9 +++++++++
 2 files changed, 10 insertions(+)

diff --git a/.gitignore b/.gitignore
index 93802b22b..0bc7dc0b9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -37,6 +37,7 @@ zope.interface-*.egg
 /tahoe-deps/
 /tahoe-deps.tar.gz
 /.coverage
+/.coverage.*
 /.coverage.el
 /coverage-html/
 /miscaptures.txt
diff --git a/Makefile b/Makefile
index 780d27c26..2b176c4be 100644
--- a/Makefile
+++ b/Makefile
@@ -27,6 +27,11 @@ APPNAME=tahoe-lafs
 default:
 	@echo "no default target"
 
+.PHONY: test
+## Run all tests and code reports
+test: .tox
+	tox -p auto
+
 # This is necessary only if you want to automatically produce a new
 # _version.py file from the current git history (without doing a build).
 .PHONY: make-version
@@ -199,6 +204,7 @@ distclean: clean
 	rm -rf src/*.egg-info
 	rm -f src/allmydata/_version.py
 	rm -f src/allmydata/_appname.py
+	rm -rf ./.tox/
 
 
 .PHONY: find-trailing-spaces
@@ -237,3 +243,6 @@ upload-tarballs:
 
 src/allmydata/_version.py:
 	$(MAKE) make-version
+
+.tox: tox.ini setup.py
+	tox --notest -p all

From b4b996c3e7cd3f0d53aacba6cbb0f71ede88c079 Mon Sep 17 00:00:00 2001
From: Ross Patterson <me@rpatterson.net>
Date: Thu, 17 Sep 2020 16:09:39 -0700
Subject: [PATCH 32/68] build(make): Cleanup remnant coverage bits

I don't see anything in here that I can find references to elsewhere and we're certainly
running test coverage reports in tox and on CI now.
---
 Makefile | 17 -----------------
 1 file changed, 17 deletions(-)

diff --git a/Makefile b/Makefile
index 2b176c4be..c1b8e7579 100644
--- a/Makefile
+++ b/Makefile
@@ -58,23 +58,6 @@ upload-osx-pkg:
 	#   echo not uploading tahoe-lafs-osx-pkg because this is not trunk but is branch \"${BB_BRANCH}\" ; \
 	# fi
 
-# code coverage-based testing is disabled temporarily, as we switch to tox.
-# This will eventually be added to a tox environment. The following comments
-# and variable settings are retained as notes for that future effort.
-
-## # code coverage: install the "coverage" package from PyPI, do "make
-## # test-coverage" to do a unit test run with coverage-gathering enabled, then
-## # use "make coverage-output" to generate an HTML report. Also see "make
-## # .coverage.el" and misc/coding_tools/coverage.el for Emacs integration.
-##
-## # This might need to be python-coverage on Debian-based distros.
-## COVERAGE=coverage
-##
-## COVERAGEARGS=--branch --source=src/allmydata
-##
-## # --include appeared in coverage-3.4
-## COVERAGE_OMIT=--include '$(CURDIR)/src/allmydata/*' --omit '$(CURDIR)/src/allmydata/test/*'
-
 .PHONY: code-checks
 #code-checks: build version-and-path check-interfaces check-miscaptures -find-trailing-spaces -check-umids pyflakes
 code-checks: check-interfaces check-debugging check-miscaptures -find-trailing-spaces -check-umids pyflakes

From 7b302871e43764c8af5d79c008559884746d4450 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Fri, 18 Sep 2020 11:41:28 -0400
Subject: [PATCH 33/68] Python 2 tests pass again.

---
 src/allmydata/immutable/upload.py | 8 ++++----
 src/allmydata/test/test_encode.py | 4 ++--
 2 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py
index dbe348a26..b12b90ecc 100644
--- a/src/allmydata/immutable/upload.py
+++ b/src/allmydata/immutable/upload.py
@@ -1,4 +1,4 @@
-from past.builtins import long
+from past.builtins import long, unicode
 
 import os, time, weakref, itertools
 from zope.interface import implementer
@@ -1500,7 +1500,7 @@ class AssistedUploader(object):
 
         Returns a Deferred that will fire with the UploadResults instance.
         """
-        precondition(isinstance(storage_index, str), storage_index)
+        precondition(isinstance(storage_index, bytes), storage_index)
         self._started = time.time()
         eu = IEncryptedUploadable(encrypted_uploadable)
         eu.set_upload_status(self._upload_status)
@@ -1653,7 +1653,7 @@ class BaseUploadable(object):
     def set_default_encoding_parameters(self, default_params):
         assert isinstance(default_params, dict)
         for k,v in default_params.items():
-            precondition(isinstance(k, str), k, v)
+            precondition(isinstance(k, (bytes, unicode)), k, v)
             precondition(isinstance(v, int), k, v)
         if "k" in default_params:
             self.default_encoding_param_k = default_params["k"]
@@ -1773,7 +1773,7 @@ class FileName(FileHandle):
         then the hash will be hashed together with the string in the
         "convergence" argument to form the encryption key.
         """
-        assert convergence is None or isinstance(convergence, str), (convergence, type(convergence))
+        assert convergence is None or isinstance(convergence, bytes), (convergence, type(convergence))
         FileHandle.__init__(self, open(filename, "rb"), convergence=convergence)
     def close(self):
         FileHandle.close(self)
diff --git a/src/allmydata/test/test_encode.py b/src/allmydata/test/test_encode.py
index 7cec5a7d3..8eab6ac45 100644
--- a/src/allmydata/test/test_encode.py
+++ b/src/allmydata/test/test_encode.py
@@ -274,7 +274,7 @@ class Encode(unittest.TestCase):
         data = make_data(datalen)
         # force use of multiple segments
         e = encode.Encoder()
-        u = upload.Data(data, convergence="some convergence string")
+        u = upload.Data(data, convergence=b"some convergence string")
         u.set_default_encoding_parameters({'max_segment_size': max_segment_size,
                                            'k': 25, 'happy': 75, 'n': 100})
         eu = upload.EncryptAnUploadable(u)
@@ -304,7 +304,7 @@ class Encode(unittest.TestCase):
 
         def _check(res):
             verifycap = res
-            self.failUnless(isinstance(verifycap.uri_extension_hash, str))
+            self.failUnless(isinstance(verifycap.uri_extension_hash, bytes))
             self.failUnlessEqual(len(verifycap.uri_extension_hash), 32)
             for i,peer in enumerate(all_shareholders):
                 self.failUnless(peer.closed)

From a0ff941fff7022f87e3aebe2d2e6b4ad1799335a Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Fri, 18 Sep 2020 13:36:54 -0400
Subject: [PATCH 34/68] Try to fix Python 3 again.

---
 src/allmydata/immutable/downloader/share.py | 4 ++--
 src/allmydata/immutable/upload.py           | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/src/allmydata/immutable/downloader/share.py b/src/allmydata/immutable/downloader/share.py
index 0da563baa..7ec2935fd 100644
--- a/src/allmydata/immutable/downloader/share.py
+++ b/src/allmydata/immutable/downloader/share.py
@@ -85,8 +85,8 @@ class Share(object):
 
         self._requested_blocks = [] # (segnum, set(observer2..))
         v = server.get_version()
-        ver = v["http://allmydata.org/tahoe/protocols/storage/v1"]
-        self._overrun_ok = ver["tolerates-immutable-read-overrun"]
+        ver = v[b"http://allmydata.org/tahoe/protocols/storage/v1"]
+        self._overrun_ok = ver[b"tolerates-immutable-read-overrun"]
         # If _overrun_ok and we guess the offsets correctly, we can get
         # everything in one RTT. If _overrun_ok and we guess wrong, we might
         # need two RTT (but we could get lucky and do it in one). If overrun
diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py
index dbe348a26..2ea970660 100644
--- a/src/allmydata/immutable/upload.py
+++ b/src/allmydata/immutable/upload.py
@@ -419,8 +419,8 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
         # 12GiB). See #439 for details.
         def _get_maxsize(server):
             v0 = server.get_version()
-            v1 = v0["http://allmydata.org/tahoe/protocols/storage/v1"]
-            return v1["maximum-immutable-share-size"]
+            v1 = v0[b"http://allmydata.org/tahoe/protocols/storage/v1"]
+            return v1[b"maximum-immutable-share-size"]
 
         for server in candidate_servers:
             self.peer_selector.add_peer(server.get_serverid())

From 059bb2250b5fe63f84f17558c0ff9425247e3303 Mon Sep 17 00:00:00 2001
From: Jean-Paul Calderone <exarkun@twistedmatrix.com>
Date: Fri, 18 Sep 2020 14:10:09 -0400
Subject: [PATCH 35/68] Add a BadRequest resource to help with BAD REQUEST

---
 src/allmydata/web/common.py     | 3 ++-
 src/allmydata/web/common_py3.py | 9 +++++++++
 2 files changed, 11 insertions(+), 1 deletion(-)

diff --git a/src/allmydata/web/common.py b/src/allmydata/web/common.py
index 4a2bc2c3c..03db8463b 100644
--- a/src/allmydata/web/common.py
+++ b/src/allmydata/web/common.py
@@ -21,7 +21,8 @@ from allmydata.util.encodingutil import to_bytes, quote_output
 
 # Originally part of this module, so still part of its API:
 from .common_py3 import (  # noqa: F401
-    get_arg, abbreviate_time, MultiFormatResource, WebError
+    get_arg, abbreviate_time, MultiFormatResource, WebError,
+    BadRequest,
 )
 
 
diff --git a/src/allmydata/web/common_py3.py b/src/allmydata/web/common_py3.py
index 06751a8e8..f532e9000 100644
--- a/src/allmydata/web/common_py3.py
+++ b/src/allmydata/web/common_py3.py
@@ -23,6 +23,15 @@ class WebError(Exception):
         self.code = code
 
 
+class BadRequest(resource.ErrorPage):
+    """
+    ``BadRequest`` is a specialiation of ``ErrorPage`` which returns the HTTP
+    response code **BAD REQUEST**.
+    """
+    def __init__(self, message=""):
+        resource.ErrorPage.__init__(self, http.BAD_REQUEST, "Bad Request", message)
+
+
 def get_arg(ctx_or_req, argname, default=None, multiple=False):
     """Extract an argument from either the query args (req.args) or the form
     body fields (req.fields). If multiple=False, this returns a single value

From 050388ee121c7f70901970f924010c1c53a942ed Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Fri, 18 Sep 2020 14:30:51 -0400
Subject: [PATCH 36/68] Work better on Python 3, until Nevow is gone.

---
 src/allmydata/test/no_network.py | 10 ++++++----
 1 file changed, 6 insertions(+), 4 deletions(-)

diff --git a/src/allmydata/test/no_network.py b/src/allmydata/test/no_network.py
index eb2bfd1a1..58289a60a 100644
--- a/src/allmydata/test/no_network.py
+++ b/src/allmydata/test/no_network.py
@@ -480,10 +480,12 @@ class GridTestMixin(object):
 
     def _record_webports_and_baseurls(self):
         self.g._check_clients()
-        self.client_webports = [c.getServiceNamed("webish").getPortnum()
-                                for c in self.g.clients]
-        self.client_baseurls = [c.getServiceNamed("webish").getURL()
-                                for c in self.g.clients]
+        if PY2:
+            # Temporarily disabled on Python 3 until Nevow is gone:
+            self.client_webports = [c.getServiceNamed("webish").getPortnum()
+                                    for c in self.g.clients]
+            self.client_baseurls = [c.getServiceNamed("webish").getURL()
+                                    for c in self.g.clients]
 
     def get_client_config(self, i=0):
         self.g._check_clients()

From c3bb367a93079744456c76262500b94b4b9e1bb6 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Fri, 18 Sep 2020 14:31:23 -0400
Subject: [PATCH 37/68] Tests pass on Python 3.

---
 src/allmydata/immutable/checker.py | 2 +-
 src/allmydata/immutable/literal.py | 5 +++--
 src/allmydata/immutable/upload.py  | 2 +-
 3 files changed, 5 insertions(+), 4 deletions(-)

diff --git a/src/allmydata/immutable/checker.py b/src/allmydata/immutable/checker.py
index 73d6a0263..ce533b969 100644
--- a/src/allmydata/immutable/checker.py
+++ b/src/allmydata/immutable/checker.py
@@ -127,7 +127,7 @@ class ValidatedExtendedURIProxy(object):
         # consistent: codec_name, codec_params, tail_codec_params,
         # num_segments, size, needed_shares, total_shares
         if 'codec_name' in d:
-            if d['codec_name'] != "crs":
+            if d['codec_name'] != b"crs":
                 raise UnsupportedErasureCodec(d['codec_name'])
 
         if 'codec_params' in d:
diff --git a/src/allmydata/immutable/literal.py b/src/allmydata/immutable/literal.py
index fef4803b1..4832da7c1 100644
--- a/src/allmydata/immutable/literal.py
+++ b/src/allmydata/immutable/literal.py
@@ -1,4 +1,5 @@
-from six.moves import cStringIO as StringIO
+from io import BytesIO
+
 from zope.interface import implementer
 from twisted.internet import defer
 from twisted.internet.interfaces import IPushProducer
@@ -104,7 +105,7 @@ class LiteralFileNode(_ImmutableFileNodeBase):
         # vfs.adapters.ftp._FileToConsumerAdapter), neither of which is
         # likely to be used as the target for a Tahoe download.
 
-        d = basic.FileSender().beginFileTransfer(StringIO(data), consumer)
+        d = basic.FileSender().beginFileTransfer(BytesIO(data), consumer)
         d.addCallback(lambda lastSent: consumer)
         return d
 
diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py
index b3994310c..884b2cf5e 100644
--- a/src/allmydata/immutable/upload.py
+++ b/src/allmydata/immutable/upload.py
@@ -1377,7 +1377,7 @@ class LiteralUploader(object):
                 self._progress.set_progress_total(size)
             return read_this_many_bytes(uploadable, size)
         d.addCallback(_got_size)
-        d.addCallback(lambda data: uri.LiteralFileURI("".join(data)))
+        d.addCallback(lambda data: uri.LiteralFileURI(b"".join(data)))
         d.addCallback(lambda u: u.to_string())
         d.addCallback(self._build_results)
         return d

From 8ef2252bd4535b3640e0befd4d473186dc91d731 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Fri, 18 Sep 2020 14:32:19 -0400
Subject: [PATCH 38/68] Finish porting to Python 3.

---
 src/allmydata/test/test_encode.py | 3 +++
 src/allmydata/util/_python3.py    | 1 +
 2 files changed, 4 insertions(+)

diff --git a/src/allmydata/test/test_encode.py b/src/allmydata/test/test_encode.py
index 8eab6ac45..028a988cb 100644
--- a/src/allmydata/test/test_encode.py
+++ b/src/allmydata/test/test_encode.py
@@ -1,3 +1,6 @@
+"""
+Ported to Python 3.
+"""
 from __future__ import division
 from __future__ import absolute_import
 from __future__ import print_function
diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py
index db02f0b17..d16f05a9a 100644
--- a/src/allmydata/util/_python3.py
+++ b/src/allmydata/util/_python3.py
@@ -85,6 +85,7 @@ PORTED_TEST_MODULES = [
     "allmydata.test.test_crypto",
     "allmydata.test.test_deferredutil",
     "allmydata.test.test_dictutil",
+    "allmydata.test.test_encode",
     "allmydata.test.test_encodingutil",
     "allmydata.test.test_happiness",
     "allmydata.test.test_hashtree",

From e648965fb6d8c46d8145df3c188a0947390b8435 Mon Sep 17 00:00:00 2001
From: Jean-Paul Calderone <exarkun@twistedmatrix.com>
Date: Fri, 18 Sep 2020 14:49:19 -0400
Subject: [PATCH 39/68] Add helpers to implement the desired exception behavior
 and use them

---
 src/allmydata/web/common.py    | 100 ++++++++++++++++++++++++---------
 src/allmydata/web/directory.py |  13 ++++-
 2 files changed, 85 insertions(+), 28 deletions(-)

diff --git a/src/allmydata/web/common.py b/src/allmydata/web/common.py
index 03db8463b..ac748c237 100644
--- a/src/allmydata/web/common.py
+++ b/src/allmydata/web/common.py
@@ -204,33 +204,52 @@ def should_create_intermediate_directories(req):
                 t not in ("delete", "rename", "rename-form", "check"))
 
 def humanize_failure(f):
-    # return text, responsecode
-    if f.check(EmptyPathnameComponentError):
+    """
+    Create an human-oriented description of a failure along with some HTTP
+    metadata.
+
+    :param Failure f: The failure to describe.
+
+    :return (bytes, int): A tuple of some prose and an HTTP code describing
+        the failure.
+    """
+    return humanize_exception(f.value)
+
+
+def humanize_exception(exc):
+    """
+    Like ``humanize_failure`` but for an exception.
+
+    :param Exception exc: The exception to describe.
+
+    :return: See ``humanize_failure``.
+    """
+    if isinstance(exc, EmptyPathnameComponentError):
         return ("The webapi does not allow empty pathname components, "
                 "i.e. a double slash", http.BAD_REQUEST)
-    if f.check(ExistingChildError):
+    if isinstance(exc, ExistingChildError):
         return ("There was already a child by that name, and you asked me "
                 "to not replace it.", http.CONFLICT)
-    if f.check(NoSuchChildError):
-        quoted_name = quote_output(f.value.args[0], encoding="utf-8", quotemarks=False)
+    if isinstance(exc, NoSuchChildError):
+        quoted_name = quote_output(exc.args[0], encoding="utf-8", quotemarks=False)
         return ("No such child: %s" % quoted_name, http.NOT_FOUND)
-    if f.check(NotEnoughSharesError):
+    if isinstance(exc, NotEnoughSharesError):
         t = ("NotEnoughSharesError: This indicates that some "
              "servers were unavailable, or that shares have been "
              "lost to server departure, hard drive failure, or disk "
              "corruption. You should perform a filecheck on "
              "this object to learn more.\n\nThe full error message is:\n"
-             "%s") % str(f.value)
+             "%s") % str(exc)
         return (t, http.GONE)
-    if f.check(NoSharesError):
+    if isinstance(exc, NoSharesError):
         t = ("NoSharesError: no shares could be found. "
              "Zero shares usually indicates a corrupt URI, or that "
              "no servers were connected, but it might also indicate "
              "severe corruption. You should perform a filecheck on "
              "this object to learn more.\n\nThe full error message is:\n"
-             "%s") % str(f.value)
+             "%s") % str(exc)
         return (t, http.GONE)
-    if f.check(UnrecoverableFileError):
+    if isinstance(exc, UnrecoverableFileError):
         t = ("UnrecoverableFileError: the directory (or mutable file) could "
              "not be retrieved, because there were insufficient good shares. "
              "This might indicate that no servers were connected, "
@@ -239,9 +258,9 @@ def humanize_failure(f):
              "failure, or disk corruption. You should perform a filecheck on "
              "this object to learn more.")
         return (t, http.GONE)
-    if f.check(MustNotBeUnknownRWError):
-        quoted_name = quote_output(f.value.args[1], encoding="utf-8")
-        immutable = f.value.args[2]
+    if isinstance(exc, MustNotBeUnknownRWError):
+        quoted_name = quote_output(exc.args[1], encoding="utf-8")
+        immutable = exc.args[2]
         if immutable:
             t = ("MustNotBeUnknownRWError: an operation to add a child named "
                  "%s to a directory was given an unknown cap in a write slot.\n"
@@ -261,29 +280,30 @@ def humanize_failure(f):
                  "writecap in the write slot if desired, would also work in this "
                  "case.") % quoted_name
         return (t, http.BAD_REQUEST)
-    if f.check(MustBeDeepImmutableError):
-        quoted_name = quote_output(f.value.args[1], encoding="utf-8")
+    if isinstance(exc, MustBeDeepImmutableError):
+        quoted_name = quote_output(exc.args[1], encoding="utf-8")
         t = ("MustBeDeepImmutableError: a cap passed to this operation for "
              "the child named %s, needed to be immutable but was not. Either "
              "the cap is being added to an immutable directory, or it was "
              "originally retrieved from an immutable directory as an unknown "
              "cap.") % quoted_name
         return (t, http.BAD_REQUEST)
-    if f.check(MustBeReadonlyError):
-        quoted_name = quote_output(f.value.args[1], encoding="utf-8")
+    if isinstance(exc, MustBeReadonlyError):
+        quoted_name = quote_output(exc.args[1], encoding="utf-8")
         t = ("MustBeReadonlyError: a cap passed to this operation for "
              "the child named '%s', needed to be read-only but was not. "
              "The cap is being passed in a read slot (ro_uri), or was retrieved "
              "from a read slot as an unknown cap.") % quoted_name
         return (t, http.BAD_REQUEST)
-    if f.check(blacklist.FileProhibited):
-        t = "Access Prohibited: %s" % quote_output(f.value.reason, encoding="utf-8", quotemarks=False)
+    if isinstance(exc, blacklist.FileProhibited):
+        t = "Access Prohibited: %s" % quote_output(exc.reason, encoding="utf-8", quotemarks=False)
         return (t, http.FORBIDDEN)
-    if f.check(WebError):
-        return (f.value.text, f.value.code)
-    if f.check(FileTooLargeError):
-        return (f.getTraceback(), http.REQUEST_ENTITY_TOO_LARGE)
-    return (str(f), None)
+    if isinstance(exc, WebError):
+        return (exc.text, exc.code)
+    if isinstance(exc, FileTooLargeError):
+        return ("FileTooLargeError: %s" % (exc,), http.REQUEST_ENTITY_TOO_LARGE)
+    return (str(exc), None)
+
 
 class MyExceptionHandler(appserver.DefaultExceptionHandler, object):
     def simple(self, ctx, text, code=http.BAD_REQUEST):
@@ -481,8 +501,38 @@ class TokenOnlyWebApi(resource.Resource, object):
                 req.setResponseCode(e.code)
                 return json.dumps({"error": e.text})
             except Exception as e:
-                message, code = humanize_failure(Failure())
+                message, code = humanize_exception(e)
                 req.setResponseCode(500 if code is None else code)
                 return json.dumps({"error": message})
         else:
             raise WebError("'%s' invalid type for 't' arg" % (t,), http.BAD_REQUEST)
+
+
+def exception_to_child(f):
+    """
+    Decorate ``getChild`` method with exception handling behavior to render an
+    error page reflecting the exception.
+    """
+    @wraps(f)
+    def g(self, name, req):
+        try:
+            return m(self, name, req)
+        except Exception as e:
+            description, status = humanize_exception(e)
+            return ErrorPage(status, "Error", description)
+    return g
+
+
+def render_exception(m):
+    """
+    Decorate a ``render_*`` method with exception handling behavior to render
+    an error page reflecting the exception.
+    """
+    @wraps(m)
+    def g(self, request):
+        try:
+            return m(self, request)
+        except Exception as e:
+            description, status = humanize_exception(e)
+            return ErrorPage(status, "Error", description).render(request)
+    return g
diff --git a/src/allmydata/web/directory.py b/src/allmydata/web/directory.py
index cabcd023e..3ad935bab 100644
--- a/src/allmydata/web/directory.py
+++ b/src/allmydata/web/directory.py
@@ -2,6 +2,7 @@
 import json
 import urllib
 from datetime import timedelta
+from functools import wraps
 
 from zope.interface import implementer
 from twisted.internet import defer
@@ -48,6 +49,7 @@ from allmydata.web.common import (
     parse_replace_arg,
     should_create_intermediate_directories,
     humanize_failure,
+    humanize_exception,
     convert_children_json,
     get_format,
     get_mutable_type,
@@ -55,6 +57,8 @@ from allmydata.web.common import (
     render_time,
     MultiFormatResource,
     SlotsSequenceElement,
+    exception_to_child,
+    render_exception,
 )
 from allmydata.web.filenode import ReplaceMeMixin, \
      FileNodeHandler, PlaceHolderNodeHandler
@@ -94,6 +98,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
         self.name = name
         self._operations = client.get_web_service().get_operations()
 
+    @exception_to_child
     def getChild(self, name, req):
         """
         Dynamically create a child for the given request and name
@@ -113,9 +118,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
         # we will follow suit.
         for segment in req.prepath:
             if not segment:
-                raise EmptyPathnameComponentError(
-                    u"The webapi does not allow empty pathname components",
-                )
+                raise EmptyPathnameComponentError()
 
         d = self.node.get(name)
         d.addBoth(self._got_child, req, name)
@@ -210,6 +213,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
         d.addCallback(lambda res: self.node.get_uri())
         return d
 
+    @render_exception
     def render_GET(self, req):
         # This is where all of the directory-related ?t=* code goes.
         t = get_arg(req, "t", "").strip()
@@ -248,6 +252,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
 
         raise WebError("GET directory: bad t=%s" % t)
 
+    @render_exception
     def render_PUT(self, req):
         t = get_arg(req, "t", "").strip()
         replace = parse_replace_arg(get_arg(req, "replace", "true"))
@@ -267,6 +272,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
 
         raise WebError("PUT to a directory")
 
+    @render_exception
     def render_POST(self, req):
         t = get_arg(req, "t", "").strip()
 
@@ -1458,6 +1464,7 @@ class UnknownNodeHandler(Resource, object):
         self.parentnode = parentnode
         self.name = name
 
+    @render_exception
     def render_GET(self, req):
         t = get_arg(req, "t", "").strip()
         if t == "info":

From 8f3a32a22c08e213b376d911c87efced46ba63d0 Mon Sep 17 00:00:00 2001
From: Jean-Paul Calderone <exarkun@twistedmatrix.com>
Date: Fri, 18 Sep 2020 14:49:39 -0400
Subject: [PATCH 40/68] news fragment

---
 newsfragments/3422.minor | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 create mode 100644 newsfragments/3422.minor

diff --git a/newsfragments/3422.minor b/newsfragments/3422.minor
new file mode 100644
index 000000000..e69de29bb

From 97872118a59eeb5e488570ff599c95ea0eb8aef3 Mon Sep 17 00:00:00 2001
From: Jean-Paul Calderone <exarkun@twistedmatrix.com>
Date: Fri, 18 Sep 2020 14:50:45 -0400
Subject: [PATCH 41/68] derived function below

---
 src/allmydata/web/common.py | 26 +++++++++++++-------------
 1 file changed, 13 insertions(+), 13 deletions(-)

diff --git a/src/allmydata/web/common.py b/src/allmydata/web/common.py
index ac748c237..e6e818e7a 100644
--- a/src/allmydata/web/common.py
+++ b/src/allmydata/web/common.py
@@ -203,19 +203,6 @@ def should_create_intermediate_directories(req):
     return bool(req.method in ("PUT", "POST") and
                 t not in ("delete", "rename", "rename-form", "check"))
 
-def humanize_failure(f):
-    """
-    Create an human-oriented description of a failure along with some HTTP
-    metadata.
-
-    :param Failure f: The failure to describe.
-
-    :return (bytes, int): A tuple of some prose and an HTTP code describing
-        the failure.
-    """
-    return humanize_exception(f.value)
-
-
 def humanize_exception(exc):
     """
     Like ``humanize_failure`` but for an exception.
@@ -305,6 +292,19 @@ def humanize_exception(exc):
     return (str(exc), None)
 
 
+def humanize_failure(f):
+    """
+    Create an human-oriented description of a failure along with some HTTP
+    metadata.
+
+    :param Failure f: The failure to describe.
+
+    :return (bytes, int): A tuple of some prose and an HTTP code describing
+        the failure.
+    """
+    return humanize_exception(f.value)
+
+
 class MyExceptionHandler(appserver.DefaultExceptionHandler, object):
     def simple(self, ctx, text, code=http.BAD_REQUEST):
         req = IRequest(ctx)

From d0d11a5444d52b4ead4165ad2b418943da196b91 Mon Sep 17 00:00:00 2001
From: Ross Patterson <me@rpatterson.net>
Date: Fri, 18 Sep 2020 11:48:49 -0700
Subject: [PATCH 42/68] fix(style): Wrong Python version for codechecks

On systems where the default Python is Python 3 (such as on recent Debian/Ubuntu
versions), then `$ tox -e codechecks` has a ton of failures related to Python 3
compatibility.  This explicitly forces it to use Python 2.7 until we have Python 3
compatibility.
---
 tox.ini | 1 +
 1 file changed, 1 insertion(+)

diff --git a/tox.ini b/tox.ini
index 6bc24273c..c9ec2bb1d 100644
--- a/tox.ini
+++ b/tox.ini
@@ -80,6 +80,7 @@ commands =
 
 
 [testenv:codechecks]
+basepython = python2.7
 # On macOS, git inside of towncrier needs $HOME.
 passenv = HOME
 whitelist_externals =

From 52015df7e4598a0a2372712a26c2fe3fbd793139 Mon Sep 17 00:00:00 2001
From: Ross Patterson <me@rpatterson.net>
Date: Fri, 18 Sep 2020 11:41:53 -0700
Subject: [PATCH 43/68] build(make): Add changelog entry for PR

---
 newsfragments/3421.other | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/3421.other

diff --git a/newsfragments/3421.other b/newsfragments/3421.other
new file mode 100644
index 000000000..d6f70f6d9
--- /dev/null
+++ b/newsfragments/3421.other
@@ -0,0 +1 @@
+Various, minor development `./Makefile` cleanup and improvement.

From b200d2043093b6746cc5a4951b5a5eb271faa498 Mon Sep 17 00:00:00 2001
From: Jean-Paul Calderone <exarkun@twistedmatrix.com>
Date: Fri, 18 Sep 2020 15:01:53 -0400
Subject: [PATCH 44/68] minor cleanups/rearranging

---
 src/allmydata/web/common.py     | 15 +++++++--------
 src/allmydata/web/common_py3.py |  9 ---------
 src/allmydata/web/directory.py  |  3 +--
 3 files changed, 8 insertions(+), 19 deletions(-)

diff --git a/src/allmydata/web/common.py b/src/allmydata/web/common.py
index e6e818e7a..6035e2884 100644
--- a/src/allmydata/web/common.py
+++ b/src/allmydata/web/common.py
@@ -1,10 +1,10 @@
 
 import time
 import json
+from functools import wraps
 
 from twisted.web import http, server, resource, template
 from twisted.python import log
-from twisted.python.failure import Failure
 from nevow import loaders, appserver
 from nevow.rend import Page
 from nevow.inevow import IRequest
@@ -22,7 +22,6 @@ from allmydata.util.encodingutil import to_bytes, quote_output
 # Originally part of this module, so still part of its API:
 from .common_py3 import (  # noqa: F401
     get_arg, abbreviate_time, MultiFormatResource, WebError,
-    BadRequest,
 )
 
 
@@ -516,23 +515,23 @@ def exception_to_child(f):
     @wraps(f)
     def g(self, name, req):
         try:
-            return m(self, name, req)
+            return f(self, name, req)
         except Exception as e:
             description, status = humanize_exception(e)
-            return ErrorPage(status, "Error", description)
+            return resource.ErrorPage(status, "Error", description)
     return g
 
 
-def render_exception(m):
+def render_exception(f):
     """
     Decorate a ``render_*`` method with exception handling behavior to render
     an error page reflecting the exception.
     """
-    @wraps(m)
+    @wraps(f)
     def g(self, request):
         try:
-            return m(self, request)
+            return f(self, request)
         except Exception as e:
             description, status = humanize_exception(e)
-            return ErrorPage(status, "Error", description).render(request)
+            return resource.ErrorPage(status, "Error", description).render(request)
     return g
diff --git a/src/allmydata/web/common_py3.py b/src/allmydata/web/common_py3.py
index f532e9000..06751a8e8 100644
--- a/src/allmydata/web/common_py3.py
+++ b/src/allmydata/web/common_py3.py
@@ -23,15 +23,6 @@ class WebError(Exception):
         self.code = code
 
 
-class BadRequest(resource.ErrorPage):
-    """
-    ``BadRequest`` is a specialiation of ``ErrorPage`` which returns the HTTP
-    response code **BAD REQUEST**.
-    """
-    def __init__(self, message=""):
-        resource.ErrorPage.__init__(self, http.BAD_REQUEST, "Bad Request", message)
-
-
 def get_arg(ctx_or_req, argname, default=None, multiple=False):
     """Extract an argument from either the query args (req.args) or the form
     body fields (req.fields). If multiple=False, this returns a single value
diff --git a/src/allmydata/web/directory.py b/src/allmydata/web/directory.py
index 3ad935bab..4d7aa1bd1 100644
--- a/src/allmydata/web/directory.py
+++ b/src/allmydata/web/directory.py
@@ -2,7 +2,6 @@
 import json
 import urllib
 from datetime import timedelta
-from functools import wraps
 
 from zope.interface import implementer
 from twisted.internet import defer
@@ -680,7 +679,7 @@ class DirectoryAsHTML(Element):
         try:
             children = yield self.node.list()
         except Exception as e:
-            text, code = humanize_failure(Failure(e))
+            text, code = humanize_exception(e)
             children = None
             self.dirnode_children_error = text
 

From 02b4ec8101745ff962a4bad966b9a5bd8d0d013e Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 21 Sep 2020 10:33:16 -0400
Subject: [PATCH 45/68] Get rid of stray print().

---
 src/allmydata/test/no_network.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/src/allmydata/test/no_network.py b/src/allmydata/test/no_network.py
index eb2bfd1a1..461de0aa8 100644
--- a/src/allmydata/test/no_network.py
+++ b/src/allmydata/test/no_network.py
@@ -325,7 +325,6 @@ class NoNetworkGrid(service.MultiService):
             d.addCallback(lambda c: self.clients.append(c))
 
             def _bad(f):
-                print(f)
                 self._setup_errors.append(f)
             d.addErrback(_bad)
 

From a4da6c3dbeb73bdf6c1d5d1154c6a2a6bcb2b7f6 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 21 Sep 2020 10:35:56 -0400
Subject: [PATCH 46/68] Clarify comment.

---
 src/allmydata/uri.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/src/allmydata/uri.py b/src/allmydata/uri.py
index bad0dd9df..2c367cafe 100644
--- a/src/allmydata/uri.py
+++ b/src/allmydata/uri.py
@@ -13,7 +13,8 @@ from __future__ import unicode_literals
 
 from future.utils import PY2
 if PY2:
-    # Don't import bytes or str, to prevent leaks.
+    # Don't import bytes or str, to prevent future's newbytes leaking and
+    # breaking code that only expects normal bytes.
     from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, max, min  # noqa: F401
     str = unicode
 

From c674198074cfbd124da5a413abbd9c5e933d52b2 Mon Sep 17 00:00:00 2001
From: Sajith Sasidharan <sajith@hcoop.net>
Date: Mon, 21 Sep 2020 13:03:51 -0400
Subject: [PATCH 47/68] Rearrage imports

---
 src/allmydata/web/filenode.py | 29 ++++++++++++++++++++++-------
 1 file changed, 22 insertions(+), 7 deletions(-)

diff --git a/src/allmydata/web/filenode.py b/src/allmydata/web/filenode.py
index 1f0142df9..9233926db 100644
--- a/src/allmydata/web/filenode.py
+++ b/src/allmydata/web/filenode.py
@@ -17,14 +17,29 @@ from allmydata.mutable.publish import MutableFileHandle
 from allmydata.mutable.common import MODE_READ
 from allmydata.util import log, base32
 from allmydata.util.encodingutil import quote_output
-from allmydata.blacklist import FileProhibited, ProhibitedNode
+from allmydata.blacklist import (
+    FileProhibited,
+    ProhibitedNode,
+)
 
-from allmydata.web.common import text_plain, WebError, \
-     boolean_of_arg, get_arg, should_create_intermediate_directories, \
-     MyExceptionHandler, parse_replace_arg, parse_offset_arg, \
-     get_format, get_mutable_type, get_filenode_metadata
-from allmydata.web.check_results import CheckResultsRenderer, \
-     CheckAndRepairResultsRenderer, LiteralCheckResultsRenderer
+from allmydata.web.common import (
+    boolean_of_arg,
+    get_arg,
+    get_filenode_metadata,
+    get_format,
+    get_mutable_type,
+    parse_offset_arg,
+    parse_replace_arg,
+    should_create_intermediate_directories,
+    text_plain,
+    MyExceptionHandler,
+    WebError,
+)
+from allmydata.web.check_results import (
+    CheckResultsRenderer,
+    CheckAndRepairResultsRenderer,
+    LiteralCheckResultsRenderer,
+)
 from allmydata.web.info import MoreInfo
 
 class ReplaceMeMixin(object):

From 9f0034347823e01152a4f4a1b555f99361f32776 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 21 Sep 2020 10:45:05 -0400
Subject: [PATCH 48/68] Some progress towards test_upload running on Python 3.

---
 src/allmydata/storage_client.py   |  5 ++--
 src/allmydata/test/test_upload.py | 47 ++++++++++++++++---------------
 2 files changed, 27 insertions(+), 25 deletions(-)

diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py
index cfc3bc83f..cdce28467 100644
--- a/src/allmydata/storage_client.py
+++ b/src/allmydata/storage_client.py
@@ -28,6 +28,7 @@ the foolscap-based server implemented in src/allmydata/storage/*.py .
 #
 # 6: implement other sorts of IStorageClient classes: S3, etc
 
+from past.builtins import unicode
 
 import re, time, hashlib
 try:
@@ -489,7 +490,7 @@ class _FoolscapStorage(object):
 
         *nickname* is optional.
         """
-        m = re.match(r'pb://(\w+)@', furl)
+        m = re.match(br'pb://(\w+)@', furl)
         assert m, furl
         tubid_s = m.group(1).lower()
         tubid = base32.a2b(tubid_s)
@@ -633,7 +634,7 @@ class NativeStorageServer(service.MultiService):
 
     def __init__(self, server_id, ann, tub_maker, handler_overrides, node_config, config=StorageClientConfig()):
         service.MultiService.__init__(self)
-        assert isinstance(server_id, str)
+        assert isinstance(server_id, bytes)
         self._server_id = server_id
         self.announcement = ann
         self._tub_maker = tub_maker
diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py
index 6f35d57d3..5d68447ff 100644
--- a/src/allmydata/test/test_upload.py
+++ b/src/allmydata/test/test_upload.py
@@ -1,7 +1,8 @@
 # -*- coding: utf-8 -*-
 
 import os, shutil
-from six.moves import cStringIO as StringIO
+from io import BytesIO
+
 from twisted.trial import unittest
 from twisted.python.failure import Failure
 from twisted.internet import defer, task
@@ -33,25 +34,25 @@ class Uploadable(unittest.TestCase):
     def shouldEqual(self, data, expected):
         self.failUnless(isinstance(data, list))
         for e in data:
-            self.failUnless(isinstance(e, str))
-        s = "".join(data)
+            self.failUnless(isinstance(e, bytes))
+        s = b"".join(data)
         self.failUnlessEqual(s, expected)
 
     def test_filehandle_random_key(self):
         return self._test_filehandle(convergence=None)
 
     def test_filehandle_convergent_encryption(self):
-        return self._test_filehandle(convergence="some convergence string")
+        return self._test_filehandle(convergence=b"some convergence string")
 
     def _test_filehandle(self, convergence):
-        s = StringIO("a"*41)
+        s = BytesIO(b"a"*41)
         u = upload.FileHandle(s, convergence=convergence)
         d = u.get_size()
         d.addCallback(self.failUnlessEqual, 41)
         d.addCallback(lambda res: u.read(1))
-        d.addCallback(self.shouldEqual, "a")
+        d.addCallback(self.shouldEqual, b"a")
         d.addCallback(lambda res: u.read(80))
-        d.addCallback(self.shouldEqual, "a"*40)
+        d.addCallback(self.shouldEqual, b"a"*40)
         d.addCallback(lambda res: u.close()) # this doesn't close the filehandle
         d.addCallback(lambda res: s.close()) # that privilege is reserved for us
         return d
@@ -60,28 +61,28 @@ class Uploadable(unittest.TestCase):
         basedir = "upload/Uploadable/test_filename"
         os.makedirs(basedir)
         fn = os.path.join(basedir, "file")
-        f = open(fn, "w")
-        f.write("a"*41)
+        f = open(fn, "wb")
+        f.write(b"a"*41)
         f.close()
         u = upload.FileName(fn, convergence=None)
         d = u.get_size()
         d.addCallback(self.failUnlessEqual, 41)
         d.addCallback(lambda res: u.read(1))
-        d.addCallback(self.shouldEqual, "a")
+        d.addCallback(self.shouldEqual, b"a")
         d.addCallback(lambda res: u.read(80))
-        d.addCallback(self.shouldEqual, "a"*40)
+        d.addCallback(self.shouldEqual, b"a"*40)
         d.addCallback(lambda res: u.close())
         return d
 
     def test_data(self):
-        s = "a"*41
+        s = b"a"*41
         u = upload.Data(s, convergence=None)
         d = u.get_size()
         d.addCallback(self.failUnlessEqual, 41)
         d.addCallback(lambda res: u.read(1))
-        d.addCallback(self.shouldEqual, "a")
+        d.addCallback(self.shouldEqual, b"a")
         d.addCallback(lambda res: u.read(80))
-        d.addCallback(self.shouldEqual, "a"*40)
+        d.addCallback(self.shouldEqual, b"a"*40)
         d.addCallback(lambda res: u.close())
         return d
 
@@ -167,7 +168,7 @@ class FakeStorageServer(object):
 class FakeBucketWriter(object):
     # a diagnostic version of storageserver.BucketWriter
     def __init__(self, size):
-        self.data = StringIO()
+        self.data = BytesIO()
         self.closed = False
         self._size = size
 
@@ -216,7 +217,7 @@ class FakeClient(object):
         if type(mode) is str:
             mode = dict([i,mode] for i in range(num_servers))
         servers = [
-            ("%20d" % fakeid, FakeStorageServer(mode[fakeid], reactor=reactor))
+            (b"%20d" % fakeid, FakeStorageServer(mode[fakeid], reactor=reactor))
             for fakeid in range(self.num_servers)
         ]
         self.storage_broker = StorageFarmBroker(
@@ -225,7 +226,7 @@ class FakeClient(object):
             node_config=EMPTY_CLIENT_CONFIG,
         )
         for (serverid, rref) in servers:
-            ann = {"anonymous-storage-FURL": "pb://%s@nowhere/fake" % base32.b2a(serverid),
+            ann = {"anonymous-storage-FURL": b"pb://%s@nowhere/fake" % base32.b2a(serverid),
                    "permutation-seed-base32": base32.b2a(serverid) }
             self.storage_broker.test_add_rref(serverid, rref, ann)
         self.last_servers = [s[1] for s in servers]
@@ -236,7 +237,7 @@ class FakeClient(object):
         return self.encoding_params
     def get_storage_broker(self):
         return self.storage_broker
-    _secret_holder = client.SecretHolder("lease secret", "convergence secret")
+    _secret_holder = client.SecretHolder(b"lease secret", b"convergence secret")
 
 class GotTooFarError(Exception):
     pass
@@ -247,7 +248,7 @@ class GiganticUploadable(upload.FileHandle):
         self._fp = 0
 
     def get_encryption_key(self):
-        return defer.succeed("\x00" * 16)
+        return defer.succeed(b"\x00" * 16)
     def get_size(self):
         return defer.succeed(self._size)
     def read(self, length):
@@ -257,7 +258,7 @@ class GiganticUploadable(upload.FileHandle):
         if self._fp > 1000000:
             # terminate the test early.
             raise GotTooFarError("we shouldn't be allowed to get this far")
-        return defer.succeed(["\x00" * length])
+        return defer.succeed([b"\x00" * length])
     def close(self):
         pass
 
@@ -367,21 +368,21 @@ class GoodServer(unittest.TestCase, ShouldFailMixin, SetDEPMixin):
 
     def test_filehandle_zero(self):
         data = self.get_data(SIZE_ZERO)
-        d = upload_filehandle(self.u, StringIO(data))
+        d = upload_filehandle(self.u, BytesIO(data))
         d.addCallback(extract_uri)
         d.addCallback(self._check_small, SIZE_ZERO)
         return d
 
     def test_filehandle_small(self):
         data = self.get_data(SIZE_SMALL)
-        d = upload_filehandle(self.u, StringIO(data))
+        d = upload_filehandle(self.u, BytesIO(data))
         d.addCallback(extract_uri)
         d.addCallback(self._check_small, SIZE_SMALL)
         return d
 
     def test_filehandle_large(self):
         data = self.get_data(SIZE_LARGE)
-        d = upload_filehandle(self.u, StringIO(data))
+        d = upload_filehandle(self.u, BytesIO(data))
         d.addCallback(extract_uri)
         d.addCallback(self._check_large, SIZE_LARGE)
         return d

From 125a6855d6cb0c059a98249025b8def7efdfd5c6 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 21 Sep 2020 10:52:58 -0400
Subject: [PATCH 49/68] More progress towards test_upload on Python 3.

---
 src/allmydata/immutable/upload.py |  2 +-
 src/allmydata/test/test_upload.py | 92 +++++++++++++++----------------
 2 files changed, 47 insertions(+), 47 deletions(-)

diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py
index 884b2cf5e..a6fc02f30 100644
--- a/src/allmydata/immutable/upload.py
+++ b/src/allmydata/immutable/upload.py
@@ -299,7 +299,7 @@ class ServerTracker(object):
         I abort the remote bucket writers for all shares. This is a good idea
         to conserve space on the storage server.
         """
-        self.abort_some_buckets(self.buckets.keys())
+        self.abort_some_buckets(list(self.buckets.keys()))
 
     def abort_some_buckets(self, sharenums):
         """
diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py
index 5d68447ff..0dbb39a4f 100644
--- a/src/allmydata/test/test_upload.py
+++ b/src/allmydata/test/test_upload.py
@@ -262,7 +262,7 @@ class GiganticUploadable(upload.FileHandle):
     def close(self):
         pass
 
-DATA = """
+DATA = b"""
 Once upon a time, there was a beautiful princess named Buttercup. She lived
 in a magical land where every file was stored securely among millions of
 machines, and nobody ever worried about their data being lost ever again.
@@ -765,40 +765,40 @@ class ServerSelection(unittest.TestCase):
 
 class StorageIndex(unittest.TestCase):
     def test_params_must_matter(self):
-        DATA = "I am some data"
+        DATA = b"I am some data"
         PARAMS = _Client.DEFAULT_ENCODING_PARAMETERS
 
-        u = upload.Data(DATA, convergence="")
+        u = upload.Data(DATA, convergence=b"")
         u.set_default_encoding_parameters(PARAMS)
         eu = upload.EncryptAnUploadable(u)
         d1 = eu.get_storage_index()
 
         # CHK means the same data should encrypt the same way
-        u = upload.Data(DATA, convergence="")
+        u = upload.Data(DATA, convergence=b"")
         u.set_default_encoding_parameters(PARAMS)
         eu = upload.EncryptAnUploadable(u)
         d1a = eu.get_storage_index()
 
         # but if we use a different convergence string it should be different
-        u = upload.Data(DATA, convergence="wheee!")
+        u = upload.Data(DATA, convergence=b"wheee!")
         u.set_default_encoding_parameters(PARAMS)
         eu = upload.EncryptAnUploadable(u)
         d1salt1 = eu.get_storage_index()
 
         # and if we add yet a different convergence it should be different again
-        u = upload.Data(DATA, convergence="NOT wheee!")
+        u = upload.Data(DATA, convergence=b"NOT wheee!")
         u.set_default_encoding_parameters(PARAMS)
         eu = upload.EncryptAnUploadable(u)
         d1salt2 = eu.get_storage_index()
 
         # and if we use the first string again it should be the same as last time
-        u = upload.Data(DATA, convergence="wheee!")
+        u = upload.Data(DATA, convergence=b"wheee!")
         u.set_default_encoding_parameters(PARAMS)
         eu = upload.EncryptAnUploadable(u)
         d1salt1a = eu.get_storage_index()
 
         # and if we change the encoding parameters, it should be different (from the same convergence string with different encoding parameters)
-        u = upload.Data(DATA, convergence="")
+        u = upload.Data(DATA, convergence=b"")
         u.set_default_encoding_parameters(PARAMS)
         u.encoding_param_k = u.default_encoding_param_k + 1
         eu = upload.EncryptAnUploadable(u)
@@ -910,7 +910,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         assert self.g, "I tried to find a grid at self.g, but failed"
         broker = self.g.clients[0].storage_broker
         sh     = self.g.clients[0]._secret_holder
-        data = upload.Data("data" * 10000, convergence="")
+        data = upload.Data(b"data" * 10000, convergence=b"")
         data.set_default_encoding_parameters({'k': 3, 'happy': 4, 'n': 10})
         uploadable = upload.EncryptAnUploadable(data)
         encoder = encode.Encoder()
@@ -1003,7 +1003,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         if "n" in kwargs and "k" in kwargs:
             client.encoding_params['k'] = kwargs['k']
             client.encoding_params['n'] = kwargs['n']
-        data = upload.Data("data" * 10000, convergence="")
+        data = upload.Data(b"data" * 10000, convergence=b"")
         self.data = data
         d = client.upload(data)
         def _store_uri(ur):
@@ -1022,8 +1022,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         self.set_up_grid(client_config_hooks=hooks)
         c0 = self.g.clients[0]
 
-        DATA = "data" * 100
-        u = upload.Data(DATA, convergence="")
+        DATA = b"data" * 100
+        u = upload.Data(DATA, convergence=b"")
         d = c0.upload(u)
         d.addCallback(lambda ur: c0.create_node_from_uri(ur.get_uri()))
         m = monitor.Monitor()
@@ -1046,7 +1046,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
 
     def test_happy_semantics(self):
         self._setUp(2)
-        DATA = upload.Data("kittens" * 10000, convergence="")
+        DATA = upload.Data(b"kittens" * 10000, convergence=b"")
         # These parameters are unsatisfiable with only 2 servers.
         self.set_encoding_parameters(k=3, happy=5, n=10)
         d = self.shouldFail(UploadUnhappinessError, "test_happy_semantics",
@@ -1078,7 +1078,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         self.basedir = "upload/EncodingParameters/aborted_shares"
         self.set_up_grid(num_servers=4)
         c = self.g.clients[0]
-        DATA = upload.Data(100 * "kittens", convergence="")
+        DATA = upload.Data(100 * b"kittens", convergence=b"")
         # These parameters are unsatisfiable with only 4 servers, but should
         # work with 5, as long as the original 4 are not stuck in the open
         # BucketWriter state (open() but not
@@ -1156,8 +1156,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "We were asked to place shares on at "
                             "least 4 servers such that any 3 of them have "
                             "enough shares to recover the file",
-                            client.upload, upload.Data("data" * 10000,
-                                                       convergence="")))
+                            client.upload, upload.Data(b"data" * 10000,
+                                                       convergence=b"")))
 
         # Do comment:52, but like this:
         # server 2: empty
@@ -1189,8 +1189,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "that any 3 of them have enough shares to recover "
                             "the file, but we were asked to place shares on "
                             "at least 4 such servers.",
-                            client.upload, upload.Data("data" * 10000,
-                                                       convergence="")))
+                            client.upload, upload.Data(b"data" * 10000,
+                                                       convergence=b"")))
         return d
 
 
@@ -1231,7 +1231,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             return client
         d.addCallback(_reset_encoding_parameters)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
 
@@ -1271,7 +1271,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         # Now try uploading.
         d.addCallback(_reset_encoding_parameters)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
 
@@ -1300,7 +1300,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self.g.remove_server(self.g.servers_by_number[0].my_nodeid))
         d.addCallback(_reset_encoding_parameters)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         # Make sure that only as many shares as necessary to satisfy
         # servers of happiness were pushed.
         d.addCallback(lambda results:
@@ -1331,7 +1331,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
 
         d.addCallback(_setup)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
@@ -1369,7 +1369,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             return client
         d.addCallback(_reset_encoding_parameters)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
@@ -1408,7 +1408,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             return client
         d.addCallback(_reset_encoding_parameters)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
@@ -1524,7 +1524,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             return client
         d.addCallback(_prepare_client)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
@@ -1551,8 +1551,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         d.addCallback(lambda c:
             self.shouldFail(UploadUnhappinessError, "test_query_counting",
                             "0 queries placed some shares",
-                            c.upload, upload.Data("data" * 10000,
-                                                  convergence="")))
+                            c.upload, upload.Data(b"data" * 10000,
+                                                  convergence=b"")))
         # Now try with some readonly servers. We want to make sure that
         # the readonly server share discovery phase is counted correctly.
         def _reset(ign):
@@ -1575,8 +1575,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self.shouldFail(UploadUnhappinessError, "test_query_counting",
                             "4 placed none (of which 4 placed none due to "
                             "the server being full",
-                            c.upload, upload.Data("data" * 10000,
-                                                  convergence="")))
+                            c.upload, upload.Data(b"data" * 10000,
+                                                  convergence=b"")))
         # Now try the case where the upload process finds a bunch of the
         # shares that it wants to place on the first server, including
         # the one that it wanted to allocate there. Though no shares will
@@ -1604,8 +1604,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         d.addCallback(lambda c:
             self.shouldFail(UploadUnhappinessError, "test_query_counting",
                             "0 queries placed some shares",
-                            c.upload, upload.Data("data" * 10000,
-                                                  convergence="")))
+                            c.upload, upload.Data(b"data" * 10000,
+                                                  convergence=b"")))
         return d
 
 
@@ -1627,7 +1627,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "test_upper_limit_on_readonly_queries",
                             "sent 8 queries to 8 servers",
                             client.upload,
-                            upload.Data('data' * 10000, convergence="")))
+                            upload.Data('data' * 10000, convergence=b"")))
         return d
 
 
@@ -1669,7 +1669,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "(of which 5 placed none due to the server being "
                             "full and 0 placed none due to an error)",
                             client.upload,
-                            upload.Data("data" * 10000, convergence="")))
+                            upload.Data(b"data" * 10000, convergence=b"")))
 
 
         # server 1: read-only, no shares
@@ -1710,7 +1710,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "(of which 4 placed none due to the server being "
                             "full and 1 placed none due to an error)",
                             client.upload,
-                            upload.Data("data" * 10000, convergence="")))
+                            upload.Data(b"data" * 10000, convergence=b"")))
         # server 0, server 1 = empty, accepting shares
         # This should place all of the shares, but still fail with happy=4.
         # We want to make sure that the exception message is worded correctly.
@@ -1726,8 +1726,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "server(s). We were asked to place shares on at "
                             "least 4 server(s) such that any 3 of them have "
                             "enough shares to recover the file.",
-                            client.upload, upload.Data("data" * 10000,
-                                                       convergence="")))
+                            client.upload, upload.Data(b"data" * 10000,
+                                                       convergence=b"")))
         # servers 0 - 4 = empty, accepting shares
         # This too should place all the shares, and this too should fail,
         # but since the effective happiness is more than the k encoding
@@ -1751,8 +1751,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "that any 3 of them have enough shares to recover "
                             "the file, but we were asked to place shares on "
                             "at least 7 such servers.",
-                            client.upload, upload.Data("data" * 10000,
-                                                       convergence="")))
+                            client.upload, upload.Data(b"data" * 10000,
+                                                       convergence=b"")))
         # server 0: shares 0 - 9
         # server 1: share 0, read-only
         # server 2: share 0, read-only
@@ -1783,8 +1783,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "to place shares on at least 7 servers such that "
                             "any 3 of them have enough shares to recover the "
                             "file",
-                            client.upload, upload.Data("data" * 10000,
-                                                       convergence="")))
+                            client.upload, upload.Data(b"data" * 10000,
+                                                       convergence=b"")))
         return d
 
 
@@ -1816,7 +1816,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
 
         d.addCallback(_setup)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
@@ -1874,7 +1874,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
 
         d.addCallback(_setup)
         d.addCallback(lambda client:
-                          client.upload(upload.Data("data" * 10000, convergence="")))
+                          client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
@@ -1912,7 +1912,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             return c
         d.addCallback(_server_setup)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
@@ -1941,7 +1941,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             return self.g.clients[0]
         d.addCallback(_server_setup)
         d.addCallback(lambda client:
-            client.upload(upload.Data("data" * 10000, convergence="")))
+            client.upload(upload.Data(b"data" * 10000, convergence=b"")))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
@@ -1964,8 +1964,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self.shouldFail(UploadUnhappinessError,
                             "test_server_selection_bucket_abort",
                             "",
-                            client.upload, upload.Data("data" * 10000,
-                                                       convergence="")))
+                            client.upload, upload.Data(b"data" * 10000,
+                                                       convergence=b"")))
         # wait for the abort messages to get there.
         def _turn_barrier(res):
             return fireEventually(res)

From 0cee40c00948db0c548ca557d0e84df21c07a8e9 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 21 Sep 2020 11:01:51 -0400
Subject: [PATCH 50/68] Even more progress towards test_upload on Python 3.

---
 src/allmydata/storage_client.py   | 11 ++++++---
 src/allmydata/test/test_upload.py | 37 ++++++++++++++++---------------
 2 files changed, 27 insertions(+), 21 deletions(-)

diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py
index cdce28467..4e06e28fb 100644
--- a/src/allmydata/storage_client.py
+++ b/src/allmydata/storage_client.py
@@ -495,7 +495,10 @@ class _FoolscapStorage(object):
         tubid_s = m.group(1).lower()
         tubid = base32.a2b(tubid_s)
         if "permutation-seed-base32" in ann:
-            ps = base32.a2b(str(ann["permutation-seed-base32"]))
+            seed = ann["permutation-seed-base32"]
+            if isinstance(seed, unicode):
+                seed = seed.encode("utf-8")
+            ps = base32.a2b(seed)
         elif re.search(r'^v0-[0-9a-zA-Z]{52}$', server_id):
             ps = base32.a2b(server_id[3:])
         else:
@@ -510,7 +513,7 @@ class _FoolscapStorage(object):
 
         assert server_id
         long_description = server_id
-        if server_id.startswith("v0-"):
+        if server_id.startswith(b"v0-"):
             # remove v0- prefix from abbreviated name
             short_description = server_id[3:3+8]
         else:
@@ -695,12 +698,14 @@ class NativeStorageServer(service.MultiService):
             # Nope
             pass
         else:
+            if isinstance(furl, unicode):
+                furl = furl.encode("utf-8")
             # See comment above for the _storage_from_foolscap_plugin case
             # about passing in get_rref.
             storage_server = _StorageServer(get_rref=self.get_rref)
             return _FoolscapStorage.from_announcement(
                 self._server_id,
-                furl.encode("utf-8"),
+                furl,
                 ann,
                 storage_server,
             )
diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py
index 0dbb39a4f..740c6815d 100644
--- a/src/allmydata/test/test_upload.py
+++ b/src/allmydata/test/test_upload.py
@@ -23,6 +23,7 @@ from allmydata.client import _Client
 from .common import (
     EMPTY_CLIENT_CONFIG,
 )
+from functools import reduce
 
 
 MiB = 1024*1024
@@ -839,10 +840,10 @@ def combinations(iterable, r):
     n = len(pool)
     if r > n:
         return
-    indices = range(r)
+    indices = list(range(r))
     yield tuple(pool[i] for i in indices)
     while True:
-        for i in reversed(range(r)):
+        for i in reversed(list(range(r))):
             if indices[i] != i + n - r:
                 break
         else:
@@ -856,7 +857,7 @@ def is_happy_enough(servertoshnums, h, k):
     """ I calculate whether servertoshnums achieves happiness level h. I do this with a naïve "brute force search" approach. (See src/allmydata/util/happinessutil.py for a better algorithm.) """
     if len(servertoshnums) < h:
         return False
-    for happysetcombo in combinations(servertoshnums.iterkeys(), h):
+    for happysetcombo in combinations(iter(servertoshnums.keys()), h):
         for subsetcombo in combinations(happysetcombo, k):
             shnums = reduce(set.union, [ servertoshnums[s] for s in subsetcombo ])
             if len(shnums) < k:
@@ -887,7 +888,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         assert self.g, "I tried to find a grid at self.g, but failed"
         servertoshnums = {} # k: server, v: set(shnum)
 
-        for i, c in self.g.servers_by_number.iteritems():
+        for i, c in self.g.servers_by_number.items():
             for (dirp, dirns, fns) in os.walk(c.sharedir):
                 for fn in fns:
                     try:
@@ -927,9 +928,9 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         def _have_shareholders(upload_trackers_and_already_servers):
             (upload_trackers, already_servers) = upload_trackers_and_already_servers
             assert servers_to_break <= len(upload_trackers)
-            for index in xrange(servers_to_break):
+            for index in range(servers_to_break):
                 tracker = list(upload_trackers)[index]
-                for share in tracker.buckets.keys():
+                for share in list(tracker.buckets.keys()):
                     tracker.buckets[share].abort()
             buckets = {}
             servermap = already_servers.copy()
@@ -1260,7 +1261,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self._add_server_with_share(server_number=1, share_number=2))
         # Copy all of the other shares to server number 2
         def _copy_shares(ign):
-            for i in xrange(0, 10):
+            for i in range(0, 10):
                 self._copy_share_to_server(i, 2)
         d.addCallback(_copy_shares)
         # Remove the first server, and add a placeholder with share 0
@@ -1354,7 +1355,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                                         readonly=True))
         # Copy all of the other shares to server number 2
         def _copy_shares(ign):
-            for i in xrange(1, 10):
+            for i in range(1, 10):
                 self._copy_share_to_server(i, 2)
         d.addCallback(_copy_shares)
         # Remove server 0, and add another in its place
@@ -1397,7 +1398,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self._add_server_with_share(server_number=2, share_number=0,
                                         readonly=True))
         def _copy_shares(ign):
-            for i in xrange(1, 10):
+            for i in range(1, 10):
                 self._copy_share_to_server(i, 2)
         d.addCallback(_copy_shares)
         d.addCallback(lambda ign:
@@ -1513,7 +1514,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         d.addCallback(lambda ign:
             self._add_server(4))
         def _copy_shares(ign):
-            for i in xrange(1, 10):
+            for i in range(1, 10):
                 self._copy_share_to_server(i, 1)
         d.addCallback(_copy_shares)
         d.addCallback(lambda ign:
@@ -1537,7 +1538,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         self.basedir = self.mktemp()
         d = self._setup_and_upload()
         def _setup(ign):
-            for i in xrange(1, 11):
+            for i in range(1, 11):
                 self._add_server(server_number=i)
             self.g.remove_server(self.g.servers_by_number[0].my_nodeid)
             c = self.g.clients[0]
@@ -1562,7 +1563,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         d.addCallback(lambda ign:
             self._setup_and_upload())
         def _then(ign):
-            for i in xrange(1, 11):
+            for i in range(1, 11):
                 self._add_server(server_number=i)
             self._add_server(server_number=11, readonly=True)
             self._add_server(server_number=12, readonly=True)
@@ -1588,11 +1589,11 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self._setup_and_upload())
 
         def _next(ign):
-            for i in xrange(1, 11):
+            for i in range(1, 11):
                 self._add_server(server_number=i)
             # Copy all of the shares to server 9, since that will be
             # the first one that the selector sees.
-            for i in xrange(10):
+            for i in range(10):
                 self._copy_share_to_server(i, 9)
             # Remove server 0, and its contents
             self.g.remove_server(self.g.servers_by_number[0].my_nodeid)
@@ -1613,7 +1614,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         self.basedir = self.mktemp()
         d = self._setup_and_upload()
         def _then(ign):
-            for i in xrange(1, 11):
+            for i in range(1, 11):
                 self._add_server(server_number=i, readonly=True)
             self.g.remove_server(self.g.servers_by_number[0].my_nodeid)
             c = self.g.clients[0]
@@ -1936,7 +1937,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self._add_server_with_share(server_number=8, share_number=4)
             self._add_server_with_share(server_number=5, share_number=5)
             self._add_server_with_share(server_number=10, share_number=7)
-            for i in xrange(4):
+            for i in range(4):
                 self._copy_share_to_server(i, 2)
             return self.g.clients[0]
         d.addCallback(_server_setup)
@@ -1971,7 +1972,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             return fireEventually(res)
         d.addCallback(_turn_barrier)
         def _then(ignored):
-            for server in self.g.servers_by_number.values():
+            for server in list(self.g.servers_by_number.values()):
                 self.failUnlessEqual(server.allocated_size(), 0)
         d.addCallback(_then)
         return d
@@ -1997,7 +1998,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             return fireEventually(res)
         d.addCallback(_turn_barrier)
         def _then(ignored):
-            for server in self.g.servers_by_number.values():
+            for server in list(self.g.servers_by_number.values()):
                 self.failUnlessEqual(server.allocated_size(), 0)
         d.addCallback(_then)
         return d

From 6fe68c792c3df395e4cd330e51bf578f14d51312 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 21 Sep 2020 13:03:29 -0400
Subject: [PATCH 51/68] Tests pass on Python 2 and 3.

---
 src/allmydata/immutable/upload.py |  2 +-
 src/allmydata/storage_client.py   | 16 ++++++++--------
 src/allmydata/test/test_upload.py | 26 +++++++++++++-------------
 3 files changed, 22 insertions(+), 22 deletions(-)

diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py
index a6fc02f30..1ab312ab6 100644
--- a/src/allmydata/immutable/upload.py
+++ b/src/allmydata/immutable/upload.py
@@ -1818,7 +1818,7 @@ class Uploader(service.MultiService, log.PrefixingLogMixin):
         self.log("got helper connection, getting versions")
         default = { "http://allmydata.org/tahoe/protocols/helper/v1" :
                     { },
-                    "application-version": "unknown: no get_version()",
+                    "application-version": b"unknown: no get_version()",
                     }
         d = add_version_to_remote_reference(helper, default)
         d.addCallback(self._got_versioned_helper)
diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py
index 4e06e28fb..df1e4573e 100644
--- a/src/allmydata/storage_client.py
+++ b/src/allmydata/storage_client.py
@@ -625,14 +625,14 @@ class NativeStorageServer(service.MultiService):
     """
 
     VERSION_DEFAULTS = {
-        "http://allmydata.org/tahoe/protocols/storage/v1" :
-        { "maximum-immutable-share-size": 2**32 - 1,
-          "maximum-mutable-share-size": 2*1000*1000*1000, # maximum prior to v1.9.2
-          "tolerates-immutable-read-overrun": False,
-          "delete-mutable-shares-with-zero-length-writev": False,
-          "available-space": None,
+        b"http://allmydata.org/tahoe/protocols/storage/v1" :
+        { b"maximum-immutable-share-size": 2**32 - 1,
+          b"maximum-mutable-share-size": 2*1000*1000*1000, # maximum prior to v1.9.2
+          b"tolerates-immutable-read-overrun": False,
+          b"delete-mutable-shares-with-zero-length-writev": False,
+          b"available-space": None,
           },
-        "application-version": "unknown: no get_version()",
+        b"application-version": "unknown: no get_version()",
         }
 
     def __init__(self, server_id, ann, tub_maker, handler_overrides, node_config, config=StorageClientConfig()):
@@ -773,7 +773,7 @@ class NativeStorageServer(service.MultiService):
         version = self.get_version()
         if version is None:
             return None
-        protocol_v1_version = version.get('http://allmydata.org/tahoe/protocols/storage/v1', {})
+        protocol_v1_version = version.get(b'http://allmydata.org/tahoe/protocols/storage/v1', {})
         available_space = protocol_v1_version.get('available-space')
         if available_space is None:
             available_space = protocol_v1_version.get('maximum-immutable-share-size', None)
diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py
index 740c6815d..ca4825990 100644
--- a/src/allmydata/test/test_upload.py
+++ b/src/allmydata/test/test_upload.py
@@ -106,19 +106,19 @@ class FakeStorageServer(object):
         self._alloc_queries = 0
         self._get_queries = 0
         self.version = {
-            "http://allmydata.org/tahoe/protocols/storage/v1" :
+            b"http://allmydata.org/tahoe/protocols/storage/v1" :
             {
-                "maximum-immutable-share-size": 2**32 - 1,
+                b"maximum-immutable-share-size": 2**32 - 1,
             },
-            "application-version": str(allmydata.__full_version__),
+            b"application-version": str(allmydata.__full_version__),
         }
         if mode == "small":
             self.version = {
-                "http://allmydata.org/tahoe/protocols/storage/v1" :
+                b"http://allmydata.org/tahoe/protocols/storage/v1" :
                 {
-                    "maximum-immutable-share-size": 10,
+                    b"maximum-immutable-share-size": 10,
                 },
-                "application-version": str(allmydata.__full_version__),
+                b"application-version": str(allmydata.__full_version__),
             }
 
 
@@ -306,9 +306,9 @@ class GoodServer(unittest.TestCase, ShouldFailMixin, SetDEPMixin):
     def _check_large(self, newuri, size):
         u = uri.from_string(newuri)
         self.failUnless(isinstance(u, uri.CHKFileURI))
-        self.failUnless(isinstance(u.get_storage_index(), str))
+        self.failUnless(isinstance(u.get_storage_index(), bytes))
         self.failUnlessEqual(len(u.get_storage_index()), 16)
-        self.failUnless(isinstance(u.key, str))
+        self.failUnless(isinstance(u.key, bytes))
         self.failUnlessEqual(len(u.key), 16)
         self.failUnlessEqual(u.size, size)
 
@@ -431,9 +431,9 @@ class ServerErrors(unittest.TestCase, ShouldFailMixin, SetDEPMixin):
     def _check_large(self, newuri, size):
         u = uri.from_string(newuri)
         self.failUnless(isinstance(u, uri.CHKFileURI))
-        self.failUnless(isinstance(u.get_storage_index(), str))
+        self.failUnless(isinstance(u.get_storage_index(), bytes))
         self.failUnlessEqual(len(u.get_storage_index()), 16)
-        self.failUnless(isinstance(u.key, str))
+        self.failUnless(isinstance(u.key, bytes))
         self.failUnlessEqual(len(u.key), 16)
         self.failUnlessEqual(u.size, size)
 
@@ -601,9 +601,9 @@ class ServerSelection(unittest.TestCase):
     def _check_large(self, newuri, size):
         u = uri.from_string(newuri)
         self.failUnless(isinstance(u, uri.CHKFileURI))
-        self.failUnless(isinstance(u.get_storage_index(), str))
+        self.failUnless(isinstance(u.get_storage_index(), bytes))
         self.failUnlessEqual(len(u.get_storage_index()), 16)
-        self.failUnless(isinstance(u.key, str))
+        self.failUnless(isinstance(u.key, bytes))
         self.failUnlessEqual(len(u.key), 16)
         self.failUnlessEqual(u.size, size)
 
@@ -1628,7 +1628,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                             "test_upper_limit_on_readonly_queries",
                             "sent 8 queries to 8 servers",
                             client.upload,
-                            upload.Data('data' * 10000, convergence=b"")))
+                            upload.Data(b'data' * 10000, convergence=b"")))
         return d
 
 

From 40b42441463151465b9c8dee64d7fd8cf5c720b7 Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Mon, 21 Sep 2020 13:13:26 -0400
Subject: [PATCH 52/68] Port to Python 3.

---
 src/allmydata/test/test_upload.py | 18 +++++++++++++++---
 src/allmydata/util/_python3.py    |  1 +
 2 files changed, 16 insertions(+), 3 deletions(-)

diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py
index ca4825990..5ce2a29fb 100644
--- a/src/allmydata/test/test_upload.py
+++ b/src/allmydata/test/test_upload.py
@@ -1,5 +1,17 @@
 # -*- coding: utf-8 -*-
 
+"""
+Ported to Python 3.
+"""
+from __future__ import unicode_literals
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from future.utils import PY2
+if PY2:
+    from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min  # noqa: F401
+
 import os, shutil
 from io import BytesIO
 
@@ -110,7 +122,7 @@ class FakeStorageServer(object):
             {
                 b"maximum-immutable-share-size": 2**32 - 1,
             },
-            b"application-version": str(allmydata.__full_version__),
+            b"application-version": bytes(allmydata.__full_version__, "ascii"),
         }
         if mode == "small":
             self.version = {
@@ -118,7 +130,7 @@ class FakeStorageServer(object):
                 {
                     b"maximum-immutable-share-size": 10,
                 },
-                b"application-version": str(allmydata.__full_version__),
+                b"application-version": bytes(allmydata.__full_version__, "ascii"),
             }
 
 
@@ -215,7 +227,7 @@ class FakeClient(object):
     def __init__(self, mode="good", num_servers=50, reactor=None):
         self.num_servers = num_servers
         self.encoding_params = self.DEFAULT_ENCODING_PARAMETERS.copy()
-        if type(mode) is str:
+        if isinstance(mode, str):
             mode = dict([i,mode] for i in range(num_servers))
         servers = [
             (b"%20d" % fakeid, FakeStorageServer(mode[fakeid], reactor=reactor))
diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py
index d16f05a9a..fc2530f15 100644
--- a/src/allmydata/util/_python3.py
+++ b/src/allmydata/util/_python3.py
@@ -104,6 +104,7 @@ PORTED_TEST_MODULES = [
     "allmydata.test.test_storage",
     "allmydata.test.test_storage_web",
     "allmydata.test.test_time_format",
+    "allmydata.test.test_upload",
     "allmydata.test.test_uri",
     "allmydata.test.test_util",
     "allmydata.test.test_version",

From 199fbfcc1c27020fcc725f7e2e56f7d1fcd71193 Mon Sep 17 00:00:00 2001
From: Sajith Sasidharan <sajith@hcoop.net>
Date: Mon, 21 Sep 2020 13:22:00 -0400
Subject: [PATCH 53/68] Return error pages when an exception is raised in
 filenode

---
 newsfragments/3423.minor      |  0
 src/allmydata/web/filenode.py | 13 +++++++++++++
 2 files changed, 13 insertions(+)
 create mode 100644 newsfragments/3423.minor

diff --git a/newsfragments/3423.minor b/newsfragments/3423.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/allmydata/web/filenode.py b/src/allmydata/web/filenode.py
index 9233926db..0ecc8cc52 100644
--- a/src/allmydata/web/filenode.py
+++ b/src/allmydata/web/filenode.py
@@ -24,12 +24,14 @@ from allmydata.blacklist import (
 
 from allmydata.web.common import (
     boolean_of_arg,
+    exception_to_child,
     get_arg,
     get_filenode_metadata,
     get_format,
     get_mutable_type,
     parse_offset_arg,
     parse_replace_arg,
+    render_exception,
     should_create_intermediate_directories,
     text_plain,
     MyExceptionHandler,
@@ -115,6 +117,7 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin):
         self.name = name
         self.node = None
 
+    @render_exception
     def render_PUT(self, req):
         t = get_arg(req, "t", "").strip()
         replace = parse_replace_arg(get_arg(req, "replace", "true"))
@@ -130,6 +133,7 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin):
 
         raise WebError("PUT to a file: bad t=%s" % t)
 
+    @render_exception
     def render_POST(self, req):
         t = get_arg(req, "t", "").strip()
         replace = boolean_of_arg(get_arg(req, "replace", "true"))
@@ -161,6 +165,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
         self.parentnode = parentnode
         self.name = name
 
+    @exception_to_child
     def getChild(self, name, req):
         if isinstance(self.node, ProhibitedNode):
             raise FileProhibited(self.node.reason)
@@ -177,6 +182,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
             "no details",
         )
 
+    @render_exception
     def render_GET(self, req):
         t = get_arg(req, "t", "").strip()
 
@@ -234,6 +240,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
             return _file_read_only_uri(req, self.node)
         raise WebError("GET file: bad t=%s" % t)
 
+    @render_exception
     def render_HEAD(self, req):
         t = get_arg(req, "t", "").strip()
         if t:
@@ -243,6 +250,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
         d.addCallback(lambda dn: FileDownloader(dn, filename))
         return d
 
+    @render_exception
     def render_PUT(self, req):
         t = get_arg(req, "t", "").strip()
         replace = parse_replace_arg(get_arg(req, "replace", "true"))
@@ -285,6 +293,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
 
         raise WebError("PUT to a file: bad t=%s" % t)
 
+    @render_exception
     def render_POST(self, req):
         t = get_arg(req, "t", "").strip()
         replace = boolean_of_arg(get_arg(req, "replace", "true"))
@@ -328,6 +337,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
             d.addCallback(self._maybe_literal, CheckResultsRenderer)
         return d
 
+    @render_exception
     def render_DELETE(self, req):
         assert self.parentnode and self.name
         d = self.parentnode.delete(self.name)
@@ -419,6 +429,7 @@ class FileDownloader(Resource, object):
         except ValueError:
             return None
 
+    @render_exception
     def render(self, req):
         gte = static.getTypeAndEncoding
         ctype, encoding = gte(self.filename,
@@ -541,5 +552,7 @@ def _file_read_only_uri(req, filenode):
 
 
 class FileNodeDownloadHandler(FileNodeHandler):
+
+    @exception_to_child
     def getChild(self, name, req):
         return FileNodeDownloadHandler(self.client, self.node, name=name)

From 0e139114f760bc0aa0788e2f492ca8b313ff1440 Mon Sep 17 00:00:00 2001
From: Jean-Paul Calderone <exarkun@twistedmatrix.com>
Date: Mon, 21 Sep 2020 14:07:11 -0400
Subject: [PATCH 54/68] add a limited amount of missing test coverage for
 humanize_exception

---
 src/allmydata/test/web/test_web.py | 40 +++++++++++++++++++++++++++++-
 1 file changed, 39 insertions(+), 1 deletion(-)

diff --git a/src/allmydata/test/web/test_web.py b/src/allmydata/test/web/test_web.py
index 11c5a8c5c..8189542c7 100644
--- a/src/allmydata/test/web/test_web.py
+++ b/src/allmydata/test/web/test_web.py
@@ -59,7 +59,11 @@ from .common import (
     unknown_immcap,
 )
 
-from allmydata.interfaces import IMutableFileNode, SDMF_VERSION, MDMF_VERSION
+from allmydata.interfaces import (
+    IMutableFileNode, SDMF_VERSION, MDMF_VERSION,
+    FileTooLargeError,
+    MustBeReadonlyError,
+)
 from allmydata.mutable import servermap, publish, retrieve
 from .. import common_util as testutil
 from ..common_py3 import TimezoneMixin
@@ -67,6 +71,10 @@ from ..common_web import (
     do_http,
     Error,
 )
+from ...web.common import (
+    humanize_exception,
+)
+
 from allmydata.client import _Client, SecretHolder
 
 # create a fake uploader/downloader, and a couple of fake dirnodes, then
@@ -4790,3 +4798,33 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
         # doesn't reveal anything. This addresses #1720.
         d.addCallback(lambda e: self.assertEquals(str(e), "404 Not Found"))
         return d
+
+
+class HumanizeExceptionTests(TrialTestCase):
+    """
+    Tests for ``humanize_exception``.
+    """
+    def test_mustbereadonly(self):
+        """
+        ``humanize_exception`` describes ``MustBeReadonlyError``.
+        """
+        text, code = humanize_exception(
+            MustBeReadonlyError(
+                "URI:DIR2 directory writecap used in a read-only context",
+                "<unknown name>",
+            ),
+        )
+        self.assertIn("MustBeReadonlyError", text)
+        self.assertEqual(code, http.BAD_REQUEST)
+
+    def test_filetoolarge(self):
+        """
+        ``humanize_exception`` describes ``FileTooLargeError``.
+        """
+        text, code = humanize_exception(
+            FileTooLargeError(
+                "This file is too large to be uploaded (data_size).",
+            ),
+        )
+        self.assertIn("FileTooLargeError", text)
+        self.assertEqual(code, http.REQUEST_ENTITY_TOO_LARGE)

From e8f7643a8f70fc0eb24972a586f71901f9209de8 Mon Sep 17 00:00:00 2001
From: Sajith Sasidharan <sajith@hcoop.net>
Date: Mon, 21 Sep 2020 16:26:51 -0400
Subject: [PATCH 55/68] Return ErrorPage when web.operations raises an
 exception

---
 newsfragments/3424.minor        | 0
 src/allmydata/web/operations.py | 2 ++
 2 files changed, 2 insertions(+)
 create mode 100644 newsfragments/3424.minor

diff --git a/newsfragments/3424.minor b/newsfragments/3424.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/allmydata/web/operations.py b/src/allmydata/web/operations.py
index 4dcad0028..21c2ec7ef 100644
--- a/src/allmydata/web/operations.py
+++ b/src/allmydata/web/operations.py
@@ -17,6 +17,7 @@ from allmydata.web.common import (
     get_root,
     get_arg,
     boolean_of_arg,
+    exception_to_child,
 )
 
 MINUTE = 60
@@ -95,6 +96,7 @@ class OphandleTable(resource.Resource, service.Service):
         # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3314
         return url.URL.fromString(target)
 
+    @exception_to_child
     def getChild(self, name, req):
         ophandle = name
         if ophandle not in self.handles:

From c16cf00c6bd6a306373ad58e2808fe3d6ef2487f Mon Sep 17 00:00:00 2001
From: Sajith Sasidharan <sajith@hcoop.net>
Date: Mon, 21 Sep 2020 16:37:36 -0400
Subject: [PATCH 56/68] Return an error page when an exception is raised from
 web.root

---
 newsfragments/3425.minor  |  0
 src/allmydata/web/root.py | 12 ++++++++++++
 2 files changed, 12 insertions(+)
 create mode 100644 newsfragments/3425.minor

diff --git a/newsfragments/3425.minor b/newsfragments/3425.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/allmydata/web/root.py b/src/allmydata/web/root.py
index d6fe6863d..78daadef4 100644
--- a/src/allmydata/web/root.py
+++ b/src/allmydata/web/root.py
@@ -34,11 +34,13 @@ from allmydata.web import storage
 from allmydata.web.common import (
     abbreviate_size,
     WebError,
+    exception_to_child,
     get_arg,
     MultiFormatResource,
     SlotsSequenceElement,
     get_format,
     get_mutable_type,
+    render_exception,
     render_time_delta,
     render_time,
     render_time_attr,
@@ -58,6 +60,7 @@ class URIHandler(resource.Resource, object):
         super(URIHandler, self).__init__()
         self.client = client
 
+    @render_exception
     def render_GET(self, req):
         """
         Historically, accessing this via "GET /uri?uri=<capabilitiy>"
@@ -88,6 +91,7 @@ class URIHandler(resource.Resource, object):
                     redir_uri = redir_uri.add(k.decode('utf8'), v.decode('utf8'))
         return redirectTo(redir_uri.to_text().encode('utf8'), req)
 
+    @render_exception
     def render_PUT(self, req):
         """
         either "PUT /uri" to create an unlinked file, or
@@ -109,6 +113,7 @@ class URIHandler(resource.Resource, object):
         )
         raise WebError(errmsg, http.BAD_REQUEST)
 
+    @render_exception
     def render_POST(self, req):
         """
         "POST /uri?t=upload&file=newfile" to upload an
@@ -135,6 +140,7 @@ class URIHandler(resource.Resource, object):
                   "and POST?t=mkdir")
         raise WebError(errmsg, http.BAD_REQUEST)
 
+    @exception_to_child
     def getChild(self, name, req):
         """
         Most requests look like /uri/<cap> so this fetches the capability
@@ -167,6 +173,7 @@ class FileHandler(resource.Resource, object):
         super(FileHandler, self).__init__()
         self.client = client
 
+    @exception_to_child
     def getChild(self, name, req):
         if req.method not in ("GET", "HEAD"):
             raise WebError("/file can only be used with GET or HEAD")
@@ -181,6 +188,7 @@ class FileHandler(resource.Resource, object):
             raise WebError("'%s' is not a file-cap" % name)
         return filenode.FileNodeDownloadHandler(self.client, node)
 
+    @render_exception
     def render_GET(self, ctx):
         raise WebError("/file must be followed by a file-cap and a name",
                        http.NOT_FOUND)
@@ -188,6 +196,7 @@ class FileHandler(resource.Resource, object):
 class IncidentReporter(MultiFormatResource):
     """Handler for /report_incident POST request"""
 
+    @render_exception
     def render(self, req):
         if req.method != "POST":
             raise WebError("/report_incident can only be used with POST")
@@ -236,6 +245,7 @@ class Root(MultiFormatResource):
 
         self.putChild("report_incident", IncidentReporter())
 
+    @exception_to_child
     def getChild(self, path, request):
         if not path:
             # Render "/" path.
@@ -254,9 +264,11 @@ class Root(MultiFormatResource):
                 storage_server = None
             return storage.StorageStatus(storage_server, self._client.nickname)
 
+    @render_exception
     def render_HTML(self, req):
         return renderElement(req, RootElement(self._client, self._now_fn))
 
+    @render_exception
     def render_JSON(self, req):
         req.setHeader("content-type", "application/json; charset=utf-8")
         intro_summaries = [s.summary for s in self._client.introducer_connection_statuses()]

From 20101f8146ecc10c788cdafa134088f255d0aba7 Mon Sep 17 00:00:00 2001
From: Sajith Sasidharan <sajith@hcoop.net>
Date: Mon, 21 Sep 2020 16:51:16 -0400
Subject: [PATCH 57/68] Return ErrorPage when an exception is raised in
 web.status

---
 newsfragments/3426.minor    |  0
 src/allmydata/web/status.py | 15 +++++++++++++++
 2 files changed, 15 insertions(+)
 create mode 100644 newsfragments/3426.minor

diff --git a/newsfragments/3426.minor b/newsfragments/3426.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/allmydata/web/status.py b/src/allmydata/web/status.py
index 3a89e8f5e..7f6020a99 100644
--- a/src/allmydata/web/status.py
+++ b/src/allmydata/web/status.py
@@ -18,8 +18,10 @@ from allmydata.web.common import (
     abbreviate_time,
     abbreviate_rate,
     abbreviate_size,
+    exception_to_child,
     plural,
     compute_rate,
+    render_exception,
     render_time,
     MultiFormatResource,
     SlotsSequenceElement,
@@ -192,6 +194,7 @@ class UploadStatusPage(Resource, object):
         super(UploadStatusPage, self).__init__()
         self._upload_status = upload_status
 
+    @render_exception
     def render_GET(self, req):
         elem = UploadStatusElement(self._upload_status)
         return renderElement(req, elem)
@@ -367,6 +370,7 @@ class _EventJson(Resource, object):
     def __init__(self, download_status):
         self._download_status = download_status
 
+    @render_exception
     def render(self, request):
         request.setHeader("content-type", "text/plain")
         data = { } # this will be returned to the GET
@@ -424,6 +428,7 @@ class DownloadStatusPage(Resource, object):
         self._download_status = download_status
         self.putChild("event_json", _EventJson(self._download_status))
 
+    @render_exception
     def render_GET(self, req):
         elem = DownloadStatusElement(self._download_status)
         return renderElement(req, elem)
@@ -806,6 +811,7 @@ class RetrieveStatusPage(MultiFormatResource):
         super(RetrieveStatusPage, self).__init__()
         self._retrieve_status = retrieve_status
 
+    @render_exception
     def render_HTML(self, req):
         elem = RetrieveStatusElement(self._retrieve_status)
         return renderElement(req, elem)
@@ -929,6 +935,7 @@ class PublishStatusPage(MultiFormatResource):
         super(PublishStatusPage, self).__init__()
         self._publish_status = publish_status
 
+    @render_exception
     def render_HTML(self, req):
         elem = PublishStatusElement(self._publish_status);
         return renderElement(req, elem)
@@ -1087,6 +1094,7 @@ class MapupdateStatusPage(MultiFormatResource):
         super(MapupdateStatusPage, self).__init__()
         self._update_status = update_status
 
+    @render_exception
     def render_HTML(self, req):
         elem = MapupdateStatusElement(self._update_status);
         return renderElement(req, elem)
@@ -1248,11 +1256,13 @@ class Status(MultiFormatResource):
         super(Status, self).__init__()
         self.history = history
 
+    @render_exception
     def render_HTML(self, req):
         elem = StatusElement(self._get_active_operations(),
                              self._get_recent_operations())
         return renderElement(req, elem)
 
+    @render_exception
     def render_JSON(self, req):
         # modern browsers now render this instead of forcing downloads
         req.setHeader("content-type", "application/json")
@@ -1268,6 +1278,7 @@ class Status(MultiFormatResource):
 
         return json.dumps(data, indent=1) + "\n"
 
+    @exception_to_child
     def getChild(self, path, request):
         # The "if (path is empty) return self" line should handle
         # trailing slash in request path.
@@ -1420,9 +1431,11 @@ class HelperStatus(MultiFormatResource):
         super(HelperStatus, self).__init__()
         self._helper = helper
 
+    @render_exception
     def render_HTML(self, req):
         return renderElement(req, HelperStatusElement(self._helper))
 
+    @render_exception
     def render_JSON(self, req):
         req.setHeader("content-type", "text/plain")
         if self._helper:
@@ -1512,9 +1525,11 @@ class Statistics(MultiFormatResource):
         super(Statistics, self).__init__()
         self._provider = provider
 
+    @render_exception
     def render_HTML(self, req):
         return renderElement(req, StatisticsElement(self._provider))
 
+    @render_exception
     def render_JSON(self, req):
         stats = self._provider.get_stats()
         req.setHeader("content-type", "text/plain")

From 18e56e41fcd7f626e69198e864041121184f94a8 Mon Sep 17 00:00:00 2001
From: Sajith Sasidharan <sajith@hcoop.net>
Date: Mon, 21 Sep 2020 18:18:29 -0400
Subject: [PATCH 58/68] Return error page when an exception is raised from
 web.check_results

---
 newsfragments/3429.minor           |  0
 src/allmydata/web/check_results.py | 14 ++++++++++++++
 2 files changed, 14 insertions(+)
 create mode 100644 newsfragments/3429.minor

diff --git a/newsfragments/3429.minor b/newsfragments/3429.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/allmydata/web/check_results.py b/src/allmydata/web/check_results.py
index 500ac15a7..7c4723333 100644
--- a/src/allmydata/web/check_results.py
+++ b/src/allmydata/web/check_results.py
@@ -15,8 +15,10 @@ from twisted.web.template import (
     tags,
 )
 from allmydata.web.common import (
+    exception_to_child,
     get_arg,
     get_root,
+    render_exception,
     WebError,
     MultiFormatResource,
     SlotsSequenceElement,
@@ -224,9 +226,11 @@ class LiteralCheckResultsRenderer(MultiFormatResource, ResultsBase):
         super(LiteralCheckResultsRenderer, self).__init__()
         self._client = client
 
+    @render_exception
     def render_HTML(self, req):
         return renderElement(req, LiteralCheckResultsRendererElement())
 
+    @render_exception
     def render_JSON(self, req):
         req.setHeader("content-type", "text/plain")
         data = json_check_results(None)
@@ -275,9 +279,11 @@ class CheckResultsRenderer(MultiFormatResource):
         self._client = client
         self._results = ICheckResults(results)
 
+    @render_exception
     def render_HTML(self, req):
         return renderElement(req, CheckResultsRendererElement(self._client, self._results))
 
+    @render_exception
     def render_JSON(self, req):
         req.setHeader("content-type", "text/plain")
         data = json_check_results(self._results)
@@ -343,10 +349,12 @@ class CheckAndRepairResultsRenderer(MultiFormatResource):
         if results:
             self._results = ICheckAndRepairResults(results)
 
+    @render_exception
     def render_HTML(self, req):
         elem = CheckAndRepairResultsRendererElement(self._client, self._results)
         return renderElement(req, elem)
 
+    @render_exception
     def render_JSON(self, req):
         req.setHeader("content-type", "text/plain")
         data = json_check_and_repair_results(self._results)
@@ -411,6 +419,7 @@ class DeepCheckResultsRenderer(MultiFormatResource):
         self._client = client
         self.monitor = monitor
 
+    @exception_to_child
     def getChild(self, name, req):
         if not name:
             return self
@@ -425,10 +434,12 @@ class DeepCheckResultsRenderer(MultiFormatResource):
             raise WebError("No detailed results for SI %s" % html.escape(name),
                            http.NOT_FOUND)
 
+    @render_exception
     def render_HTML(self, req):
         elem = DeepCheckResultsRendererElement(self.monitor)
         return renderElement(req, elem)
 
+    @render_exception
     def render_JSON(self, req):
         req.setHeader("content-type", "text/plain")
         data = {}
@@ -615,6 +626,7 @@ class DeepCheckAndRepairResultsRenderer(MultiFormatResource):
         self._client = client
         self.monitor = monitor
 
+    @exception_to_child
     def getChild(self, name, req):
         if not name:
             return self
@@ -629,10 +641,12 @@ class DeepCheckAndRepairResultsRenderer(MultiFormatResource):
             raise WebError("No detailed results for SI %s" % html.escape(name),
                            http.NOT_FOUND)
 
+    @render_exception
     def render_HTML(self, req):
         elem = DeepCheckAndRepairResultsRendererElement(self.monitor)
         return renderElement(req, elem)
 
+    @render_exception
     def render_JSON(self, req):
         req.setHeader("content-type", "text/plain")
         res = self.monitor.get_status()

From fd5436b867366da813945ca547717a3ba3f5dc53 Mon Sep 17 00:00:00 2001
From: Sajith Sasidharan <sajith@hcoop.net>
Date: Mon, 21 Sep 2020 18:34:53 -0400
Subject: [PATCH 59/68] Expect ErrorPage in RenderSlashUri tests

---
 src/allmydata/test/web/test_root.py | 26 ++++++++++++++++++++++----
 1 file changed, 22 insertions(+), 4 deletions(-)

diff --git a/src/allmydata/test/web/test_root.py b/src/allmydata/test/web/test_root.py
index 5a888587c..426d408b8 100644
--- a/src/allmydata/test/web/test_root.py
+++ b/src/allmydata/test/web/test_root.py
@@ -57,8 +57,17 @@ class RenderSlashUri(unittest.TestCase):
         A (trivially) invalid capbility is an error
         """
         self.request.args[b"uri"] = [b"not a capability"]
-        with self.assertRaises(WebError):
-            self.res.render_GET(self.request)
+        response_body = self.res.render_GET(self.request)
+
+        self.assertIn(
+            "<title>400 - Error</title>", response_body,
+        )
+        self.assertIn(
+            "<h1>Error</h1>", response_body,
+        )
+        self.assertIn(
+            "<p>Invalid capability</p>", response_body,
+        )
 
     @given(
         text()
@@ -68,8 +77,17 @@ class RenderSlashUri(unittest.TestCase):
         Let hypothesis try a bunch of invalid capabilities
         """
         self.request.args[b"uri"] = [cap.encode('utf8')]
-        with self.assertRaises(WebError):
-            self.res.render_GET(self.request)
+        response_body = self.res.render_GET(self.request)
+
+        self.assertIn(
+            "<title>400 - Error</title>", response_body,
+        )
+        self.assertIn(
+            "<h1>Error</h1>", response_body,
+        )
+        self.assertIn(
+            "<p>Invalid capability</p>", response_body,
+        )
 
 
 class RenderServiceRow(unittest.TestCase):

From 136a70217c34bd4e0b9a4c2c553e83248ae2f0a9 Mon Sep 17 00:00:00 2001
From: Sajith Sasidharan <sajith@hcoop.net>
Date: Mon, 21 Sep 2020 20:44:49 -0400
Subject: [PATCH 60/68] Remove unused import

---
 src/allmydata/test/web/test_root.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/src/allmydata/test/web/test_root.py b/src/allmydata/test/web/test_root.py
index 426d408b8..df6b0cebc 100644
--- a/src/allmydata/test/web/test_root.py
+++ b/src/allmydata/test/web/test_root.py
@@ -14,7 +14,6 @@ from ...storage_client import (
 from ...web.root import RootElement
 from ...util.connection_status import ConnectionStatus
 from allmydata.web.root import URIHandler
-from allmydata.web.common import WebError
 from allmydata.client import _Client
 
 from hypothesis import given

From a413eb862610ffd5faa01c24005053a4700f70e2 Mon Sep 17 00:00:00 2001
From: Sajith Sasidharan <sajith@hcoop.net>
Date: Wed, 23 Sep 2020 08:32:19 -0400
Subject: [PATCH 61/68] Use BeautifulSoup in RenderSlashUri tests

---
 src/allmydata/test/web/test_root.py | 33 ++++++++++++++++++-----------
 1 file changed, 21 insertions(+), 12 deletions(-)

diff --git a/src/allmydata/test/web/test_root.py b/src/allmydata/test/web/test_root.py
index df6b0cebc..1a29b7a15 100644
--- a/src/allmydata/test/web/test_root.py
+++ b/src/allmydata/test/web/test_root.py
@@ -2,6 +2,8 @@ from mock import Mock
 
 import time
 
+from bs4 import BeautifulSoup
+
 from twisted.trial import unittest
 from twisted.web.template import Tag
 from twisted.web.test.requesthelper import DummyRequest
@@ -19,6 +21,9 @@ from allmydata.client import _Client
 from hypothesis import given
 from hypothesis.strategies import text
 
+from .common import (
+    assert_soup_has_tag_with_content,
+)
 
 from ..common import (
     EMPTY_CLIENT_CONFIG,
@@ -58,14 +63,16 @@ class RenderSlashUri(unittest.TestCase):
         self.request.args[b"uri"] = [b"not a capability"]
         response_body = self.res.render_GET(self.request)
 
-        self.assertIn(
-            "<title>400 - Error</title>", response_body,
+        soup = BeautifulSoup(response_body, 'html5lib')
+
+        assert_soup_has_tag_with_content(
+            self, soup, "title", "400 - Error",
         )
-        self.assertIn(
-            "<h1>Error</h1>", response_body,
+        assert_soup_has_tag_with_content(
+            self, soup, "h1", "Error",
         )
-        self.assertIn(
-            "<p>Invalid capability</p>", response_body,
+        assert_soup_has_tag_with_content(
+            self, soup, "p", "Invalid capability",
         )
 
     @given(
@@ -78,14 +85,16 @@ class RenderSlashUri(unittest.TestCase):
         self.request.args[b"uri"] = [cap.encode('utf8')]
         response_body = self.res.render_GET(self.request)
 
-        self.assertIn(
-            "<title>400 - Error</title>", response_body,
+        soup = BeautifulSoup(response_body, 'html5lib')
+
+        assert_soup_has_tag_with_content(
+            self, soup, "title", "400 - Error",
         )
-        self.assertIn(
-            "<h1>Error</h1>", response_body,
+        assert_soup_has_tag_with_content(
+            self, soup, "h1", "Error",
         )
-        self.assertIn(
-            "<p>Invalid capability</p>", response_body,
+        assert_soup_has_tag_with_content(
+            self, soup, "p", "Invalid capability",
         )
 
 

From 2645675649d3ba6b7e7b5381b4e1493fa11b4fee Mon Sep 17 00:00:00 2001
From: Ross Patterson <me@rpatterson.net>
Date: Wed, 23 Sep 2020 10:31:42 -0700
Subject: [PATCH 62/68] build(make): Exclude changelog entry from NEWS

This includes only developer-oriented changes.
---
 newsfragments/{3421.other => 3421.minor} | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 rename newsfragments/{3421.other => 3421.minor} (100%)

diff --git a/newsfragments/3421.other b/newsfragments/3421.minor
similarity index 100%
rename from newsfragments/3421.other
rename to newsfragments/3421.minor

From 5fbbb5be0f8fa0c95d004e950e4d78e256d8acdd Mon Sep 17 00:00:00 2001
From: Itamar Turner-Trauring <itamar@itamarst.org>
Date: Wed, 23 Sep 2020 15:21:47 -0400
Subject: [PATCH 63/68] News file.

---
 newsfragments/3430.minor | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 create mode 100644 newsfragments/3430.minor

diff --git a/newsfragments/3430.minor b/newsfragments/3430.minor
new file mode 100644
index 000000000..e69de29bb

From ed049da34f663de4c1b4d9cc0654f958923eaf13 Mon Sep 17 00:00:00 2001
From: Jean-Paul Calderone <exarkun@twistedmatrix.com>
Date: Wed, 23 Sep 2020 16:24:40 -0400
Subject: [PATCH 64/68] Add a helper that can render a Page or a Resource

---
 src/allmydata/test/common_web.py | 60 ++++++++++++++++++++++++++++++--
 1 file changed, 57 insertions(+), 3 deletions(-)

diff --git a/src/allmydata/test/common_web.py b/src/allmydata/test/common_web.py
index 1c9312689..d11e39383 100644
--- a/src/allmydata/test/common_web.py
+++ b/src/allmydata/test/common_web.py
@@ -1,9 +1,33 @@
 
 import treq
-from twisted.internet import defer
+from twisted.internet.defer import (
+    maybeDeferred,
+    inlineCallbacks,
+    returnValue,
+    succeed,
+)
 from twisted.web.error import Error
+from twisted.web.resource import (
+    IResource as ITwistedResource,
+)
+from twisted.web.test.requesthelper import (
+    DummyRequest,
+)
 
-@defer.inlineCallbacks
+from nevow.context import WebContext
+from nevow.testutil import FakeRequest
+from nevow.appserver import (
+    processingFailed,
+    DefaultExceptionHandler,
+)
+from nevow.inevow import (
+    ICanHandleException,
+    IRequest,
+    IResource as INevowResource,
+    IData,
+)
+
+@inlineCallbacks
 def do_http(method, url, **kwargs):
     response = yield treq.request(method, url, persistent=False, **kwargs)
     body = yield treq.content(response)
@@ -11,4 +35,34 @@ def do_http(method, url, **kwargs):
     # https://github.com/twisted/treq/pull/159 has landed
     if 400 <= response.code < 600:
         raise Error(response.code, response=body)
-    defer.returnValue(body)
+    returnValue(body)
+
+
+def render(resource, **query_args):
+    """
+    Render (in the manner of the Nevow appserver) a Nevow ``Page`` or a
+    Twisted ``Resource`` against a request with the given query arguments .
+
+    :param resource: The page or resource to render.
+
+    :param query_args: The query arguments to put into the request being
+        rendered.  A mapping from ``bytes`` to ``list`` of ``bytes``.
+
+    :return Deferred: A Deferred that fires with the rendered response body as
+        ``bytes``.
+    """
+    ctx = WebContext(tag=resource)
+    req = FakeRequest(args=query_args)
+    ctx.remember(DefaultExceptionHandler(), ICanHandleException)
+    ctx.remember(req, IRequest)
+    ctx.remember(None, IData)
+
+    def maybe_concat(res):
+        if isinstance(res, bytes):
+            return req.v + res
+        return req.v
+
+    resource = INevowResource(resource)
+    d = maybeDeferred(resource.renderHTTP, ctx)
+    d.addCallback(maybe_concat)
+    return d

From 34bade409117eb27efb3741afd6870d87c592409 Mon Sep 17 00:00:00 2001
From: Jean-Paul Calderone <exarkun@twistedmatrix.com>
Date: Wed, 23 Sep 2020 16:25:38 -0400
Subject: [PATCH 65/68] Use the newly introduced helper instead of our own
 internal thing

---
 src/allmydata/test/common_web.py   |  2 +-
 src/allmydata/test/web/test_web.py | 42 ++++--------------------------
 2 files changed, 6 insertions(+), 38 deletions(-)

diff --git a/src/allmydata/test/common_web.py b/src/allmydata/test/common_web.py
index d11e39383..414fbabdb 100644
--- a/src/allmydata/test/common_web.py
+++ b/src/allmydata/test/common_web.py
@@ -38,7 +38,7 @@ def do_http(method, url, **kwargs):
     returnValue(body)
 
 
-def render(resource, **query_args):
+def render(resource, query_args):
     """
     Render (in the manner of the Nevow appserver) a Nevow ``Page`` or a
     Twisted ``Resource`` against a request with the given query arguments .
diff --git a/src/allmydata/test/web/test_web.py b/src/allmydata/test/web/test_web.py
index 8189542c7..b032f29a1 100644
--- a/src/allmydata/test/web/test_web.py
+++ b/src/allmydata/test/web/test_web.py
@@ -8,24 +8,13 @@ from bs4 import BeautifulSoup
 
 from twisted.application import service
 from twisted.internet import defer
-from twisted.internet.defer import inlineCallbacks, returnValue, maybeDeferred
+from twisted.internet.defer import inlineCallbacks, returnValue
 from twisted.internet.task import Clock
 from twisted.web import client, error, http
 from twisted.python import failure, log
 
-from nevow.context import WebContext
-from nevow.inevow import (
-    ICanHandleException,
-    IRequest,
-    IData,
-)
 from nevow.util import escapeToXML
 from nevow.loaders import stan
-from nevow.testutil import FakeRequest
-from nevow.appserver import (
-    processingFailed,
-    DefaultExceptionHandler,
-)
 
 from allmydata import interfaces, uri, webish
 from allmydata.storage_client import StorageFarmBroker, StubServer
@@ -70,6 +59,7 @@ from ..common_py3 import TimezoneMixin
 from ..common_web import (
     do_http,
     Error,
+    render,
 )
 from ...web.common import (
     humanize_exception,
@@ -670,6 +660,9 @@ class MultiFormatPageTests(TrialTestCase):
     """
     Tests for ``MultiFormatPage``.
     """
+    def render(self, resource, **queryargs):
+        return self.successResultOf(render(resource, queryargs))
+
     def resource(self):
         """
         Create and return an instance of a ``MultiFormatPage`` subclass with two
@@ -686,31 +679,6 @@ class MultiFormatPageTests(TrialTestCase):
         return Content()
 
 
-    def render(self, resource, **query_args):
-        """
-        Render a Nevow ``Page`` against a request with the given query arguments.
-
-        :param resource: The Nevow resource to render.
-
-        :param query_args: The query arguments to put into the request being
-            rendered.  A mapping from ``bytes`` to ``list`` of ``bytes``.
-
-        :return: The rendered response body as ``bytes``.
-        """
-        ctx = WebContext(tag=resource)
-        req = FakeRequest(args=query_args)
-        ctx.remember(DefaultExceptionHandler(), ICanHandleException)
-        ctx.remember(req, IRequest)
-        ctx.remember(None, IData)
-
-        d = maybeDeferred(resource.renderHTTP, ctx)
-        d.addErrback(processingFailed, req, ctx)
-        res = self.successResultOf(d)
-        if isinstance(res, bytes):
-            return req.v + res
-        return req.v
-
-
     def test_select_format(self):
         """
         The ``formatArgument`` attribute of a ``MultiFormatPage`` subclass

From e7101b1711154526740d267df1fe353e9b21ae08 Mon Sep 17 00:00:00 2001
From: Jean-Paul Calderone <exarkun@twistedmatrix.com>
Date: Wed, 23 Sep 2020 18:38:26 -0400
Subject: [PATCH 66/68] Turn exceptions into error pages like Nevow does

---
 src/allmydata/test/common_web.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/src/allmydata/test/common_web.py b/src/allmydata/test/common_web.py
index 414fbabdb..de4d3e820 100644
--- a/src/allmydata/test/common_web.py
+++ b/src/allmydata/test/common_web.py
@@ -64,5 +64,6 @@ def render(resource, query_args):
 
     resource = INevowResource(resource)
     d = maybeDeferred(resource.renderHTTP, ctx)
+    d.addErrback(processingFailed, req, ctx)
     d.addCallback(maybe_concat)
     return d

From 67adb9303bf66e55dafff309326bfd2098a6b36d Mon Sep 17 00:00:00 2001
From: Jean-Paul Calderone <exarkun@twistedmatrix.com>
Date: Thu, 24 Sep 2020 10:23:34 -0400
Subject: [PATCH 67/68] news fragment

---
 newsfragments/3440.minor | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 create mode 100644 newsfragments/3440.minor

diff --git a/newsfragments/3440.minor b/newsfragments/3440.minor
new file mode 100644
index 000000000..e69de29bb

From f1bcf4eb077190623c1490d07baa92bff4d2ab29 Mon Sep 17 00:00:00 2001
From: Jean-Paul Calderone <exarkun@twistedmatrix.com>
Date: Thu, 24 Sep 2020 10:42:57 -0400
Subject: [PATCH 68/68] remove unused imports

---
 src/allmydata/test/common_web.py | 7 -------
 1 file changed, 7 deletions(-)

diff --git a/src/allmydata/test/common_web.py b/src/allmydata/test/common_web.py
index de4d3e820..e2ea57539 100644
--- a/src/allmydata/test/common_web.py
+++ b/src/allmydata/test/common_web.py
@@ -4,15 +4,8 @@ from twisted.internet.defer import (
     maybeDeferred,
     inlineCallbacks,
     returnValue,
-    succeed,
 )
 from twisted.web.error import Error
-from twisted.web.resource import (
-    IResource as ITwistedResource,
-)
-from twisted.web.test.requesthelper import (
-    DummyRequest,
-)
 
 from nevow.context import WebContext
 from nevow.testutil import FakeRequest