diff --git a/misc/coding_tools/check-interfaces.py b/misc/coding_tools/check-interfaces.py
index 5d6482d0a..66bdf808f 100644
--- a/misc/coding_tools/check-interfaces.py
+++ b/misc/coding_tools/check-interfaces.py
@@ -46,7 +46,7 @@ def strictly_implements(*interfaces):
                 for interface in interfaces:
                     try:
                         verifyClass(interface, cls)
-                    except Exception, e:
+                    except Exception as e:
                         print("%s.%s does not correctly implement %s.%s:\n%s"
                                        % (cls.__module__, cls.__name__,
                                           interface.__module__, interface.__name__, e), file=_err)
@@ -89,7 +89,7 @@ def check():
                 module = relpath.replace(os.sep, '/').replace('/', '.')
                 try:
                     __import__(module)
-                except ImportError, e:
+                except ImportError as e:
                     if not is_windows and (' _win' in str(e) or 'win32' in str(e)):
                         print("Warning: %r imports a Windows-specific module, so we cannot check it (%s).\n"
                                        % (module, str(e)), file=_err)
diff --git a/misc/coding_tools/check-miscaptures.py b/misc/coding_tools/check-miscaptures.py
index a6424015b..c55f9fa9c 100644
--- a/misc/coding_tools/check-miscaptures.py
+++ b/misc/coding_tools/check-miscaptures.py
@@ -15,7 +15,7 @@ def check_file(path):
 def check_thing(parser, thing):
     try:
         ast = parser(thing)
-    except SyntaxError, e:
+    except SyntaxError as e:
         return e
     else:
         results = []
diff --git a/newsfragments/3013.other b/newsfragments/3013.other
new file mode 100644
index 000000000..fd5a34e30
--- /dev/null
+++ b/newsfragments/3013.other
@@ -0,0 +1 @@
+Updated all Python files to use PEP-3110 exception syntax for Python3 compatibility.
diff --git a/py3_valid_syntax.txt b/py3_valid_syntax.txt
index 0673802b4..5a1191f57 100644
--- a/py3_valid_syntax.txt
+++ b/py3_valid_syntax.txt
@@ -5,19 +5,14 @@ misc/build_helpers/gen-package-table.py
 misc/build_helpers/run-deprecations.py
 misc/build_helpers/show-tool-versions.py
 misc/build_helpers/test-osx-pkg.py
-misc/coding_tools/check-debugging.py
-misc/coding_tools/check-umids.py
-misc/coding_tools/coverage2el.py
-misc/coding_tools/find-trailing-spaces.py
-misc/coding_tools/fixshebangs.py
-misc/coding_tools/graph-deps.py
-misc/coding_tools/make-canary-files.py
+misc/coding_tools/
 misc/incident-gatherer/
 misc/operations_helpers/
 misc/simulators/
 release-tools/
 setup.py
 src/allmydata/_auto_deps.py
+src/allmydata/blacklist.py
 src/allmydata/check_results.py
 src/allmydata/client.py
 src/allmydata/codec.py
@@ -27,10 +22,12 @@ src/allmydata/frontends/auth.py
 src/allmydata/frontends/__init__.py
 src/allmydata/hashtree.py
 src/allmydata/history.py
+src/allmydata/immutable/checker.py
 src/allmydata/immutable/downloader/common.py
 src/allmydata/immutable/downloader/fetcher.py
 src/allmydata/immutable/downloader/finder.py
 src/allmydata/immutable/downloader/__init__.py
+src/allmydata/immutable/downloader/share.py
 src/allmydata/immutable/downloader/status.py
 src/allmydata/immutable/filenode.py
 src/allmydata/immutable/__init__.py
@@ -40,6 +37,7 @@ src/allmydata/immutable/offloaded.py
 src/allmydata/immutable/repairer.py
 src/allmydata/interfaces.py
 src/allmydata/introducer/
+src/allmydata/magicfolderdb.py
 src/allmydata/__main__.py
 src/allmydata/monitor.py
 src/allmydata/mutable/checker.py
@@ -48,24 +46,40 @@ src/allmydata/mutable/__init__.py
 src/allmydata/mutable/layout.py
 src/allmydata/mutable/publish.py
 src/allmydata/mutable/repairer.py
+src/allmydata/mutable/retrieve.py
 src/allmydata/mutable/servermap.py
 src/allmydata/nodemaker.py
 src/allmydata/node.py
 src/allmydata/scripts/admin.py
+src/allmydata/scripts/backupdb.py
 src/allmydata/scripts/cli.py
+src/allmydata/scripts/common_http.py
 src/allmydata/scripts/common.py
 src/allmydata/scripts/create_node.py
 src/allmydata/scripts/default_nodedir.py
 src/allmydata/scripts/__init__.py
 src/allmydata/scripts/magic_folder_cli.py
+src/allmydata/scripts/slow_operation.py
 src/allmydata/scripts/stats_gatherer.py
 src/allmydata/scripts/tahoe_add_alias.py
 src/allmydata/scripts/tahoe_backup.py
+src/allmydata/scripts/tahoe_check.py
+src/allmydata/scripts/tahoe_cp.py
+src/allmydata/scripts/tahoe_daemonize.py
+src/allmydata/scripts/tahoe_get.py
 src/allmydata/scripts/tahoe_invite.py
+src/allmydata/scripts/tahoe_ls.py
+src/allmydata/scripts/tahoe_manifest.py
+src/allmydata/scripts/tahoe_mkdir.py
+src/allmydata/scripts/tahoe_mv.py
+src/allmydata/scripts/tahoe_put.py
 src/allmydata/scripts/tahoe_restart.py
 src/allmydata/scripts/tahoe_run.py
 src/allmydata/scripts/tahoe_start.py
 src/allmydata/scripts/tahoe_status.py
+src/allmydata/scripts/tahoe_stop.py
+src/allmydata/scripts/tahoe_unlink.py
+src/allmydata/scripts/tahoe_webopen.py
 src/allmydata/stats.py
 src/allmydata/storage/
 src/allmydata/test/bench_dirnode.py
@@ -81,6 +95,7 @@ src/allmydata/test/cli/test_daemonize.py
 src/allmydata/test/cli/test_invite.py
 src/allmydata/test/cli/test_start.py
 src/allmydata/test/cli/test_status.py
+src/allmydata/test/common.py
 src/allmydata/test/common_util.py
 src/allmydata/test/common_web.py
 src/allmydata/test/eliotutil.py
@@ -109,11 +124,14 @@ src/allmydata/test/test_checker.py
 src/allmydata/test/test_client.py
 src/allmydata/test/test_configutil.py
 src/allmydata/test/test_connections.py
+src/allmydata/test/test_crawler.py
 src/allmydata/test/test_eliotutil.py
 src/allmydata/test/test_encode.py
+src/allmydata/test/test_encodingutil.py
 src/allmydata/test/test_filenode.py
 src/allmydata/test/test_ftp.py
 src/allmydata/test/test_happiness.py
+src/allmydata/test/test_hashtree.py
 src/allmydata/test/test_helper.py
 src/allmydata/test/test_hung_server.py
 src/allmydata/test/test_i2p_provider.py
@@ -130,6 +148,7 @@ src/allmydata/test/test_netstring.py
 src/allmydata/test/test_node.py
 src/allmydata/test/test_no_network.py
 src/allmydata/test/test_observer.py
+src/allmydata/test/test_repairer.py
 src/allmydata/test/test_runner.py
 src/allmydata/test/test_stats.py
 src/allmydata/test/test_storage_client.py
@@ -147,6 +166,7 @@ src/allmydata/test/web/test_root.py
 src/allmydata/test/web/test_token.py
 src/allmydata/test/web/test_util.py
 src/allmydata/unknown.py
+src/allmydata/uri.py
 src/allmydata/util/abbreviate.py
 src/allmydata/util/base32.py
 src/allmydata/util/base62.py
@@ -154,12 +174,15 @@ src/allmydata/util/cachedir.py
 src/allmydata/util/configutil.py
 src/allmydata/util/connection_status.py
 src/allmydata/util/consumer.py
+src/allmydata/util/dbutil.py
+src/allmydata/util/deferredutil.py
 src/allmydata/util/dictutil.py
 src/allmydata/util/eliotutil.py
 src/allmydata/util/hashutil.py
 src/allmydata/util/i2p_provider.py
 src/allmydata/util/idlib.py
 src/allmydata/util/__init__.py
+src/allmydata/util/iputil.py
 src/allmydata/util/keyutil.py
 src/allmydata/util/limiter.py
 src/allmydata/util/log.py
@@ -184,5 +207,6 @@ src/allmydata/watchdog/
 src/allmydata/web/
 src/allmydata/windows/__init__.py
 src/allmydata/windows/tahoesvc.py
+static/
 static/tahoe.py
 ws_client.py
diff --git a/src/allmydata/__init__.py b/src/allmydata/__init__.py
index d2ea56f34..485abfa5f 100644
--- a/src/allmydata/__init__.py
+++ b/src/allmydata/__init__.py
@@ -399,7 +399,7 @@ def cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list):
                 pr_normver = normalized_version(pr_ver)
             except verlib.IrrationalVersionError:
                 continue
-            except Exception, e:
+            except Exception as e:
                 errors.append("Warning: version number %r found for dependency %r by pkg_resources could not be parsed. "
                               "The version found by import was %r from %r. "
                               "pkg_resources thought it should be found at %r. "
@@ -416,7 +416,7 @@ def cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list):
                         imp_normver = normalized_version(imp_ver)
                     except verlib.IrrationalVersionError:
                         continue
-                    except Exception, e:
+                    except Exception as e:
                         errors.append("Warning: version number %r found for dependency %r (imported from %r) could not be parsed. "
                                       "pkg_resources thought it should be version %r at %r. "
                                       "The exception was %s: %s"
@@ -470,7 +470,7 @@ def check_all_requirements():
     for requirement in install_requires:
         try:
             check_requirement(requirement, vers_and_locs)
-        except (ImportError, PackagingError), e:
+        except (ImportError, PackagingError) as e:
             fatal_errors.append("%s: %s" % (e.__class__.__name__, e))
 
     if fatal_errors:
diff --git a/src/allmydata/blacklist.py b/src/allmydata/blacklist.py
index aebede6af..23690266d 100644
--- a/src/allmydata/blacklist.py
+++ b/src/allmydata/blacklist.py
@@ -42,7 +42,7 @@ class Blacklist:
                     si = base32.a2b(si_s) # must be valid base32
                     self.entries[si] = reason
                 self.last_mtime = current_mtime
-        except Exception, e:
+        except Exception as e:
             twisted_log.err(e, "unparseable blacklist file")
             raise
 
diff --git a/src/allmydata/dirnode.py b/src/allmydata/dirnode.py
index b1b6b6abb..d094d85cc 100644
--- a/src/allmydata/dirnode.py
+++ b/src/allmydata/dirnode.py
@@ -402,7 +402,7 @@ class DirectoryNode(object):
                     log.msg(format="mutable cap for child %(name)s unpacked from an immutable directory",
                             name=quote_output(name, encoding='utf-8'),
                             facility="tahoe.webish", level=log.UNUSUAL)
-            except CapConstraintError, e:
+            except CapConstraintError as e:
                 log.msg(format="unmet constraint on cap for child %(name)s unpacked from a directory:\n"
                                "%(message)s", message=e.args[0], name=quote_output(name, encoding='utf-8'),
                                facility="tahoe.webish", level=log.UNUSUAL)
diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py
index cfaf3a0d2..50457a4d8 100644
--- a/src/allmydata/frontends/sftpd.py
+++ b/src/allmydata/frontends/sftpd.py
@@ -539,7 +539,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin):
             self.is_closed = True
             try:
                 self.f.close()
-            except Exception, e:
+            except Exception as e:
                 self.log("suppressed %r from close of temporary file %r" % (e, self.f), level=WEIRD)
         self.download_done("closed")
         return self.done_status
diff --git a/src/allmydata/immutable/checker.py b/src/allmydata/immutable/checker.py
index 4e1c5d012..596bc8225 100644
--- a/src/allmydata/immutable/checker.py
+++ b/src/allmydata/immutable/checker.py
@@ -256,9 +256,9 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
             sharehashes = dict(sh)
             try:
                 self.share_hash_tree.set_hashes(sharehashes)
-            except IndexError, le:
+            except IndexError as le:
                 raise BadOrMissingHash(le)
-            except (hashtree.BadHashError, hashtree.NotEnoughHashesError), le:
+            except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
                 raise BadOrMissingHash(le)
         d.addCallback(_got_share_hashes)
         return d
@@ -289,9 +289,9 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
 
             try:
                 self.block_hash_tree.set_hashes(bh)
-            except IndexError, le:
+            except IndexError as le:
                 raise BadOrMissingHash(le)
-            except (hashtree.BadHashError, hashtree.NotEnoughHashesError), le:
+            except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
                 raise BadOrMissingHash(le)
         d.addCallback(_got_block_hashes)
         return d
@@ -316,9 +316,9 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
             ct_hashes = dict(enumerate(hashes))
             try:
                 crypttext_hash_tree.set_hashes(ct_hashes)
-            except IndexError, le:
+            except IndexError as le:
                 raise BadOrMissingHash(le)
-            except (hashtree.BadHashError, hashtree.NotEnoughHashesError), le:
+            except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
                 raise BadOrMissingHash(le)
         d.addCallback(_got_crypttext_hashes)
         return d
@@ -359,7 +359,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
         sharehashes, blockhashes, blockdata = results
         try:
             sharehashes = dict(sharehashes)
-        except ValueError, le:
+        except ValueError as le:
             le.args = tuple(le.args + (sharehashes,))
             raise
         blockhashes = dict(enumerate(blockhashes))
@@ -373,7 +373,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
                 # match the root node of self.share_hash_tree.
                 try:
                     self.share_hash_tree.set_hashes(sharehashes)
-                except IndexError, le:
+                except IndexError as le:
                     # Weird -- sharehashes contained index numbers outside of
                     # the range that fit into this hash tree.
                     raise BadOrMissingHash(le)
@@ -400,7 +400,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
             #        (self.sharenum, blocknum, len(blockdata),
             #         blockdata[:50], blockdata[-50:], base32.b2a(blockhash)))
 
-        except (hashtree.BadHashError, hashtree.NotEnoughHashesError), le:
+        except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
             # log.WEIRD: indicates undetected disk/network error, or more
             # likely a programming error
             self.log("hash failure in block=%d, shnum=%d on %s" %
diff --git a/src/allmydata/immutable/downloader/share.py b/src/allmydata/immutable/downloader/share.py
index 30cddb798..e4ed429b5 100644
--- a/src/allmydata/immutable/downloader/share.py
+++ b/src/allmydata/immutable/downloader/share.py
@@ -208,7 +208,7 @@ class Share:
                     level=log.NOISY, parent=self._lp, umid="BaL1zw")
             self._do_loop()
             # all exception cases call self._fail(), which clears self._alive
-        except (BadHashError, NotEnoughHashesError, LayoutInvalid), e:
+        except (BadHashError, NotEnoughHashesError, LayoutInvalid) as e:
             # Abandon this share. We do this if we see corruption in the
             # offset table, the UEB, or a hash tree. We don't abandon the
             # whole share if we see corruption in a data block (we abandon
@@ -225,7 +225,7 @@ class Share:
                     share=repr(self),
                     level=log.UNUSUAL, parent=self._lp, umid="gWspVw")
             self._fail(Failure(e), log.UNUSUAL)
-        except DataUnavailable, e:
+        except DataUnavailable as e:
             # Abandon this share.
             log.msg(format="need data that will never be available"
                     " from %s: pending=%s, received=%s, unavailable=%s" %
@@ -416,7 +416,7 @@ class Share:
         try:
             self._node.validate_and_store_UEB(UEB_s)
             return True
-        except (LayoutInvalid, BadHashError), e:
+        except (LayoutInvalid, BadHashError) as e:
             # TODO: if this UEB was bad, we'll keep trying to validate it
             # over and over again. Only log.err on the first one, or better
             # yet skip all but the first
@@ -452,7 +452,7 @@ class Share:
         try:
             self._node.process_share_hashes(share_hashes)
             # adds to self._node.share_hash_tree
-        except (BadHashError, NotEnoughHashesError), e:
+        except (BadHashError, NotEnoughHashesError) as e:
             f = Failure(e)
             self._signal_corruption(f, o["share_hashes"], hashlen)
             self.had_corruption = True
@@ -481,7 +481,7 @@ class Share:
         # cannot validate)
         try:
             self._commonshare.process_block_hashes(block_hashes)
-        except (BadHashError, NotEnoughHashesError), e:
+        except (BadHashError, NotEnoughHashesError) as e:
             f = Failure(e)
             hashnums = ",".join([str(n) for n in sorted(block_hashes.keys())])
             log.msg(format="hash failure in block_hashes=(%(hashnums)s),"
@@ -509,7 +509,7 @@ class Share:
         # gotten them all
         try:
             self._node.process_ciphertext_hashes(hashes)
-        except (BadHashError, NotEnoughHashesError), e:
+        except (BadHashError, NotEnoughHashesError) as e:
             f = Failure(e)
             hashnums = ",".join([str(n) for n in sorted(hashes.keys())])
             log.msg(format="hash failure in ciphertext_hashes=(%(hashnums)s),"
@@ -553,7 +553,7 @@ class Share:
             # now clear our received data, to dodge the #1170 spans.py
             # complexity bug
             self._received = DataSpans()
-        except (BadHashError, NotEnoughHashesError), e:
+        except (BadHashError, NotEnoughHashesError) as e:
             # rats, we have a corrupt block. Notify our clients that they
             # need to look elsewhere, and advise the server. Unlike
             # corruption in other parts of the share, this doesn't cause us
diff --git a/src/allmydata/magicfolderdb.py b/src/allmydata/magicfolderdb.py
index e0d9893da..9b09f6c95 100644
--- a/src/allmydata/magicfolderdb.py
+++ b/src/allmydata/magicfolderdb.py
@@ -85,7 +85,7 @@ def get_magicfolderdb(dbfile, stderr=sys.stderr,
         else:
             print("invalid magicfolderdb schema version specified", file=stderr)
             return None
-    except DBError, e:
+    except DBError as e:
         print(e, file=stderr)
         return None
 
diff --git a/src/allmydata/mutable/retrieve.py b/src/allmydata/mutable/retrieve.py
index 03dc6f888..3771a235c 100644
--- a/src/allmydata/mutable/retrieve.py
+++ b/src/allmydata/mutable/retrieve.py
@@ -756,7 +756,7 @@ class Retrieve(object):
             try:
                 bht.set_hashes(blockhashes)
             except (hashtree.BadHashError, hashtree.NotEnoughHashesError, \
-                    IndexError), e:
+                    IndexError) as e:
                 raise CorruptShareError(server,
                                         reader.shnum,
                                         "block hash tree failure: %s" % e)
@@ -770,7 +770,7 @@ class Retrieve(object):
         try:
            bht.set_hashes(leaves={segnum: blockhash})
         except (hashtree.BadHashError, hashtree.NotEnoughHashesError, \
-                IndexError), e:
+                IndexError) as e:
             raise CorruptShareError(server,
                                     reader.shnum,
                                     "block hash tree failure: %s" % e)
@@ -788,7 +788,7 @@ class Retrieve(object):
             self.share_hash_tree.set_hashes(hashes=sharehashes,
                                         leaves={reader.shnum: bht[0]})
         except (hashtree.BadHashError, hashtree.NotEnoughHashesError, \
-                IndexError), e:
+                IndexError) as e:
             raise CorruptShareError(server,
                                     reader.shnum,
                                     "corrupt hashes: %s" % e)
diff --git a/src/allmydata/scripts/backupdb.py b/src/allmydata/scripts/backupdb.py
index 0e2a54dc9..b86a06edc 100644
--- a/src/allmydata/scripts/backupdb.py
+++ b/src/allmydata/scripts/backupdb.py
@@ -73,7 +73,7 @@ def get_backupdb(dbfile, stderr=sys.stderr,
         (sqlite3, db) = get_db(dbfile, stderr, create_version, updaters=UPDATERS,
                                just_create=just_create, dbname="backupdb")
         return BackupDB_v2(sqlite3, db)
-    except DBError, e:
+    except DBError as e:
         print(e, file=stderr)
         return None
 
diff --git a/src/allmydata/scripts/common_http.py b/src/allmydata/scripts/common_http.py
index c35e1a8b3..5cfb9f7bd 100644
--- a/src/allmydata/scripts/common_http.py
+++ b/src/allmydata/scripts/common_http.py
@@ -70,7 +70,7 @@ def do_http(method, url, body=""):
 
     try:
         c.endheaders()
-    except socket_error, err:
+    except socket_error as err:
         return BadResponse(url, err)
 
     while True:
diff --git a/src/allmydata/scripts/debug.py b/src/allmydata/scripts/debug.py
index 267d2d0d8..fab7529d3 100644
--- a/src/allmydata/scripts/debug.py
+++ b/src/allmydata/scripts/debug.py
@@ -234,7 +234,7 @@ def dump_SDMF_share(m, length, options):
 
     try:
         pieces = unpack_share(data)
-    except NeedMoreDataError, e:
+    except NeedMoreDataError as e:
         # retry once with the larger size
         size = e.needed_bytes
         f = open(options['filename'], "rb")
@@ -726,7 +726,7 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
 
             try:
                 pieces = unpack_share(data)
-            except NeedMoreDataError, e:
+            except NeedMoreDataError as e:
                 # retry once with the larger size
                 size = e.needed_bytes
                 f.seek(m.DATA_OFFSET)
diff --git a/src/allmydata/scripts/slow_operation.py b/src/allmydata/scripts/slow_operation.py
index 4b7ebbf19..8aa5adf12 100644
--- a/src/allmydata/scripts/slow_operation.py
+++ b/src/allmydata/scripts/slow_operation.py
@@ -22,7 +22,7 @@ class SlowOperationRunner:
         where = options.where
         try:
             rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
-        except UnknownAliasError, e:
+        except UnknownAliasError as e:
             e.display(stderr)
             return 1
         if path == '/':
diff --git a/src/allmydata/scripts/tahoe_check.py b/src/allmydata/scripts/tahoe_check.py
index 53cc99dce..19967d505 100644
--- a/src/allmydata/scripts/tahoe_check.py
+++ b/src/allmydata/scripts/tahoe_check.py
@@ -24,7 +24,7 @@ def check_location(options, where):
         nodeurl += "/"
     try:
         rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
-    except UnknownAliasError, e:
+    except UnknownAliasError as e:
         e.display(stderr)
         return 1
     if path == '/':
@@ -285,7 +285,7 @@ class DeepCheckStreamer(LineOnlyReceiver):
 
         try:
             rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
-        except UnknownAliasError, e:
+        except UnknownAliasError as e:
             e.display(stderr)
             return 1
         if path == '/':
diff --git a/src/allmydata/scripts/tahoe_cp.py b/src/allmydata/scripts/tahoe_cp.py
index c309fcd21..e2c32fdfd 100644
--- a/src/allmydata/scripts/tahoe_cp.py
+++ b/src/allmydata/scripts/tahoe_cp.py
@@ -488,7 +488,7 @@ class Copier:
         try:
             status = self.try_copy()
             return status
-        except TahoeError, te:
+        except TahoeError as te:
             if verbosity >= 2:
                 Failure().printTraceback(self.stderr)
                 print(file=self.stderr)
diff --git a/src/allmydata/scripts/tahoe_daemonize.py b/src/allmydata/scripts/tahoe_daemonize.py
index 3c5b1ac01..d7cfc89cf 100644
--- a/src/allmydata/scripts/tahoe_daemonize.py
+++ b/src/allmydata/scripts/tahoe_daemonize.py
@@ -206,7 +206,7 @@ def daemonize(config):
     twistd_config = MyTwistdConfig()
     try:
         twistd_config.parseOptions(twistd_args)
-    except usage.error, ue:
+    except usage.error as ue:
         # these arguments were unsuitable for 'twistd'
         print(config, file=err)
         print("tahoe %s: usage error from twistd: %s\n" % (config.subcommand_name, ue), file=err)
diff --git a/src/allmydata/scripts/tahoe_get.py b/src/allmydata/scripts/tahoe_get.py
index 0470b383f..d90baf2c9 100644
--- a/src/allmydata/scripts/tahoe_get.py
+++ b/src/allmydata/scripts/tahoe_get.py
@@ -17,7 +17,7 @@ def get(options):
         nodeurl += "/"
     try:
         rootcap, path = get_alias(aliases, from_file, DEFAULT_ALIAS)
-    except UnknownAliasError, e:
+    except UnknownAliasError as e:
         e.display(stderr)
         return 1
     url = nodeurl + "uri/%s" % urllib.quote(rootcap)
diff --git a/src/allmydata/scripts/tahoe_ls.py b/src/allmydata/scripts/tahoe_ls.py
index cbe48aedc..1332da32a 100644
--- a/src/allmydata/scripts/tahoe_ls.py
+++ b/src/allmydata/scripts/tahoe_ls.py
@@ -20,7 +20,7 @@ def list(options):
         where = where[:-1]
     try:
         rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
-    except UnknownAliasError, e:
+    except UnknownAliasError as e:
         e.display(stderr)
         return 1
     url = nodeurl + "uri/%s" % urllib.quote(rootcap)
@@ -54,7 +54,7 @@ def list(options):
 
     try:
         parsed = json.loads(data)
-    except Exception, e:
+    except Exception as e:
         print("error: %s" % quote_output(e.args[0], quotemarks=False), file=stderr)
         print("Could not parse JSON response:", file=stderr)
         print(quote_output(data, quotemarks=False), file=stderr)
diff --git a/src/allmydata/scripts/tahoe_manifest.py b/src/allmydata/scripts/tahoe_manifest.py
index 25618dc48..f1a5e7a99 100644
--- a/src/allmydata/scripts/tahoe_manifest.py
+++ b/src/allmydata/scripts/tahoe_manifest.py
@@ -30,7 +30,7 @@ class ManifestStreamer(LineOnlyReceiver):
         where = options.where
         try:
             rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
-        except UnknownAliasError, e:
+        except UnknownAliasError as e:
             e.display(stderr)
             return 1
         if path == '/':
@@ -71,7 +71,7 @@ class ManifestStreamer(LineOnlyReceiver):
 
         try:
             d = json.loads(line.decode('utf-8'))
-        except Exception, e:
+        except Exception as e:
             print("ERROR could not decode/parse %s\nERROR  %r" % (quote_output(line), e), file=stderr)
         else:
             if d["type"] in ("file", "directory"):
diff --git a/src/allmydata/scripts/tahoe_mkdir.py b/src/allmydata/scripts/tahoe_mkdir.py
index f7899e6ec..a76adc8fc 100644
--- a/src/allmydata/scripts/tahoe_mkdir.py
+++ b/src/allmydata/scripts/tahoe_mkdir.py
@@ -16,7 +16,7 @@ def mkdir(options):
     if where:
         try:
             rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
-        except UnknownAliasError, e:
+        except UnknownAliasError as e:
             e.display(stderr)
             return 1
 
diff --git a/src/allmydata/scripts/tahoe_mv.py b/src/allmydata/scripts/tahoe_mv.py
index a2cbab997..4caaca928 100644
--- a/src/allmydata/scripts/tahoe_mv.py
+++ b/src/allmydata/scripts/tahoe_mv.py
@@ -22,7 +22,7 @@ def mv(options, mode="move"):
         nodeurl += "/"
     try:
         rootcap, from_path = get_alias(aliases, from_file, DEFAULT_ALIAS)
-    except UnknownAliasError, e:
+    except UnknownAliasError as e:
         e.display(stderr)
         return 1
     from_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
@@ -40,7 +40,7 @@ def mv(options, mode="move"):
     # now get the target
     try:
         rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
-    except UnknownAliasError, e:
+    except UnknownAliasError as e:
         e.display(stderr)
         return 1
     to_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
diff --git a/src/allmydata/scripts/tahoe_put.py b/src/allmydata/scripts/tahoe_put.py
index a769fb5e2..8d87408dc 100644
--- a/src/allmydata/scripts/tahoe_put.py
+++ b/src/allmydata/scripts/tahoe_put.py
@@ -50,7 +50,7 @@ def put(options):
         else:
             try:
                 rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
-            except UnknownAliasError, e:
+            except UnknownAliasError as e:
                 e.display(stderr)
                 return 1
             if path.startswith("/"):
diff --git a/src/allmydata/scripts/tahoe_stop.py b/src/allmydata/scripts/tahoe_stop.py
index 562a0aa8b..9918f7e0d 100644
--- a/src/allmydata/scripts/tahoe_stop.py
+++ b/src/allmydata/scripts/tahoe_stop.py
@@ -44,7 +44,7 @@ def stop(config):
     # the user but keep waiting until they give up.
     try:
         os.kill(pid, signal.SIGKILL)
-    except OSError, oserr:
+    except OSError as oserr:
         if oserr.errno == 3:
             print(oserr.strerror)
             # the process didn't exist, so wipe the pid file
diff --git a/src/allmydata/scripts/tahoe_unlink.py b/src/allmydata/scripts/tahoe_unlink.py
index c3d1faec6..bc1d43c9e 100644
--- a/src/allmydata/scripts/tahoe_unlink.py
+++ b/src/allmydata/scripts/tahoe_unlink.py
@@ -19,7 +19,7 @@ def unlink(options, command="unlink"):
         nodeurl += "/"
     try:
         rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
-    except UnknownAliasError, e:
+    except UnknownAliasError as e:
         e.display(stderr)
         return 1
     if not path:
diff --git a/src/allmydata/scripts/tahoe_webopen.py b/src/allmydata/scripts/tahoe_webopen.py
index 03500dc76..a7b7ca7e1 100644
--- a/src/allmydata/scripts/tahoe_webopen.py
+++ b/src/allmydata/scripts/tahoe_webopen.py
@@ -12,7 +12,7 @@ def webopen(options, opener=None):
     if where:
         try:
             rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
-        except UnknownAliasError, e:
+        except UnknownAliasError as e:
             e.display(stderr)
             return 1
         if path == '/':
diff --git a/src/allmydata/test/common.py b/src/allmydata/test/common.py
index cd48fcda4..e5f16a007 100644
--- a/src/allmydata/test/common.py
+++ b/src/allmydata/test/common.py
@@ -274,7 +274,7 @@ class FakeCHKFileNode:
             return self.my_uri.get_size()
         try:
             data = self.all_contents[self.my_uri.to_string()]
-        except KeyError, le:
+        except KeyError as le:
             raise NotEnoughSharesError(le, 0, 3)
         return len(data)
     def get_current_size(self):
diff --git a/src/allmydata/test/test_crawler.py b/src/allmydata/test/test_crawler.py
index 05677d1ac..48d1ba26e 100644
--- a/src/allmydata/test/test_crawler.py
+++ b/src/allmydata/test/test_crawler.py
@@ -294,7 +294,7 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
                 left = p["remaining-sleep-time"]
                 self.failUnless(isinstance(left, float), left)
                 self.failUnless(left > 0.0, left)
-            except Exception, e:
+            except Exception as e:
                 did_check_progress[0] = e
             else:
                 did_check_progress[0] = True
diff --git a/src/allmydata/test/test_deepcheck.py b/src/allmydata/test/test_deepcheck.py
index d6d81d8c3..c1e13df33 100644
--- a/src/allmydata/test/test_deepcheck.py
+++ b/src/allmydata/test/test_deepcheck.py
@@ -145,7 +145,7 @@ class DeepCheckBase(GridTestMixin, ErrorMixin, StallMixin, ShouldFailMixin,
                 continue
             try:
                 yield json.loads(unit)
-            except ValueError, le:
+            except ValueError as le:
                 le.args = tuple(le.args + (unit,))
                 raise
 
@@ -953,7 +953,7 @@ class DeepCheckWebBad(DeepCheckBase, unittest.TestCase):
             self.failUnlessEqual(cr.get_version_counter_recoverable(), 1, where)
             self.failUnlessEqual(cr.get_version_counter_unrecoverable(), 0, where)
             return cr
-        except Exception, le:
+        except Exception as le:
             le.args = tuple(le.args + (where,))
             raise
 
diff --git a/src/allmydata/test/test_encodingutil.py b/src/allmydata/test/test_encodingutil.py
index 743e9ef9f..771e9d879 100644
--- a/src/allmydata/test/test_encodingutil.py
+++ b/src/allmydata/test/test_encodingutil.py
@@ -271,7 +271,7 @@ class StdlibUnicode(unittest.TestCase):
 
         try:
             os.mkdir(lumiere_nfc)
-        except EnvironmentError, e:
+        except EnvironmentError as e:
             raise unittest.SkipTest("%r\nIt is possible that the filesystem on which this test is being run "
                                     "does not support Unicode, even though the platform does." % (e,))
 
diff --git a/src/allmydata/test/test_hashtree.py b/src/allmydata/test/test_hashtree.py
index abcf4eb1b..d96e8ebd7 100644
--- a/src/allmydata/test/test_hashtree.py
+++ b/src/allmydata/test/test_hashtree.py
@@ -186,7 +186,7 @@ class Incomplete(unittest.TestCase):
         # this should succeed
         try:
             iht.set_hashes(chain, leaves={0: tagged_hash("tag", "0")})
-        except hashtree.BadHashError, e:
+        except hashtree.BadHashError as e:
             self.fail("bad hash: %s" % e)
 
         self.failUnlessEqual(ht.get_leaf(0), tagged_hash("tag", "0"))
@@ -215,5 +215,5 @@ class Incomplete(unittest.TestCase):
         # this should succeed
         try:
             iht.set_hashes(chain, leaves={4: tagged_hash("tag", "4")})
-        except hashtree.BadHashError, e:
+        except hashtree.BadHashError as e:
             self.fail("bad hash: %s" % e)
diff --git a/src/allmydata/test/test_repairer.py b/src/allmydata/test/test_repairer.py
index 747415207..9475741b4 100644
--- a/src/allmydata/test/test_repairer.py
+++ b/src/allmydata/test/test_repairer.py
@@ -117,7 +117,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin):
             self.failIfBigger(delta_reads, MAX_DELTA_READS)
             try:
                 judgement(vr)
-            except unittest.FailTest, e:
+            except unittest.FailTest as e:
                 # FailTest just uses e.args[0] == str
                 new_arg = str(e.args[0]) + "\nvr.data is: " + str(vr.as_dict())
                 e.args = (new_arg,)
diff --git a/src/allmydata/test/test_util.py b/src/allmydata/test/test_util.py
index f78d95019..4556ecf0c 100644
--- a/src/allmydata/test/test_util.py
+++ b/src/allmydata/test/test_util.py
@@ -66,19 +66,19 @@ class HumanReadable(unittest.TestCase):
         self.failUnlessEqual(hr({1:2}), "{1:2}")
         try:
             raise ValueError
-        except Exception, e:
+        except Exception as e:
             self.failUnless(
                 hr(e) == "<ValueError: ()>" # python-2.4
                 or hr(e) == "ValueError()") # python-2.5
         try:
             raise ValueError("oops")
-        except Exception, e:
+        except Exception as e:
             self.failUnless(
                 hr(e) == "<ValueError: 'oops'>" # python-2.4
                 or hr(e) == "ValueError('oops',)") # python-2.5
         try:
             raise NoArgumentException
-        except Exception, e:
+        except Exception as e:
             self.failUnless(
                 hr(e) == "<NoArgumentException>" # python-2.4
                 or hr(e) == "NoArgumentException()") # python-2.5
@@ -361,18 +361,18 @@ class Asserts(unittest.TestCase):
     def should_assert(self, func, *args, **kwargs):
         try:
             func(*args, **kwargs)
-        except AssertionError, e:
+        except AssertionError as e:
             return str(e)
-        except Exception, e:
+        except Exception as e:
             self.fail("assert failed with non-AssertionError: %s" % e)
         self.fail("assert was not caught")
 
     def should_not_assert(self, func, *args, **kwargs):
         try:
             func(*args, **kwargs)
-        except AssertionError, e:
+        except AssertionError as e:
             self.fail("assertion fired when it should not have: %s" % e)
-        except Exception, e:
+        except Exception as e:
             self.fail("assertion (which shouldn't have failed) failed with non-AssertionError: %s" % e)
         return # we're happy
 
diff --git a/src/allmydata/uri.py b/src/allmydata/uri.py
index b2159c026..8d462d537 100644
--- a/src/allmydata/uri.py
+++ b/src/allmydata/uri.py
@@ -816,7 +816,7 @@ def from_string(u, deep_immutable=False, name=u"<unknown name>"):
         else:
             error = MustBeReadonlyError(kind + " used in a read-only context", name)
 
-    except BadURIError, e:
+    except BadURIError as e:
         error = e
 
     return UnknownURI(u, error=error)
diff --git a/src/allmydata/util/dbutil.py b/src/allmydata/util/dbutil.py
index f098f1c17..543dd2797 100644
--- a/src/allmydata/util/dbutil.py
+++ b/src/allmydata/util/dbutil.py
@@ -22,7 +22,7 @@ def get_db(dbfile, stderr=sys.stderr,
     must_create = not os.path.exists(dbfile)
     try:
         db = sqlite3.connect(dbfile)
-    except (EnvironmentError, sqlite3.OperationalError), e:
+    except (EnvironmentError, sqlite3.OperationalError) as e:
         raise DBError("Unable to create/open %s file %s: %s" % (dbname, dbfile, e))
 
     schema, target_version = create_version
@@ -46,7 +46,7 @@ def get_db(dbfile, stderr=sys.stderr,
     try:
         c.execute("SELECT version FROM version")
         version = c.fetchone()[0]
-    except sqlite3.DatabaseError, e:
+    except sqlite3.DatabaseError as e:
         # this indicates that the file is not a compatible database format.
         # Perhaps it was created with an old version, or it might be junk.
         raise DBError("%s file is unusable: %s" % (dbname, e))
diff --git a/src/allmydata/util/deferredutil.py b/src/allmydata/util/deferredutil.py
index 974709ad2..d48faa0c7 100644
--- a/src/allmydata/util/deferredutil.py
+++ b/src/allmydata/util/deferredutil.py
@@ -28,7 +28,7 @@ def timeout_call(reactor, d, timeout):
         try:
             timer.cancel()
             timer_d.callback(x)
-        except error.AlreadyCalled, defer.AlreadyCalledError:
+        except (error.AlreadyCalled, defer.AlreadyCalledError):
             pass
         return None
 
diff --git a/src/allmydata/util/fileutil.py b/src/allmydata/util/fileutil.py
index 394333af4..1a3412050 100644
--- a/src/allmydata/util/fileutil.py
+++ b/src/allmydata/util/fileutil.py
@@ -37,7 +37,7 @@ def rename(src, dst, tries=4, basedelay=0.1):
     for i in range(tries-1):
         try:
             return os.rename(src, dst)
-        except EnvironmentError, le:
+        except EnvironmentError as le:
             # XXX Tighten this to check if this is a permission denied error (possibly due to another Windows process having the file open and execute the superkludge only in this case.
             log.msg("XXX KLUDGE Attempting to move file %s => %s; got %s; sleeping %s seconds" % (src, dst, le, basedelay,))
             time.sleep(basedelay)
@@ -65,7 +65,7 @@ def remove(f, tries=4, basedelay=0.1):
     for i in range(tries-1):
         try:
             return os.remove(f)
-        except EnvironmentError, le:
+        except EnvironmentError as le:
             # XXX Tighten this to check if this is a permission denied error (possibly due to another Windows process having the file open and execute the superkludge only in this case.
             if not os.path.exists(f):
                 return
@@ -183,7 +183,7 @@ def make_dirs(dirname, mode=0o777):
     tx = None
     try:
         os.makedirs(dirname, mode)
-    except OSError, x:
+    except OSError as x:
         tx = x
 
     if not os.path.isdir(dirname):
@@ -210,7 +210,7 @@ def rm_dir(dirname):
             else:
                 remove(fullname)
         os.rmdir(dirname)
-    except Exception, le:
+    except Exception as le:
         # Ignore "No such file or directory"
         if (not isinstance(le, OSError)) or le.args[0] != 2:
             excs.append(le)
diff --git a/src/allmydata/util/iputil.py b/src/allmydata/util/iputil.py
index 086f740f8..b834e285d 100644
--- a/src/allmydata/util/iputil.py
+++ b/src/allmydata/util/iputil.py
@@ -214,7 +214,7 @@ def _query(path, args, regex):
             p = subprocess.Popen([path] + list(args), stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
             (output, err) = p.communicate()
             break
-        except OSError, e:
+        except OSError as e:
             if e.errno == errno.EINTR and trial < TRIES-1:
                 continue
             raise
diff --git a/src/allmydata/windows/fixups.py b/src/allmydata/windows/fixups.py
index 6e01fc122..29fb43da0 100644
--- a/src/allmydata/windows/fixups.py
+++ b/src/allmydata/windows/fixups.py
@@ -143,7 +143,7 @@ def initialize():
                     if self._hConsole is None:
                         try:
                             self._stream.flush()
-                        except Exception, e:
+                        except Exception as e:
                             _complain("%s.flush: %r from %r" % (self.name, e, self._stream))
                             raise
 
@@ -169,7 +169,7 @@ def initialize():
                                 remaining -= n.value
                                 if remaining == 0: break
                                 text = text[n.value:]
-                    except Exception, e:
+                    except Exception as e:
                         _complain("%s.write: %r" % (self.name, e))
                         raise
 
@@ -177,7 +177,7 @@ def initialize():
                     try:
                         for line in lines:
                             self.write(line)
-                    except Exception, e:
+                    except Exception as e:
                         _complain("%s.writelines: %r" % (self.name, e))
                         raise
 
@@ -190,7 +190,7 @@ def initialize():
                 sys.stderr = UnicodeOutput(hStderr, None, STDERR_FILENO, '<Unicode console stderr>')
             else:
                 sys.stderr = UnicodeOutput(None, sys.stderr, old_stderr_fileno, '<Unicode redirected stderr>')
-    except Exception, e:
+    except Exception as e:
         _complain("exception %r while fixing up sys.stdout and sys.stderr" % (e,))
 
     # This works around <http://bugs.python.org/issue2128>.
@@ -221,7 +221,7 @@ def initialize():
 
     try:
         argv = [unmangle(argv_unicode[i]).encode('utf-8') for i in xrange(0, argc.value)]
-    except Exception, e:
+    except Exception as e:
         _complain("%s:  could not unmangle Unicode arguments.\n%r"
                   % (sys.argv[0], [argv_unicode[i] for i in xrange(0, argc.value)]))
         raise
diff --git a/src/allmydata/windows/inotify.py b/src/allmydata/windows/inotify.py
index c4d9f8fbb..17bea3597 100644
--- a/src/allmydata/windows/inotify.py
+++ b/src/allmydata/windows/inotify.py
@@ -345,7 +345,7 @@ class INotify(PollMixin):
                                     try:
                                         with CALLBACK(inotify_events=event_mask):
                                             cb(None, path1, event_mask)
-                                    except Exception, e2:
+                                    except Exception as e2:
                                         log.err(e2)
                         self._pending = set()
 
@@ -360,7 +360,7 @@ class INotify(PollMixin):
                     reactor.callFromThread(_maybe_notify, path)
                     if self._check_stop():
                         return
-        except Exception, e:
+        except Exception as e:
             log.err(e)
             self._state = STOPPED
             raise