mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-22 14:22:25 +00:00
Merge pull request #587 from tahoe-lafs/3013-try-except-syntax
updated all python files to use pep-3110 exception syntax for python3 compatibility
This commit is contained in:
commit
7431a23686
@ -46,7 +46,7 @@ def strictly_implements(*interfaces):
|
|||||||
for interface in interfaces:
|
for interface in interfaces:
|
||||||
try:
|
try:
|
||||||
verifyClass(interface, cls)
|
verifyClass(interface, cls)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
print("%s.%s does not correctly implement %s.%s:\n%s"
|
print("%s.%s does not correctly implement %s.%s:\n%s"
|
||||||
% (cls.__module__, cls.__name__,
|
% (cls.__module__, cls.__name__,
|
||||||
interface.__module__, interface.__name__, e), file=_err)
|
interface.__module__, interface.__name__, e), file=_err)
|
||||||
@ -89,7 +89,7 @@ def check():
|
|||||||
module = relpath.replace(os.sep, '/').replace('/', '.')
|
module = relpath.replace(os.sep, '/').replace('/', '.')
|
||||||
try:
|
try:
|
||||||
__import__(module)
|
__import__(module)
|
||||||
except ImportError, e:
|
except ImportError as e:
|
||||||
if not is_windows and (' _win' in str(e) or 'win32' in str(e)):
|
if not is_windows and (' _win' in str(e) or 'win32' in str(e)):
|
||||||
print("Warning: %r imports a Windows-specific module, so we cannot check it (%s).\n"
|
print("Warning: %r imports a Windows-specific module, so we cannot check it (%s).\n"
|
||||||
% (module, str(e)), file=_err)
|
% (module, str(e)), file=_err)
|
||||||
|
@ -15,7 +15,7 @@ def check_file(path):
|
|||||||
def check_thing(parser, thing):
|
def check_thing(parser, thing):
|
||||||
try:
|
try:
|
||||||
ast = parser(thing)
|
ast = parser(thing)
|
||||||
except SyntaxError, e:
|
except SyntaxError as e:
|
||||||
return e
|
return e
|
||||||
else:
|
else:
|
||||||
results = []
|
results = []
|
||||||
|
1
newsfragments/3013.other
Normal file
1
newsfragments/3013.other
Normal file
@ -0,0 +1 @@
|
|||||||
|
Updated all Python files to use PEP-3110 exception syntax for Python3 compatibility.
|
@ -5,19 +5,14 @@ misc/build_helpers/gen-package-table.py
|
|||||||
misc/build_helpers/run-deprecations.py
|
misc/build_helpers/run-deprecations.py
|
||||||
misc/build_helpers/show-tool-versions.py
|
misc/build_helpers/show-tool-versions.py
|
||||||
misc/build_helpers/test-osx-pkg.py
|
misc/build_helpers/test-osx-pkg.py
|
||||||
misc/coding_tools/check-debugging.py
|
misc/coding_tools/
|
||||||
misc/coding_tools/check-umids.py
|
|
||||||
misc/coding_tools/coverage2el.py
|
|
||||||
misc/coding_tools/find-trailing-spaces.py
|
|
||||||
misc/coding_tools/fixshebangs.py
|
|
||||||
misc/coding_tools/graph-deps.py
|
|
||||||
misc/coding_tools/make-canary-files.py
|
|
||||||
misc/incident-gatherer/
|
misc/incident-gatherer/
|
||||||
misc/operations_helpers/
|
misc/operations_helpers/
|
||||||
misc/simulators/
|
misc/simulators/
|
||||||
release-tools/
|
release-tools/
|
||||||
setup.py
|
setup.py
|
||||||
src/allmydata/_auto_deps.py
|
src/allmydata/_auto_deps.py
|
||||||
|
src/allmydata/blacklist.py
|
||||||
src/allmydata/check_results.py
|
src/allmydata/check_results.py
|
||||||
src/allmydata/client.py
|
src/allmydata/client.py
|
||||||
src/allmydata/codec.py
|
src/allmydata/codec.py
|
||||||
@ -27,10 +22,12 @@ src/allmydata/frontends/auth.py
|
|||||||
src/allmydata/frontends/__init__.py
|
src/allmydata/frontends/__init__.py
|
||||||
src/allmydata/hashtree.py
|
src/allmydata/hashtree.py
|
||||||
src/allmydata/history.py
|
src/allmydata/history.py
|
||||||
|
src/allmydata/immutable/checker.py
|
||||||
src/allmydata/immutable/downloader/common.py
|
src/allmydata/immutable/downloader/common.py
|
||||||
src/allmydata/immutable/downloader/fetcher.py
|
src/allmydata/immutable/downloader/fetcher.py
|
||||||
src/allmydata/immutable/downloader/finder.py
|
src/allmydata/immutable/downloader/finder.py
|
||||||
src/allmydata/immutable/downloader/__init__.py
|
src/allmydata/immutable/downloader/__init__.py
|
||||||
|
src/allmydata/immutable/downloader/share.py
|
||||||
src/allmydata/immutable/downloader/status.py
|
src/allmydata/immutable/downloader/status.py
|
||||||
src/allmydata/immutable/filenode.py
|
src/allmydata/immutable/filenode.py
|
||||||
src/allmydata/immutable/__init__.py
|
src/allmydata/immutable/__init__.py
|
||||||
@ -40,6 +37,7 @@ src/allmydata/immutable/offloaded.py
|
|||||||
src/allmydata/immutable/repairer.py
|
src/allmydata/immutable/repairer.py
|
||||||
src/allmydata/interfaces.py
|
src/allmydata/interfaces.py
|
||||||
src/allmydata/introducer/
|
src/allmydata/introducer/
|
||||||
|
src/allmydata/magicfolderdb.py
|
||||||
src/allmydata/__main__.py
|
src/allmydata/__main__.py
|
||||||
src/allmydata/monitor.py
|
src/allmydata/monitor.py
|
||||||
src/allmydata/mutable/checker.py
|
src/allmydata/mutable/checker.py
|
||||||
@ -48,24 +46,40 @@ src/allmydata/mutable/__init__.py
|
|||||||
src/allmydata/mutable/layout.py
|
src/allmydata/mutable/layout.py
|
||||||
src/allmydata/mutable/publish.py
|
src/allmydata/mutable/publish.py
|
||||||
src/allmydata/mutable/repairer.py
|
src/allmydata/mutable/repairer.py
|
||||||
|
src/allmydata/mutable/retrieve.py
|
||||||
src/allmydata/mutable/servermap.py
|
src/allmydata/mutable/servermap.py
|
||||||
src/allmydata/nodemaker.py
|
src/allmydata/nodemaker.py
|
||||||
src/allmydata/node.py
|
src/allmydata/node.py
|
||||||
src/allmydata/scripts/admin.py
|
src/allmydata/scripts/admin.py
|
||||||
|
src/allmydata/scripts/backupdb.py
|
||||||
src/allmydata/scripts/cli.py
|
src/allmydata/scripts/cli.py
|
||||||
|
src/allmydata/scripts/common_http.py
|
||||||
src/allmydata/scripts/common.py
|
src/allmydata/scripts/common.py
|
||||||
src/allmydata/scripts/create_node.py
|
src/allmydata/scripts/create_node.py
|
||||||
src/allmydata/scripts/default_nodedir.py
|
src/allmydata/scripts/default_nodedir.py
|
||||||
src/allmydata/scripts/__init__.py
|
src/allmydata/scripts/__init__.py
|
||||||
src/allmydata/scripts/magic_folder_cli.py
|
src/allmydata/scripts/magic_folder_cli.py
|
||||||
|
src/allmydata/scripts/slow_operation.py
|
||||||
src/allmydata/scripts/stats_gatherer.py
|
src/allmydata/scripts/stats_gatherer.py
|
||||||
src/allmydata/scripts/tahoe_add_alias.py
|
src/allmydata/scripts/tahoe_add_alias.py
|
||||||
src/allmydata/scripts/tahoe_backup.py
|
src/allmydata/scripts/tahoe_backup.py
|
||||||
|
src/allmydata/scripts/tahoe_check.py
|
||||||
|
src/allmydata/scripts/tahoe_cp.py
|
||||||
|
src/allmydata/scripts/tahoe_daemonize.py
|
||||||
|
src/allmydata/scripts/tahoe_get.py
|
||||||
src/allmydata/scripts/tahoe_invite.py
|
src/allmydata/scripts/tahoe_invite.py
|
||||||
|
src/allmydata/scripts/tahoe_ls.py
|
||||||
|
src/allmydata/scripts/tahoe_manifest.py
|
||||||
|
src/allmydata/scripts/tahoe_mkdir.py
|
||||||
|
src/allmydata/scripts/tahoe_mv.py
|
||||||
|
src/allmydata/scripts/tahoe_put.py
|
||||||
src/allmydata/scripts/tahoe_restart.py
|
src/allmydata/scripts/tahoe_restart.py
|
||||||
src/allmydata/scripts/tahoe_run.py
|
src/allmydata/scripts/tahoe_run.py
|
||||||
src/allmydata/scripts/tahoe_start.py
|
src/allmydata/scripts/tahoe_start.py
|
||||||
src/allmydata/scripts/tahoe_status.py
|
src/allmydata/scripts/tahoe_status.py
|
||||||
|
src/allmydata/scripts/tahoe_stop.py
|
||||||
|
src/allmydata/scripts/tahoe_unlink.py
|
||||||
|
src/allmydata/scripts/tahoe_webopen.py
|
||||||
src/allmydata/stats.py
|
src/allmydata/stats.py
|
||||||
src/allmydata/storage/
|
src/allmydata/storage/
|
||||||
src/allmydata/test/bench_dirnode.py
|
src/allmydata/test/bench_dirnode.py
|
||||||
@ -81,6 +95,7 @@ src/allmydata/test/cli/test_daemonize.py
|
|||||||
src/allmydata/test/cli/test_invite.py
|
src/allmydata/test/cli/test_invite.py
|
||||||
src/allmydata/test/cli/test_start.py
|
src/allmydata/test/cli/test_start.py
|
||||||
src/allmydata/test/cli/test_status.py
|
src/allmydata/test/cli/test_status.py
|
||||||
|
src/allmydata/test/common.py
|
||||||
src/allmydata/test/common_util.py
|
src/allmydata/test/common_util.py
|
||||||
src/allmydata/test/common_web.py
|
src/allmydata/test/common_web.py
|
||||||
src/allmydata/test/eliotutil.py
|
src/allmydata/test/eliotutil.py
|
||||||
@ -109,11 +124,14 @@ src/allmydata/test/test_checker.py
|
|||||||
src/allmydata/test/test_client.py
|
src/allmydata/test/test_client.py
|
||||||
src/allmydata/test/test_configutil.py
|
src/allmydata/test/test_configutil.py
|
||||||
src/allmydata/test/test_connections.py
|
src/allmydata/test/test_connections.py
|
||||||
|
src/allmydata/test/test_crawler.py
|
||||||
src/allmydata/test/test_eliotutil.py
|
src/allmydata/test/test_eliotutil.py
|
||||||
src/allmydata/test/test_encode.py
|
src/allmydata/test/test_encode.py
|
||||||
|
src/allmydata/test/test_encodingutil.py
|
||||||
src/allmydata/test/test_filenode.py
|
src/allmydata/test/test_filenode.py
|
||||||
src/allmydata/test/test_ftp.py
|
src/allmydata/test/test_ftp.py
|
||||||
src/allmydata/test/test_happiness.py
|
src/allmydata/test/test_happiness.py
|
||||||
|
src/allmydata/test/test_hashtree.py
|
||||||
src/allmydata/test/test_helper.py
|
src/allmydata/test/test_helper.py
|
||||||
src/allmydata/test/test_hung_server.py
|
src/allmydata/test/test_hung_server.py
|
||||||
src/allmydata/test/test_i2p_provider.py
|
src/allmydata/test/test_i2p_provider.py
|
||||||
@ -130,6 +148,7 @@ src/allmydata/test/test_netstring.py
|
|||||||
src/allmydata/test/test_node.py
|
src/allmydata/test/test_node.py
|
||||||
src/allmydata/test/test_no_network.py
|
src/allmydata/test/test_no_network.py
|
||||||
src/allmydata/test/test_observer.py
|
src/allmydata/test/test_observer.py
|
||||||
|
src/allmydata/test/test_repairer.py
|
||||||
src/allmydata/test/test_runner.py
|
src/allmydata/test/test_runner.py
|
||||||
src/allmydata/test/test_stats.py
|
src/allmydata/test/test_stats.py
|
||||||
src/allmydata/test/test_storage_client.py
|
src/allmydata/test/test_storage_client.py
|
||||||
@ -147,6 +166,7 @@ src/allmydata/test/web/test_root.py
|
|||||||
src/allmydata/test/web/test_token.py
|
src/allmydata/test/web/test_token.py
|
||||||
src/allmydata/test/web/test_util.py
|
src/allmydata/test/web/test_util.py
|
||||||
src/allmydata/unknown.py
|
src/allmydata/unknown.py
|
||||||
|
src/allmydata/uri.py
|
||||||
src/allmydata/util/abbreviate.py
|
src/allmydata/util/abbreviate.py
|
||||||
src/allmydata/util/base32.py
|
src/allmydata/util/base32.py
|
||||||
src/allmydata/util/base62.py
|
src/allmydata/util/base62.py
|
||||||
@ -154,12 +174,15 @@ src/allmydata/util/cachedir.py
|
|||||||
src/allmydata/util/configutil.py
|
src/allmydata/util/configutil.py
|
||||||
src/allmydata/util/connection_status.py
|
src/allmydata/util/connection_status.py
|
||||||
src/allmydata/util/consumer.py
|
src/allmydata/util/consumer.py
|
||||||
|
src/allmydata/util/dbutil.py
|
||||||
|
src/allmydata/util/deferredutil.py
|
||||||
src/allmydata/util/dictutil.py
|
src/allmydata/util/dictutil.py
|
||||||
src/allmydata/util/eliotutil.py
|
src/allmydata/util/eliotutil.py
|
||||||
src/allmydata/util/hashutil.py
|
src/allmydata/util/hashutil.py
|
||||||
src/allmydata/util/i2p_provider.py
|
src/allmydata/util/i2p_provider.py
|
||||||
src/allmydata/util/idlib.py
|
src/allmydata/util/idlib.py
|
||||||
src/allmydata/util/__init__.py
|
src/allmydata/util/__init__.py
|
||||||
|
src/allmydata/util/iputil.py
|
||||||
src/allmydata/util/keyutil.py
|
src/allmydata/util/keyutil.py
|
||||||
src/allmydata/util/limiter.py
|
src/allmydata/util/limiter.py
|
||||||
src/allmydata/util/log.py
|
src/allmydata/util/log.py
|
||||||
@ -184,5 +207,6 @@ src/allmydata/watchdog/
|
|||||||
src/allmydata/web/
|
src/allmydata/web/
|
||||||
src/allmydata/windows/__init__.py
|
src/allmydata/windows/__init__.py
|
||||||
src/allmydata/windows/tahoesvc.py
|
src/allmydata/windows/tahoesvc.py
|
||||||
|
static/
|
||||||
static/tahoe.py
|
static/tahoe.py
|
||||||
ws_client.py
|
ws_client.py
|
||||||
|
@ -399,7 +399,7 @@ def cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list):
|
|||||||
pr_normver = normalized_version(pr_ver)
|
pr_normver = normalized_version(pr_ver)
|
||||||
except verlib.IrrationalVersionError:
|
except verlib.IrrationalVersionError:
|
||||||
continue
|
continue
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
errors.append("Warning: version number %r found for dependency %r by pkg_resources could not be parsed. "
|
errors.append("Warning: version number %r found for dependency %r by pkg_resources could not be parsed. "
|
||||||
"The version found by import was %r from %r. "
|
"The version found by import was %r from %r. "
|
||||||
"pkg_resources thought it should be found at %r. "
|
"pkg_resources thought it should be found at %r. "
|
||||||
@ -416,7 +416,7 @@ def cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list):
|
|||||||
imp_normver = normalized_version(imp_ver)
|
imp_normver = normalized_version(imp_ver)
|
||||||
except verlib.IrrationalVersionError:
|
except verlib.IrrationalVersionError:
|
||||||
continue
|
continue
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
errors.append("Warning: version number %r found for dependency %r (imported from %r) could not be parsed. "
|
errors.append("Warning: version number %r found for dependency %r (imported from %r) could not be parsed. "
|
||||||
"pkg_resources thought it should be version %r at %r. "
|
"pkg_resources thought it should be version %r at %r. "
|
||||||
"The exception was %s: %s"
|
"The exception was %s: %s"
|
||||||
@ -470,7 +470,7 @@ def check_all_requirements():
|
|||||||
for requirement in install_requires:
|
for requirement in install_requires:
|
||||||
try:
|
try:
|
||||||
check_requirement(requirement, vers_and_locs)
|
check_requirement(requirement, vers_and_locs)
|
||||||
except (ImportError, PackagingError), e:
|
except (ImportError, PackagingError) as e:
|
||||||
fatal_errors.append("%s: %s" % (e.__class__.__name__, e))
|
fatal_errors.append("%s: %s" % (e.__class__.__name__, e))
|
||||||
|
|
||||||
if fatal_errors:
|
if fatal_errors:
|
||||||
|
@ -42,7 +42,7 @@ class Blacklist:
|
|||||||
si = base32.a2b(si_s) # must be valid base32
|
si = base32.a2b(si_s) # must be valid base32
|
||||||
self.entries[si] = reason
|
self.entries[si] = reason
|
||||||
self.last_mtime = current_mtime
|
self.last_mtime = current_mtime
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
twisted_log.err(e, "unparseable blacklist file")
|
twisted_log.err(e, "unparseable blacklist file")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
@ -402,7 +402,7 @@ class DirectoryNode(object):
|
|||||||
log.msg(format="mutable cap for child %(name)s unpacked from an immutable directory",
|
log.msg(format="mutable cap for child %(name)s unpacked from an immutable directory",
|
||||||
name=quote_output(name, encoding='utf-8'),
|
name=quote_output(name, encoding='utf-8'),
|
||||||
facility="tahoe.webish", level=log.UNUSUAL)
|
facility="tahoe.webish", level=log.UNUSUAL)
|
||||||
except CapConstraintError, e:
|
except CapConstraintError as e:
|
||||||
log.msg(format="unmet constraint on cap for child %(name)s unpacked from a directory:\n"
|
log.msg(format="unmet constraint on cap for child %(name)s unpacked from a directory:\n"
|
||||||
"%(message)s", message=e.args[0], name=quote_output(name, encoding='utf-8'),
|
"%(message)s", message=e.args[0], name=quote_output(name, encoding='utf-8'),
|
||||||
facility="tahoe.webish", level=log.UNUSUAL)
|
facility="tahoe.webish", level=log.UNUSUAL)
|
||||||
|
@ -539,7 +539,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin):
|
|||||||
self.is_closed = True
|
self.is_closed = True
|
||||||
try:
|
try:
|
||||||
self.f.close()
|
self.f.close()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.log("suppressed %r from close of temporary file %r" % (e, self.f), level=WEIRD)
|
self.log("suppressed %r from close of temporary file %r" % (e, self.f), level=WEIRD)
|
||||||
self.download_done("closed")
|
self.download_done("closed")
|
||||||
return self.done_status
|
return self.done_status
|
||||||
|
@ -256,9 +256,9 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
|
|||||||
sharehashes = dict(sh)
|
sharehashes = dict(sh)
|
||||||
try:
|
try:
|
||||||
self.share_hash_tree.set_hashes(sharehashes)
|
self.share_hash_tree.set_hashes(sharehashes)
|
||||||
except IndexError, le:
|
except IndexError as le:
|
||||||
raise BadOrMissingHash(le)
|
raise BadOrMissingHash(le)
|
||||||
except (hashtree.BadHashError, hashtree.NotEnoughHashesError), le:
|
except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
|
||||||
raise BadOrMissingHash(le)
|
raise BadOrMissingHash(le)
|
||||||
d.addCallback(_got_share_hashes)
|
d.addCallback(_got_share_hashes)
|
||||||
return d
|
return d
|
||||||
@ -289,9 +289,9 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
self.block_hash_tree.set_hashes(bh)
|
self.block_hash_tree.set_hashes(bh)
|
||||||
except IndexError, le:
|
except IndexError as le:
|
||||||
raise BadOrMissingHash(le)
|
raise BadOrMissingHash(le)
|
||||||
except (hashtree.BadHashError, hashtree.NotEnoughHashesError), le:
|
except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
|
||||||
raise BadOrMissingHash(le)
|
raise BadOrMissingHash(le)
|
||||||
d.addCallback(_got_block_hashes)
|
d.addCallback(_got_block_hashes)
|
||||||
return d
|
return d
|
||||||
@ -316,9 +316,9 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
|
|||||||
ct_hashes = dict(enumerate(hashes))
|
ct_hashes = dict(enumerate(hashes))
|
||||||
try:
|
try:
|
||||||
crypttext_hash_tree.set_hashes(ct_hashes)
|
crypttext_hash_tree.set_hashes(ct_hashes)
|
||||||
except IndexError, le:
|
except IndexError as le:
|
||||||
raise BadOrMissingHash(le)
|
raise BadOrMissingHash(le)
|
||||||
except (hashtree.BadHashError, hashtree.NotEnoughHashesError), le:
|
except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
|
||||||
raise BadOrMissingHash(le)
|
raise BadOrMissingHash(le)
|
||||||
d.addCallback(_got_crypttext_hashes)
|
d.addCallback(_got_crypttext_hashes)
|
||||||
return d
|
return d
|
||||||
@ -359,7 +359,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
|
|||||||
sharehashes, blockhashes, blockdata = results
|
sharehashes, blockhashes, blockdata = results
|
||||||
try:
|
try:
|
||||||
sharehashes = dict(sharehashes)
|
sharehashes = dict(sharehashes)
|
||||||
except ValueError, le:
|
except ValueError as le:
|
||||||
le.args = tuple(le.args + (sharehashes,))
|
le.args = tuple(le.args + (sharehashes,))
|
||||||
raise
|
raise
|
||||||
blockhashes = dict(enumerate(blockhashes))
|
blockhashes = dict(enumerate(blockhashes))
|
||||||
@ -373,7 +373,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
|
|||||||
# match the root node of self.share_hash_tree.
|
# match the root node of self.share_hash_tree.
|
||||||
try:
|
try:
|
||||||
self.share_hash_tree.set_hashes(sharehashes)
|
self.share_hash_tree.set_hashes(sharehashes)
|
||||||
except IndexError, le:
|
except IndexError as le:
|
||||||
# Weird -- sharehashes contained index numbers outside of
|
# Weird -- sharehashes contained index numbers outside of
|
||||||
# the range that fit into this hash tree.
|
# the range that fit into this hash tree.
|
||||||
raise BadOrMissingHash(le)
|
raise BadOrMissingHash(le)
|
||||||
@ -400,7 +400,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
|
|||||||
# (self.sharenum, blocknum, len(blockdata),
|
# (self.sharenum, blocknum, len(blockdata),
|
||||||
# blockdata[:50], blockdata[-50:], base32.b2a(blockhash)))
|
# blockdata[:50], blockdata[-50:], base32.b2a(blockhash)))
|
||||||
|
|
||||||
except (hashtree.BadHashError, hashtree.NotEnoughHashesError), le:
|
except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
|
||||||
# log.WEIRD: indicates undetected disk/network error, or more
|
# log.WEIRD: indicates undetected disk/network error, or more
|
||||||
# likely a programming error
|
# likely a programming error
|
||||||
self.log("hash failure in block=%d, shnum=%d on %s" %
|
self.log("hash failure in block=%d, shnum=%d on %s" %
|
||||||
|
@ -208,7 +208,7 @@ class Share:
|
|||||||
level=log.NOISY, parent=self._lp, umid="BaL1zw")
|
level=log.NOISY, parent=self._lp, umid="BaL1zw")
|
||||||
self._do_loop()
|
self._do_loop()
|
||||||
# all exception cases call self._fail(), which clears self._alive
|
# all exception cases call self._fail(), which clears self._alive
|
||||||
except (BadHashError, NotEnoughHashesError, LayoutInvalid), e:
|
except (BadHashError, NotEnoughHashesError, LayoutInvalid) as e:
|
||||||
# Abandon this share. We do this if we see corruption in the
|
# Abandon this share. We do this if we see corruption in the
|
||||||
# offset table, the UEB, or a hash tree. We don't abandon the
|
# offset table, the UEB, or a hash tree. We don't abandon the
|
||||||
# whole share if we see corruption in a data block (we abandon
|
# whole share if we see corruption in a data block (we abandon
|
||||||
@ -225,7 +225,7 @@ class Share:
|
|||||||
share=repr(self),
|
share=repr(self),
|
||||||
level=log.UNUSUAL, parent=self._lp, umid="gWspVw")
|
level=log.UNUSUAL, parent=self._lp, umid="gWspVw")
|
||||||
self._fail(Failure(e), log.UNUSUAL)
|
self._fail(Failure(e), log.UNUSUAL)
|
||||||
except DataUnavailable, e:
|
except DataUnavailable as e:
|
||||||
# Abandon this share.
|
# Abandon this share.
|
||||||
log.msg(format="need data that will never be available"
|
log.msg(format="need data that will never be available"
|
||||||
" from %s: pending=%s, received=%s, unavailable=%s" %
|
" from %s: pending=%s, received=%s, unavailable=%s" %
|
||||||
@ -416,7 +416,7 @@ class Share:
|
|||||||
try:
|
try:
|
||||||
self._node.validate_and_store_UEB(UEB_s)
|
self._node.validate_and_store_UEB(UEB_s)
|
||||||
return True
|
return True
|
||||||
except (LayoutInvalid, BadHashError), e:
|
except (LayoutInvalid, BadHashError) as e:
|
||||||
# TODO: if this UEB was bad, we'll keep trying to validate it
|
# TODO: if this UEB was bad, we'll keep trying to validate it
|
||||||
# over and over again. Only log.err on the first one, or better
|
# over and over again. Only log.err on the first one, or better
|
||||||
# yet skip all but the first
|
# yet skip all but the first
|
||||||
@ -452,7 +452,7 @@ class Share:
|
|||||||
try:
|
try:
|
||||||
self._node.process_share_hashes(share_hashes)
|
self._node.process_share_hashes(share_hashes)
|
||||||
# adds to self._node.share_hash_tree
|
# adds to self._node.share_hash_tree
|
||||||
except (BadHashError, NotEnoughHashesError), e:
|
except (BadHashError, NotEnoughHashesError) as e:
|
||||||
f = Failure(e)
|
f = Failure(e)
|
||||||
self._signal_corruption(f, o["share_hashes"], hashlen)
|
self._signal_corruption(f, o["share_hashes"], hashlen)
|
||||||
self.had_corruption = True
|
self.had_corruption = True
|
||||||
@ -481,7 +481,7 @@ class Share:
|
|||||||
# cannot validate)
|
# cannot validate)
|
||||||
try:
|
try:
|
||||||
self._commonshare.process_block_hashes(block_hashes)
|
self._commonshare.process_block_hashes(block_hashes)
|
||||||
except (BadHashError, NotEnoughHashesError), e:
|
except (BadHashError, NotEnoughHashesError) as e:
|
||||||
f = Failure(e)
|
f = Failure(e)
|
||||||
hashnums = ",".join([str(n) for n in sorted(block_hashes.keys())])
|
hashnums = ",".join([str(n) for n in sorted(block_hashes.keys())])
|
||||||
log.msg(format="hash failure in block_hashes=(%(hashnums)s),"
|
log.msg(format="hash failure in block_hashes=(%(hashnums)s),"
|
||||||
@ -509,7 +509,7 @@ class Share:
|
|||||||
# gotten them all
|
# gotten them all
|
||||||
try:
|
try:
|
||||||
self._node.process_ciphertext_hashes(hashes)
|
self._node.process_ciphertext_hashes(hashes)
|
||||||
except (BadHashError, NotEnoughHashesError), e:
|
except (BadHashError, NotEnoughHashesError) as e:
|
||||||
f = Failure(e)
|
f = Failure(e)
|
||||||
hashnums = ",".join([str(n) for n in sorted(hashes.keys())])
|
hashnums = ",".join([str(n) for n in sorted(hashes.keys())])
|
||||||
log.msg(format="hash failure in ciphertext_hashes=(%(hashnums)s),"
|
log.msg(format="hash failure in ciphertext_hashes=(%(hashnums)s),"
|
||||||
@ -553,7 +553,7 @@ class Share:
|
|||||||
# now clear our received data, to dodge the #1170 spans.py
|
# now clear our received data, to dodge the #1170 spans.py
|
||||||
# complexity bug
|
# complexity bug
|
||||||
self._received = DataSpans()
|
self._received = DataSpans()
|
||||||
except (BadHashError, NotEnoughHashesError), e:
|
except (BadHashError, NotEnoughHashesError) as e:
|
||||||
# rats, we have a corrupt block. Notify our clients that they
|
# rats, we have a corrupt block. Notify our clients that they
|
||||||
# need to look elsewhere, and advise the server. Unlike
|
# need to look elsewhere, and advise the server. Unlike
|
||||||
# corruption in other parts of the share, this doesn't cause us
|
# corruption in other parts of the share, this doesn't cause us
|
||||||
|
@ -85,7 +85,7 @@ def get_magicfolderdb(dbfile, stderr=sys.stderr,
|
|||||||
else:
|
else:
|
||||||
print("invalid magicfolderdb schema version specified", file=stderr)
|
print("invalid magicfolderdb schema version specified", file=stderr)
|
||||||
return None
|
return None
|
||||||
except DBError, e:
|
except DBError as e:
|
||||||
print(e, file=stderr)
|
print(e, file=stderr)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -756,7 +756,7 @@ class Retrieve(object):
|
|||||||
try:
|
try:
|
||||||
bht.set_hashes(blockhashes)
|
bht.set_hashes(blockhashes)
|
||||||
except (hashtree.BadHashError, hashtree.NotEnoughHashesError, \
|
except (hashtree.BadHashError, hashtree.NotEnoughHashesError, \
|
||||||
IndexError), e:
|
IndexError) as e:
|
||||||
raise CorruptShareError(server,
|
raise CorruptShareError(server,
|
||||||
reader.shnum,
|
reader.shnum,
|
||||||
"block hash tree failure: %s" % e)
|
"block hash tree failure: %s" % e)
|
||||||
@ -770,7 +770,7 @@ class Retrieve(object):
|
|||||||
try:
|
try:
|
||||||
bht.set_hashes(leaves={segnum: blockhash})
|
bht.set_hashes(leaves={segnum: blockhash})
|
||||||
except (hashtree.BadHashError, hashtree.NotEnoughHashesError, \
|
except (hashtree.BadHashError, hashtree.NotEnoughHashesError, \
|
||||||
IndexError), e:
|
IndexError) as e:
|
||||||
raise CorruptShareError(server,
|
raise CorruptShareError(server,
|
||||||
reader.shnum,
|
reader.shnum,
|
||||||
"block hash tree failure: %s" % e)
|
"block hash tree failure: %s" % e)
|
||||||
@ -788,7 +788,7 @@ class Retrieve(object):
|
|||||||
self.share_hash_tree.set_hashes(hashes=sharehashes,
|
self.share_hash_tree.set_hashes(hashes=sharehashes,
|
||||||
leaves={reader.shnum: bht[0]})
|
leaves={reader.shnum: bht[0]})
|
||||||
except (hashtree.BadHashError, hashtree.NotEnoughHashesError, \
|
except (hashtree.BadHashError, hashtree.NotEnoughHashesError, \
|
||||||
IndexError), e:
|
IndexError) as e:
|
||||||
raise CorruptShareError(server,
|
raise CorruptShareError(server,
|
||||||
reader.shnum,
|
reader.shnum,
|
||||||
"corrupt hashes: %s" % e)
|
"corrupt hashes: %s" % e)
|
||||||
|
@ -73,7 +73,7 @@ def get_backupdb(dbfile, stderr=sys.stderr,
|
|||||||
(sqlite3, db) = get_db(dbfile, stderr, create_version, updaters=UPDATERS,
|
(sqlite3, db) = get_db(dbfile, stderr, create_version, updaters=UPDATERS,
|
||||||
just_create=just_create, dbname="backupdb")
|
just_create=just_create, dbname="backupdb")
|
||||||
return BackupDB_v2(sqlite3, db)
|
return BackupDB_v2(sqlite3, db)
|
||||||
except DBError, e:
|
except DBError as e:
|
||||||
print(e, file=stderr)
|
print(e, file=stderr)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -70,7 +70,7 @@ def do_http(method, url, body=""):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
c.endheaders()
|
c.endheaders()
|
||||||
except socket_error, err:
|
except socket_error as err:
|
||||||
return BadResponse(url, err)
|
return BadResponse(url, err)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
@ -234,7 +234,7 @@ def dump_SDMF_share(m, length, options):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
pieces = unpack_share(data)
|
pieces = unpack_share(data)
|
||||||
except NeedMoreDataError, e:
|
except NeedMoreDataError as e:
|
||||||
# retry once with the larger size
|
# retry once with the larger size
|
||||||
size = e.needed_bytes
|
size = e.needed_bytes
|
||||||
f = open(options['filename'], "rb")
|
f = open(options['filename'], "rb")
|
||||||
@ -726,7 +726,7 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
pieces = unpack_share(data)
|
pieces = unpack_share(data)
|
||||||
except NeedMoreDataError, e:
|
except NeedMoreDataError as e:
|
||||||
# retry once with the larger size
|
# retry once with the larger size
|
||||||
size = e.needed_bytes
|
size = e.needed_bytes
|
||||||
f.seek(m.DATA_OFFSET)
|
f.seek(m.DATA_OFFSET)
|
||||||
|
@ -22,7 +22,7 @@ class SlowOperationRunner:
|
|||||||
where = options.where
|
where = options.where
|
||||||
try:
|
try:
|
||||||
rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
|
rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
|
||||||
except UnknownAliasError, e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
if path == '/':
|
if path == '/':
|
||||||
|
@ -24,7 +24,7 @@ def check_location(options, where):
|
|||||||
nodeurl += "/"
|
nodeurl += "/"
|
||||||
try:
|
try:
|
||||||
rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
|
rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
|
||||||
except UnknownAliasError, e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
if path == '/':
|
if path == '/':
|
||||||
@ -285,7 +285,7 @@ class DeepCheckStreamer(LineOnlyReceiver):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
|
rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
|
||||||
except UnknownAliasError, e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
if path == '/':
|
if path == '/':
|
||||||
|
@ -488,7 +488,7 @@ class Copier:
|
|||||||
try:
|
try:
|
||||||
status = self.try_copy()
|
status = self.try_copy()
|
||||||
return status
|
return status
|
||||||
except TahoeError, te:
|
except TahoeError as te:
|
||||||
if verbosity >= 2:
|
if verbosity >= 2:
|
||||||
Failure().printTraceback(self.stderr)
|
Failure().printTraceback(self.stderr)
|
||||||
print(file=self.stderr)
|
print(file=self.stderr)
|
||||||
|
@ -206,7 +206,7 @@ def daemonize(config):
|
|||||||
twistd_config = MyTwistdConfig()
|
twistd_config = MyTwistdConfig()
|
||||||
try:
|
try:
|
||||||
twistd_config.parseOptions(twistd_args)
|
twistd_config.parseOptions(twistd_args)
|
||||||
except usage.error, ue:
|
except usage.error as ue:
|
||||||
# these arguments were unsuitable for 'twistd'
|
# these arguments were unsuitable for 'twistd'
|
||||||
print(config, file=err)
|
print(config, file=err)
|
||||||
print("tahoe %s: usage error from twistd: %s\n" % (config.subcommand_name, ue), file=err)
|
print("tahoe %s: usage error from twistd: %s\n" % (config.subcommand_name, ue), file=err)
|
||||||
|
@ -17,7 +17,7 @@ def get(options):
|
|||||||
nodeurl += "/"
|
nodeurl += "/"
|
||||||
try:
|
try:
|
||||||
rootcap, path = get_alias(aliases, from_file, DEFAULT_ALIAS)
|
rootcap, path = get_alias(aliases, from_file, DEFAULT_ALIAS)
|
||||||
except UnknownAliasError, e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
||||||
|
@ -20,7 +20,7 @@ def list(options):
|
|||||||
where = where[:-1]
|
where = where[:-1]
|
||||||
try:
|
try:
|
||||||
rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
|
rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
|
||||||
except UnknownAliasError, e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
||||||
@ -54,7 +54,7 @@ def list(options):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
parsed = json.loads(data)
|
parsed = json.loads(data)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
print("error: %s" % quote_output(e.args[0], quotemarks=False), file=stderr)
|
print("error: %s" % quote_output(e.args[0], quotemarks=False), file=stderr)
|
||||||
print("Could not parse JSON response:", file=stderr)
|
print("Could not parse JSON response:", file=stderr)
|
||||||
print(quote_output(data, quotemarks=False), file=stderr)
|
print(quote_output(data, quotemarks=False), file=stderr)
|
||||||
|
@ -30,7 +30,7 @@ class ManifestStreamer(LineOnlyReceiver):
|
|||||||
where = options.where
|
where = options.where
|
||||||
try:
|
try:
|
||||||
rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
|
rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
|
||||||
except UnknownAliasError, e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
if path == '/':
|
if path == '/':
|
||||||
@ -71,7 +71,7 @@ class ManifestStreamer(LineOnlyReceiver):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
d = json.loads(line.decode('utf-8'))
|
d = json.loads(line.decode('utf-8'))
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
print("ERROR could not decode/parse %s\nERROR %r" % (quote_output(line), e), file=stderr)
|
print("ERROR could not decode/parse %s\nERROR %r" % (quote_output(line), e), file=stderr)
|
||||||
else:
|
else:
|
||||||
if d["type"] in ("file", "directory"):
|
if d["type"] in ("file", "directory"):
|
||||||
|
@ -16,7 +16,7 @@ def mkdir(options):
|
|||||||
if where:
|
if where:
|
||||||
try:
|
try:
|
||||||
rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
|
rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
|
||||||
except UnknownAliasError, e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
@ -22,7 +22,7 @@ def mv(options, mode="move"):
|
|||||||
nodeurl += "/"
|
nodeurl += "/"
|
||||||
try:
|
try:
|
||||||
rootcap, from_path = get_alias(aliases, from_file, DEFAULT_ALIAS)
|
rootcap, from_path = get_alias(aliases, from_file, DEFAULT_ALIAS)
|
||||||
except UnknownAliasError, e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
from_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
from_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
||||||
@ -40,7 +40,7 @@ def mv(options, mode="move"):
|
|||||||
# now get the target
|
# now get the target
|
||||||
try:
|
try:
|
||||||
rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
|
rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
|
||||||
except UnknownAliasError, e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
to_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
to_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
|
||||||
|
@ -50,7 +50,7 @@ def put(options):
|
|||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
|
rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
|
||||||
except UnknownAliasError, e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
if path.startswith("/"):
|
if path.startswith("/"):
|
||||||
|
@ -44,7 +44,7 @@ def stop(config):
|
|||||||
# the user but keep waiting until they give up.
|
# the user but keep waiting until they give up.
|
||||||
try:
|
try:
|
||||||
os.kill(pid, signal.SIGKILL)
|
os.kill(pid, signal.SIGKILL)
|
||||||
except OSError, oserr:
|
except OSError as oserr:
|
||||||
if oserr.errno == 3:
|
if oserr.errno == 3:
|
||||||
print(oserr.strerror)
|
print(oserr.strerror)
|
||||||
# the process didn't exist, so wipe the pid file
|
# the process didn't exist, so wipe the pid file
|
||||||
|
@ -19,7 +19,7 @@ def unlink(options, command="unlink"):
|
|||||||
nodeurl += "/"
|
nodeurl += "/"
|
||||||
try:
|
try:
|
||||||
rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
|
rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
|
||||||
except UnknownAliasError, e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
if not path:
|
if not path:
|
||||||
|
@ -12,7 +12,7 @@ def webopen(options, opener=None):
|
|||||||
if where:
|
if where:
|
||||||
try:
|
try:
|
||||||
rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
|
rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
|
||||||
except UnknownAliasError, e:
|
except UnknownAliasError as e:
|
||||||
e.display(stderr)
|
e.display(stderr)
|
||||||
return 1
|
return 1
|
||||||
if path == '/':
|
if path == '/':
|
||||||
|
@ -274,7 +274,7 @@ class FakeCHKFileNode:
|
|||||||
return self.my_uri.get_size()
|
return self.my_uri.get_size()
|
||||||
try:
|
try:
|
||||||
data = self.all_contents[self.my_uri.to_string()]
|
data = self.all_contents[self.my_uri.to_string()]
|
||||||
except KeyError, le:
|
except KeyError as le:
|
||||||
raise NotEnoughSharesError(le, 0, 3)
|
raise NotEnoughSharesError(le, 0, 3)
|
||||||
return len(data)
|
return len(data)
|
||||||
def get_current_size(self):
|
def get_current_size(self):
|
||||||
|
@ -294,7 +294,7 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
|
|||||||
left = p["remaining-sleep-time"]
|
left = p["remaining-sleep-time"]
|
||||||
self.failUnless(isinstance(left, float), left)
|
self.failUnless(isinstance(left, float), left)
|
||||||
self.failUnless(left > 0.0, left)
|
self.failUnless(left > 0.0, left)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
did_check_progress[0] = e
|
did_check_progress[0] = e
|
||||||
else:
|
else:
|
||||||
did_check_progress[0] = True
|
did_check_progress[0] = True
|
||||||
|
@ -145,7 +145,7 @@ class DeepCheckBase(GridTestMixin, ErrorMixin, StallMixin, ShouldFailMixin,
|
|||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
yield json.loads(unit)
|
yield json.loads(unit)
|
||||||
except ValueError, le:
|
except ValueError as le:
|
||||||
le.args = tuple(le.args + (unit,))
|
le.args = tuple(le.args + (unit,))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@ -953,7 +953,7 @@ class DeepCheckWebBad(DeepCheckBase, unittest.TestCase):
|
|||||||
self.failUnlessEqual(cr.get_version_counter_recoverable(), 1, where)
|
self.failUnlessEqual(cr.get_version_counter_recoverable(), 1, where)
|
||||||
self.failUnlessEqual(cr.get_version_counter_unrecoverable(), 0, where)
|
self.failUnlessEqual(cr.get_version_counter_unrecoverable(), 0, where)
|
||||||
return cr
|
return cr
|
||||||
except Exception, le:
|
except Exception as le:
|
||||||
le.args = tuple(le.args + (where,))
|
le.args = tuple(le.args + (where,))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
@ -271,7 +271,7 @@ class StdlibUnicode(unittest.TestCase):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
os.mkdir(lumiere_nfc)
|
os.mkdir(lumiere_nfc)
|
||||||
except EnvironmentError, e:
|
except EnvironmentError as e:
|
||||||
raise unittest.SkipTest("%r\nIt is possible that the filesystem on which this test is being run "
|
raise unittest.SkipTest("%r\nIt is possible that the filesystem on which this test is being run "
|
||||||
"does not support Unicode, even though the platform does." % (e,))
|
"does not support Unicode, even though the platform does." % (e,))
|
||||||
|
|
||||||
|
@ -186,7 +186,7 @@ class Incomplete(unittest.TestCase):
|
|||||||
# this should succeed
|
# this should succeed
|
||||||
try:
|
try:
|
||||||
iht.set_hashes(chain, leaves={0: tagged_hash("tag", "0")})
|
iht.set_hashes(chain, leaves={0: tagged_hash("tag", "0")})
|
||||||
except hashtree.BadHashError, e:
|
except hashtree.BadHashError as e:
|
||||||
self.fail("bad hash: %s" % e)
|
self.fail("bad hash: %s" % e)
|
||||||
|
|
||||||
self.failUnlessEqual(ht.get_leaf(0), tagged_hash("tag", "0"))
|
self.failUnlessEqual(ht.get_leaf(0), tagged_hash("tag", "0"))
|
||||||
@ -215,5 +215,5 @@ class Incomplete(unittest.TestCase):
|
|||||||
# this should succeed
|
# this should succeed
|
||||||
try:
|
try:
|
||||||
iht.set_hashes(chain, leaves={4: tagged_hash("tag", "4")})
|
iht.set_hashes(chain, leaves={4: tagged_hash("tag", "4")})
|
||||||
except hashtree.BadHashError, e:
|
except hashtree.BadHashError as e:
|
||||||
self.fail("bad hash: %s" % e)
|
self.fail("bad hash: %s" % e)
|
||||||
|
@ -117,7 +117,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin):
|
|||||||
self.failIfBigger(delta_reads, MAX_DELTA_READS)
|
self.failIfBigger(delta_reads, MAX_DELTA_READS)
|
||||||
try:
|
try:
|
||||||
judgement(vr)
|
judgement(vr)
|
||||||
except unittest.FailTest, e:
|
except unittest.FailTest as e:
|
||||||
# FailTest just uses e.args[0] == str
|
# FailTest just uses e.args[0] == str
|
||||||
new_arg = str(e.args[0]) + "\nvr.data is: " + str(vr.as_dict())
|
new_arg = str(e.args[0]) + "\nvr.data is: " + str(vr.as_dict())
|
||||||
e.args = (new_arg,)
|
e.args = (new_arg,)
|
||||||
|
@ -66,19 +66,19 @@ class HumanReadable(unittest.TestCase):
|
|||||||
self.failUnlessEqual(hr({1:2}), "{1:2}")
|
self.failUnlessEqual(hr({1:2}), "{1:2}")
|
||||||
try:
|
try:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.failUnless(
|
self.failUnless(
|
||||||
hr(e) == "<ValueError: ()>" # python-2.4
|
hr(e) == "<ValueError: ()>" # python-2.4
|
||||||
or hr(e) == "ValueError()") # python-2.5
|
or hr(e) == "ValueError()") # python-2.5
|
||||||
try:
|
try:
|
||||||
raise ValueError("oops")
|
raise ValueError("oops")
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.failUnless(
|
self.failUnless(
|
||||||
hr(e) == "<ValueError: 'oops'>" # python-2.4
|
hr(e) == "<ValueError: 'oops'>" # python-2.4
|
||||||
or hr(e) == "ValueError('oops',)") # python-2.5
|
or hr(e) == "ValueError('oops',)") # python-2.5
|
||||||
try:
|
try:
|
||||||
raise NoArgumentException
|
raise NoArgumentException
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.failUnless(
|
self.failUnless(
|
||||||
hr(e) == "<NoArgumentException>" # python-2.4
|
hr(e) == "<NoArgumentException>" # python-2.4
|
||||||
or hr(e) == "NoArgumentException()") # python-2.5
|
or hr(e) == "NoArgumentException()") # python-2.5
|
||||||
@ -361,18 +361,18 @@ class Asserts(unittest.TestCase):
|
|||||||
def should_assert(self, func, *args, **kwargs):
|
def should_assert(self, func, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
func(*args, **kwargs)
|
func(*args, **kwargs)
|
||||||
except AssertionError, e:
|
except AssertionError as e:
|
||||||
return str(e)
|
return str(e)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.fail("assert failed with non-AssertionError: %s" % e)
|
self.fail("assert failed with non-AssertionError: %s" % e)
|
||||||
self.fail("assert was not caught")
|
self.fail("assert was not caught")
|
||||||
|
|
||||||
def should_not_assert(self, func, *args, **kwargs):
|
def should_not_assert(self, func, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
func(*args, **kwargs)
|
func(*args, **kwargs)
|
||||||
except AssertionError, e:
|
except AssertionError as e:
|
||||||
self.fail("assertion fired when it should not have: %s" % e)
|
self.fail("assertion fired when it should not have: %s" % e)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.fail("assertion (which shouldn't have failed) failed with non-AssertionError: %s" % e)
|
self.fail("assertion (which shouldn't have failed) failed with non-AssertionError: %s" % e)
|
||||||
return # we're happy
|
return # we're happy
|
||||||
|
|
||||||
|
@ -816,7 +816,7 @@ def from_string(u, deep_immutable=False, name=u"<unknown name>"):
|
|||||||
else:
|
else:
|
||||||
error = MustBeReadonlyError(kind + " used in a read-only context", name)
|
error = MustBeReadonlyError(kind + " used in a read-only context", name)
|
||||||
|
|
||||||
except BadURIError, e:
|
except BadURIError as e:
|
||||||
error = e
|
error = e
|
||||||
|
|
||||||
return UnknownURI(u, error=error)
|
return UnknownURI(u, error=error)
|
||||||
|
@ -22,7 +22,7 @@ def get_db(dbfile, stderr=sys.stderr,
|
|||||||
must_create = not os.path.exists(dbfile)
|
must_create = not os.path.exists(dbfile)
|
||||||
try:
|
try:
|
||||||
db = sqlite3.connect(dbfile)
|
db = sqlite3.connect(dbfile)
|
||||||
except (EnvironmentError, sqlite3.OperationalError), e:
|
except (EnvironmentError, sqlite3.OperationalError) as e:
|
||||||
raise DBError("Unable to create/open %s file %s: %s" % (dbname, dbfile, e))
|
raise DBError("Unable to create/open %s file %s: %s" % (dbname, dbfile, e))
|
||||||
|
|
||||||
schema, target_version = create_version
|
schema, target_version = create_version
|
||||||
@ -46,7 +46,7 @@ def get_db(dbfile, stderr=sys.stderr,
|
|||||||
try:
|
try:
|
||||||
c.execute("SELECT version FROM version")
|
c.execute("SELECT version FROM version")
|
||||||
version = c.fetchone()[0]
|
version = c.fetchone()[0]
|
||||||
except sqlite3.DatabaseError, e:
|
except sqlite3.DatabaseError as e:
|
||||||
# this indicates that the file is not a compatible database format.
|
# this indicates that the file is not a compatible database format.
|
||||||
# Perhaps it was created with an old version, or it might be junk.
|
# Perhaps it was created with an old version, or it might be junk.
|
||||||
raise DBError("%s file is unusable: %s" % (dbname, e))
|
raise DBError("%s file is unusable: %s" % (dbname, e))
|
||||||
|
@ -28,7 +28,7 @@ def timeout_call(reactor, d, timeout):
|
|||||||
try:
|
try:
|
||||||
timer.cancel()
|
timer.cancel()
|
||||||
timer_d.callback(x)
|
timer_d.callback(x)
|
||||||
except error.AlreadyCalled, defer.AlreadyCalledError:
|
except (error.AlreadyCalled, defer.AlreadyCalledError):
|
||||||
pass
|
pass
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ def rename(src, dst, tries=4, basedelay=0.1):
|
|||||||
for i in range(tries-1):
|
for i in range(tries-1):
|
||||||
try:
|
try:
|
||||||
return os.rename(src, dst)
|
return os.rename(src, dst)
|
||||||
except EnvironmentError, le:
|
except EnvironmentError as le:
|
||||||
# XXX Tighten this to check if this is a permission denied error (possibly due to another Windows process having the file open and execute the superkludge only in this case.
|
# XXX Tighten this to check if this is a permission denied error (possibly due to another Windows process having the file open and execute the superkludge only in this case.
|
||||||
log.msg("XXX KLUDGE Attempting to move file %s => %s; got %s; sleeping %s seconds" % (src, dst, le, basedelay,))
|
log.msg("XXX KLUDGE Attempting to move file %s => %s; got %s; sleeping %s seconds" % (src, dst, le, basedelay,))
|
||||||
time.sleep(basedelay)
|
time.sleep(basedelay)
|
||||||
@ -65,7 +65,7 @@ def remove(f, tries=4, basedelay=0.1):
|
|||||||
for i in range(tries-1):
|
for i in range(tries-1):
|
||||||
try:
|
try:
|
||||||
return os.remove(f)
|
return os.remove(f)
|
||||||
except EnvironmentError, le:
|
except EnvironmentError as le:
|
||||||
# XXX Tighten this to check if this is a permission denied error (possibly due to another Windows process having the file open and execute the superkludge only in this case.
|
# XXX Tighten this to check if this is a permission denied error (possibly due to another Windows process having the file open and execute the superkludge only in this case.
|
||||||
if not os.path.exists(f):
|
if not os.path.exists(f):
|
||||||
return
|
return
|
||||||
@ -183,7 +183,7 @@ def make_dirs(dirname, mode=0o777):
|
|||||||
tx = None
|
tx = None
|
||||||
try:
|
try:
|
||||||
os.makedirs(dirname, mode)
|
os.makedirs(dirname, mode)
|
||||||
except OSError, x:
|
except OSError as x:
|
||||||
tx = x
|
tx = x
|
||||||
|
|
||||||
if not os.path.isdir(dirname):
|
if not os.path.isdir(dirname):
|
||||||
@ -210,7 +210,7 @@ def rm_dir(dirname):
|
|||||||
else:
|
else:
|
||||||
remove(fullname)
|
remove(fullname)
|
||||||
os.rmdir(dirname)
|
os.rmdir(dirname)
|
||||||
except Exception, le:
|
except Exception as le:
|
||||||
# Ignore "No such file or directory"
|
# Ignore "No such file or directory"
|
||||||
if (not isinstance(le, OSError)) or le.args[0] != 2:
|
if (not isinstance(le, OSError)) or le.args[0] != 2:
|
||||||
excs.append(le)
|
excs.append(le)
|
||||||
|
@ -214,7 +214,7 @@ def _query(path, args, regex):
|
|||||||
p = subprocess.Popen([path] + list(args), stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
|
p = subprocess.Popen([path] + list(args), stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
|
||||||
(output, err) = p.communicate()
|
(output, err) = p.communicate()
|
||||||
break
|
break
|
||||||
except OSError, e:
|
except OSError as e:
|
||||||
if e.errno == errno.EINTR and trial < TRIES-1:
|
if e.errno == errno.EINTR and trial < TRIES-1:
|
||||||
continue
|
continue
|
||||||
raise
|
raise
|
||||||
|
@ -143,7 +143,7 @@ def initialize():
|
|||||||
if self._hConsole is None:
|
if self._hConsole is None:
|
||||||
try:
|
try:
|
||||||
self._stream.flush()
|
self._stream.flush()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
_complain("%s.flush: %r from %r" % (self.name, e, self._stream))
|
_complain("%s.flush: %r from %r" % (self.name, e, self._stream))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@ -169,7 +169,7 @@ def initialize():
|
|||||||
remaining -= n.value
|
remaining -= n.value
|
||||||
if remaining == 0: break
|
if remaining == 0: break
|
||||||
text = text[n.value:]
|
text = text[n.value:]
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
_complain("%s.write: %r" % (self.name, e))
|
_complain("%s.write: %r" % (self.name, e))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@ -177,7 +177,7 @@ def initialize():
|
|||||||
try:
|
try:
|
||||||
for line in lines:
|
for line in lines:
|
||||||
self.write(line)
|
self.write(line)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
_complain("%s.writelines: %r" % (self.name, e))
|
_complain("%s.writelines: %r" % (self.name, e))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@ -190,7 +190,7 @@ def initialize():
|
|||||||
sys.stderr = UnicodeOutput(hStderr, None, STDERR_FILENO, '<Unicode console stderr>')
|
sys.stderr = UnicodeOutput(hStderr, None, STDERR_FILENO, '<Unicode console stderr>')
|
||||||
else:
|
else:
|
||||||
sys.stderr = UnicodeOutput(None, sys.stderr, old_stderr_fileno, '<Unicode redirected stderr>')
|
sys.stderr = UnicodeOutput(None, sys.stderr, old_stderr_fileno, '<Unicode redirected stderr>')
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
_complain("exception %r while fixing up sys.stdout and sys.stderr" % (e,))
|
_complain("exception %r while fixing up sys.stdout and sys.stderr" % (e,))
|
||||||
|
|
||||||
# This works around <http://bugs.python.org/issue2128>.
|
# This works around <http://bugs.python.org/issue2128>.
|
||||||
@ -221,7 +221,7 @@ def initialize():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
argv = [unmangle(argv_unicode[i]).encode('utf-8') for i in xrange(0, argc.value)]
|
argv = [unmangle(argv_unicode[i]).encode('utf-8') for i in xrange(0, argc.value)]
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
_complain("%s: could not unmangle Unicode arguments.\n%r"
|
_complain("%s: could not unmangle Unicode arguments.\n%r"
|
||||||
% (sys.argv[0], [argv_unicode[i] for i in xrange(0, argc.value)]))
|
% (sys.argv[0], [argv_unicode[i] for i in xrange(0, argc.value)]))
|
||||||
raise
|
raise
|
||||||
|
@ -345,7 +345,7 @@ class INotify(PollMixin):
|
|||||||
try:
|
try:
|
||||||
with CALLBACK(inotify_events=event_mask):
|
with CALLBACK(inotify_events=event_mask):
|
||||||
cb(None, path1, event_mask)
|
cb(None, path1, event_mask)
|
||||||
except Exception, e2:
|
except Exception as e2:
|
||||||
log.err(e2)
|
log.err(e2)
|
||||||
self._pending = set()
|
self._pending = set()
|
||||||
|
|
||||||
@ -360,7 +360,7 @@ class INotify(PollMixin):
|
|||||||
reactor.callFromThread(_maybe_notify, path)
|
reactor.callFromThread(_maybe_notify, path)
|
||||||
if self._check_stop():
|
if self._check_stop():
|
||||||
return
|
return
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
log.err(e)
|
log.err(e)
|
||||||
self._state = STOPPED
|
self._state = STOPPED
|
||||||
raise
|
raise
|
||||||
|
Loading…
Reference in New Issue
Block a user