From ee9d9d79848cfcc5e4e49e094db07c25082f9b64 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 11 Sep 2020 15:38:04 -0400 Subject: [PATCH 001/213] Add mypy checks as separate tox environment. --- mypy.ini | 2 ++ tox.ini | 7 +++++++ 2 files changed, 9 insertions(+) create mode 100644 mypy.ini diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 000000000..976ba0294 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,2 @@ +[mypy] +ignore_missing_imports = True diff --git a/tox.ini b/tox.ini index 597270e3a..f5c8f3f42 100644 --- a/tox.ini +++ b/tox.ini @@ -108,6 +108,13 @@ commands = # file. See pyproject.toml for legal values. python -m towncrier.check --pyproject towncrier.pyproject.toml + +[testenv:typechecks] +skip_install = True +deps = mypy +commands = mypy src + + [testenv:draftnews] passenv = TAHOE_LAFS_* PIP_* SUBUNITREPORTER_* USERPROFILE HOMEDRIVE HOMEPATH # see comment in [testenv] about "certifi" From ab54585558f32c92aa9e786df19c42d577e15a44 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 18 Sep 2020 15:49:21 -0400 Subject: [PATCH 002/213] Incorporate mypy-zope to support zope interfaces. --- mypy.ini | 1 + tox.ini | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/mypy.ini b/mypy.ini index 976ba0294..01cbb57a8 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,2 +1,3 @@ [mypy] ignore_missing_imports = True +plugins=mypy_zope:plugin diff --git a/tox.ini b/tox.ini index f5c8f3f42..1c232b4d7 100644 --- a/tox.ini +++ b/tox.ini @@ -111,7 +111,9 @@ commands = [testenv:typechecks] skip_install = True -deps = mypy +deps = + mypy + mypy-zope commands = mypy src From 6d2d82d7b7342be42d0392ccac0af3ed8882f41c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 13 Oct 2020 21:06:07 -0400 Subject: [PATCH 003/213] Use pre-release versions of foolscap and mypy-zope with intended support for RemoteInterface subclasses. --- tox.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 1c232b4d7..a13e840a4 100644 --- a/tox.ini +++ b/tox.ini @@ -113,7 +113,8 @@ commands = skip_install = True deps = mypy - mypy-zope + git+https://github.com/jaraco/mypy-zope@bugfix/21-InterfaceClass-subclass + git+https://github.com/jaraco/foolscap@bugfix/75-use-metaclass commands = mypy src From 4b559ffc3332222c99a86b0f75ea05a01a845321 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 31 Oct 2020 16:06:52 -0400 Subject: [PATCH 004/213] Add typechecks to tox run --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index a13e840a4..0cc0c4ac2 100644 --- a/tox.ini +++ b/tox.ini @@ -7,7 +7,7 @@ twisted = 1 [tox] -envlist = codechecks,py27,py36,pypy27 +envlist = typechecks,codechecks,py27,py36,pypy27 minversion = 2.4 [testenv] From f2ffa78198e756a2337a66463241127c38318509 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 20 Nov 2020 12:32:11 -0500 Subject: [PATCH 005/213] Define type of PollMixin._poll_should_ignore_these_errors --- src/allmydata/util/pollmixin.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/allmydata/util/pollmixin.py b/src/allmydata/util/pollmixin.py index 5d1716853..582bafe86 100644 --- a/src/allmydata/util/pollmixin.py +++ b/src/allmydata/util/pollmixin.py @@ -14,6 +14,12 @@ if PY2: from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import time + +try: + from typing import List +except ImportError: + pass + from twisted.internet import task class TimeoutError(Exception): @@ -23,7 +29,7 @@ class PollComplete(Exception): pass class PollMixin(object): - _poll_should_ignore_these_errors = [] + _poll_should_ignore_these_errors = [] # type: List[Exception] def poll(self, check_f, pollinterval=0.01, timeout=1000): # Return a Deferred, then call check_f periodically until it returns From ce3b775944fdd9070f57de1230c4b2f1c1b8a5b7 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 20 Nov 2020 12:33:41 -0500 Subject: [PATCH 006/213] Suppress typing error in test_python3 --- src/allmydata/test/test_python3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/test/test_python3.py b/src/allmydata/test/test_python3.py index 80242f8a2..c1f0e83d6 100644 --- a/src/allmydata/test/test_python3.py +++ b/src/allmydata/test/test_python3.py @@ -44,7 +44,7 @@ class Python3PortingEffortTests(SynchronousTestCase): ), ), ) - test_finished_porting.todo = native_str( + test_finished_porting.todo = native_str( # type: ignore "https://tahoe-lafs.org/trac/tahoe-lafs/milestone/Support%20Python%203 should be completed", ) From d1ea36781a651ece31807c32f366ce6c596b8e70 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 20 Nov 2020 13:12:52 -0500 Subject: [PATCH 007/213] Add type declarations to check_load. --- src/allmydata/test/check_load.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/allmydata/test/check_load.py b/src/allmydata/test/check_load.py index 4058ddf77..21576ea3a 100644 --- a/src/allmydata/test/check_load.py +++ b/src/allmydata/test/check_load.py @@ -37,6 +37,11 @@ a mean of 10kB and a max of 100MB, so filesize=min(int(1.0/random(.0002)),1e8) import os, sys, httplib, binascii import urllib, json, random, time, urlparse +try: + from typing import Dict +except ImportError: + pass + # Python 2 compatibility from future.utils import PY2 if PY2: @@ -49,13 +54,13 @@ if sys.argv[1] == "--stats": DELAY = 10 MAXSAMPLES = 6 totals = [] - last_stats = {} + last_stats = {} # type: Dict[str, float] while True: - stats = {} + stats = {} # type: Dict[str, float] for sf in statsfiles: for line in open(sf, "r").readlines(): - name, value = line.split(":") - value = int(value.strip()) + name, str_value = line.split(":") + value = int(str_value.strip()) if name not in stats: stats[name] = 0 stats[name] += float(value) From 8da82e9ed55d363fb963b95e9b96fac79e6c1bb8 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 20 Nov 2020 13:47:23 -0500 Subject: [PATCH 008/213] Add workaround for Shoobx/mypy-zope#26. --- src/allmydata/web/private.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/allmydata/web/private.py b/src/allmydata/web/private.py index fea058405..a86c869c4 100644 --- a/src/allmydata/web/private.py +++ b/src/allmydata/web/private.py @@ -61,7 +61,11 @@ class IToken(ICredentials): pass -@implementer(IToken) +# Shoobx/mypy-zope#26 +_itoken_impl = implementer(IToken) + + +@_itoken_impl @attr.s class Token(object): proposed_token = attr.ib(type=bytes) From 25cce8b77ebbf6d195f938d39e0c00aa9e6262b9 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 23 Nov 2020 13:57:37 -0500 Subject: [PATCH 009/213] Suppress typing errors in fileutil, crawler, fixups. --- src/allmydata/storage/crawler.py | 2 +- src/allmydata/util/fileutil.py | 2 +- src/allmydata/windows/fixups.py | 7 ++++++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/allmydata/storage/crawler.py b/src/allmydata/storage/crawler.py index 24042c38b..f13f7cb99 100644 --- a/src/allmydata/storage/crawler.py +++ b/src/allmydata/storage/crawler.py @@ -19,7 +19,7 @@ import os, time, struct try: import cPickle as pickle except ImportError: - import pickle + import pickle # type: ignore from twisted.internet import reactor from twisted.application import service from allmydata.storage.common import si_b2a diff --git a/src/allmydata/util/fileutil.py b/src/allmydata/util/fileutil.py index ea16c0d6a..e40e06180 100644 --- a/src/allmydata/util/fileutil.py +++ b/src/allmydata/util/fileutil.py @@ -311,7 +311,7 @@ def precondition_abspath(path): _getfullpathname = None try: - from nt import _getfullpathname + from nt import _getfullpathname # type: ignore except ImportError: pass diff --git a/src/allmydata/windows/fixups.py b/src/allmydata/windows/fixups.py index e7f045b95..c5ba3bb57 100644 --- a/src/allmydata/windows/fixups.py +++ b/src/allmydata/windows/fixups.py @@ -217,7 +217,12 @@ def initialize(): # Instead it "mangles" or escapes them using \x7F as an escape character, which we # unescape here. def unmangle(s): - return re.sub(u'\\x7F[0-9a-fA-F]*\\;', lambda m: unichr(int(m.group(0)[1:-1], 16)), s) + return re.sub( + u'\\x7F[0-9a-fA-F]*\\;', + # type ignored for 'unichr' + lambda m: unichr(int(m.group(0)[1:-1], 16)), # type: ignore + s, + ) try: argv = [unmangle(argv_unicode[i]).encode('utf-8') for i in xrange(0, argc.value)] From df31d7db5b23bbc1e12d83f44bf23c9cce316b25 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 23 Nov 2020 14:05:33 -0500 Subject: [PATCH 010/213] Suppress type error in Node.GENERATED_FILES, apparently unused. --- src/allmydata/node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/node.py b/src/allmydata/node.py index 0dcd900aa..7622d5bc3 100644 --- a/src/allmydata/node.py +++ b/src/allmydata/node.py @@ -713,7 +713,7 @@ class Node(service.MultiService): """ NODETYPE = "unknown NODETYPE" CERTFILE = "node.pem" - GENERATED_FILES = [] + GENERATED_FILES = [] # type: ignore def __init__(self, config, main_tub, control_tub, i2p_provider, tor_provider): """ From dec6f6d64705b0f103dba3436598f42e96df365c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 23 Nov 2020 14:08:23 -0500 Subject: [PATCH 011/213] Remove Interface subclass, as IURI is an interface. Fixes mypy error. --- src/allmydata/interfaces.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/interfaces.py b/src/allmydata/interfaces.py index 49dcf7646..537f6d655 100644 --- a/src/allmydata/interfaces.py +++ b/src/allmydata/interfaces.py @@ -682,7 +682,7 @@ class IURI(Interface): passing into init_from_string.""" -class IVerifierURI(Interface, IURI): +class IVerifierURI(IURI): def init_from_string(uri): """Accept a string (as created by my to_string() method) and populate this instance with its data. I am not normally called directly, From 5f40c562ebb0754c3eabfb0b1dddaeb6d9e4e3a6 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 23 Nov 2020 14:09:46 -0500 Subject: [PATCH 012/213] Remove self arguments to IProgress, which mypy caught as improper. --- src/allmydata/interfaces.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/allmydata/interfaces.py b/src/allmydata/interfaces.py index 537f6d655..95b1fdf63 100644 --- a/src/allmydata/interfaces.py +++ b/src/allmydata/interfaces.py @@ -749,7 +749,7 @@ class IProgress(Interface): "Current amount of progress (in percentage)" ) - def set_progress(self, value): + def set_progress(value): """ Sets the current amount of progress. @@ -757,7 +757,7 @@ class IProgress(Interface): set_progress_total. """ - def set_progress_total(self, value): + def set_progress_total(value): """ Sets the total amount of expected progress From adf06889181c41c20db56aece8537a46945ae3ea Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 23 Nov 2020 14:15:39 -0500 Subject: [PATCH 013/213] Add a non-implementation of encode_proposal to satisfy interface. --- src/allmydata/codec.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/allmydata/codec.py b/src/allmydata/codec.py index a4baab4b6..19345959e 100644 --- a/src/allmydata/codec.py +++ b/src/allmydata/codec.py @@ -57,6 +57,10 @@ class CRSEncoder(object): return defer.succeed((shares, desired_share_ids)) + def encode_proposal(self, data, desired_share_ids=None): + raise NotImplementedError() + + @implementer(ICodecDecoder) class CRSDecoder(object): From 4998c4693fdfd900d0659000888e0e0a59346b8c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 13:46:52 -0500 Subject: [PATCH 014/213] Ignore type checks on Referenceable objects. Ref warner/foolscap#78. --- src/allmydata/immutable/offloaded.py | 4 ++-- src/allmydata/immutable/upload.py | 2 +- src/allmydata/storage/immutable.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/allmydata/immutable/offloaded.py b/src/allmydata/immutable/offloaded.py index d574b980d..4e18ad216 100644 --- a/src/allmydata/immutable/offloaded.py +++ b/src/allmydata/immutable/offloaded.py @@ -141,7 +141,7 @@ class CHKCheckerAndUEBFetcher(object): @implementer(interfaces.RICHKUploadHelper) -class CHKUploadHelper(Referenceable, upload.CHKUploader): +class CHKUploadHelper(Referenceable, upload.CHKUploader): # type: ignore # warner/foolscap#78 """I am the helper-server -side counterpart to AssistedUploader. I handle peer selection, encoding, and share pushing. I read ciphertext from the remote AssistedUploader. @@ -502,7 +502,7 @@ class LocalCiphertextReader(AskUntilSuccessMixin): @implementer(interfaces.RIHelper, interfaces.IStatsProducer) -class Helper(Referenceable): +class Helper(Referenceable): # type: ignore # warner/foolscap#78 """ :ivar dict[bytes, CHKUploadHelper] _active_uploads: For any uploads which have been started but not finished, a mapping from storage index to the diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py index e77cbb30b..18f818504 100644 --- a/src/allmydata/immutable/upload.py +++ b/src/allmydata/immutable/upload.py @@ -1423,7 +1423,7 @@ class LiteralUploader(object): return self._status @implementer(RIEncryptedUploadable) -class RemoteEncryptedUploadable(Referenceable): +class RemoteEncryptedUploadable(Referenceable): # type: ignore # warner/foolscap#78 def __init__(self, encrypted_uploadable, upload_status): self._eu = IEncryptedUploadable(encrypted_uploadable) diff --git a/src/allmydata/storage/immutable.py b/src/allmydata/storage/immutable.py index 778c0ddf8..4b60d79f1 100644 --- a/src/allmydata/storage/immutable.py +++ b/src/allmydata/storage/immutable.py @@ -202,7 +202,7 @@ class ShareFile(object): @implementer(RIBucketWriter) -class BucketWriter(Referenceable): +class BucketWriter(Referenceable): # type: ignore # warner/foolscap#78 def __init__(self, ss, incominghome, finalhome, max_size, lease_info, canary): self.ss = ss @@ -301,7 +301,7 @@ class BucketWriter(Referenceable): @implementer(RIBucketReader) -class BucketReader(Referenceable): +class BucketReader(Referenceable): # type: ignore # warner/foolscap#78 def __init__(self, ss, sharefname, storage_index=None, shnum=None): self.ss = ss From 50f81aa25d7ad86c5c238bc2a1d70afce25de03f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 13:54:39 -0500 Subject: [PATCH 015/213] Update two methods of introducer.client.IntroducerClient to match the interface definition. --- src/allmydata/introducer/client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/allmydata/introducer/client.py b/src/allmydata/introducer/client.py index 0a6352317..62642d0af 100644 --- a/src/allmydata/introducer/client.py +++ b/src/allmydata/introducer/client.py @@ -157,15 +157,15 @@ class IntroducerClient(service.Service, Referenceable): kwargs["facility"] = "tahoe.introducer.client" return log.msg(*args, **kwargs) - def subscribe_to(self, service_name, cb, *args, **kwargs): - self._local_subscribers.append( (service_name,cb,args,kwargs) ) + def subscribe_to(self, service_name, callback, *args, **kwargs): + self._local_subscribers.append( (service_name,callback,args,kwargs) ) self._subscribed_service_names.add(service_name) self._maybe_subscribe() for index,(ann,key_s,when) in self._inbound_announcements.items(): precondition(isinstance(key_s, str), key_s) servicename = index[0] if servicename == service_name: - eventually(cb, key_s, ann, *args, **kwargs) + eventually(callback, key_s, ann, *args, **kwargs) def _maybe_subscribe(self): if not self._publisher: @@ -198,7 +198,7 @@ class IntroducerClient(service.Service, Referenceable): ann_d.update(ann) return ann_d - def publish(self, service_name, ann, signing_key): + def publish(self, service_name, ann, signing_key=None): # we increment the seqnum every time we publish something new current_seqnum, current_nonce = self._sequencer() From bc3508ce6098d65ae0407047a2e02d3a1bacfee9 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 14:01:05 -0500 Subject: [PATCH 016/213] Ignore type checks on cmp usage (awaiting Python 3 porting) --- src/allmydata/web/status.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/allmydata/web/status.py b/src/allmydata/web/status.py index 7f6020a99..de15230fd 100644 --- a/src/allmydata/web/status.py +++ b/src/allmydata/web/status.py @@ -1335,7 +1335,7 @@ class Status(MultiFormatResource): active = [s for s in self._get_all_statuses() if s.get_active()] - active.sort(lambda a, b: cmp(a.get_started(), b.get_started())) + active.sort(lambda a, b: cmp(a.get_started(), b.get_started())) # type: ignore # py2 active.reverse() return active @@ -1343,7 +1343,7 @@ class Status(MultiFormatResource): recent = [s for s in self._get_all_statuses() if not s.get_active()] - recent.sort(lambda a, b: cmp(a.get_started(), b.get_started())) + recent.sort(lambda a, b: cmp(a.get_started(), b.get_started())) # type: ignore # py2 recent.reverse() return recent From 6ba7533168eff65454376987258748f99fb1057d Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 14:04:59 -0500 Subject: [PATCH 017/213] Ignore failure on StorageServer.slot_testv_and_readv_and_writev, the implementation of which deviates from the interface spec substantially. --- src/allmydata/storage/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/storage/server.py b/src/allmydata/storage/server.py index 8a8138f26..b7df702d5 100644 --- a/src/allmydata/storage/server.py +++ b/src/allmydata/storage/server.py @@ -581,7 +581,7 @@ class StorageServer(service.MultiService, Referenceable): for share in six.viewvalues(shares): share.add_or_renew_lease(lease_info) - def slot_testv_and_readv_and_writev( + def slot_testv_and_readv_and_writev( # type: ignore # fixme self, storage_index, secrets, From dca0840c350e5b188672e7410a4995b50a1dca60 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 14:08:33 -0500 Subject: [PATCH 018/213] Add stubs for methods demanded by the interface --- src/allmydata/uri.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/allmydata/uri.py b/src/allmydata/uri.py index 2c367cafe..e95c86d96 100644 --- a/src/allmydata/uri.py +++ b/src/allmydata/uri.py @@ -535,6 +535,12 @@ class _DirectoryBaseURI(_BaseURI): def get_storage_index(self): return self._filenode_uri.get_storage_index() + def get_readonly(self): + raise NotImplementedError() + + def is_readonly(self): + raise NotImplementedError() + @implementer(IDirectoryURI) class DirectoryURI(_DirectoryBaseURI): From cc91b7c9edf0d5fc64525b410b50da0748f1180b Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 14:15:24 -0500 Subject: [PATCH 019/213] Declare DirectoryURIVerifier type to allow subclass to override. --- src/allmydata/uri.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/allmydata/uri.py b/src/allmydata/uri.py index e95c86d96..d7f4782cd 100644 --- a/src/allmydata/uri.py +++ b/src/allmydata/uri.py @@ -22,6 +22,11 @@ from past.builtins import unicode, long import re +try: + from typing import Type +except ImportError: + pass + from zope.interface import implementer from twisted.python.components import registerAdapter @@ -707,7 +712,7 @@ class DirectoryURIVerifier(_DirectoryBaseURI): BASE_STRING=b'URI:DIR2-Verifier:' BASE_STRING_RE=re.compile(b'^'+BASE_STRING) - INNER_URI_CLASS=SSKVerifierURI + INNER_URI_CLASS=SSKVerifierURI # type: Type[IVerifierURI] def __init__(self, filenode_uri=None): if filenode_uri: From 7e757d2ec4e201d4e9a1e03be66bd5007eb5c094 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 14:23:59 -0500 Subject: [PATCH 020/213] As _ImmutableFileNodeBase doesn't implement the interface, move the implementer declaration to LiteralFileNode --- src/allmydata/immutable/literal.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/allmydata/immutable/literal.py b/src/allmydata/immutable/literal.py index 68db478f3..6ed5571b9 100644 --- a/src/allmydata/immutable/literal.py +++ b/src/allmydata/immutable/literal.py @@ -19,7 +19,7 @@ from twisted.protocols import basic from allmydata.interfaces import IImmutableFileNode, ICheckable from allmydata.uri import LiteralFileURI -@implementer(IImmutableFileNode, ICheckable) + class _ImmutableFileNodeBase(object): def get_write_uri(self): @@ -56,6 +56,7 @@ class _ImmutableFileNodeBase(object): return not self == other +@implementer(IImmutableFileNode, ICheckable) class LiteralFileNode(_ImmutableFileNodeBase): def __init__(self, filecap): From e9ddcf5911dd75cceea8611dd7a134d7a7eb2fb1 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 14:26:20 -0500 Subject: [PATCH 021/213] Implement set_size as required by the interface --- src/allmydata/immutable/encode.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/allmydata/immutable/encode.py b/src/allmydata/immutable/encode.py index 9351df501..e743ce766 100644 --- a/src/allmydata/immutable/encode.py +++ b/src/allmydata/immutable/encode.py @@ -711,3 +711,6 @@ class Encoder(object): return self.uri_extension_data def get_uri_extension_hash(self): return self.uri_extension_hash + + def set_size(self, size): + raise NotImplementedError() From 1248d65778242c81e914fa6096990c3e3e68fcba Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 14:32:34 -0500 Subject: [PATCH 022/213] Declare types for BasedirOptions. Fixes several errors. --- src/allmydata/scripts/common.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/allmydata/scripts/common.py b/src/allmydata/scripts/common.py index 34266ee72..a633da655 100644 --- a/src/allmydata/scripts/common.py +++ b/src/allmydata/scripts/common.py @@ -4,6 +4,11 @@ import os, sys, urllib, textwrap import codecs from os.path import join +try: + from typing import Optional, Sequence, List +except ImportError: + pass + # Python 2 compatibility from future.utils import PY2 if PY2: @@ -64,7 +69,7 @@ class BasedirOptions(BaseOptions): optParameters = [ ["basedir", "C", None, "Specify which Tahoe base directory should be used. [default: %s]" % quote_local_unicode_path(_default_nodedir)], - ] + ] # type: List[Sequence[Optional[str]]] def parseArgs(self, basedir=None): # This finds the node-directory option correctly even if we are in a subcommand. From e0eb63929a56e46d622fa48b1be159b09cefc031 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 14:35:17 -0500 Subject: [PATCH 023/213] Declare type for BaseOptions.description. Fixes many type errors. --- src/allmydata/scripts/common.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/scripts/common.py b/src/allmydata/scripts/common.py index a633da655..6d281e07c 100644 --- a/src/allmydata/scripts/common.py +++ b/src/allmydata/scripts/common.py @@ -45,7 +45,7 @@ class BaseOptions(usage.Options): def opt_version(self): raise usage.UsageError("--version not allowed on subcommands") - description = None + description = None # type: Optional[str] description_unwrapped = None def __str__(self): From 1b92da75fa01d94a8486e1a4108c3dcce10458e8 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 14:44:17 -0500 Subject: [PATCH 024/213] Some subclasses use ints, so just go for Any --- src/allmydata/scripts/common.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/allmydata/scripts/common.py b/src/allmydata/scripts/common.py index 6d281e07c..438b4a7e2 100644 --- a/src/allmydata/scripts/common.py +++ b/src/allmydata/scripts/common.py @@ -5,7 +5,7 @@ import codecs from os.path import join try: - from typing import Optional, Sequence, List + from typing import Optional, Sequence, List, Any except ImportError: pass @@ -69,7 +69,7 @@ class BasedirOptions(BaseOptions): optParameters = [ ["basedir", "C", None, "Specify which Tahoe base directory should be used. [default: %s]" % quote_local_unicode_path(_default_nodedir)], - ] # type: List[Sequence[Optional[str]]] + ] # type: List[Sequence[Any]] def parseArgs(self, basedir=None): # This finds the node-directory option correctly even if we are in a subcommand. From c3a22966e82fd698eab1d62b5416be53b863013b Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 14:52:45 -0500 Subject: [PATCH 025/213] Add stubs for methods demanded by IPeerSelector --- src/allmydata/immutable/upload.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py index 18f818504..171b71eff 100644 --- a/src/allmydata/immutable/upload.py +++ b/src/allmydata/immutable/upload.py @@ -385,6 +385,12 @@ class PeerSelector(object): ) return self.happiness_mappings + def add_peers(self, peerids=None): + raise NotImplementedError + + def confirm_share_allocation(self, peerid, shnum): + raise NotImplementedError + class _QueryStatistics(object): From af172f6bff9b5de980473a02155840e50cdbae91 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 14:55:13 -0500 Subject: [PATCH 026/213] Repeat type declaration from parent to avoid over-constraining this type for subclasses. --- src/allmydata/scripts/common.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/scripts/common.py b/src/allmydata/scripts/common.py index 438b4a7e2..81511681d 100644 --- a/src/allmydata/scripts/common.py +++ b/src/allmydata/scripts/common.py @@ -106,7 +106,7 @@ class NoDefaultBasedirOptions(BasedirOptions): optParameters = [ ["basedir", "C", None, "Specify which Tahoe base directory should be used."], - ] + ] # type: List[Sequence[Any]] # This is overridden in order to ensure we get a "Wrong number of arguments." # error when more than one argument is given. From 103bec6a1579373df3238c8b5ee09eb72f79718a Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 14:58:28 -0500 Subject: [PATCH 027/213] On MutableFileNode, accept optional 'progress' parameter as declared by the interface. --- src/allmydata/mutable/filenode.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/mutable/filenode.py b/src/allmydata/mutable/filenode.py index 5afc84dec..54e9844d6 100644 --- a/src/allmydata/mutable/filenode.py +++ b/src/allmydata/mutable/filenode.py @@ -564,7 +564,7 @@ class MutableFileNode(object): return d - def upload(self, new_contents, servermap): + def upload(self, new_contents, servermap, progress=None): """ I overwrite the contents of the best recoverable version of this mutable file with new_contents, using servermap instead of From a75454a04f61c3e96b080553fe1f7e412b1d976b Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 15:02:13 -0500 Subject: [PATCH 028/213] Add stub for MutableFileVersion.get_servermap --- src/allmydata/mutable/filenode.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/allmydata/mutable/filenode.py b/src/allmydata/mutable/filenode.py index 54e9844d6..e9cf23fa1 100644 --- a/src/allmydata/mutable/filenode.py +++ b/src/allmydata/mutable/filenode.py @@ -1205,3 +1205,6 @@ class MutableFileVersion(object): self._servermap, mode=mode) return u.update() + + def get_servermap(self): + raise NotImplementedError From 32b77c42394c337fd2bc38f6ff2be29e5acdd001 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 15:06:12 -0500 Subject: [PATCH 029/213] Ignore interface violation in MutableFileVersion.download_to_data --- src/allmydata/mutable/filenode.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/mutable/filenode.py b/src/allmydata/mutable/filenode.py index e9cf23fa1..4613a918b 100644 --- a/src/allmydata/mutable/filenode.py +++ b/src/allmydata/mutable/filenode.py @@ -951,7 +951,7 @@ class MutableFileVersion(object): return self._servermap.size_of_version(self._version) - def download_to_data(self, fetch_privkey=False, progress=None): + def download_to_data(self, fetch_privkey=False, progress=None): # type: ignore # fixme """ I return a Deferred that fires with the contents of this readable object as a byte string. From 646297ddc3f95b9ab88b35d32a83782c8e2f178f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 15:08:46 -0500 Subject: [PATCH 030/213] Add stub for LocalCiphertextReader.set_upload_status --- src/allmydata/immutable/offloaded.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/allmydata/immutable/offloaded.py b/src/allmydata/immutable/offloaded.py index 4e18ad216..53a1f911a 100644 --- a/src/allmydata/immutable/offloaded.py +++ b/src/allmydata/immutable/offloaded.py @@ -499,6 +499,8 @@ class LocalCiphertextReader(AskUntilSuccessMixin): # ??. I'm not sure if it makes sense to forward the close message. return self.call("close") + def set_upload_status(self, upload_status): + raise NotImplementedError @implementer(interfaces.RIHelper, interfaces.IStatsProducer) From 67f0be8431157f965df50bd82e3098b9be3bbe12 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 15:13:19 -0500 Subject: [PATCH 031/213] Prefer type(None) for better compatibility. --- src/allmydata/frontends/ftpd.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/allmydata/frontends/ftpd.py b/src/allmydata/frontends/ftpd.py index 0b18df85b..af5444969 100644 --- a/src/allmydata/frontends/ftpd.py +++ b/src/allmydata/frontends/ftpd.py @@ -1,7 +1,5 @@ from six import ensure_str -from types import NoneType - from zope.interface import implementer from twisted.application import service, strports from twisted.internet import defer @@ -317,7 +315,7 @@ class Dispatcher(object): class FTPServer(service.MultiService): def __init__(self, client, accountfile, accounturl, ftp_portstr): - precondition(isinstance(accountfile, (unicode, NoneType)), accountfile) + precondition(isinstance(accountfile, (unicode, type(None))), accountfile) service.MultiService.__init__(self) r = Dispatcher(client) From 8b991d3516166b62b7dfe4827ba7b1911de5f680 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 15:16:45 -0500 Subject: [PATCH 032/213] Update DirectoryNode.set_uri to match interface spec. --- src/allmydata/dirnode.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/dirnode.py b/src/allmydata/dirnode.py index 59ebd73ba..6f052c3c7 100644 --- a/src/allmydata/dirnode.py +++ b/src/allmydata/dirnode.py @@ -554,7 +554,7 @@ class DirectoryNode(object): d = self.get_child_and_metadata(childnamex) return d - def set_uri(self, namex, writecap, readcap, metadata=None, overwrite=True): + def set_uri(self, namex, writecap, readcap=None, metadata=None, overwrite=True): precondition(isinstance(writecap, (str,type(None))), writecap) precondition(isinstance(readcap, (str,type(None))), readcap) From 6ea9003436e79163a865a0a30b6051f7811c1bd3 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 15:19:11 -0500 Subject: [PATCH 033/213] Declare MultiFormatResource.formatDefault as optional string for subclass overrides. --- src/allmydata/web/common_py3.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/allmydata/web/common_py3.py b/src/allmydata/web/common_py3.py index 22f235790..080c24c19 100644 --- a/src/allmydata/web/common_py3.py +++ b/src/allmydata/web/common_py3.py @@ -4,6 +4,11 @@ Common utilities that are available from Python 3. Can eventually be merged back into allmydata.web.common. """ +try: + from typing import Optional +except ImportError: + pass + from twisted.web import resource, http from allmydata.util import abbreviate @@ -47,7 +52,7 @@ class MultiFormatResource(resource.Resource, object): format if nothing else is given as the ``formatDefault``. """ formatArgument = "t" - formatDefault = None + formatDefault = None # type: Optional[str] def render(self, req): """ From cb351607d81e21919c3c380c0ac55cd0dba3a5fc Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 15:21:25 -0500 Subject: [PATCH 034/213] Repeat type declaration from parent to avoid over-constraining this type for subclasses. --- src/allmydata/scripts/cli.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/allmydata/scripts/cli.py b/src/allmydata/scripts/cli.py index 379e1d212..96dde65f8 100644 --- a/src/allmydata/scripts/cli.py +++ b/src/allmydata/scripts/cli.py @@ -1,6 +1,12 @@ from __future__ import print_function import os.path, re, fnmatch + +try: + from typing import List, Sequence, Any +except ImportError: + pass + from twisted.python import usage from allmydata.scripts.common import get_aliases, get_default_nodedir, \ DEFAULT_ALIAS, BaseOptions @@ -19,7 +25,7 @@ class FileStoreOptions(BaseOptions): "This overrides the URL found in the --node-directory ."], ["dir-cap", None, None, "Specify which dirnode URI should be used as the 'tahoe' alias."] - ] + ] # type: List[Sequence[Any]] def postOptions(self): self["quiet"] = self.parent["quiet"] From 6b772e7fdc14f473492a61e3e5abf507116077d0 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 15:22:58 -0500 Subject: [PATCH 035/213] Declare type for BaseOptions.description_unwrapped. --- src/allmydata/scripts/common.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/scripts/common.py b/src/allmydata/scripts/common.py index 81511681d..b501c7e6a 100644 --- a/src/allmydata/scripts/common.py +++ b/src/allmydata/scripts/common.py @@ -46,7 +46,7 @@ class BaseOptions(usage.Options): raise usage.UsageError("--version not allowed on subcommands") description = None # type: Optional[str] - description_unwrapped = None + description_unwrapped = None # type: Optional[str] def __str__(self): width = int(os.environ.get('COLUMNS', '80')) From 41c341a3cc0e2a58847907cc9bee471de4d68519 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 15:24:20 -0500 Subject: [PATCH 036/213] Prefer type(None) for better compatibility. --- src/allmydata/frontends/sftpd.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index db914fa45..6ee42d505 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -1,6 +1,5 @@ import six import heapq, traceback, array, stat, struct -from types import NoneType from stat import S_IFREG, S_IFDIR from time import time, strftime, localtime @@ -267,7 +266,7 @@ def _attrs_to_metadata(attrs): def _direntry_for(filenode_or_parent, childname, filenode=None): - precondition(isinstance(childname, (unicode, NoneType)), childname=childname) + precondition(isinstance(childname, (unicode, type(None))), childname=childname) if childname is None: filenode_or_parent = filenode @@ -672,7 +671,7 @@ class GeneralSFTPFile(PrefixingLogMixin): self.log(".open(parent=%r, childname=%r, filenode=%r, metadata=%r)" % (parent, childname, filenode, metadata), level=OPERATIONAL) - precondition(isinstance(childname, (unicode, NoneType)), childname=childname) + precondition(isinstance(childname, (unicode, type(None))), childname=childname) precondition(filenode is None or IFileNode.providedBy(filenode), filenode=filenode) precondition(not self.closed, sftpfile=self) @@ -1194,7 +1193,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): request = "._sync_heisenfiles(%r, %r, ignore=%r)" % (userpath, direntry, ignore) self.log(request, level=OPERATIONAL) - _assert(isinstance(userpath, str) and isinstance(direntry, (str, NoneType)), + _assert(isinstance(userpath, str) and isinstance(direntry, (str, type(None))), userpath=userpath, direntry=direntry) files = [] @@ -1219,7 +1218,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _remove_heisenfile(self, userpath, parent, childname, file_to_remove): if noisy: self.log("._remove_heisenfile(%r, %r, %r, %r)" % (userpath, parent, childname, file_to_remove), level=NOISY) - _assert(isinstance(userpath, str) and isinstance(childname, (unicode, NoneType)), + _assert(isinstance(userpath, str) and isinstance(childname, (unicode, type(None))), userpath=userpath, childname=childname) direntry = _direntry_for(parent, childname) @@ -1246,7 +1245,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): (existing_file, userpath, flags, _repr_flags(flags), parent, childname, filenode, metadata), level=NOISY) - _assert((isinstance(userpath, str) and isinstance(childname, (unicode, NoneType)) and + _assert((isinstance(userpath, str) and isinstance(childname, (unicode, type(None))) and (metadata is None or 'no-write' in metadata)), userpath=userpath, childname=childname, metadata=metadata) @@ -1977,7 +1976,7 @@ class Dispatcher(object): class SFTPServer(service.MultiService): def __init__(self, client, accountfile, accounturl, sftp_portstr, pubkey_file, privkey_file): - precondition(isinstance(accountfile, (unicode, NoneType)), accountfile) + precondition(isinstance(accountfile, (unicode, type(None))), accountfile) precondition(isinstance(pubkey_file, unicode), pubkey_file) precondition(isinstance(privkey_file, unicode), privkey_file) service.MultiService.__init__(self) From acbb6b3e93f0a1082e63db42fae20131908afe5e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 15:48:26 -0500 Subject: [PATCH 037/213] Convert subcommands to tuples instead of lists, as that's what mypy demands for heterogeneous sequences. --- src/allmydata/scripts/admin.py | 9 ++++-- src/allmydata/scripts/cli.py | 39 +++++++++++++------------ src/allmydata/scripts/create_node.py | 13 ++++++--- src/allmydata/scripts/debug.py | 9 ++++-- src/allmydata/scripts/runner.py | 17 +++++++---- src/allmydata/scripts/stats_gatherer.py | 9 ++++-- src/allmydata/scripts/tahoe_invite.py | 7 ++++- src/allmydata/scripts/types_.py | 10 +++++++ 8 files changed, 77 insertions(+), 36 deletions(-) create mode 100644 src/allmydata/scripts/types_.py diff --git a/src/allmydata/scripts/admin.py b/src/allmydata/scripts/admin.py index e472ffd8c..50dde9e43 100644 --- a/src/allmydata/scripts/admin.py +++ b/src/allmydata/scripts/admin.py @@ -1,5 +1,10 @@ from __future__ import print_function +try: + from allmydata.scripts.types_ import SubCommands +except ImportError: + pass + from twisted.python import usage from allmydata.scripts.common import BaseOptions @@ -79,8 +84,8 @@ def do_admin(options): subCommands = [ - ["admin", None, AdminCommand, "admin subcommands: use 'tahoe admin' for a list"], - ] + ("admin", None, AdminCommand, "admin subcommands: use 'tahoe admin' for a list"), + ] # type: SubCommands dispatch = { "admin": do_admin, diff --git a/src/allmydata/scripts/cli.py b/src/allmydata/scripts/cli.py index 96dde65f8..2bb91472f 100644 --- a/src/allmydata/scripts/cli.py +++ b/src/allmydata/scripts/cli.py @@ -4,6 +4,7 @@ import os.path, re, fnmatch try: from typing import List, Sequence, Any + from allmydata.scripts.types_ import SubCommands except ImportError: pass @@ -461,25 +462,25 @@ class DeepCheckOptions(FileStoreOptions): Optionally repair any problems found.""" subCommands = [ - ["mkdir", None, MakeDirectoryOptions, "Create a new directory."], - ["add-alias", None, AddAliasOptions, "Add a new alias cap."], - ["create-alias", None, CreateAliasOptions, "Create a new alias cap."], - ["list-aliases", None, ListAliasesOptions, "List all alias caps."], - ["ls", None, ListOptions, "List a directory."], - ["get", None, GetOptions, "Retrieve a file from the grid."], - ["put", None, PutOptions, "Upload a file into the grid."], - ["cp", None, CpOptions, "Copy one or more files or directories."], - ["unlink", None, UnlinkOptions, "Unlink a file or directory on the grid."], - ["mv", None, MvOptions, "Move a file within the grid."], - ["ln", None, LnOptions, "Make an additional link to an existing file or directory."], - ["backup", None, BackupOptions, "Make target dir look like local dir."], - ["webopen", None, WebopenOptions, "Open a web browser to a grid file or directory."], - ["manifest", None, ManifestOptions, "List all files/directories in a subtree."], - ["stats", None, StatsOptions, "Print statistics about all files/directories in a subtree."], - ["check", None, CheckOptions, "Check a single file or directory."], - ["deep-check", None, DeepCheckOptions, "Check all files/directories reachable from a starting point."], - ["status", None, TahoeStatusCommand, "Various status information."], - ] + ("mkdir", None, MakeDirectoryOptions, "Create a new directory."), + ("add-alias", None, AddAliasOptions, "Add a new alias cap."), + ("create-alias", None, CreateAliasOptions, "Create a new alias cap."), + ("list-aliases", None, ListAliasesOptions, "List all alias caps."), + ("ls", None, ListOptions, "List a directory."), + ("get", None, GetOptions, "Retrieve a file from the grid."), + ("put", None, PutOptions, "Upload a file into the grid."), + ("cp", None, CpOptions, "Copy one or more files or directories."), + ("unlink", None, UnlinkOptions, "Unlink a file or directory on the grid."), + ("mv", None, MvOptions, "Move a file within the grid."), + ("ln", None, LnOptions, "Make an additional link to an existing file or directory."), + ("backup", None, BackupOptions, "Make target dir look like local dir."), + ("webopen", None, WebopenOptions, "Open a web browser to a grid file or directory."), + ("manifest", None, ManifestOptions, "List all files/directories in a subtree."), + ("stats", None, StatsOptions, "Print statistics about all files/directories in a subtree."), + ("check", None, CheckOptions, "Check a single file or directory."), + ("deep-check", None, DeepCheckOptions, "Check all files/directories reachable from a starting point."), + ("status", None, TahoeStatusCommand, "Various status information."), + ] # type: SubCommands def mkdir(options): from allmydata.scripts import tahoe_mkdir diff --git a/src/allmydata/scripts/create_node.py b/src/allmydata/scripts/create_node.py index 2634e0915..9af9bf3ad 100644 --- a/src/allmydata/scripts/create_node.py +++ b/src/allmydata/scripts/create_node.py @@ -3,6 +3,11 @@ from __future__ import print_function import os import json +try: + from allmydata.scripts.types_ import SubCommands +except ImportError: + pass + from twisted.internet import reactor, defer from twisted.python.usage import UsageError from allmydata.scripts.common import BasedirOptions, NoDefaultBasedirOptions @@ -478,10 +483,10 @@ def create_introducer(config): subCommands = [ - ["create-node", None, CreateNodeOptions, "Create a node that acts as a client, server or both."], - ["create-client", None, CreateClientOptions, "Create a client node (with storage initially disabled)."], - ["create-introducer", None, CreateIntroducerOptions, "Create an introducer node."], -] + ("create-node", None, CreateNodeOptions, "Create a node that acts as a client, server or both."), + ("create-client", None, CreateClientOptions, "Create a client node (with storage initially disabled)."), + ("create-introducer", None, CreateIntroducerOptions, "Create an introducer node."), +] # type: SubCommands dispatch = { "create-node": create_node, diff --git a/src/allmydata/scripts/debug.py b/src/allmydata/scripts/debug.py index fd3f2b87c..be6b7e2bc 100644 --- a/src/allmydata/scripts/debug.py +++ b/src/allmydata/scripts/debug.py @@ -1,5 +1,10 @@ from __future__ import print_function +try: + from allmydata.scripts.types_ import SubCommands +except ImportError: + pass + # do not import any allmydata modules at this level. Do that from inside # individual functions instead. import struct, time, os, sys @@ -1051,8 +1056,8 @@ def do_debug(options): subCommands = [ - ["debug", None, DebugCommand, "debug subcommands: use 'tahoe debug' for a list."], - ] + ("debug", None, DebugCommand, "debug subcommands: use 'tahoe debug' for a list."), + ] # type: SubCommands dispatch = { "debug": do_debug, diff --git a/src/allmydata/scripts/runner.py b/src/allmydata/scripts/runner.py index 3436a1b84..999d7d353 100644 --- a/src/allmydata/scripts/runner.py +++ b/src/allmydata/scripts/runner.py @@ -4,6 +4,11 @@ import os, sys from six.moves import StringIO import six +try: + from allmydata.scripts.types_ import SubCommands +except ImportError: + pass + from twisted.python import usage from twisted.internet import defer, task, threads @@ -45,12 +50,12 @@ _control_node_dispatch = { } process_control_commands = [ - ["run", None, tahoe_run.RunOptions, "run a node without daemonizing"], - ["daemonize", None, tahoe_daemonize.DaemonizeOptions, "(deprecated) run a node in the background"], - ["start", None, tahoe_start.StartOptions, "(deprecated) start a node in the background and confirm it started"], - ["stop", None, tahoe_stop.StopOptions, "(deprecated) stop a node"], - ["restart", None, tahoe_restart.RestartOptions, "(deprecated) restart a node"], -] + ("run", None, tahoe_run.RunOptions, "run a node without daemonizing"), + ("daemonize", None, tahoe_daemonize.DaemonizeOptions, "(deprecated) run a node in the background"), + ("start", None, tahoe_start.StartOptions, "(deprecated) start a node in the background and confirm it started"), + ("stop", None, tahoe_stop.StopOptions, "(deprecated) stop a node"), + ("restart", None, tahoe_restart.RestartOptions, "(deprecated) restart a node"), +] # type: SubCommands class Options(usage.Options): diff --git a/src/allmydata/scripts/stats_gatherer.py b/src/allmydata/scripts/stats_gatherer.py index 26848a23c..b16ce689e 100644 --- a/src/allmydata/scripts/stats_gatherer.py +++ b/src/allmydata/scripts/stats_gatherer.py @@ -2,6 +2,11 @@ from __future__ import print_function import os +try: + from allmydata.scripts.types_ import SubCommands +except ImportError: + pass + # Python 2 compatibility from future.utils import PY2 if PY2: @@ -93,8 +98,8 @@ def create_stats_gatherer(config): return 0 subCommands = [ - ["create-stats-gatherer", None, CreateStatsGathererOptions, "Create a stats-gatherer service."], -] + ("create-stats-gatherer", None, CreateStatsGathererOptions, "Create a stats-gatherer service."), +] # type: SubCommands dispatch = { "create-stats-gatherer": create_stats_gatherer, diff --git a/src/allmydata/scripts/tahoe_invite.py b/src/allmydata/scripts/tahoe_invite.py index cca4216e3..f2d978f55 100644 --- a/src/allmydata/scripts/tahoe_invite.py +++ b/src/allmydata/scripts/tahoe_invite.py @@ -3,6 +3,11 @@ from __future__ import print_function import json from os.path import join +try: + from allmydata.scripts.types_ import SubCommands +except ImportError: + pass + from twisted.python import usage from twisted.internet import defer, reactor @@ -104,7 +109,7 @@ def invite(options): subCommands = [ ("invite", None, InviteOptions, "Invite a new node to this grid"), -] +] # type: SubCommands dispatch = { "invite": invite, diff --git a/src/allmydata/scripts/types_.py b/src/allmydata/scripts/types_.py new file mode 100644 index 000000000..58f88722b --- /dev/null +++ b/src/allmydata/scripts/types_.py @@ -0,0 +1,10 @@ +from typing import List, Tuple, Type +from allmydata.scripts.common import BaseOptions + + +# Historically, subcommands were implemented as lists, but due to a +# [designed contraint in mypy](https://stackoverflow.com/a/52559625/70170), +# a Tuple is required. +SubCommand = Tuple[str, None, Type[BaseOptions], str] + +SubCommands = List[SubCommand] From 572d7b2e02b2ba54c8c4db1592bac1898b48aba1 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 15:52:04 -0500 Subject: [PATCH 038/213] Ignore error when untyped Module has no dispatch. --- src/allmydata/scripts/runner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/scripts/runner.py b/src/allmydata/scripts/runner.py index 999d7d353..e56130d87 100644 --- a/src/allmydata/scripts/runner.py +++ b/src/allmydata/scripts/runner.py @@ -113,7 +113,7 @@ class Options(usage.Options): create_dispatch = {} for module in (create_node, stats_gatherer): - create_dispatch.update(module.dispatch) + create_dispatch.update(module.dispatch) # type: ignore def parse_options(argv, config=None): if not config: From b1b3a2341517d2b993e21f6764797aa3f6c01d4f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 15:56:39 -0500 Subject: [PATCH 039/213] Fix type errors with CPUUsageMonitor subclasses with float POLL_INTERVAL. --- src/allmydata/stats.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/stats.py b/src/allmydata/stats.py index f669b0861..6de323b73 100644 --- a/src/allmydata/stats.py +++ b/src/allmydata/stats.py @@ -78,7 +78,7 @@ class LoadMonitor(service.MultiService): @implementer(IStatsProducer) class CPUUsageMonitor(service.MultiService): HISTORY_LENGTH = 15 - POLL_INTERVAL = 60 + POLL_INTERVAL = 60 # type: float def __init__(self): service.MultiService.__init__(self) From 1768377aecc4d7e563e0b42d0c79d8d0f2d7d8a5 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 16:04:56 -0500 Subject: [PATCH 040/213] Ignore error in DummyStorage --- src/allmydata/test/storage_plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/test/storage_plugin.py b/src/allmydata/test/storage_plugin.py index 4a1f84531..24081ae09 100644 --- a/src/allmydata/test/storage_plugin.py +++ b/src/allmydata/test/storage_plugin.py @@ -48,7 +48,7 @@ class RIDummy(RemoteInterface): -@implementer(IFoolscapStoragePlugin) +@implementer(IFoolscapStoragePlugin) # type: ignore # todo: make stubs for twisted @attr.s class DummyStorage(object): name = attr.ib() From 3653d7ed16c6b5d365664aca504af0c388e99752 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 16:05:09 -0500 Subject: [PATCH 041/213] Ignore type checks on Referenceable objects. Ref warner/foolscap#78. --- src/allmydata/test/storage_plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/test/storage_plugin.py b/src/allmydata/test/storage_plugin.py index 24081ae09..b3464a88f 100644 --- a/src/allmydata/test/storage_plugin.py +++ b/src/allmydata/test/storage_plugin.py @@ -107,7 +107,7 @@ class GetCounter(Resource, object): @implementer(RIDummy) @attr.s(frozen=True) -class DummyStorageServer(object): +class DummyStorageServer(object): # type: ignore # warner/foolscap#78 get_anonymous_storage_server = attr.ib() def remote_just_some_method(self): From 2514196b275ad65b9e546b4c54898a43be949edd Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 16:06:46 -0500 Subject: [PATCH 042/213] Suppress typing error in DummyStorageClient --- src/allmydata/test/storage_plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/test/storage_plugin.py b/src/allmydata/test/storage_plugin.py index b3464a88f..ba11776e6 100644 --- a/src/allmydata/test/storage_plugin.py +++ b/src/allmydata/test/storage_plugin.py @@ -116,7 +116,7 @@ class DummyStorageServer(object): # type: ignore # warner/foolscap#78 @implementer(IStorageServer) @attr.s -class DummyStorageClient(object): +class DummyStorageClient(object): # type: ignore # incomplete implementation get_rref = attr.ib() configuration = attr.ib() announcement = attr.ib() From ffa19d1c07e6a876fcebe8e8edbe968b10c842a9 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 16:08:22 -0500 Subject: [PATCH 043/213] Suppress typing errors in common Nodes --- src/allmydata/test/common.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/allmydata/test/common.py b/src/allmydata/test/common.py index a420dd3ba..a68a56ba5 100644 --- a/src/allmydata/test/common.py +++ b/src/allmydata/test/common.py @@ -391,7 +391,7 @@ class DummyProducer(object): pass @implementer(IImmutableFileNode) -class FakeCHKFileNode(object): +class FakeCHKFileNode(object): # type: ignore # incomplete implementation """I provide IImmutableFileNode, but all of my data is stored in a class-level dictionary.""" @@ -529,7 +529,7 @@ def create_chk_filenode(contents, all_contents): @implementer(IMutableFileNode, ICheckable) -class FakeMutableFileNode(object): +class FakeMutableFileNode(object): # type: ignore # incomplete implementation """I provide IMutableFileNode, but all of my data is stored in a class-level dictionary.""" From 53ff740f0ec2a13b50673fb989190cc73bee60f6 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 16:14:10 -0500 Subject: [PATCH 044/213] Suppress type check error on NativeStorageServerWithVersion --- src/allmydata/test/test_storage_client.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/allmydata/test/test_storage_client.py b/src/allmydata/test/test_storage_client.py index fa3a34b15..f2be9ad1e 100644 --- a/src/allmydata/test/test_storage_client.py +++ b/src/allmydata/test/test_storage_client.py @@ -90,7 +90,12 @@ from allmydata.interfaces import ( SOME_FURL = b"pb://abcde@nowhere/fake" -class NativeStorageServerWithVersion(NativeStorageServer): + +# type checks fail with: +# Cannot determine consistent method resolution order (MRO) for "NativeStorageServerWithVersion" +# even though class hierarchy is single-inheritance. Probably `implementer` +# wrappers are affecting the MRO. +class NativeStorageServerWithVersion(NativeStorageServer): # type: ignore def __init__(self, version): # note: these instances won't work for anything other than # get_available_space() because we don't upcall From 7507e84a18902f5874a9aaaabf45dee92ec05510 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 16:19:24 -0500 Subject: [PATCH 045/213] Suppress errors in no_network --- src/allmydata/test/no_network.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/allmydata/test/no_network.py b/src/allmydata/test/no_network.py index 59ab807bb..1e8c519e1 100644 --- a/src/allmydata/test/no_network.py +++ b/src/allmydata/test/no_network.py @@ -67,7 +67,7 @@ class Marker(object): fireNow = partial(defer.succeed, None) -@implementer(IRemoteReference) +@implementer(IRemoteReference) # type: ignore # todo: write stubs for foolscap class LocalWrapper(object): """ A ``LocalWrapper`` presents the remote reference interface to a local @@ -212,9 +212,12 @@ class NoNetworkServer(object): return _StorageServer(lambda: self.rref) def get_version(self): return self.rref.version + def start_connecting(self, trigger_cb): + raise NotImplementedError + @implementer(IStorageBroker) -class NoNetworkStorageBroker(object): +class NoNetworkStorageBroker(object): # type: ignore # missing many methods def get_servers_for_psi(self, peer_selection_index): def _permuted(server): seed = server.get_permutation_seed() @@ -258,7 +261,7 @@ def create_no_network_client(basedir): return defer.succeed(client) -class _NoNetworkClient(_Client): +class _NoNetworkClient(_Client): # type: ignore # Cannot determine consistent MRO order """ Overrides all _Client networking functionality to do nothing. """ From 13cd780231bf3185104d930d748c75b9d324eabc Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 16:22:12 -0500 Subject: [PATCH 046/213] Prefer sys.maxsize to sys.maxint. --- src/allmydata/test/check_memory.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/allmydata/test/check_memory.py b/src/allmydata/test/check_memory.py index 41cf6e1d7..4f50b383b 100644 --- a/src/allmydata/test/check_memory.py +++ b/src/allmydata/test/check_memory.py @@ -499,13 +499,13 @@ if __name__ == '__main__': mode = "upload" if len(sys.argv) > 1: mode = sys.argv[1] - if sys.maxint == 2147483647: + if sys.maxsize == 2147483647: bits = "32" - elif sys.maxint == 9223372036854775807: + elif sys.maxsize == 9223372036854775807: bits = "64" else: bits = "?" - print("%s-bit system (sys.maxint=%d)" % (bits, sys.maxint)) + print("%s-bit system (sys.maxsize=%d)" % (bits, sys.maxsize)) # put the logfile and stats.out in _test_memory/ . These stick around. # put the nodes and other files in _test_memory/test/ . These are # removed each time we run. From b0803a2ac054234f341ec1650695ce988d926f25 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 16:24:20 -0500 Subject: [PATCH 047/213] Suppress errors in test_web due to ambiguous MRO --- src/allmydata/test/web/test_web.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/allmydata/test/web/test_web.py b/src/allmydata/test/web/test_web.py index 326569a26..a5fbe5a51 100644 --- a/src/allmydata/test/web/test_web.py +++ b/src/allmydata/test/web/test_web.py @@ -189,7 +189,7 @@ class FakeHistory(object): def list_all_helper_statuses(self): return [] -class FakeDisplayableServer(StubServer): +class FakeDisplayableServer(StubServer): # type: ignore # Cannot determine MRO def __init__(self, serverid, nickname, connected, last_connect_time, last_loss_time, last_rx_time): StubServer.__init__(self, serverid) @@ -255,7 +255,7 @@ class FakeStorageServer(service.MultiService): def on_status_changed(self, cb): cb(self) -class FakeClient(_Client): +class FakeClient(_Client): # type: ignore # Cannot determine MRO def __init__(self): # don't upcall to Client.__init__, since we only want to initialize a # minimal subset From fc19d1baf4a503dabc1a69b93adfd3200b5e413b Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 16:26:36 -0500 Subject: [PATCH 048/213] Suppress errors in test_sftp --- src/allmydata/test/test_sftp.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/allmydata/test/test_sftp.py b/src/allmydata/test/test_sftp.py index b6f1fbc8a..1ff0363e8 100644 --- a/src/allmydata/test/test_sftp.py +++ b/src/allmydata/test/test_sftp.py @@ -9,18 +9,15 @@ from twisted.python.failure import Failure from twisted.internet.error import ProcessDone, ProcessTerminated from allmydata.util import deferredutil -conch_interfaces = None -sftp = None -sftpd = None - try: from twisted.conch import interfaces as conch_interfaces from twisted.conch.ssh import filetransfer as sftp from allmydata.frontends import sftpd except ImportError as e: + conch_interfaces = sftp = sftpd = None # type: ignore conch_unavailable_reason = e else: - conch_unavailable_reason = None + conch_unavailable_reason = None # type: ignore from allmydata.interfaces import IDirectoryNode, ExistingChildError, NoSuchChildError from allmydata.mutable.common import NotWriteableError From 54e45498367485d44ed6f886c50feb269a26c9e5 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 16:30:13 -0500 Subject: [PATCH 049/213] Satisfy type check in test_helper.FakeClient. --- src/allmydata/test/test_helper.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/allmydata/test/test_helper.py b/src/allmydata/test/test_helper.py index 65c07135a..3faffbe0d 100644 --- a/src/allmydata/test/test_helper.py +++ b/src/allmydata/test/test_helper.py @@ -19,6 +19,12 @@ from functools import ( ) import attr +try: + from typing import List + from allmydata.introducer.client import IntroducerClient +except ImportError: + pass + from twisted.internet import defer from twisted.trial import unittest from twisted.application import service @@ -125,7 +131,7 @@ class FakeCHKCheckerAndUEBFetcher(object): )) class FakeClient(service.MultiService): - introducer_clients = [] + introducer_clients = [] # type: List[IntroducerClient] DEFAULT_ENCODING_PARAMETERS = {"k":25, "happy": 75, "n": 100, From 86f88a4aa5568876989f1bc222703cc9dd3d35fb Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 16:32:19 -0500 Subject: [PATCH 050/213] Satisfy type checks in test_dirnode --- src/allmydata/test/test_dirnode.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/allmydata/test/test_dirnode.py b/src/allmydata/test/test_dirnode.py index 48ffff45a..8af1567c9 100644 --- a/src/allmydata/test/test_dirnode.py +++ b/src/allmydata/test/test_dirnode.py @@ -1526,7 +1526,7 @@ class Packing(testutil.ReallyEqualMixin, unittest.TestCase): kids, fn.get_writekey(), deep_immutable=True) @implementer(IMutableFileNode) -class FakeMutableFile(object): +class FakeMutableFile(object): # type: ignore # incomplete interface counter = 0 def __init__(self, initial_contents=""): data = self._get_initial_contents(initial_contents) @@ -1587,7 +1587,7 @@ class FakeNodeMaker(NodeMaker): def create_mutable_file(self, contents="", keysize=None, version=None): return defer.succeed(FakeMutableFile(contents)) -class FakeClient2(_Client): +class FakeClient2(_Client): # type: ignore # ambiguous MRO def __init__(self): self.nodemaker = FakeNodeMaker(None, None, None, None, None, From d2d3f1f4a9a6aecb482859c96e5732c41847051f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 29 Nov 2020 16:33:25 -0500 Subject: [PATCH 051/213] Suppress type errors in test_checker --- src/allmydata/test/test_checker.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/allmydata/test/test_checker.py b/src/allmydata/test/test_checker.py index 85b894b1f..23ce44762 100644 --- a/src/allmydata/test/test_checker.py +++ b/src/allmydata/test/test_checker.py @@ -62,7 +62,7 @@ class FakeClient(object): @implementer(IServer) -class FakeServer(object): +class FakeServer(object): # type: ignore # incomplete interface def get_name(self): return "fake name" @@ -75,7 +75,7 @@ class FakeServer(object): @implementer(ICheckResults) -class FakeCheckResults(object): +class FakeCheckResults(object): # type: ignore # incomplete interface def __init__(self, si=None, healthy=False, recoverable=False, @@ -106,7 +106,7 @@ class FakeCheckResults(object): @implementer(ICheckAndRepairResults) -class FakeCheckAndRepairResults(object): +class FakeCheckAndRepairResults(object): # type: ignore # incomplete interface def __init__(self, si=None, repair_attempted=False, From d2e2a22f6230f880c133f05cad73bd9b8490a909 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 4 Dec 2020 10:34:21 -0500 Subject: [PATCH 052/213] Run typechecks in CircleCI --- .circleci/config.yml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index afa3fafa1..ff14d6dd3 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -91,6 +91,9 @@ workflows: - "build-porting-depgraph": <<: *DOCKERHUB_CONTEXT + - "typechecks": + <<: *DOCKERHUB_CONTEXT + images: # Build the Docker images used by the ci jobs. This makes the ci jobs # faster and takes various spurious failures out of the critical path. @@ -475,6 +478,17 @@ jobs: . /tmp/venv/bin/activate ./misc/python3/depgraph.sh + typechecks: + docker: + - <<: *DOCKERHUB_AUTH + image: "jaraco/multipy-tox" + + steps: + - "checkout" + - run: + name: "Validate Types" + command: tox -e typechecks + build-image: &BUILD_IMAGE # This is a template for a job to build a Docker image that has as much of # the setup as we can manage already done and baked in. This cuts down on From 3eb975748a9a6c32c7972ce4c498ec5afabfda9d Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 4 Dec 2020 19:56:51 -0500 Subject: [PATCH 053/213] Ignore type checks in allmydata. --- src/allmydata/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/allmydata/__init__.py b/src/allmydata/__init__.py index 15d5fb240..a43781158 100644 --- a/src/allmydata/__init__.py +++ b/src/allmydata/__init__.py @@ -14,7 +14,7 @@ __all__ = [ __version__ = "unknown" try: - from allmydata._version import __version__ + from allmydata._version import __version__ # type: ignore except ImportError: # We're running in a tree that hasn't run update_version, and didn't # come with a _version.py, so we don't know what our version is. @@ -24,7 +24,7 @@ except ImportError: full_version = "unknown" branch = "unknown" try: - from allmydata._version import full_version, branch + from allmydata._version import full_version, branch # type: ignore except ImportError: # We're running in a tree that hasn't run update_version, and didn't # come with a _version.py, so we don't know what our full version or From 8d439fd58d02366dff9b38204eeb67c204d39861 Mon Sep 17 00:00:00 2001 From: viktoriiasavchuk <75451912+viktoriiasavchuk@users.noreply.github.com> Date: Sun, 13 Dec 2020 20:59:58 +0200 Subject: [PATCH 054/213] Update CONTRIBUTING.rst Added link to the Contributor Code of Conduct, https://github.com/tahoe-lafs/tahoe-lafs/blob/master/docs/CODE_OF_CONDUCT.md --- .github/CONTRIBUTING.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst index c8f5093f1..ad1f1b5f3 100644 --- a/.github/CONTRIBUTING.rst +++ b/.github/CONTRIBUTING.rst @@ -17,4 +17,4 @@ Examples of contributions include: * `Patch reviews `_ Before authoring or reviewing a patch, -please familiarize yourself with the `coding standard `_. +please familiarize yourself with the `Coding Standard `_ and the `Contributor Code of Conduct `_. From 3a0b72f34c262fa9a6493690be13bbe8b4648b7d Mon Sep 17 00:00:00 2001 From: Viktoriia <75451912+viktoriiasavchuk@users.noreply.github.com> Date: Tue, 15 Dec 2020 14:11:26 +0200 Subject: [PATCH 055/213] Update CONTRIBUTING.rst Changed 'Coding Standard' to 'Coding Standards' --- .github/CONTRIBUTING.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst index ad1f1b5f3..0c0da9503 100644 --- a/.github/CONTRIBUTING.rst +++ b/.github/CONTRIBUTING.rst @@ -17,4 +17,4 @@ Examples of contributions include: * `Patch reviews `_ Before authoring or reviewing a patch, -please familiarize yourself with the `Coding Standard `_ and the `Contributor Code of Conduct `_. +please familiarize yourself with the `Coding Standards `_ and the `Contributor Code of Conduct `_. From 950ca189326703daf8bb84188507ca7324efc16e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 18 Dec 2020 09:23:27 -0500 Subject: [PATCH 056/213] Ignores no longer needed. --- src/allmydata/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/allmydata/__init__.py b/src/allmydata/__init__.py index a43781158..15d5fb240 100644 --- a/src/allmydata/__init__.py +++ b/src/allmydata/__init__.py @@ -14,7 +14,7 @@ __all__ = [ __version__ = "unknown" try: - from allmydata._version import __version__ # type: ignore + from allmydata._version import __version__ except ImportError: # We're running in a tree that hasn't run update_version, and didn't # come with a _version.py, so we don't know what our version is. @@ -24,7 +24,7 @@ except ImportError: full_version = "unknown" branch = "unknown" try: - from allmydata._version import full_version, branch # type: ignore + from allmydata._version import full_version, branch except ImportError: # We're running in a tree that hasn't run update_version, and didn't # come with a _version.py, so we don't know what our full version or From 99da74fffb1b013602ae847dfd4756ccb5e61c3d Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 18 Dec 2020 09:58:56 -0500 Subject: [PATCH 057/213] Change comment to clarify that it's the implementation that's incomplete. --- src/allmydata/test/test_checker.py | 6 +++--- src/allmydata/test/test_dirnode.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/allmydata/test/test_checker.py b/src/allmydata/test/test_checker.py index 23ce44762..d24ad4ef4 100644 --- a/src/allmydata/test/test_checker.py +++ b/src/allmydata/test/test_checker.py @@ -62,7 +62,7 @@ class FakeClient(object): @implementer(IServer) -class FakeServer(object): # type: ignore # incomplete interface +class FakeServer(object): # type: ignore # incomplete implementation def get_name(self): return "fake name" @@ -75,7 +75,7 @@ class FakeServer(object): # type: ignore # incomplete interface @implementer(ICheckResults) -class FakeCheckResults(object): # type: ignore # incomplete interface +class FakeCheckResults(object): # type: ignore # incomplete implementation def __init__(self, si=None, healthy=False, recoverable=False, @@ -106,7 +106,7 @@ class FakeCheckResults(object): # type: ignore # incomplete interface @implementer(ICheckAndRepairResults) -class FakeCheckAndRepairResults(object): # type: ignore # incomplete interface +class FakeCheckAndRepairResults(object): # type: ignore # incomplete implementation def __init__(self, si=None, repair_attempted=False, diff --git a/src/allmydata/test/test_dirnode.py b/src/allmydata/test/test_dirnode.py index 8af1567c9..68fcdd54b 100644 --- a/src/allmydata/test/test_dirnode.py +++ b/src/allmydata/test/test_dirnode.py @@ -1526,7 +1526,7 @@ class Packing(testutil.ReallyEqualMixin, unittest.TestCase): kids, fn.get_writekey(), deep_immutable=True) @implementer(IMutableFileNode) -class FakeMutableFile(object): # type: ignore # incomplete interface +class FakeMutableFile(object): # type: ignore # incomplete implementation counter = 0 def __init__(self, initial_contents=""): data = self._get_initial_contents(initial_contents) From 51b0b201b49733280702ed901baa06e23d75af85 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 18 Dec 2020 10:33:30 -0500 Subject: [PATCH 058/213] Expand comment to provide more context. --- src/allmydata/web/private.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/allmydata/web/private.py b/src/allmydata/web/private.py index a86c869c4..405ca75e7 100644 --- a/src/allmydata/web/private.py +++ b/src/allmydata/web/private.py @@ -61,7 +61,12 @@ class IToken(ICredentials): pass -# Shoobx/mypy-zope#26 +# Workaround for Shoobx/mypy-zope#26, where without suitable +# stubs for twisted classes (ICredentials), IToken does not +# appear to be an Interface. The proper fix appears to be to +# create stubs for twisted +# (https://twistedmatrix.com/trac/ticket/9717). For now, +# bypassing the inline decorator syntax works around the issue. _itoken_impl = implementer(IToken) From efd0aef2847814c1c43a14db3ccc1cde01a95119 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 18 Dec 2020 10:36:43 -0500 Subject: [PATCH 059/213] Indicate that unichr is Python 2 only. --- src/allmydata/windows/fixups.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/windows/fixups.py b/src/allmydata/windows/fixups.py index c5ba3bb57..e98aa8a67 100644 --- a/src/allmydata/windows/fixups.py +++ b/src/allmydata/windows/fixups.py @@ -219,7 +219,7 @@ def initialize(): def unmangle(s): return re.sub( u'\\x7F[0-9a-fA-F]*\\;', - # type ignored for 'unichr' + # type ignored for 'unichr' (Python 2 only) lambda m: unichr(int(m.group(0)[1:-1], 16)), # type: ignore s, ) From ea0c10ef83d98f019472c744ad81f3c5a8f5fab1 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 18 Dec 2020 10:51:56 -0500 Subject: [PATCH 060/213] Remove set_size, unused --- src/allmydata/immutable/encode.py | 3 --- src/allmydata/interfaces.py | 5 ----- 2 files changed, 8 deletions(-) diff --git a/src/allmydata/immutable/encode.py b/src/allmydata/immutable/encode.py index e743ce766..9351df501 100644 --- a/src/allmydata/immutable/encode.py +++ b/src/allmydata/immutable/encode.py @@ -711,6 +711,3 @@ class Encoder(object): return self.uri_extension_data def get_uri_extension_hash(self): return self.uri_extension_hash - - def set_size(self, size): - raise NotImplementedError() diff --git a/src/allmydata/interfaces.py b/src/allmydata/interfaces.py index 95b1fdf63..edd16ea08 100644 --- a/src/allmydata/interfaces.py +++ b/src/allmydata/interfaces.py @@ -1825,11 +1825,6 @@ class IEncoder(Interface): willing to receive data. """ - def set_size(size): - """Specify the number of bytes that will be encoded. This must be - peformed before get_serialized_params() can be called. - """ - def set_encrypted_uploadable(u): """Provide a source of encrypted upload data. 'u' must implement IEncryptedUploadable. From d051791e9530e5e64d49942cdfd6afc0a9a98c17 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 18 Dec 2020 10:53:24 -0500 Subject: [PATCH 061/213] Add reference to ticket. --- src/allmydata/immutable/offloaded.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/allmydata/immutable/offloaded.py b/src/allmydata/immutable/offloaded.py index 53a1f911a..2d2c5c1f5 100644 --- a/src/allmydata/immutable/offloaded.py +++ b/src/allmydata/immutable/offloaded.py @@ -499,6 +499,7 @@ class LocalCiphertextReader(AskUntilSuccessMixin): # ??. I'm not sure if it makes sense to forward the close message. return self.call("close") + # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3561 def set_upload_status(self, upload_status): raise NotImplementedError From 090031cbfc33510e7eebc627f313c94c548ea91e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 18 Dec 2020 10:55:20 -0500 Subject: [PATCH 062/213] Remove confirm_share_allocation from interface (unused). --- src/allmydata/immutable/upload.py | 3 --- src/allmydata/interfaces.py | 6 ------ 2 files changed, 9 deletions(-) diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py index 171b71eff..e6da4812a 100644 --- a/src/allmydata/immutable/upload.py +++ b/src/allmydata/immutable/upload.py @@ -388,9 +388,6 @@ class PeerSelector(object): def add_peers(self, peerids=None): raise NotImplementedError - def confirm_share_allocation(self, peerid, shnum): - raise NotImplementedError - class _QueryStatistics(object): diff --git a/src/allmydata/interfaces.py b/src/allmydata/interfaces.py index edd16ea08..e460854f8 100644 --- a/src/allmydata/interfaces.py +++ b/src/allmydata/interfaces.py @@ -860,12 +860,6 @@ class IPeerSelector(Interface): peer selection begins. """ - def confirm_share_allocation(peerid, shnum): - """ - Confirm that an allocated peer=>share pairing has been - successfully established. - """ - def add_peers(peerids=set): """ Update my internal state to include the peers in peerids as From 0e248cb4ef03f40da7e29c1bcd8a1c390ea00105 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 18 Dec 2020 10:59:53 -0500 Subject: [PATCH 063/213] Declare signing key as required in introducer client publish. --- src/allmydata/introducer/client.py | 2 +- src/allmydata/introducer/interfaces.py | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/src/allmydata/introducer/client.py b/src/allmydata/introducer/client.py index 62642d0af..0e0a56442 100644 --- a/src/allmydata/introducer/client.py +++ b/src/allmydata/introducer/client.py @@ -198,7 +198,7 @@ class IntroducerClient(service.Service, Referenceable): ann_d.update(ann) return ann_d - def publish(self, service_name, ann, signing_key=None): + def publish(self, service_name, ann, signing_key): # we increment the seqnum every time we publish something new current_seqnum, current_nonce = self._sequencer() diff --git a/src/allmydata/introducer/interfaces.py b/src/allmydata/introducer/interfaces.py index 9f08f1943..24fd3945f 100644 --- a/src/allmydata/introducer/interfaces.py +++ b/src/allmydata/introducer/interfaces.py @@ -73,7 +73,7 @@ class IIntroducerClient(Interface): publish their services to the rest of the world, and I help them learn about services available on other nodes.""" - def publish(service_name, ann, signing_key=None): + def publish(service_name, ann, signing_key): """Publish the given announcement dictionary (which must be JSON-serializable), plus some additional keys, to the world. @@ -83,8 +83,7 @@ class IIntroducerClient(Interface): the signing_key, if present, otherwise it is derived from the 'anonymous-storage-FURL' key. - If signing_key= is set to an instance of SigningKey, it will be - used to sign the announcement.""" + signing_key (a SigningKey) will be used to sign the announcement.""" def subscribe_to(service_name, callback, *args, **kwargs): """Call this if you will eventually want to use services with the From c2d2aba83f025824e23f2051700ad2fd54063799 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 18 Dec 2020 11:05:55 -0500 Subject: [PATCH 064/213] Add reference to ticket. --- src/allmydata/mutable/filenode.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/allmydata/mutable/filenode.py b/src/allmydata/mutable/filenode.py index 4613a918b..39e8b76be 100644 --- a/src/allmydata/mutable/filenode.py +++ b/src/allmydata/mutable/filenode.py @@ -1206,5 +1206,6 @@ class MutableFileVersion(object): mode=mode) return u.update() + # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3562 def get_servermap(self): raise NotImplementedError From 189608e11388988b512aa00f225911ff61afc80c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 18 Dec 2020 11:07:30 -0500 Subject: [PATCH 065/213] Remove GENERATED_FILES, unused --- src/allmydata/node.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/allmydata/node.py b/src/allmydata/node.py index 7622d5bc3..6d0b2da04 100644 --- a/src/allmydata/node.py +++ b/src/allmydata/node.py @@ -713,7 +713,6 @@ class Node(service.MultiService): """ NODETYPE = "unknown NODETYPE" CERTFILE = "node.pem" - GENERATED_FILES = [] # type: ignore def __init__(self, config, main_tub, control_tub, i2p_provider, tor_provider): """ From 602a06e5cba818cb927c45566a610eb3e0d6eadc Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 18 Dec 2020 11:14:07 -0500 Subject: [PATCH 066/213] Extract Parameters type in scripts.types_. --- src/allmydata/scripts/cli.py | 5 ++--- src/allmydata/scripts/common.py | 7 ++++--- src/allmydata/scripts/types_.py | 4 +++- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/src/allmydata/scripts/cli.py b/src/allmydata/scripts/cli.py index 2bb91472f..e4cd8aa22 100644 --- a/src/allmydata/scripts/cli.py +++ b/src/allmydata/scripts/cli.py @@ -3,8 +3,7 @@ from __future__ import print_function import os.path, re, fnmatch try: - from typing import List, Sequence, Any - from allmydata.scripts.types_ import SubCommands + from allmydata.scripts.types_ import SubCommands, Parameters except ImportError: pass @@ -26,7 +25,7 @@ class FileStoreOptions(BaseOptions): "This overrides the URL found in the --node-directory ."], ["dir-cap", None, None, "Specify which dirnode URI should be used as the 'tahoe' alias."] - ] # type: List[Sequence[Any]] + ] # type: Parameters def postOptions(self): self["quiet"] = self.parent["quiet"] diff --git a/src/allmydata/scripts/common.py b/src/allmydata/scripts/common.py index b501c7e6a..42c26bb90 100644 --- a/src/allmydata/scripts/common.py +++ b/src/allmydata/scripts/common.py @@ -5,7 +5,8 @@ import codecs from os.path import join try: - from typing import Optional, Sequence, List, Any + from typing import Optional + from .types_ import Parameters except ImportError: pass @@ -69,7 +70,7 @@ class BasedirOptions(BaseOptions): optParameters = [ ["basedir", "C", None, "Specify which Tahoe base directory should be used. [default: %s]" % quote_local_unicode_path(_default_nodedir)], - ] # type: List[Sequence[Any]] + ] # type: Parameters def parseArgs(self, basedir=None): # This finds the node-directory option correctly even if we are in a subcommand. @@ -106,7 +107,7 @@ class NoDefaultBasedirOptions(BasedirOptions): optParameters = [ ["basedir", "C", None, "Specify which Tahoe base directory should be used."], - ] # type: List[Sequence[Any]] + ] # type: Parameters # This is overridden in order to ensure we get a "Wrong number of arguments." # error when more than one argument is given. diff --git a/src/allmydata/scripts/types_.py b/src/allmydata/scripts/types_.py index 58f88722b..3937cb803 100644 --- a/src/allmydata/scripts/types_.py +++ b/src/allmydata/scripts/types_.py @@ -1,4 +1,4 @@ -from typing import List, Tuple, Type +from typing import List, Tuple, Type, Sequence, Any from allmydata.scripts.common import BaseOptions @@ -8,3 +8,5 @@ from allmydata.scripts.common import BaseOptions SubCommand = Tuple[str, None, Type[BaseOptions], str] SubCommands = List[SubCommand] + +Parameters = List[Sequence[Any]] From ff182e69c1bc699503f38477c75197671f0e67ae Mon Sep 17 00:00:00 2001 From: meejah Date: Sun, 20 Dec 2020 19:10:00 -0700 Subject: [PATCH 067/213] signatures are detached --- docs/release-checklist.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/release-checklist.rst b/docs/release-checklist.rst index be32aea6c..18c908a99 100644 --- a/docs/release-checklist.rst +++ b/docs/release-checklist.rst @@ -97,10 +97,10 @@ they will need to evaluate which contributors' signatures they trust. - install each in a fresh virtualenv - run `tahoe` command - when satisfied, sign the tarballs: - - gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0-py2-none-any.whl - - gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.tar.bz2 - - gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.tar.gz - - gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.zip + - gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0-py2-none-any.whl + - gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.tar.bz2 + - gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.tar.gz + - gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.zip Privileged Contributor From 6b6b8f8378f1e3da826a50d9dcbe95e63ec8faa1 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 26 Dec 2020 11:17:35 -0500 Subject: [PATCH 068/213] Push IURI implementers down to the classes that actually implement it. --- src/allmydata/uri.py | 20 +++++++------------- 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/src/allmydata/uri.py b/src/allmydata/uri.py index d7f4782cd..ce7794dda 100644 --- a/src/allmydata/uri.py +++ b/src/allmydata/uri.py @@ -494,7 +494,6 @@ class MDMFVerifierURI(_BaseURI): return self -@implementer(IURI, IDirnodeURI) class _DirectoryBaseURI(_BaseURI): def __init__(self, filenode_uri=None): self._filenode_uri = filenode_uri @@ -540,14 +539,8 @@ class _DirectoryBaseURI(_BaseURI): def get_storage_index(self): return self._filenode_uri.get_storage_index() - def get_readonly(self): - raise NotImplementedError() - def is_readonly(self): - raise NotImplementedError() - - -@implementer(IDirectoryURI) +@implementer(IURI, IDirectoryURI) class DirectoryURI(_DirectoryBaseURI): BASE_STRING=b'URI:DIR2:' @@ -566,7 +559,7 @@ class DirectoryURI(_DirectoryBaseURI): return ReadonlyDirectoryURI(self._filenode_uri.get_readonly()) -@implementer(IReadonlyDirectoryURI) +@implementer(IURI, IReadonlyDirectoryURI) class ReadonlyDirectoryURI(_DirectoryBaseURI): BASE_STRING=b'URI:DIR2-RO:' @@ -585,6 +578,7 @@ class ReadonlyDirectoryURI(_DirectoryBaseURI): return self +@implementer(IURI, IDirnodeURI) class _ImmutableDirectoryBaseURI(_DirectoryBaseURI): def __init__(self, filenode_uri=None): if filenode_uri: @@ -622,7 +616,7 @@ class LiteralDirectoryURI(_ImmutableDirectoryBaseURI): return None -@implementer(IDirectoryURI) +@implementer(IURI, IDirectoryURI) class MDMFDirectoryURI(_DirectoryBaseURI): BASE_STRING=b'URI:DIR2-MDMF:' @@ -644,7 +638,7 @@ class MDMFDirectoryURI(_DirectoryBaseURI): return MDMFDirectoryURIVerifier(self._filenode_uri.get_verify_cap()) -@implementer(IReadonlyDirectoryURI) +@implementer(IURI, IReadonlyDirectoryURI) class ReadonlyMDMFDirectoryURI(_DirectoryBaseURI): BASE_STRING=b'URI:DIR2-MDMF-RO:' @@ -682,7 +676,7 @@ def wrap_dirnode_cap(filecap): raise AssertionError("cannot interpret as a directory cap: %s" % filecap.__class__) -@implementer(IVerifierURI) +@implementer(IURI, IVerifierURI) class MDMFDirectoryURIVerifier(_DirectoryBaseURI): BASE_STRING=b'URI:DIR2-MDMF-Verifier:' @@ -707,7 +701,7 @@ class MDMFDirectoryURIVerifier(_DirectoryBaseURI): return self -@implementer(IVerifierURI) +@implementer(IURI, IVerifierURI) class DirectoryURIVerifier(_DirectoryBaseURI): BASE_STRING=b'URI:DIR2-Verifier:' From b65ef3cee6c6922c70edac842e28985ac7c20990 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 26 Dec 2020 11:22:39 -0500 Subject: [PATCH 069/213] Revert "Ignores no longer needed." This reverts commit 950ca189326703daf8bb84188507ca7324efc16e. --- src/allmydata/__init__.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/allmydata/__init__.py b/src/allmydata/__init__.py index 15d5fb240..3157c8c80 100644 --- a/src/allmydata/__init__.py +++ b/src/allmydata/__init__.py @@ -14,7 +14,9 @@ __all__ = [ __version__ = "unknown" try: - from allmydata._version import __version__ + # type ignored as it fails in CI + # (https://app.circleci.com/pipelines/github/tahoe-lafs/tahoe-lafs/1647/workflows/60ae95d4-abe8-492c-8a03-1ad3b9e42ed3/jobs/40972) + from allmydata._version import __version__ # type: ignore except ImportError: # We're running in a tree that hasn't run update_version, and didn't # come with a _version.py, so we don't know what our version is. @@ -24,7 +26,9 @@ except ImportError: full_version = "unknown" branch = "unknown" try: - from allmydata._version import full_version, branch + # type ignored as it fails in CI + # (https://app.circleci.com/pipelines/github/tahoe-lafs/tahoe-lafs/1647/workflows/60ae95d4-abe8-492c-8a03-1ad3b9e42ed3/jobs/40972) + from allmydata._version import full_version, branch # type: ignore except ImportError: # We're running in a tree that hasn't run update_version, and didn't # come with a _version.py, so we don't know what our full version or From ab2c544efcaad629f2d49173bfdc82e8de94d944 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 26 Dec 2020 11:50:06 -0500 Subject: [PATCH 070/213] Restore IDirnodeURI --- src/allmydata/uri.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/allmydata/uri.py b/src/allmydata/uri.py index ce7794dda..51671b0ac 100644 --- a/src/allmydata/uri.py +++ b/src/allmydata/uri.py @@ -494,6 +494,7 @@ class MDMFVerifierURI(_BaseURI): return self +@implementer(IDirnodeURI) class _DirectoryBaseURI(_BaseURI): def __init__(self, filenode_uri=None): self._filenode_uri = filenode_uri From dacdf7f12da89e214eec9b0543b15ef450b27ae3 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 26 Dec 2020 11:56:13 -0500 Subject: [PATCH 071/213] Add more detail and link to upstream issue for Twisted stubs. --- src/allmydata/test/storage_plugin.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/allmydata/test/storage_plugin.py b/src/allmydata/test/storage_plugin.py index ba11776e6..17ec89078 100644 --- a/src/allmydata/test/storage_plugin.py +++ b/src/allmydata/test/storage_plugin.py @@ -47,8 +47,9 @@ class RIDummy(RemoteInterface): """ - -@implementer(IFoolscapStoragePlugin) # type: ignore # todo: make stubs for twisted +# type ignored due to missing stubs for Twisted +# https://twistedmatrix.com/trac/ticket/9717 +@implementer(IFoolscapStoragePlugin) # type: ignore @attr.s class DummyStorage(object): name = attr.ib() From 5396f9f97eaf5a541c2334eaf8ebdb5a2d3484f5 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 26 Dec 2020 12:02:47 -0500 Subject: [PATCH 072/213] Replace fixme with reference to foolscap issue. --- src/allmydata/storage/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/storage/server.py b/src/allmydata/storage/server.py index b7df702d5..5f2ef3ac2 100644 --- a/src/allmydata/storage/server.py +++ b/src/allmydata/storage/server.py @@ -581,7 +581,7 @@ class StorageServer(service.MultiService, Referenceable): for share in six.viewvalues(shares): share.add_or_renew_lease(lease_info) - def slot_testv_and_readv_and_writev( # type: ignore # fixme + def slot_testv_and_readv_and_writev( # type: ignore # warner/foolscap#78 self, storage_index, secrets, From 1bf71fd69010a0a075f66076156f0e951042b8dd Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 26 Dec 2020 12:12:39 -0500 Subject: [PATCH 073/213] Replace todo with a ticket. --- src/allmydata/test/no_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/test/no_network.py b/src/allmydata/test/no_network.py index 1e8c519e1..ba497f81a 100644 --- a/src/allmydata/test/no_network.py +++ b/src/allmydata/test/no_network.py @@ -67,7 +67,7 @@ class Marker(object): fireNow = partial(defer.succeed, None) -@implementer(IRemoteReference) # type: ignore # todo: write stubs for foolscap +@implementer(IRemoteReference) # type: ignore # warner/foolscap#79 class LocalWrapper(object): """ A ``LocalWrapper`` presents the remote reference interface to a local From 01147f4627a10ecb4db5ddcdcb4a17f5d839a627 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 26 Dec 2020 12:33:22 -0500 Subject: [PATCH 074/213] Add reference to ticket for ambiguous MRO --- src/allmydata/test/no_network.py | 2 +- src/allmydata/test/test_dirnode.py | 2 +- src/allmydata/test/test_storage_client.py | 6 +----- src/allmydata/test/web/test_web.py | 4 ++-- 4 files changed, 5 insertions(+), 9 deletions(-) diff --git a/src/allmydata/test/no_network.py b/src/allmydata/test/no_network.py index ba497f81a..b9c9ecaeb 100644 --- a/src/allmydata/test/no_network.py +++ b/src/allmydata/test/no_network.py @@ -261,7 +261,7 @@ def create_no_network_client(basedir): return defer.succeed(client) -class _NoNetworkClient(_Client): # type: ignore # Cannot determine consistent MRO order +class _NoNetworkClient(_Client): # type: ignore # tahoe-lafs/ticket/3573 """ Overrides all _Client networking functionality to do nothing. """ diff --git a/src/allmydata/test/test_dirnode.py b/src/allmydata/test/test_dirnode.py index 68fcdd54b..6866bc88e 100644 --- a/src/allmydata/test/test_dirnode.py +++ b/src/allmydata/test/test_dirnode.py @@ -1587,7 +1587,7 @@ class FakeNodeMaker(NodeMaker): def create_mutable_file(self, contents="", keysize=None, version=None): return defer.succeed(FakeMutableFile(contents)) -class FakeClient2(_Client): # type: ignore # ambiguous MRO +class FakeClient2(_Client): # type: ignore # tahoe-lafs/ticket/3573 def __init__(self): self.nodemaker = FakeNodeMaker(None, None, None, None, None, diff --git a/src/allmydata/test/test_storage_client.py b/src/allmydata/test/test_storage_client.py index f2be9ad1e..bcbfffa1e 100644 --- a/src/allmydata/test/test_storage_client.py +++ b/src/allmydata/test/test_storage_client.py @@ -91,11 +91,7 @@ from allmydata.interfaces import ( SOME_FURL = b"pb://abcde@nowhere/fake" -# type checks fail with: -# Cannot determine consistent method resolution order (MRO) for "NativeStorageServerWithVersion" -# even though class hierarchy is single-inheritance. Probably `implementer` -# wrappers are affecting the MRO. -class NativeStorageServerWithVersion(NativeStorageServer): # type: ignore +class NativeStorageServerWithVersion(NativeStorageServer): # type: ignore # tahoe-lafs/ticket/3573 def __init__(self, version): # note: these instances won't work for anything other than # get_available_space() because we don't upcall diff --git a/src/allmydata/test/web/test_web.py b/src/allmydata/test/web/test_web.py index a5fbe5a51..ef0446077 100644 --- a/src/allmydata/test/web/test_web.py +++ b/src/allmydata/test/web/test_web.py @@ -189,7 +189,7 @@ class FakeHistory(object): def list_all_helper_statuses(self): return [] -class FakeDisplayableServer(StubServer): # type: ignore # Cannot determine MRO +class FakeDisplayableServer(StubServer): # type: ignore # tahoe-lafs/ticket/3573 def __init__(self, serverid, nickname, connected, last_connect_time, last_loss_time, last_rx_time): StubServer.__init__(self, serverid) @@ -255,7 +255,7 @@ class FakeStorageServer(service.MultiService): def on_status_changed(self, cb): cb(self) -class FakeClient(_Client): # type: ignore # Cannot determine MRO +class FakeClient(_Client): # type: ignore # tahoe-lafs/ticket/3573 def __init__(self): # don't upcall to Client.__init__, since we only want to initialize a # minimal subset From 04ab4dec3be3fdd07f7a85bfe50c3133cef30910 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 26 Dec 2020 13:01:27 -0500 Subject: [PATCH 075/213] Extract function and annotate it to satisfy typechecks for _Config._basedir --- src/allmydata/node.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/allmydata/node.py b/src/allmydata/node.py index 89d5bcb90..fabb255f6 100644 --- a/src/allmydata/node.py +++ b/src/allmydata/node.py @@ -23,6 +23,11 @@ from base64 import b32decode, b32encode from errno import ENOENT, EPERM from warnings import warn +try: + from typing import Union +except ImportError: + pass + import attr # On Python 2 this will be the backported package. @@ -273,6 +278,11 @@ def _error_about_old_config_files(basedir, generated_files): raise e +def ensure_text_and_abspath_expanduser_unicode(basedir): + # type: (Union[bytes, str]) -> str + return abspath_expanduser_unicode(ensure_text(basedir)) + + @attr.s class _Config(object): """ @@ -300,8 +310,8 @@ class _Config(object): config = attr.ib(validator=attr.validators.instance_of(configparser.ConfigParser)) portnum_fname = attr.ib() _basedir = attr.ib( - converter=lambda basedir: abspath_expanduser_unicode(ensure_text(basedir)), - ) + converter=ensure_text_and_abspath_expanduser_unicode, + ) # type: str config_path = attr.ib( validator=attr.validators.optional( attr.validators.instance_of(FilePath), From 3fd46f94009101fdcfff8617ccb335bab255c57f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 26 Dec 2020 13:02:16 -0500 Subject: [PATCH 076/213] Ignore additional attribute on the function. --- src/allmydata/test/test_node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/test/test_node.py b/src/allmydata/test/test_node.py index 1e0f3020c..e44fd5743 100644 --- a/src/allmydata/test/test_node.py +++ b/src/allmydata/test/test_node.py @@ -564,7 +564,7 @@ class TestMissingPorts(unittest.TestCase): config = config_from_string(self.basedir, "portnum", config_data) with self.assertRaises(PortAssignmentRequired): _tub_portlocation(config, None, None) - test_listen_on_zero_with_host.todo = native_str( + test_listen_on_zero_with_host.todo = native_str( # type: ignore "https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3563" ) From cc5a1046d9d936712577717f1ba0409378d279e6 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 26 Dec 2020 13:07:12 -0500 Subject: [PATCH 077/213] Define type for IntroducerService.VERSION, accepting bytes or str as keys for now. --- src/allmydata/introducer/server.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/allmydata/introducer/server.py b/src/allmydata/introducer/server.py index 237c30315..339c5a0ac 100644 --- a/src/allmydata/introducer/server.py +++ b/src/allmydata/introducer/server.py @@ -15,6 +15,12 @@ from past.builtins import long from six import ensure_text import time, os.path, textwrap + +try: + from typing import Any, Dict, Union +except ImportError: + pass + from zope.interface import implementer from twisted.application import service from twisted.internet import defer @@ -147,10 +153,12 @@ class IntroducerService(service.MultiService, Referenceable): name = "introducer" # v1 is the original protocol, added in 1.0 (but only advertised starting # in 1.3), removed in 1.12. v2 is the new signed protocol, added in 1.10 - VERSION = { #"http://allmydata.org/tahoe/protocols/introducer/v1": { }, + # TODO: reconcile bytes/str for keys + VERSION = { + #"http://allmydata.org/tahoe/protocols/introducer/v1": { }, b"http://allmydata.org/tahoe/protocols/introducer/v2": { }, b"application-version": allmydata.__full_version__.encode("utf-8"), - } + } # type: Dict[Union[bytes, str], Any] def __init__(self): service.MultiService.__init__(self) From 854c22e1ca44baa943e36c964c9d06cfa71f8809 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 26 Dec 2020 13:09:57 -0500 Subject: [PATCH 078/213] Use compatible import for urllib.parse.quote. --- src/allmydata/test/web/test_root.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/allmydata/test/web/test_root.py b/src/allmydata/test/web/test_root.py index 0715c8102..2ea418047 100644 --- a/src/allmydata/test/web/test_root.py +++ b/src/allmydata/test/web/test_root.py @@ -1,8 +1,7 @@ import time -from urllib import ( - quote, -) + +from six.moves.urllib.parse import quote from bs4 import ( BeautifulSoup, From 652222116665f6876e476e5a03cf1129fbc1c789 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 26 Dec 2020 13:12:11 -0500 Subject: [PATCH 079/213] Suppress error on SpyHandler interface. --- src/allmydata/test/test_storage_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/test/test_storage_client.py b/src/allmydata/test/test_storage_client.py index 45a99979f..8500d6bff 100644 --- a/src/allmydata/test/test_storage_client.py +++ b/src/allmydata/test/test_storage_client.py @@ -570,7 +570,7 @@ class SpyEndpoint(object): return d -@implementer(IConnectionHintHandler) +@implementer(IConnectionHintHandler) # type: ignore # warner/foolscap#78 @attr.s class SpyHandler(object): """ From 9780f8bfdcb29120149e4a49254a20fd7576e4ce Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 26 Dec 2020 13:23:28 -0500 Subject: [PATCH 080/213] Add newsfragment --- newsfragments/3399.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/3399.feature diff --git a/newsfragments/3399.feature b/newsfragments/3399.feature new file mode 100644 index 000000000..d30a91679 --- /dev/null +++ b/newsfragments/3399.feature @@ -0,0 +1 @@ +Added 'typechecks' environment for tox running mypy and performing static typechecks. From 4a9d3bde5b53bb7ce9ae38ca03f416f9a4f5aad4 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 26 Dec 2020 13:30:32 -0500 Subject: [PATCH 081/213] Exclude allmydata.scripts.types_ module from PythonTwoRegressions. --- src/allmydata/test/test_python2_regressions.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/allmydata/test/test_python2_regressions.py b/src/allmydata/test/test_python2_regressions.py index 84484f1cf..5c6a654c1 100644 --- a/src/allmydata/test/test_python2_regressions.py +++ b/src/allmydata/test/test_python2_regressions.py @@ -16,6 +16,7 @@ from testtools.matchers import ( BLACKLIST = { "allmydata.test.check_load", "allmydata.windows.registry", + "allmydata.scripts.types_", } From 3951257cd7090188d33f3efabeca263c954912d0 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 27 Dec 2020 16:17:59 -0500 Subject: [PATCH 082/213] Switch to mypy-zope main branch. --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 26e056dc1..706899ebe 100644 --- a/tox.ini +++ b/tox.ini @@ -117,7 +117,7 @@ commands = skip_install = True deps = mypy - git+https://github.com/jaraco/mypy-zope@bugfix/21-InterfaceClass-subclass + git+https://github.com/Shoobx/mypy-zope git+https://github.com/jaraco/foolscap@bugfix/75-use-metaclass commands = mypy src From b4b4a1aabfb471912c40bab980d9a2e78a36ab8c Mon Sep 17 00:00:00 2001 From: Chad Whitacre Date: Tue, 29 Dec 2020 12:39:00 -0500 Subject: [PATCH 083/213] Port webish/test_webish to Python 3 --- newsfragments/3577.minor | 0 src/allmydata/test/web/test_webish.py | 16 +++++++++--- src/allmydata/util/_python3.py | 2 ++ src/allmydata/webish.py | 36 ++++++++++++++++++++------- 4 files changed, 42 insertions(+), 12 deletions(-) create mode 100644 newsfragments/3577.minor diff --git a/newsfragments/3577.minor b/newsfragments/3577.minor new file mode 100644 index 000000000..e69de29bb diff --git a/src/allmydata/test/web/test_webish.py b/src/allmydata/test/web/test_webish.py index e680acd04..12a04a6eb 100644 --- a/src/allmydata/test/web/test_webish.py +++ b/src/allmydata/test/web/test_webish.py @@ -1,6 +1,16 @@ """ Tests for ``allmydata.webish``. + +Ported to Python 3. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from uuid import ( uuid4, @@ -96,7 +106,7 @@ class TahoeLAFSRequestTests(SyncTestCase): ]) self._fields_test( b"POST", - {b"content-type": b"multipart/form-data; boundary={}".format(boundary)}, + {b"content-type": b"multipart/form-data; boundary=" + bytes(boundary, 'ascii')}, form_data.encode("ascii"), AfterPreprocessing( lambda fs: { @@ -105,8 +115,8 @@ class TahoeLAFSRequestTests(SyncTestCase): in fs.keys() }, Equals({ - b"foo": b"bar", - b"baz": b"some file contents", + "foo": "bar", + "baz": b"some file contents", }), ), ) diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py index a7b77001a..b2bc47153 100644 --- a/src/allmydata/util/_python3.py +++ b/src/allmydata/util/_python3.py @@ -110,6 +110,7 @@ PORTED_MODULES = [ "allmydata.util.spans", "allmydata.util.statistics", "allmydata.util.time_format", + "allmydata.webish", ] PORTED_TEST_MODULES = [ @@ -179,6 +180,7 @@ PORTED_TEST_MODULES = [ "allmydata.test.test_upload", "allmydata.test.test_uri", "allmydata.test.test_util", + "allmydata.test.test_webish", "allmydata.test.web.test_common", "allmydata.test.web.test_grid", "allmydata.test.web.test_util", diff --git a/src/allmydata/webish.py b/src/allmydata/webish.py index f32f56714..b3b819b6a 100644 --- a/src/allmydata/webish.py +++ b/src/allmydata/webish.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from six import ensure_str import re, time, tempfile @@ -65,18 +77,24 @@ class TahoeLAFSRequest(Request, object): self.path, argstring = x self.args = parse_qs(argstring, 1) - if self.method == 'POST': + if self.method == b'POST': # We use FieldStorage here because it performs better than # cgi.parse_multipart(self.content, pdict) which is what # twisted.web.http.Request uses. - self.fields = FieldStorage( - self.content, - { - name.lower(): value[-1] - for (name, value) - in self.requestHeaders.getAllRawHeaders() - }, - environ={'REQUEST_METHOD': 'POST'}) + + headers = { + ensure_str(name.lower()): ensure_str(value[-1]) + for (name, value) + in self.requestHeaders.getAllRawHeaders() + } + + if 'content-length' not in headers: + # Python 3's cgi module would really, really like us to set + # Content-Length. This seems likely to shoot performance in + # the foot. + headers['content-length'] = len(self.content.getvalue()) + + self.fields = FieldStorage(self.content, headers, environ={'REQUEST_METHOD': 'POST'}) self.content.seek(0) self._tahoeLAFSSecurityPolicy() From 2a8fa4da7a475eeb34c3f19d8e9eb30dc30d1b33 Mon Sep 17 00:00:00 2001 From: Chad Whitacre Date: Tue, 29 Dec 2020 12:54:19 -0500 Subject: [PATCH 084/213] Fix test module reference in util._python3 --- src/allmydata/util/_python3.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py index b2bc47153..b54fed188 100644 --- a/src/allmydata/util/_python3.py +++ b/src/allmydata/util/_python3.py @@ -180,9 +180,9 @@ PORTED_TEST_MODULES = [ "allmydata.test.test_upload", "allmydata.test.test_uri", "allmydata.test.test_util", - "allmydata.test.test_webish", "allmydata.test.web.test_common", "allmydata.test.web.test_grid", - "allmydata.test.web.test_util", "allmydata.test.web.test_status", + "allmydata.test.web.test_util", + "allmydata.test.web.test_webish", ] From 744e2057631f694ebb64b5ce6a18dde6b27926ac Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 4 Jan 2021 11:03:38 -0500 Subject: [PATCH 085/213] Tests pass on Python 3. --- src/allmydata/frontends/ftpd.py | 6 ++---- src/allmydata/test/test_ftp.py | 10 +++++----- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/src/allmydata/frontends/ftpd.py b/src/allmydata/frontends/ftpd.py index 0b18df85b..83e98f8ed 100644 --- a/src/allmydata/frontends/ftpd.py +++ b/src/allmydata/frontends/ftpd.py @@ -1,7 +1,5 @@ from six import ensure_str -from types import NoneType - from zope.interface import implementer from twisted.application import service, strports from twisted.internet import defer @@ -264,7 +262,7 @@ class Handler(object): d.addCallback(_list) def _render(children): results = [] - for (name, childnode) in children.iteritems(): + for (name, childnode) in children.items(): # the interface claims that the result should have a unicode # object as the name, but it fails unless you give it a # bytestring @@ -317,7 +315,7 @@ class Dispatcher(object): class FTPServer(service.MultiService): def __init__(self, client, accountfile, accounturl, ftp_portstr): - precondition(isinstance(accountfile, (unicode, NoneType)), accountfile) + precondition(isinstance(accountfile, (unicode, type(None))), accountfile) service.MultiService.__init__(self) r = Dispatcher(client) diff --git a/src/allmydata/test/test_ftp.py b/src/allmydata/test/test_ftp.py index 4eddef440..e77a9e6ab 100644 --- a/src/allmydata/test/test_ftp.py +++ b/src/allmydata/test/test_ftp.py @@ -46,7 +46,7 @@ class Handler(GridTestMixin, ReallyEqualMixin, unittest.TestCase): def _set_up_tree(self): # add immutable file at root - immutable = upload.Data("immutable file contents", None) + immutable = upload.Data(b"immutable file contents", None) d = self.root.add_file(u"immutable", immutable) # `mtime' and `linkmotime' both set @@ -62,7 +62,7 @@ class Handler(GridTestMixin, ReallyEqualMixin, unittest.TestCase): d.addCallback(lambda _: self._set_metadata(u"loop", md_just_mtime)) # add mutable file at root - mutable = publish.MutableData("mutable file contents") + mutable = publish.MutableData(b"mutable file contents") d.addCallback(lambda _: self.client.create_mutable_file(mutable)) d.addCallback(lambda node: self.root.set_node(u"mutable", node)) @@ -93,11 +93,11 @@ class Handler(GridTestMixin, ReallyEqualMixin, unittest.TestCase): d.addCallback(lambda _: self.handler.list("", keys=keys)) expected_root = [ - ('loop', + (b'loop', [0, True, ftpd.IntishPermissions(0o600), 1, self.FALL_OF_BERLIN_WALL, 'alice', 'alice', '??']), - ('immutable', + (b'immutable', [23, False, ftpd.IntishPermissions(0o600), 1, self.TURN_OF_MILLENIUM, 'alice', 'alice', '??']), - ('mutable', + (b'mutable', # timestamp should be 0 if no timestamp metadata is present [0, False, ftpd.IntishPermissions(0o600), 1, 0, 'alice', 'alice', '??'])] From a54aeae338ae79388a09d807ad63eaaf031e0175 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 4 Jan 2021 11:05:59 -0500 Subject: [PATCH 086/213] Port to Python 3. --- src/allmydata/frontends/ftpd.py | 22 +++++++++++++++++----- src/allmydata/test/test_ftp.py | 11 +++++++++++ src/allmydata/util/_python3.py | 2 ++ 3 files changed, 30 insertions(+), 5 deletions(-) diff --git a/src/allmydata/frontends/ftpd.py b/src/allmydata/frontends/ftpd.py index 83e98f8ed..316d6095d 100644 --- a/src/allmydata/frontends/ftpd.py +++ b/src/allmydata/frontends/ftpd.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from six import ensure_str from zope.interface import implementer @@ -104,7 +116,7 @@ class Handler(object): def _get_parent(self, path): # fire with (parentnode, childname) - path = [unicode(p) for p in path] + path = [str(p) for p in path] if not path: raise NoParentError childname = path[-1] @@ -183,7 +195,7 @@ class Handler(object): def _get_root(self, path): # return (root, remaining_path) - path = [unicode(p) for p in path] + path = [str(p) for p in path] if path and path[0] == "uri": d = defer.maybeDeferred(self.client.create_node_from_uri, str(path[1])) @@ -262,7 +274,7 @@ class Handler(object): d.addCallback(_list) def _render(children): results = [] - for (name, childnode) in children.items(): + for (name, childnode) in list(children.items()): # the interface claims that the result should have a unicode # object as the name, but it fails unless you give it a # bytestring @@ -280,7 +292,7 @@ class Handler(object): return d def openForWriting(self, path): - path = [unicode(p) for p in path] + path = [str(p) for p in path] if not path: raise ftp.PermissionDeniedError("cannot STOR to root directory") childname = path[-1] @@ -315,7 +327,7 @@ class Dispatcher(object): class FTPServer(service.MultiService): def __init__(self, client, accountfile, accounturl, ftp_portstr): - precondition(isinstance(accountfile, (unicode, type(None))), accountfile) + precondition(isinstance(accountfile, (str, type(None))), accountfile) service.MultiService.__init__(self) r = Dispatcher(client) diff --git a/src/allmydata/test/test_ftp.py b/src/allmydata/test/test_ftp.py index e77a9e6ab..1bfca4bce 100644 --- a/src/allmydata/test/test_ftp.py +++ b/src/allmydata/test/test_ftp.py @@ -1,3 +1,14 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from twisted.trial import unittest diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py index af771cd5a..74135c2c2 100644 --- a/src/allmydata/util/_python3.py +++ b/src/allmydata/util/_python3.py @@ -35,6 +35,7 @@ PORTED_MODULES = [ "allmydata.crypto.rsa", "allmydata.crypto.util", "allmydata.dirnode", + "allmydata.frontends.ftpd", "allmydata.hashtree", "allmydata.immutable.checker", "allmydata.immutable.downloader", @@ -146,6 +147,7 @@ PORTED_TEST_MODULES = [ "allmydata.test.test_encode", "allmydata.test.test_encodingutil", "allmydata.test.test_filenode", + "allmydata.test.test_ftp", "allmydata.test.test_happiness", "allmydata.test.test_hashtree", "allmydata.test.test_hashutil", From 62e6c0d9ac1b393e4d2fe7c5cc3d9890412f8be6 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 4 Jan 2021 11:06:14 -0500 Subject: [PATCH 087/213] News file. --- newsfragments/3579.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/3579.minor diff --git a/newsfragments/3579.minor b/newsfragments/3579.minor new file mode 100644 index 000000000..e69de29bb From 3d42030e01fa8fa2f61abd55c8da60f0cb22a18c Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 4 Jan 2021 11:21:26 -0500 Subject: [PATCH 088/213] More tests pass on Python 3. --- src/allmydata/frontends/sftpd.py | 16 +++--- src/allmydata/test/test_sftp.py | 86 ++++++++++++++++---------------- 2 files changed, 51 insertions(+), 51 deletions(-) diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index b25ac0270..ac7fe8230 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -1,6 +1,6 @@ +from past.builtins import unicode import six import heapq, traceback, array, stat, struct -from types import NoneType from stat import S_IFREG, S_IFDIR from time import time, strftime, localtime @@ -267,7 +267,7 @@ def _attrs_to_metadata(attrs): def _direntry_for(filenode_or_parent, childname, filenode=None): - precondition(isinstance(childname, (unicode, NoneType)), childname=childname) + precondition(isinstance(childname, (unicode, type(None))), childname=childname) if childname is None: filenode_or_parent = filenode @@ -1838,25 +1838,25 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _path_from_string(self, pathstring): if noisy: self.log("CONVERT %r" % (pathstring,), level=NOISY) - _assert(isinstance(pathstring, str), pathstring=pathstring) + _assert(isinstance(pathstring, bytes), pathstring=pathstring) # The home directory is the root directory. - pathstring = pathstring.strip("/") - if pathstring == "" or pathstring == ".": + pathstring = pathstring.strip(b"/") + if pathstring == b"" or pathstring == b".": path_utf8 = [] else: - path_utf8 = pathstring.split("/") + path_utf8 = pathstring.split(b"/") # # "Servers SHOULD interpret a path name component ".." as referring to # the parent directory, and "." as referring to the current directory." path = [] for p_utf8 in path_utf8: - if p_utf8 == "..": + if p_utf8 == b"..": # ignore excess .. components at the root if len(path) > 0: path = path[:-1] - elif p_utf8 != ".": + elif p_utf8 != b".": try: p = p_utf8.decode('utf-8', 'strict') except UnicodeError: diff --git a/src/allmydata/test/test_sftp.py b/src/allmydata/test/test_sftp.py index b6f1fbc8a..5d0b10d34 100644 --- a/src/allmydata/test/test_sftp.py +++ b/src/allmydata/test/test_sftp.py @@ -76,7 +76,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas return d def _set_up_tree(self): - u = publish.MutableData("mutable file contents") + u = publish.MutableData(b"mutable file contents") d = self.client.create_mutable_file(u) d.addCallback(lambda node: self.root.set_node(u"mutable", node)) def _created_mutable(n): @@ -154,55 +154,55 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas version = self.handler.gotVersion(3, {}) self.failUnless(isinstance(version, dict)) - self.failUnlessReallyEqual(self.handler._path_from_string(""), []) - self.failUnlessReallyEqual(self.handler._path_from_string("/"), []) - self.failUnlessReallyEqual(self.handler._path_from_string("."), []) - self.failUnlessReallyEqual(self.handler._path_from_string("//"), []) - self.failUnlessReallyEqual(self.handler._path_from_string("/."), []) - self.failUnlessReallyEqual(self.handler._path_from_string("/./"), []) - self.failUnlessReallyEqual(self.handler._path_from_string("foo"), [u"foo"]) - self.failUnlessReallyEqual(self.handler._path_from_string("/foo"), [u"foo"]) - self.failUnlessReallyEqual(self.handler._path_from_string("foo/"), [u"foo"]) - self.failUnlessReallyEqual(self.handler._path_from_string("/foo/"), [u"foo"]) - self.failUnlessReallyEqual(self.handler._path_from_string("foo/bar"), [u"foo", u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("/foo/bar"), [u"foo", u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("foo/bar//"), [u"foo", u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("/foo/bar//"), [u"foo", u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("foo/./bar"), [u"foo", u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("./foo/./bar"), [u"foo", u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("foo/../bar"), [u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("/foo/../bar"), [u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("../bar"), [u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("/../bar"), [u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b""), []) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/"), []) + self.failUnlessReallyEqual(self.handler._path_from_string(b"."), []) + self.failUnlessReallyEqual(self.handler._path_from_string(b"//"), []) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/."), []) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/./"), []) + self.failUnlessReallyEqual(self.handler._path_from_string(b"foo"), [u"foo"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/foo"), [u"foo"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"foo/"), [u"foo"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/foo/"), [u"foo"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"foo/bar"), [u"foo", u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/foo/bar"), [u"foo", u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"foo/bar//"), [u"foo", u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/foo/bar//"), [u"foo", u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"foo/./bar"), [u"foo", u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"./foo/./bar"), [u"foo", u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"foo/../bar"), [u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/foo/../bar"), [u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"../bar"), [u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/../bar"), [u"bar"]) - self.failUnlessReallyEqual(self.handler.realPath(""), "/") - self.failUnlessReallyEqual(self.handler.realPath("/"), "/") - self.failUnlessReallyEqual(self.handler.realPath("."), "/") - self.failUnlessReallyEqual(self.handler.realPath("//"), "/") - self.failUnlessReallyEqual(self.handler.realPath("/."), "/") - self.failUnlessReallyEqual(self.handler.realPath("/./"), "/") - self.failUnlessReallyEqual(self.handler.realPath("foo"), "/foo") - self.failUnlessReallyEqual(self.handler.realPath("/foo"), "/foo") - self.failUnlessReallyEqual(self.handler.realPath("foo/"), "/foo") - self.failUnlessReallyEqual(self.handler.realPath("/foo/"), "/foo") - self.failUnlessReallyEqual(self.handler.realPath("foo/bar"), "/foo/bar") - self.failUnlessReallyEqual(self.handler.realPath("/foo/bar"), "/foo/bar") - self.failUnlessReallyEqual(self.handler.realPath("foo/bar//"), "/foo/bar") - self.failUnlessReallyEqual(self.handler.realPath("/foo/bar//"), "/foo/bar") - self.failUnlessReallyEqual(self.handler.realPath("foo/./bar"), "/foo/bar") - self.failUnlessReallyEqual(self.handler.realPath("./foo/./bar"), "/foo/bar") - self.failUnlessReallyEqual(self.handler.realPath("foo/../bar"), "/bar") - self.failUnlessReallyEqual(self.handler.realPath("/foo/../bar"), "/bar") - self.failUnlessReallyEqual(self.handler.realPath("../bar"), "/bar") - self.failUnlessReallyEqual(self.handler.realPath("/../bar"), "/bar") + self.failUnlessReallyEqual(self.handler.realPath(b""), b"/") + self.failUnlessReallyEqual(self.handler.realPath(b"/"), b"/") + self.failUnlessReallyEqual(self.handler.realPath(b"."), b"/") + self.failUnlessReallyEqual(self.handler.realPath(b"//"), b"/") + self.failUnlessReallyEqual(self.handler.realPath(b"/."), b"/") + self.failUnlessReallyEqual(self.handler.realPath(b"/./"), b"/") + self.failUnlessReallyEqual(self.handler.realPath(b"foo"), b"/foo") + self.failUnlessReallyEqual(self.handler.realPath(b"/foo"), b"/foo") + self.failUnlessReallyEqual(self.handler.realPath(b"foo/"), b"/foo") + self.failUnlessReallyEqual(self.handler.realPath(b"/foo/"), b"/foo") + self.failUnlessReallyEqual(self.handler.realPath(b"foo/bar"), b"/foo/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"/foo/bar"), b"/foo/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"foo/bar//"), b"/foo/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"/foo/bar//"), b"/foo/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"foo/./bar"), b"/foo/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"./foo/./bar"), b"/foo/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"foo/../bar"), b"/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"/foo/../bar"), b"/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"../bar"), b"/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"/../bar"), b"/bar") d.addCallback(_check) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "_path_from_string invalid UTF-8", - self.handler._path_from_string, "\xFF")) + self.handler._path_from_string, b"\xFF")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "realPath invalid UTF-8", - self.handler.realPath, "\xFF")) + self.handler.realPath, b"\xFF")) return d From 2bf278e55d52eca0572ae3f6fb2a2d627f47cd7d Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 4 Jan 2021 11:39:25 -0500 Subject: [PATCH 089/213] Even more tests passing on Python 3. --- src/allmydata/frontends/sftpd.py | 4 +-- src/allmydata/test/test_sftp.py | 42 ++++++++++++++++---------------- 2 files changed, 23 insertions(+), 23 deletions(-) diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index ac7fe8230..7c7543e21 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -1785,7 +1785,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): # We implement the three main OpenSSH SFTP extensions; see # - if extensionName == 'posix-rename@openssh.com': + if extensionName == b'posix-rename@openssh.com': def _bad(): raise SFTPError(FX_BAD_MESSAGE, "could not parse posix-rename@openssh.com request") if 4 > len(extensionData): return defer.execute(_bad) @@ -1807,7 +1807,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): d.addCallback(_succeeded) return d - if extensionName == 'statvfs@openssh.com' or extensionName == 'fstatvfs@openssh.com': + if extensionName == b'statvfs@openssh.com' or extensionName == b'fstatvfs@openssh.com': # f_bsize and f_frsize should be the same to avoid a bug in 'df' return defer.succeed(struct.pack('>11Q', 1024, # uint64 f_bsize /* file system block size */ diff --git a/src/allmydata/test/test_sftp.py b/src/allmydata/test/test_sftp.py index 5d0b10d34..9234c20c6 100644 --- a/src/allmydata/test/test_sftp.py +++ b/src/allmydata/test/test_sftp.py @@ -42,7 +42,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas def shouldFailWithSFTPError(self, expected_code, which, callable, *args, **kwargs): assert isinstance(expected_code, int), repr(expected_code) - assert isinstance(which, str), repr(which) + assert isinstance(which, bytes), repr(which) s = traceback.format_stack() d = defer.maybeDeferred(callable, *args, **kwargs) def _done(res): @@ -198,10 +198,10 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(_check) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "_path_from_string invalid UTF-8", + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"_path_from_string invalid UTF-8", self.handler._path_from_string, b"\xFF")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "realPath invalid UTF-8", + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"realPath invalid UTF-8", self.handler.realPath, b"\xFF")) return d @@ -211,29 +211,29 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d = defer.succeed(None) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_FAILURE, "_convert_error SFTPError", + self.shouldFailWithSFTPError(sftp.FX_FAILURE, b"_convert_error SFTPError", sftpd._convert_error, Failure(sftp.SFTPError(sftp.FX_FAILURE, "foo")), "request")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "_convert_error NoSuchChildError", + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"_convert_error NoSuchChildError", sftpd._convert_error, Failure(NoSuchChildError("foo")), "request")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_FAILURE, "_convert_error ExistingChildError", + self.shouldFailWithSFTPError(sftp.FX_FAILURE, b"_convert_error ExistingChildError", sftpd._convert_error, Failure(ExistingChildError("foo")), "request")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "_convert_error NotWriteableError", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"_convert_error NotWriteableError", sftpd._convert_error, Failure(NotWriteableError("foo")), "request")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "_convert_error NotImplementedError", + self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, b"_convert_error NotImplementedError", sftpd._convert_error, Failure(NotImplementedError("foo")), "request")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_EOF, "_convert_error EOFError", + self.shouldFailWithSFTPError(sftp.FX_EOF, b"_convert_error EOFError", sftpd._convert_error, Failure(EOFError("foo")), "request")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_EOF, "_convert_error defer.FirstError", + self.shouldFailWithSFTPError(sftp.FX_EOF, b"_convert_error defer.FirstError", sftpd._convert_error, Failure(defer.FirstError( Failure(sftp.SFTPError(sftp.FX_EOF, "foo")), 0)), "request")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_FAILURE, "_convert_error AssertionError", + self.shouldFailWithSFTPError(sftp.FX_FAILURE, b"_convert_error AssertionError", sftpd._convert_error, Failure(AssertionError("foo")), "request")) return d @@ -1464,24 +1464,24 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas def test_extendedRequest(self): d = self._set_up("extendedRequest") - d.addCallback(lambda ign: self.handler.extendedRequest("statvfs@openssh.com", "/")) + d.addCallback(lambda ign: self.handler.extendedRequest(b"statvfs@openssh.com", b"/")) def _check(res): - self.failUnless(isinstance(res, str)) + self.failUnless(isinstance(res, bytes)) self.failUnlessEqual(len(res), 8*11) d.addCallback(_check) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "extendedRequest foo bar", - self.handler.extendedRequest, "foo", "bar")) + self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, b"extendedRequest foo bar", + self.handler.extendedRequest, b"foo", b"bar")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "extendedRequest posix-rename@openssh.com invalid 1", - self.handler.extendedRequest, 'posix-rename@openssh.com', '')) + self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, b"extendedRequest posix-rename@openssh.com invalid 1", + self.handler.extendedRequest, b'posix-rename@openssh.com', b'')) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "extendedRequest posix-rename@openssh.com invalid 2", - self.handler.extendedRequest, 'posix-rename@openssh.com', '\x00\x00\x00\x01')) + self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, b"extendedRequest posix-rename@openssh.com invalid 2", + self.handler.extendedRequest, b'posix-rename@openssh.com', b'\x00\x00\x00\x01')) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "extendedRequest posix-rename@openssh.com invalid 3", - self.handler.extendedRequest, 'posix-rename@openssh.com', '\x00\x00\x00\x01_\x00\x00\x00\x01')) + self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, b"extendedRequest posix-rename@openssh.com invalid 3", + self.handler.extendedRequest, b'posix-rename@openssh.com', b'\x00\x00\x00\x01_\x00\x00\x00\x01')) return d From 3a6b92f39f2dae4450e2ef49e2b4cfa85a3c07cb Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 4 Jan 2021 11:48:38 -0500 Subject: [PATCH 090/213] More progress to passing tests on Python 3. --- src/allmydata/frontends/sftpd.py | 2 +- src/allmydata/test/test_sftp.py | 84 ++++++++++++++++---------------- 2 files changed, 43 insertions(+), 43 deletions(-) diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index 7c7543e21..34ded8b93 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -1647,7 +1647,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _render(children): parent_readonly = dirnode.is_readonly() results = [] - for filename, (child, metadata) in children.iteritems(): + for filename, (child, metadata) in children.items(): # The file size may be cached or absent. metadata['no-write'] = _no_write(parent_readonly, child, metadata) attrs = _populate_attrs(child, metadata) diff --git a/src/allmydata/test/test_sftp.py b/src/allmydata/test/test_sftp.py index 9234c20c6..fc4027401 100644 --- a/src/allmydata/test/test_sftp.py +++ b/src/allmydata/test/test_sftp.py @@ -92,33 +92,33 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas self.readonly_uri = n.get_uri() d.addCallback(_created_readonly) - gross = upload.Data("0123456789" * 101, None) + gross = upload.Data(b"0123456789" * 101, None) d.addCallback(lambda ign: self.root.add_file(u"gro\u00DF", gross)) def _created_gross(n): self.gross = n self.gross_uri = n.get_uri() d.addCallback(_created_gross) - small = upload.Data("0123456789", None) + small = upload.Data(b"0123456789", None) d.addCallback(lambda ign: self.root.add_file(u"small", small)) def _created_small(n): self.small = n self.small_uri = n.get_uri() d.addCallback(_created_small) - small2 = upload.Data("Small enough for a LIT too", None) + small2 = upload.Data(b"Small enough for a LIT too", None) d.addCallback(lambda ign: self.root.add_file(u"small2", small2)) def _created_small2(n): self.small2 = n self.small2_uri = n.get_uri() d.addCallback(_created_small2) - empty_litdir_uri = "URI:DIR2-LIT:" + empty_litdir_uri = b"URI:DIR2-LIT:" # contains one child which is itself also LIT: - tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" + tiny_litdir_uri = b"URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" - unknown_uri = "x-tahoe-crazy://I_am_from_the_future." + unknown_uri = b"x-tahoe-crazy://I_am_from_the_future." d.addCallback(lambda ign: self.root._create_and_validate_node(None, empty_litdir_uri, name=u"empty_lit_dir")) def _created_empty_lit_dir(n): @@ -242,11 +242,11 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d = self._set_up("not_implemented") d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "readLink link", - self.handler.readLink, "link")) + self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, b"readLink link", + self.handler.readLink, b"link")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "makeLink link file", - self.handler.makeLink, "link", "file")) + self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, b"makeLink link file", + self.handler.makeLink, b"link", b"file")) return d @@ -276,43 +276,43 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self._set_up_tree()) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openDirectory small", - self.handler.openDirectory, "small")) + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"openDirectory small", + self.handler.openDirectory, b"small")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openDirectory unknown", - self.handler.openDirectory, "unknown")) + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"openDirectory unknown", + self.handler.openDirectory, b"unknown")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openDirectory nodir", - self.handler.openDirectory, "nodir")) + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"openDirectory nodir", + self.handler.openDirectory, b"nodir")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openDirectory nodir/nodir", - self.handler.openDirectory, "nodir/nodir")) + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"openDirectory nodir/nodir", + self.handler.openDirectory, b"nodir/nodir")) gross = u"gro\u00DF".encode("utf-8") expected_root = [ - ('empty_lit_dir', r'dr-xr-xr-x .* 0 .* empty_lit_dir$', {'permissions': S_IFDIR | 0o555}), - (gross, r'-rw-rw-rw- .* 1010 .* '+gross+'$', {'permissions': S_IFREG | 0o666, 'size': 1010}), + ('empty_lit_dir', br'dr-xr-xr-x .* 0 .* empty_lit_dir$', {'permissions': S_IFDIR | 0o555}), + (gross, br'-rw-rw-rw- .* 1010 .* '+gross+b'$', {'permissions': S_IFREG | 0o666, 'size': 1010}), # The fall of the Berlin wall may have been on 9th or 10th November 1989 depending on the gateway's timezone. #('loop', r'drwxrwxrwx .* 0 Nov (09|10) 1989 loop$', {'permissions': S_IFDIR | 0777}), - ('loop', r'drwxrwxrwx .* 0 .* loop$', {'permissions': S_IFDIR | 0o777}), - ('mutable', r'-rw-rw-rw- .* 0 .* mutable$', {'permissions': S_IFREG | 0o666}), - ('readonly', r'-r--r--r-- .* 0 .* readonly$', {'permissions': S_IFREG | 0o444}), - ('small', r'-rw-rw-rw- .* 10 .* small$', {'permissions': S_IFREG | 0o666, 'size': 10}), - ('small2', r'-rw-rw-rw- .* 26 .* small2$', {'permissions': S_IFREG | 0o666, 'size': 26}), - ('tiny_lit_dir', r'dr-xr-xr-x .* 0 .* tiny_lit_dir$', {'permissions': S_IFDIR | 0o555}), - ('unknown', r'\?--------- .* 0 .* unknown$', {'permissions': 0}), + ('loop', br'drwxrwxrwx .* 0 .* loop$', {'permissions': S_IFDIR | 0o777}), + ('mutable', br'-rw-rw-rw- .* 0 .* mutable$', {'permissions': S_IFREG | 0o666}), + ('readonly', br'-r--r--r-- .* 0 .* readonly$', {'permissions': S_IFREG | 0o444}), + ('small', br'-rw-rw-rw- .* 10 .* small$', {'permissions': S_IFREG | 0o666, 'size': 10}), + ('small2', br'-rw-rw-rw- .* 26 .* small2$', {'permissions': S_IFREG | 0o666, 'size': 26}), + ('tiny_lit_dir', br'dr-xr-xr-x .* 0 .* tiny_lit_dir$', {'permissions': S_IFDIR | 0o555}), + ('unknown', br'\?--------- .* 0 .* unknown$', {'permissions': 0}), ] - d.addCallback(lambda ign: self.handler.openDirectory("")) + d.addCallback(lambda ign: self.handler.openDirectory(b"")) d.addCallback(lambda res: self._compareDirLists(res, expected_root)) - d.addCallback(lambda ign: self.handler.openDirectory("loop")) + d.addCallback(lambda ign: self.handler.openDirectory(b"loop")) d.addCallback(lambda res: self._compareDirLists(res, expected_root)) - d.addCallback(lambda ign: self.handler.openDirectory("loop/loop")) + d.addCallback(lambda ign: self.handler.openDirectory(b"loop/loop")) d.addCallback(lambda res: self._compareDirLists(res, expected_root)) - d.addCallback(lambda ign: self.handler.openDirectory("empty_lit_dir")) + d.addCallback(lambda ign: self.handler.openDirectory(b"empty_lit_dir")) d.addCallback(lambda res: self._compareDirLists(res, [])) # The UTC epoch may either be in Jan 1 1970 or Dec 31 1969 depending on the gateway's timezone. @@ -320,10 +320,10 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas ('short', r'-r--r--r-- .* 8 (Jan 01 1970|Dec 31 1969) short$', {'permissions': S_IFREG | 0o444, 'size': 8}), ] - d.addCallback(lambda ign: self.handler.openDirectory("tiny_lit_dir")) + d.addCallback(lambda ign: self.handler.openDirectory(b"tiny_lit_dir")) d.addCallback(lambda res: self._compareDirLists(res, expected_tiny_lit)) - d.addCallback(lambda ign: self.handler.getAttrs("small", True)) + d.addCallback(lambda ign: self.handler.getAttrs(b"small", True)) d.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 10})) d.addCallback(lambda ign: self.handler.setAttrs("small", {})) @@ -542,7 +542,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas # The check at the end of openFile_read tested this for large files, # but it trashed the grid in the process, so this needs to be a # separate test. - small = upload.Data("0123456789"*10, None) + small = upload.Data(b"0123456789"*10, None) d = self._set_up("openFile_read_error") d.addCallback(lambda ign: self.root.add_file(u"small", small)) d.addCallback(lambda n: self.handler.openFile("/uri/"+n.get_uri(), sftp.FXF_READ, {})) @@ -1342,7 +1342,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self._set_up_tree()) # making a directory at a correct path should succeed - d.addCallback(lambda ign: self.handler.makeDirectory("newdir", {'ext_foo': 'bar', 'ctime': 42})) + d.addCallback(lambda ign: self.handler.makeDirectory(b"newdir", {'ext_foo': 'bar', 'ctime': 42})) d.addCallback(lambda ign: self.root.get_child_and_metadata(u"newdir")) def _got(child_and_metadata): @@ -1358,7 +1358,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(_got) # making intermediate directories should also succeed - d.addCallback(lambda ign: self.handler.makeDirectory("newparent/newchild", {})) + d.addCallback(lambda ign: self.handler.makeDirectory(b"newparent/newchild", {})) d.addCallback(lambda ign: self.root.get(u"newparent")) def _got_newparent(newparent): @@ -1373,18 +1373,18 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(_got_newchild) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "makeDirectory invalid UTF-8", - self.handler.makeDirectory, "\xFF", {})) + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"makeDirectory invalid UTF-8", + self.handler.makeDirectory, b"\xFF", {})) # should fail because there is an existing file "small" d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_FAILURE, "makeDirectory small", - self.handler.makeDirectory, "small", {})) + self.shouldFailWithSFTPError(sftp.FX_FAILURE, b"makeDirectory small", + self.handler.makeDirectory, b"small", {})) # directories cannot be created read-only via SFTP d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "makeDirectory newdir2 permissions:0444 denied", - self.handler.makeDirectory, "newdir2", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"makeDirectory newdir2 permissions:0444 denied", + self.handler.makeDirectory, b"newdir2", {'permissions': 0o444})) d.addCallback(lambda ign: self.failUnlessEqual(sftpd.all_heisenfiles, {})) From ce50916ec586d381c9016f441f49bef66e9d020f Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 4 Jan 2021 20:21:43 -0500 Subject: [PATCH 091/213] Add newsfragment --- newsfragments/3536.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/3536.minor diff --git a/newsfragments/3536.minor b/newsfragments/3536.minor new file mode 100644 index 000000000..e69de29bb From bd402ce1f411cc97b7dd81a210ff897049e2ce67 Mon Sep 17 00:00:00 2001 From: Chad Whitacre Date: Tue, 5 Jan 2021 06:27:46 -0500 Subject: [PATCH 092/213] Compute Content-Length more betterly --- src/allmydata/webish.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/allmydata/webish.py b/src/allmydata/webish.py index b3b819b6a..e90fa573a 100644 --- a/src/allmydata/webish.py +++ b/src/allmydata/webish.py @@ -89,10 +89,10 @@ class TahoeLAFSRequest(Request, object): } if 'content-length' not in headers: - # Python 3's cgi module would really, really like us to set - # Content-Length. This seems likely to shoot performance in - # the foot. - headers['content-length'] = len(self.content.getvalue()) + # Python 3's cgi module would really, really like us to set Content-Length. + self.content.seek(0, 2) + headers['content-length'] = str(self.content.tell()) + self.content.seek(0) self.fields = FieldStorage(self.content, headers, environ={'REQUEST_METHOD': 'POST'}) self.content.seek(0) From 88946900b3127dc85cb83320a442c94a4b2f7d52 Mon Sep 17 00:00:00 2001 From: Chad Whitacre Date: Tue, 29 Dec 2020 10:38:07 -0500 Subject: [PATCH 093/213] Port unknown to Python 3 This is covered by test_dirnode/test_grid --- newsfragments/3576.minor | 0 src/allmydata/unknown.py | 10 ++++++++++ src/allmydata/util/_python3.py | 1 + 3 files changed, 11 insertions(+) create mode 100644 newsfragments/3576.minor diff --git a/newsfragments/3576.minor b/newsfragments/3576.minor new file mode 100644 index 000000000..e69de29bb diff --git a/src/allmydata/unknown.py b/src/allmydata/unknown.py index f79c88415..060696293 100644 --- a/src/allmydata/unknown.py +++ b/src/allmydata/unknown.py @@ -1,3 +1,13 @@ +"""Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from zope.interface import implementer from twisted.internet import defer diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py index db2ceed03..650b52c7a 100644 --- a/src/allmydata/util/_python3.py +++ b/src/allmydata/util/_python3.py @@ -82,6 +82,7 @@ PORTED_MODULES = [ "allmydata.storage.shares", "allmydata.test.no_network", "allmydata.test.mutable.util", + "allmydata.unknown", "allmydata.uri", "allmydata.util._python3", "allmydata.util.abbreviate", From 0241244e49d0cb8006a497e4dea9607117f39f60 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 5 Jan 2021 16:30:17 -0500 Subject: [PATCH 094/213] Another test passing on Python 3. --- src/allmydata/frontends/sftpd.py | 35 ++++++++++++++++---------------- src/allmydata/test/test_sftp.py | 26 ++++++++++++------------ 2 files changed, 31 insertions(+), 30 deletions(-) diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index 34ded8b93..fa99b909a 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -55,7 +55,7 @@ def eventually_errback(d): def _utf8(x): if isinstance(x, unicode): return x.encode('utf-8') - if isinstance(x, str): + if isinstance(x, bytes): return x return repr(x) @@ -146,7 +146,7 @@ def _lsLine(name, attrs): # Since we now depend on Twisted v10.1, consider calling Twisted's version. mode = st_mode - perms = array.array('c', '-'*10) + perms = ["-"] * 10 ft = stat.S_IFMT(mode) if stat.S_ISDIR(ft): perms[0] = 'd' elif stat.S_ISREG(ft): perms[0] = '-' @@ -165,7 +165,7 @@ def _lsLine(name, attrs): if mode&stat.S_IXOTH: perms[9] = 'x' # suid/sgid never set - l = perms.tostring() + l = "".join(perms) l += str(st_nlink).rjust(5) + ' ' un = str(st_uid) l += un.ljust(9) @@ -182,6 +182,7 @@ def _lsLine(name, attrs): l += strftime("%b %d %Y ", localtime(st_mtime)) else: l += strftime("%b %d %H:%M ", localtime(st_mtime)) + l = l.encode("utf-8") l += name return l @@ -275,7 +276,7 @@ def _direntry_for(filenode_or_parent, childname, filenode=None): if filenode_or_parent: rw_uri = filenode_or_parent.get_write_uri() if rw_uri and childname: - return rw_uri + "/" + childname.encode('utf-8') + return rw_uri + b"/" + childname.encode('utf-8') else: return rw_uri @@ -509,7 +510,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin): return d def download_done(self, res): - _assert(isinstance(res, (str, Failure)), res=res) + _assert(isinstance(res, (bytes, Failure)), res=res) # Only the first call to download_done counts, but we log subsequent calls # (multiple calls are normal). if self.done_status is not None: @@ -565,7 +566,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin): PrefixingLogMixin.__init__(self, facility="tahoe.sftp", prefix=userpath) if noisy: self.log(".__init__(%r, %r, %r)" % (userpath, filenode, metadata), level=NOISY) - precondition(isinstance(userpath, str) and IFileNode.providedBy(filenode), + precondition(isinstance(userpath, bytes) and IFileNode.providedBy(filenode), userpath=userpath, filenode=filenode) self.filenode = filenode self.metadata = metadata @@ -649,7 +650,7 @@ class GeneralSFTPFile(PrefixingLogMixin): if noisy: self.log(".__init__(%r, %r = %r, %r, )" % (userpath, flags, _repr_flags(flags), close_notify), level=NOISY) - precondition(isinstance(userpath, str), userpath=userpath) + precondition(isinstance(userpath, bytes), userpath=userpath) self.userpath = userpath self.flags = flags self.close_notify = close_notify @@ -672,7 +673,7 @@ class GeneralSFTPFile(PrefixingLogMixin): self.log(".open(parent=%r, childname=%r, filenode=%r, metadata=%r)" % (parent, childname, filenode, metadata), level=OPERATIONAL) - precondition(isinstance(childname, (unicode, NoneType)), childname=childname) + precondition(isinstance(childname, (unicode, type(None))), childname=childname) precondition(filenode is None or IFileNode.providedBy(filenode), filenode=filenode) precondition(not self.closed, sftpfile=self) @@ -723,7 +724,7 @@ class GeneralSFTPFile(PrefixingLogMixin): def rename(self, new_userpath, new_parent, new_childname): self.log(".rename(%r, %r, %r)" % (new_userpath, new_parent, new_childname), level=OPERATIONAL) - precondition(isinstance(new_userpath, str) and isinstance(new_childname, unicode), + precondition(isinstance(new_userpath, bytes) and isinstance(new_childname, unicode), new_userpath=new_userpath, new_childname=new_childname) self.userpath = new_userpath self.parent = new_parent @@ -1039,7 +1040,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): request = "._abandon_any_heisenfiles(%r, %r)" % (userpath, direntry) self.log(request, level=OPERATIONAL) - precondition(isinstance(userpath, str), userpath=userpath) + precondition(isinstance(userpath, bytes), userpath=userpath) # First we synchronously mark all heisenfiles matching the userpath or direntry # as abandoned, and remove them from the two heisenfile dicts. Then we .sync() @@ -1088,8 +1089,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): (from_userpath, from_parent, from_childname, to_userpath, to_parent, to_childname, overwrite)) self.log(request, level=OPERATIONAL) - precondition((isinstance(from_userpath, str) and isinstance(from_childname, unicode) and - isinstance(to_userpath, str) and isinstance(to_childname, unicode)), + precondition((isinstance(from_userpath, bytes) and isinstance(from_childname, unicode) and + isinstance(to_userpath, bytes) and isinstance(to_childname, unicode)), from_userpath=from_userpath, from_childname=from_childname, to_userpath=to_userpath, to_childname=to_childname) if noisy: self.log("all_heisenfiles = %r\nself._heisenfiles = %r" % (all_heisenfiles, self._heisenfiles), level=NOISY) @@ -1161,7 +1162,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): request = "._update_attrs_for_heisenfiles(%r, %r, %r)" % (userpath, direntry, attrs) self.log(request, level=OPERATIONAL) - _assert(isinstance(userpath, str) and isinstance(direntry, str), + _assert(isinstance(userpath, bytes) and isinstance(direntry, bytes), userpath=userpath, direntry=direntry) files = [] @@ -1194,7 +1195,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): request = "._sync_heisenfiles(%r, %r, ignore=%r)" % (userpath, direntry, ignore) self.log(request, level=OPERATIONAL) - _assert(isinstance(userpath, str) and isinstance(direntry, (str, NoneType)), + _assert(isinstance(userpath, bytes) and isinstance(direntry, (bytes, type(None))), userpath=userpath, direntry=direntry) files = [] @@ -1219,7 +1220,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _remove_heisenfile(self, userpath, parent, childname, file_to_remove): if noisy: self.log("._remove_heisenfile(%r, %r, %r, %r)" % (userpath, parent, childname, file_to_remove), level=NOISY) - _assert(isinstance(userpath, str) and isinstance(childname, (unicode, NoneType)), + _assert(isinstance(userpath, bytes) and isinstance(childname, (bytes, type(None))), userpath=userpath, childname=childname) direntry = _direntry_for(parent, childname) @@ -1246,7 +1247,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): (existing_file, userpath, flags, _repr_flags(flags), parent, childname, filenode, metadata), level=NOISY) - _assert((isinstance(userpath, str) and isinstance(childname, (unicode, NoneType)) and + _assert((isinstance(userpath, bytes) and isinstance(childname, (unicode, type(None))) and (metadata is None or 'no-write' in metadata)), userpath=userpath, childname=childname, metadata=metadata) @@ -1979,7 +1980,7 @@ class SFTPServer(service.MultiService): def __init__(self, client, accountfile, accounturl, sftp_portstr, pubkey_file, privkey_file): - precondition(isinstance(accountfile, (unicode, NoneType)), accountfile) + precondition(isinstance(accountfile, (unicode, type(None))), accountfile) precondition(isinstance(pubkey_file, unicode), pubkey_file) precondition(isinstance(privkey_file, unicode), privkey_file) service.MultiService.__init__(self) diff --git a/src/allmydata/test/test_sftp.py b/src/allmydata/test/test_sftp.py index fc4027401..110fde7ac 100644 --- a/src/allmydata/test/test_sftp.py +++ b/src/allmydata/test/test_sftp.py @@ -290,17 +290,17 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas gross = u"gro\u00DF".encode("utf-8") expected_root = [ - ('empty_lit_dir', br'dr-xr-xr-x .* 0 .* empty_lit_dir$', {'permissions': S_IFDIR | 0o555}), + (b'empty_lit_dir', br'dr-xr-xr-x .* 0 .* empty_lit_dir$', {'permissions': S_IFDIR | 0o555}), (gross, br'-rw-rw-rw- .* 1010 .* '+gross+b'$', {'permissions': S_IFREG | 0o666, 'size': 1010}), # The fall of the Berlin wall may have been on 9th or 10th November 1989 depending on the gateway's timezone. #('loop', r'drwxrwxrwx .* 0 Nov (09|10) 1989 loop$', {'permissions': S_IFDIR | 0777}), - ('loop', br'drwxrwxrwx .* 0 .* loop$', {'permissions': S_IFDIR | 0o777}), - ('mutable', br'-rw-rw-rw- .* 0 .* mutable$', {'permissions': S_IFREG | 0o666}), - ('readonly', br'-r--r--r-- .* 0 .* readonly$', {'permissions': S_IFREG | 0o444}), - ('small', br'-rw-rw-rw- .* 10 .* small$', {'permissions': S_IFREG | 0o666, 'size': 10}), - ('small2', br'-rw-rw-rw- .* 26 .* small2$', {'permissions': S_IFREG | 0o666, 'size': 26}), - ('tiny_lit_dir', br'dr-xr-xr-x .* 0 .* tiny_lit_dir$', {'permissions': S_IFDIR | 0o555}), - ('unknown', br'\?--------- .* 0 .* unknown$', {'permissions': 0}), + (b'loop', br'drwxrwxrwx .* 0 .* loop$', {'permissions': S_IFDIR | 0o777}), + (b'mutable', br'-rw-rw-rw- .* 0 .* mutable$', {'permissions': S_IFREG | 0o666}), + (b'readonly', br'-r--r--r-- .* 0 .* readonly$', {'permissions': S_IFREG | 0o444}), + (b'small', br'-rw-rw-rw- .* 10 .* small$', {'permissions': S_IFREG | 0o666, 'size': 10}), + (b'small2', br'-rw-rw-rw- .* 26 .* small2$', {'permissions': S_IFREG | 0o666, 'size': 26}), + (b'tiny_lit_dir', br'dr-xr-xr-x .* 0 .* tiny_lit_dir$', {'permissions': S_IFDIR | 0o555}), + (b'unknown', br'\?--------- .* 0 .* unknown$', {'permissions': 0}), ] d.addCallback(lambda ign: self.handler.openDirectory(b"")) @@ -317,7 +317,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas # The UTC epoch may either be in Jan 1 1970 or Dec 31 1969 depending on the gateway's timezone. expected_tiny_lit = [ - ('short', r'-r--r--r-- .* 8 (Jan 01 1970|Dec 31 1969) short$', {'permissions': S_IFREG | 0o444, 'size': 8}), + (b'short', br'-r--r--r-- .* 8 (Jan 01 1970|Dec 31 1969) short$', {'permissions': S_IFREG | 0o444, 'size': 8}), ] d.addCallback(lambda ign: self.handler.openDirectory(b"tiny_lit_dir")) @@ -326,15 +326,15 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self.handler.getAttrs(b"small", True)) d.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 10})) - d.addCallback(lambda ign: self.handler.setAttrs("small", {})) + d.addCallback(lambda ign: self.handler.setAttrs(b"small", {})) d.addCallback(lambda res: self.failUnlessReallyEqual(res, None)) - d.addCallback(lambda ign: self.handler.getAttrs("small", True)) + d.addCallback(lambda ign: self.handler.getAttrs(b"small", True)) d.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 10})) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "setAttrs size", - self.handler.setAttrs, "small", {'size': 0})) + self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, b"setAttrs size", + self.handler.setAttrs, b"small", {'size': 0})) d.addCallback(lambda ign: self.failUnlessEqual(sftpd.all_heisenfiles, {})) d.addCallback(lambda ign: self.failUnlessEqual(self.handler._heisenfiles, {})) From 06fb9496ab6ca17b699c47dfd11f178c8072cc74 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 5 Jan 2021 16:35:57 -0500 Subject: [PATCH 095/213] Another test passing on Python 3. --- src/allmydata/frontends/sftpd.py | 10 +-- src/allmydata/test/test_sftp.py | 112 +++++++++++++++---------------- 2 files changed, 61 insertions(+), 61 deletions(-) diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index fa99b909a..c7ce3a99f 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -336,7 +336,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin): self.download_size = size if self.downloaded >= self.download_size: - self.download_done("size changed") + self.download_done(b"size changed") def registerProducer(self, p, streaming): if noisy: self.log(".registerProducer(%r, streaming=%r)" % (p, streaming), level=NOISY) @@ -419,7 +419,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin): eventually_callback(d)("reached") if milestone >= self.download_size: - self.download_done("reached download size") + self.download_done(b"reached download size") def overwrite(self, offset, data): if noisy: self.log(".overwrite(%r, )" % (offset, len(data)), level=NOISY) @@ -542,7 +542,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin): self.f.close() except Exception as e: self.log("suppressed %r from close of temporary file %r" % (e, self.f), level=WEIRD) - self.download_done("closed") + self.download_done(b"closed") return self.done_status def unregisterProducer(self): @@ -690,7 +690,7 @@ class GeneralSFTPFile(PrefixingLogMixin): if (self.flags & FXF_TRUNC) or not filenode: # We're either truncating or creating the file, so we don't need the old contents. self.consumer = OverwriteableFileConsumer(0, tempfile_maker) - self.consumer.download_done("download not needed") + self.consumer.download_done(b"download not needed") else: self.async_.addCallback(lambda ignored: filenode.get_best_readable_version()) @@ -704,7 +704,7 @@ class GeneralSFTPFile(PrefixingLogMixin): d = version.read(self.consumer, 0, None) def _finished(res): if not isinstance(res, Failure): - res = "download finished" + res = b"download finished" self.consumer.download_done(res) d.addBoth(_finished) # It is correct to drop d here. diff --git a/src/allmydata/test/test_sftp.py b/src/allmydata/test/test_sftp.py index 110fde7ac..2270ce025 100644 --- a/src/allmydata/test/test_sftp.py +++ b/src/allmydata/test/test_sftp.py @@ -345,85 +345,85 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self._set_up_tree()) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "openFile small 0 bad", - self.handler.openFile, "small", 0, {})) + self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, b"openFile small 0 bad", + self.handler.openFile, b"small", 0, {})) # attempting to open a non-existent file should fail d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openFile nofile READ nosuch", - self.handler.openFile, "nofile", sftp.FXF_READ, {})) + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"openFile nofile READ nosuch", + self.handler.openFile, b"nofile", sftp.FXF_READ, {})) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openFile nodir/file READ nosuch", - self.handler.openFile, "nodir/file", sftp.FXF_READ, {})) + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"openFile nodir/file READ nosuch", + self.handler.openFile, b"nodir/file", sftp.FXF_READ, {})) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile unknown READ denied", - self.handler.openFile, "unknown", sftp.FXF_READ, {})) + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"openFile unknown READ denied", + self.handler.openFile, b"unknown", sftp.FXF_READ, {})) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile unknown/file READ denied", - self.handler.openFile, "unknown/file", sftp.FXF_READ, {})) + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"openFile unknown/file READ denied", + self.handler.openFile, b"unknown/file", sftp.FXF_READ, {})) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile tiny_lit_dir READ denied", - self.handler.openFile, "tiny_lit_dir", sftp.FXF_READ, {})) + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"openFile tiny_lit_dir READ denied", + self.handler.openFile, b"tiny_lit_dir", sftp.FXF_READ, {})) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile unknown uri READ denied", - self.handler.openFile, "uri/"+self.unknown_uri, sftp.FXF_READ, {})) + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"openFile unknown uri READ denied", + self.handler.openFile, b"uri/"+self.unknown_uri, sftp.FXF_READ, {})) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile tiny_lit_dir uri READ denied", - self.handler.openFile, "uri/"+self.tiny_lit_dir_uri, sftp.FXF_READ, {})) + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"openFile tiny_lit_dir uri READ denied", + self.handler.openFile, b"uri/"+self.tiny_lit_dir_uri, sftp.FXF_READ, {})) # FIXME: should be FX_NO_SUCH_FILE? d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile noexist uri READ denied", - self.handler.openFile, "uri/URI:noexist", sftp.FXF_READ, {})) + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"openFile noexist uri READ denied", + self.handler.openFile, b"uri/URI:noexist", sftp.FXF_READ, {})) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openFile invalid UTF-8 uri READ denied", - self.handler.openFile, "uri/URI:\xFF", sftp.FXF_READ, {})) + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"openFile invalid UTF-8 uri READ denied", + self.handler.openFile, b"uri/URI:\xFF", sftp.FXF_READ, {})) # reading an existing file should succeed - d.addCallback(lambda ign: self.handler.openFile("small", sftp.FXF_READ, {})) + d.addCallback(lambda ign: self.handler.openFile(b"small", sftp.FXF_READ, {})) def _read_small(rf): d2 = rf.readChunk(0, 10) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) d2.addCallback(lambda ign: rf.readChunk(2, 6)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "234567")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"234567")) d2.addCallback(lambda ign: rf.readChunk(1, 0)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"")) d2.addCallback(lambda ign: rf.readChunk(8, 4)) # read that starts before EOF is OK - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "89")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"89")) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting at EOF (0-byte)", + self.shouldFailWithSFTPError(sftp.FX_EOF, b"readChunk starting at EOF (0-byte)", rf.readChunk, 10, 0)) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting at EOF", + self.shouldFailWithSFTPError(sftp.FX_EOF, b"readChunk starting at EOF", rf.readChunk, 10, 1)) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting after EOF", + self.shouldFailWithSFTPError(sftp.FX_EOF, b"readChunk starting after EOF", rf.readChunk, 11, 1)) d2.addCallback(lambda ign: rf.getAttrs()) d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 10})) - d2.addCallback(lambda ign: self.handler.getAttrs("small", followLinks=0)) + d2.addCallback(lambda ign: self.handler.getAttrs(b"small", followLinks=0)) d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 10})) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "writeChunk on read-only handle denied", - rf.writeChunk, 0, "a")) + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"writeChunk on read-only handle denied", + rf.writeChunk, 0, b"a")) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "setAttrs on read-only handle denied", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"setAttrs on read-only handle denied", rf.setAttrs, {})) d2.addCallback(lambda ign: rf.close()) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "readChunk on closed file bad", + self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, b"readChunk on closed file bad", rf.readChunk, 0, 1)) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "getAttrs on closed file bad", + self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, b"getAttrs on closed file bad", rf.getAttrs)) d2.addCallback(lambda ign: rf.close()) # should be no-op @@ -435,25 +435,25 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self.handler.openFile(gross, sftp.FXF_READ, {})) def _read_gross(rf): d2 = rf.readChunk(0, 10) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) d2.addCallback(lambda ign: rf.readChunk(2, 6)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "234567")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"234567")) d2.addCallback(lambda ign: rf.readChunk(1, 0)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"")) d2.addCallback(lambda ign: rf.readChunk(1008, 4)) # read that starts before EOF is OK - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "89")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"89")) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting at EOF (0-byte)", + self.shouldFailWithSFTPError(sftp.FX_EOF, b"readChunk starting at EOF (0-byte)", rf.readChunk, 1010, 0)) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting at EOF", + self.shouldFailWithSFTPError(sftp.FX_EOF, b"readChunk starting at EOF", rf.readChunk, 1010, 1)) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting after EOF", + self.shouldFailWithSFTPError(sftp.FX_EOF, b"readChunk starting after EOF", rf.readChunk, 1011, 1)) d2.addCallback(lambda ign: rf.getAttrs()) @@ -463,19 +463,19 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 1010})) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "writeChunk on read-only handle denied", - rf.writeChunk, 0, "a")) + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"writeChunk on read-only handle denied", + rf.writeChunk, 0, b"a")) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "setAttrs on read-only handle denied", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"setAttrs on read-only handle denied", rf.setAttrs, {})) d2.addCallback(lambda ign: rf.close()) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "readChunk on closed file", + self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, b"readChunk on closed file", rf.readChunk, 0, 1)) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "getAttrs on closed file", + self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, b"getAttrs on closed file", rf.getAttrs)) d2.addCallback(lambda ign: rf.close()) # should be no-op @@ -483,37 +483,37 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(_read_gross) # reading an existing small file via uri/ should succeed - d.addCallback(lambda ign: self.handler.openFile("uri/"+self.small_uri, sftp.FXF_READ, {})) + d.addCallback(lambda ign: self.handler.openFile(b"uri/"+self.small_uri, sftp.FXF_READ, {})) def _read_small_uri(rf): d2 = rf.readChunk(0, 10) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) d2.addCallback(lambda ign: rf.close()) return d2 d.addCallback(_read_small_uri) # repeat for a large file - d.addCallback(lambda ign: self.handler.openFile("uri/"+self.gross_uri, sftp.FXF_READ, {})) + d.addCallback(lambda ign: self.handler.openFile(b"uri/"+self.gross_uri, sftp.FXF_READ, {})) def _read_gross_uri(rf): d2 = rf.readChunk(0, 10) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) d2.addCallback(lambda ign: rf.close()) return d2 d.addCallback(_read_gross_uri) # repeat for a mutable file - d.addCallback(lambda ign: self.handler.openFile("uri/"+self.mutable_uri, sftp.FXF_READ, {})) + d.addCallback(lambda ign: self.handler.openFile(b"uri/"+self.mutable_uri, sftp.FXF_READ, {})) def _read_mutable_uri(rf): d2 = rf.readChunk(0, 100) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "mutable file contents")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"mutable file contents")) d2.addCallback(lambda ign: rf.close()) return d2 d.addCallback(_read_mutable_uri) # repeat for a file within a directory referenced by URI - d.addCallback(lambda ign: self.handler.openFile("uri/"+self.tiny_lit_dir_uri+"/short", sftp.FXF_READ, {})) + d.addCallback(lambda ign: self.handler.openFile(b"uri/"+self.tiny_lit_dir_uri+b"/short", sftp.FXF_READ, {})) def _read_short(rf): d2 = rf.readChunk(0, 100) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "The end.")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"The end.")) d2.addCallback(lambda ign: rf.close()) return d2 d.addCallback(_read_short) @@ -521,12 +521,12 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas # check that failed downloads cause failed reads. Note that this # trashes the grid (by deleting all shares), so this must be at the # end of the test function. - d.addCallback(lambda ign: self.handler.openFile("uri/"+self.gross_uri, sftp.FXF_READ, {})) + d.addCallback(lambda ign: self.handler.openFile(b"uri/"+self.gross_uri, sftp.FXF_READ, {})) def _read_broken(rf): d2 = defer.succeed(None) d2.addCallback(lambda ign: self.g.nuke_from_orbit()) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_FAILURE, "read broken", + self.shouldFailWithSFTPError(sftp.FX_FAILURE, b"read broken", rf.readChunk, 0, 100)) # close shouldn't fail d2.addCallback(lambda ign: rf.close()) From 2998057d913522d6102ec13c8c353d8e08138084 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 5 Jan 2021 16:47:34 -0500 Subject: [PATCH 096/213] Don't need byte strings for debug! --- src/allmydata/test/test_sftp.py | 130 ++++++++++++++++---------------- 1 file changed, 65 insertions(+), 65 deletions(-) diff --git a/src/allmydata/test/test_sftp.py b/src/allmydata/test/test_sftp.py index 2270ce025..68be1612d 100644 --- a/src/allmydata/test/test_sftp.py +++ b/src/allmydata/test/test_sftp.py @@ -42,7 +42,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas def shouldFailWithSFTPError(self, expected_code, which, callable, *args, **kwargs): assert isinstance(expected_code, int), repr(expected_code) - assert isinstance(which, bytes), repr(which) + assert isinstance(which, str), repr(which) s = traceback.format_stack() d = defer.maybeDeferred(callable, *args, **kwargs) def _done(res): @@ -198,10 +198,10 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(_check) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"_path_from_string invalid UTF-8", + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "_path_from_string invalid UTF-8", self.handler._path_from_string, b"\xFF")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"realPath invalid UTF-8", + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "realPath invalid UTF-8", self.handler.realPath, b"\xFF")) return d @@ -211,29 +211,29 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d = defer.succeed(None) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_FAILURE, b"_convert_error SFTPError", + self.shouldFailWithSFTPError(sftp.FX_FAILURE, "_convert_error SFTPError", sftpd._convert_error, Failure(sftp.SFTPError(sftp.FX_FAILURE, "foo")), "request")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"_convert_error NoSuchChildError", + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "_convert_error NoSuchChildError", sftpd._convert_error, Failure(NoSuchChildError("foo")), "request")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_FAILURE, b"_convert_error ExistingChildError", + self.shouldFailWithSFTPError(sftp.FX_FAILURE, "_convert_error ExistingChildError", sftpd._convert_error, Failure(ExistingChildError("foo")), "request")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"_convert_error NotWriteableError", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "_convert_error NotWriteableError", sftpd._convert_error, Failure(NotWriteableError("foo")), "request")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, b"_convert_error NotImplementedError", + self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "_convert_error NotImplementedError", sftpd._convert_error, Failure(NotImplementedError("foo")), "request")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_EOF, b"_convert_error EOFError", + self.shouldFailWithSFTPError(sftp.FX_EOF, "_convert_error EOFError", sftpd._convert_error, Failure(EOFError("foo")), "request")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_EOF, b"_convert_error defer.FirstError", + self.shouldFailWithSFTPError(sftp.FX_EOF, "_convert_error defer.FirstError", sftpd._convert_error, Failure(defer.FirstError( Failure(sftp.SFTPError(sftp.FX_EOF, "foo")), 0)), "request")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_FAILURE, b"_convert_error AssertionError", + self.shouldFailWithSFTPError(sftp.FX_FAILURE, "_convert_error AssertionError", sftpd._convert_error, Failure(AssertionError("foo")), "request")) return d @@ -242,10 +242,10 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d = self._set_up("not_implemented") d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, b"readLink link", + self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "readLink link", self.handler.readLink, b"link")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, b"makeLink link file", + self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "makeLink link file", self.handler.makeLink, b"link", b"file")) return d @@ -276,16 +276,16 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self._set_up_tree()) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"openDirectory small", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openDirectory small", self.handler.openDirectory, b"small")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"openDirectory unknown", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openDirectory unknown", self.handler.openDirectory, b"unknown")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"openDirectory nodir", + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openDirectory nodir", self.handler.openDirectory, b"nodir")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"openDirectory nodir/nodir", + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openDirectory nodir/nodir", self.handler.openDirectory, b"nodir/nodir")) gross = u"gro\u00DF".encode("utf-8") @@ -333,7 +333,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 10})) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, b"setAttrs size", + self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "setAttrs size", self.handler.setAttrs, b"small", {'size': 0})) d.addCallback(lambda ign: self.failUnlessEqual(sftpd.all_heisenfiles, {})) @@ -345,38 +345,38 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self._set_up_tree()) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, b"openFile small 0 bad", + self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "openFile small 0 bad", self.handler.openFile, b"small", 0, {})) # attempting to open a non-existent file should fail d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"openFile nofile READ nosuch", + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openFile nofile READ nosuch", self.handler.openFile, b"nofile", sftp.FXF_READ, {})) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"openFile nodir/file READ nosuch", + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openFile nodir/file READ nosuch", self.handler.openFile, b"nodir/file", sftp.FXF_READ, {})) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"openFile unknown READ denied", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile unknown READ denied", self.handler.openFile, b"unknown", sftp.FXF_READ, {})) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"openFile unknown/file READ denied", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile unknown/file READ denied", self.handler.openFile, b"unknown/file", sftp.FXF_READ, {})) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"openFile tiny_lit_dir READ denied", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile tiny_lit_dir READ denied", self.handler.openFile, b"tiny_lit_dir", sftp.FXF_READ, {})) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"openFile unknown uri READ denied", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile unknown uri READ denied", self.handler.openFile, b"uri/"+self.unknown_uri, sftp.FXF_READ, {})) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"openFile tiny_lit_dir uri READ denied", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile tiny_lit_dir uri READ denied", self.handler.openFile, b"uri/"+self.tiny_lit_dir_uri, sftp.FXF_READ, {})) # FIXME: should be FX_NO_SUCH_FILE? d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"openFile noexist uri READ denied", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile noexist uri READ denied", self.handler.openFile, b"uri/URI:noexist", sftp.FXF_READ, {})) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"openFile invalid UTF-8 uri READ denied", + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openFile invalid UTF-8 uri READ denied", self.handler.openFile, b"uri/URI:\xFF", sftp.FXF_READ, {})) # reading an existing file should succeed @@ -395,13 +395,13 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"89")) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_EOF, b"readChunk starting at EOF (0-byte)", + self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting at EOF (0-byte)", rf.readChunk, 10, 0)) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_EOF, b"readChunk starting at EOF", + self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting at EOF", rf.readChunk, 10, 1)) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_EOF, b"readChunk starting after EOF", + self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting after EOF", rf.readChunk, 11, 1)) d2.addCallback(lambda ign: rf.getAttrs()) @@ -411,19 +411,19 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 10})) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"writeChunk on read-only handle denied", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "writeChunk on read-only handle denied", rf.writeChunk, 0, b"a")) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"setAttrs on read-only handle denied", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "setAttrs on read-only handle denied", rf.setAttrs, {})) d2.addCallback(lambda ign: rf.close()) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, b"readChunk on closed file bad", + self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "readChunk on closed file bad", rf.readChunk, 0, 1)) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, b"getAttrs on closed file bad", + self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "getAttrs on closed file bad", rf.getAttrs)) d2.addCallback(lambda ign: rf.close()) # should be no-op @@ -447,13 +447,13 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"89")) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_EOF, b"readChunk starting at EOF (0-byte)", + self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting at EOF (0-byte)", rf.readChunk, 1010, 0)) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_EOF, b"readChunk starting at EOF", + self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting at EOF", rf.readChunk, 1010, 1)) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_EOF, b"readChunk starting after EOF", + self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting after EOF", rf.readChunk, 1011, 1)) d2.addCallback(lambda ign: rf.getAttrs()) @@ -463,19 +463,19 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 1010})) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"writeChunk on read-only handle denied", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "writeChunk on read-only handle denied", rf.writeChunk, 0, b"a")) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"setAttrs on read-only handle denied", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "setAttrs on read-only handle denied", rf.setAttrs, {})) d2.addCallback(lambda ign: rf.close()) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, b"readChunk on closed file", + self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "readChunk on closed file", rf.readChunk, 0, 1)) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, b"getAttrs on closed file", + self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "getAttrs on closed file", rf.getAttrs)) d2.addCallback(lambda ign: rf.close()) # should be no-op @@ -526,7 +526,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d2 = defer.succeed(None) d2.addCallback(lambda ign: self.g.nuke_from_orbit()) d2.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_FAILURE, b"read broken", + self.shouldFailWithSFTPError(sftp.FX_FAILURE, "read broken", rf.readChunk, 0, 100)) # close shouldn't fail d2.addCallback(lambda ign: rf.close()) @@ -545,7 +545,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas small = upload.Data(b"0123456789"*10, None) d = self._set_up("openFile_read_error") d.addCallback(lambda ign: self.root.add_file(u"small", small)) - d.addCallback(lambda n: self.handler.openFile("/uri/"+n.get_uri(), sftp.FXF_READ, {})) + d.addCallback(lambda n: self.handler.openFile(b"/uri/"+n.get_uri(), sftp.FXF_READ, {})) def _read_broken(rf): d2 = defer.succeed(None) d2.addCallback(lambda ign: self.g.nuke_from_orbit()) @@ -569,69 +569,69 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas # '' is an invalid filename d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openFile '' WRITE|CREAT|TRUNC nosuch", - self.handler.openFile, "", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC, {})) + self.handler.openFile, b"", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC, {})) # TRUNC is not valid without CREAT if the file does not already exist d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openFile newfile WRITE|TRUNC nosuch", - self.handler.openFile, "newfile", sftp.FXF_WRITE | sftp.FXF_TRUNC, {})) + self.handler.openFile, b"newfile", sftp.FXF_WRITE | sftp.FXF_TRUNC, {})) # EXCL is not valid without CREAT d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "openFile small WRITE|EXCL bad", - self.handler.openFile, "small", sftp.FXF_WRITE | sftp.FXF_EXCL, {})) + self.handler.openFile, b"small", sftp.FXF_WRITE | sftp.FXF_EXCL, {})) # cannot write to an existing directory d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile tiny_lit_dir WRITE denied", - self.handler.openFile, "tiny_lit_dir", sftp.FXF_WRITE, {})) + self.handler.openFile, b"tiny_lit_dir", sftp.FXF_WRITE, {})) # cannot write to an existing unknown d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile unknown WRITE denied", - self.handler.openFile, "unknown", sftp.FXF_WRITE, {})) + self.handler.openFile, b"unknown", sftp.FXF_WRITE, {})) # cannot create a child of an unknown d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile unknown/newfile WRITE|CREAT denied", - self.handler.openFile, "unknown/newfile", + self.handler.openFile, b"unknown/newfile", sftp.FXF_WRITE | sftp.FXF_CREAT, {})) # cannot write to a new file in an immutable directory d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile tiny_lit_dir/newfile WRITE|CREAT|TRUNC denied", - self.handler.openFile, "tiny_lit_dir/newfile", + self.handler.openFile, b"tiny_lit_dir/newfile", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC, {})) # cannot write to an existing immutable file in an immutable directory (with or without CREAT and EXCL) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile tiny_lit_dir/short WRITE denied", - self.handler.openFile, "tiny_lit_dir/short", sftp.FXF_WRITE, {})) + self.handler.openFile, b"tiny_lit_dir/short", sftp.FXF_WRITE, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile tiny_lit_dir/short WRITE|CREAT denied", - self.handler.openFile, "tiny_lit_dir/short", + self.handler.openFile, b"tiny_lit_dir/short", sftp.FXF_WRITE | sftp.FXF_CREAT, {})) # cannot write to a mutable file via a readonly cap (by path or uri) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile readonly WRITE denied", - self.handler.openFile, "readonly", sftp.FXF_WRITE, {})) + self.handler.openFile, b"readonly", sftp.FXF_WRITE, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile readonly uri WRITE denied", - self.handler.openFile, "uri/"+self.readonly_uri, sftp.FXF_WRITE, {})) + self.handler.openFile, b"uri/"+self.readonly_uri, sftp.FXF_WRITE, {})) # cannot create a file with the EXCL flag if it already exists d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_FAILURE, "openFile small WRITE|CREAT|EXCL failure", - self.handler.openFile, "small", + self.handler.openFile, b"small", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_FAILURE, "openFile mutable WRITE|CREAT|EXCL failure", - self.handler.openFile, "mutable", + self.handler.openFile, b"mutable", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_FAILURE, "openFile mutable uri WRITE|CREAT|EXCL failure", - self.handler.openFile, "uri/"+self.mutable_uri, + self.handler.openFile, b"uri/"+self.mutable_uri, sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_FAILURE, "openFile tiny_lit_dir/short WRITE|CREAT|EXCL failure", @@ -1373,17 +1373,17 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(_got_newchild) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, b"makeDirectory invalid UTF-8", + self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "makeDirectory invalid UTF-8", self.handler.makeDirectory, b"\xFF", {})) # should fail because there is an existing file "small" d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_FAILURE, b"makeDirectory small", + self.shouldFailWithSFTPError(sftp.FX_FAILURE, "makeDirectory small", self.handler.makeDirectory, b"small", {})) # directories cannot be created read-only via SFTP d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, b"makeDirectory newdir2 permissions:0444 denied", + self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "makeDirectory newdir2 permissions:0444 denied", self.handler.makeDirectory, b"newdir2", {'permissions': 0o444})) @@ -1471,17 +1471,17 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(_check) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, b"extendedRequest foo bar", + self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "extendedRequest foo bar", self.handler.extendedRequest, b"foo", b"bar")) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, b"extendedRequest posix-rename@openssh.com invalid 1", + self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "extendedRequest posix-rename@openssh.com invalid 1", self.handler.extendedRequest, b'posix-rename@openssh.com', b'')) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, b"extendedRequest posix-rename@openssh.com invalid 2", + self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "extendedRequest posix-rename@openssh.com invalid 2", self.handler.extendedRequest, b'posix-rename@openssh.com', b'\x00\x00\x00\x01')) d.addCallback(lambda ign: - self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, b"extendedRequest posix-rename@openssh.com invalid 3", + self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "extendedRequest posix-rename@openssh.com invalid 3", self.handler.extendedRequest, b'posix-rename@openssh.com', b'\x00\x00\x00\x01_\x00\x00\x00\x01')) return d From d82bcc5280eee1765739f24a29d9c8bad6373d06 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 5 Jan 2021 16:59:54 -0500 Subject: [PATCH 097/213] Another passing test on Python 3. --- src/allmydata/frontends/sftpd.py | 8 +- src/allmydata/test/test_sftp.py | 164 +++++++++++++++---------------- 2 files changed, 86 insertions(+), 86 deletions(-) diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index c7ce3a99f..36db21f06 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -328,7 +328,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin): if size < self.current_size or size < self.downloaded: self.f.truncate(size) if size > self.current_size: - self.overwrite(self.current_size, "\x00" * (size - self.current_size)) + self.overwrite(self.current_size, b"\x00" * (size - self.current_size)) self.current_size = size # make the invariant self.download_size <= self.current_size be true again @@ -436,7 +436,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin): # the gap between the current EOF and the offset. self.f.seek(self.current_size) - self.f.write("\x00" * (offset - self.current_size)) + self.f.write(b"\x00" * (offset - self.current_size)) start = self.current_size else: self.f.seek(offset) @@ -1220,7 +1220,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _remove_heisenfile(self, userpath, parent, childname, file_to_remove): if noisy: self.log("._remove_heisenfile(%r, %r, %r, %r)" % (userpath, parent, childname, file_to_remove), level=NOISY) - _assert(isinstance(userpath, bytes) and isinstance(childname, (bytes, type(None))), + _assert(isinstance(userpath, bytes) and isinstance(childname, (unicode, type(None))), userpath=userpath, childname=childname) direntry = _direntry_for(parent, childname) @@ -1411,7 +1411,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): # 'overwrite=False' ensures failure if the link already exists. # FIXME: should use a single call to set_uri and return (child, metadata) (#1035) - zero_length_lit = "URI:LIT:" + zero_length_lit = b"URI:LIT:" if noisy: self.log("%r.set_uri(%r, None, readcap=%r, overwrite=False)" % (parent, zero_length_lit, childname), level=NOISY) d3.addCallback(lambda ign: parent.set_uri(childname, None, readcap=zero_length_lit, diff --git a/src/allmydata/test/test_sftp.py b/src/allmydata/test/test_sftp.py index 68be1612d..a147f21d0 100644 --- a/src/allmydata/test/test_sftp.py +++ b/src/allmydata/test/test_sftp.py @@ -635,40 +635,40 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_FAILURE, "openFile tiny_lit_dir/short WRITE|CREAT|EXCL failure", - self.handler.openFile, "tiny_lit_dir/short", + self.handler.openFile, b"tiny_lit_dir/short", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) # cannot write to an immutable file if we don't have its parent (with or without CREAT, TRUNC, or EXCL) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile small uri WRITE denied", - self.handler.openFile, "uri/"+self.small_uri, sftp.FXF_WRITE, {})) + self.handler.openFile, b"uri/"+self.small_uri, sftp.FXF_WRITE, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile small uri WRITE|CREAT denied", - self.handler.openFile, "uri/"+self.small_uri, + self.handler.openFile, b"uri/"+self.small_uri, sftp.FXF_WRITE | sftp.FXF_CREAT, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile small uri WRITE|CREAT|TRUNC denied", - self.handler.openFile, "uri/"+self.small_uri, + self.handler.openFile, b"uri/"+self.small_uri, sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile small uri WRITE|CREAT|EXCL denied", - self.handler.openFile, "uri/"+self.small_uri, + self.handler.openFile, b"uri/"+self.small_uri, sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) # test creating a new file with truncation and extension d.addCallback(lambda ign: - self.handler.openFile("newfile", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC, {})) + self.handler.openFile(b"newfile", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC, {})) def _write(wf): - d2 = wf.writeChunk(0, "0123456789") + d2 = wf.writeChunk(0, b"0123456789") d2.addCallback(lambda res: self.failUnlessReallyEqual(res, None)) - d2.addCallback(lambda ign: wf.writeChunk(8, "0123")) - d2.addCallback(lambda ign: wf.writeChunk(13, "abc")) + d2.addCallback(lambda ign: wf.writeChunk(8, b"0123")) + d2.addCallback(lambda ign: wf.writeChunk(13, b"abc")) d2.addCallback(lambda ign: wf.getAttrs()) d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 16})) - d2.addCallback(lambda ign: self.handler.getAttrs("newfile", followLinks=0)) + d2.addCallback(lambda ign: self.handler.getAttrs(b"newfile", followLinks=0)) d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 16})) d2.addCallback(lambda ign: wf.setAttrs({})) @@ -688,7 +688,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d2.addCallback(lambda ign: wf.setAttrs({'size': 17})) d2.addCallback(lambda ign: wf.getAttrs()) d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['size'], 17)) - d2.addCallback(lambda ign: self.handler.getAttrs("newfile", followLinks=0)) + d2.addCallback(lambda ign: self.handler.getAttrs(b"newfile", followLinks=0)) d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['size'], 17)) d2.addCallback(lambda ign: @@ -699,7 +699,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d2.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "writeChunk on closed file bad", - wf.writeChunk, 0, "a")) + wf.writeChunk, 0, b"a")) d2.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "setAttrs on closed file bad", wf.setAttrs, {'size': 0})) @@ -709,77 +709,77 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(_write) d.addCallback(lambda ign: self.root.get(u"newfile")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "012345670123\x00a\x00\x00\x00")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"012345670123\x00a\x00\x00\x00")) # test APPEND flag, and also replacing an existing file ("newfile" created by the previous test) d.addCallback(lambda ign: - self.handler.openFile("newfile", sftp.FXF_WRITE | sftp.FXF_CREAT | + self.handler.openFile(b"newfile", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC | sftp.FXF_APPEND, {})) def _write_append(wf): - d2 = wf.writeChunk(0, "0123456789") - d2.addCallback(lambda ign: wf.writeChunk(8, "0123")) + d2 = wf.writeChunk(0, b"0123456789") + d2.addCallback(lambda ign: wf.writeChunk(8, b"0123")) d2.addCallback(lambda ign: wf.setAttrs({'size': 17})) d2.addCallback(lambda ign: wf.getAttrs()) d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['size'], 17)) - d2.addCallback(lambda ign: wf.writeChunk(0, "z")) + d2.addCallback(lambda ign: wf.writeChunk(0, b"z")) d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_append) d.addCallback(lambda ign: self.root.get(u"newfile")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "01234567890123\x00\x00\x00z")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"01234567890123\x00\x00\x00z")) # test WRITE | TRUNC without CREAT, when the file already exists # This is invalid according to section 6.3 of the SFTP spec, but required for interoperability, # since POSIX does allow O_WRONLY | O_TRUNC. d.addCallback(lambda ign: - self.handler.openFile("newfile", sftp.FXF_WRITE | sftp.FXF_TRUNC, {})) + self.handler.openFile(b"newfile", sftp.FXF_WRITE | sftp.FXF_TRUNC, {})) def _write_trunc(wf): - d2 = wf.writeChunk(0, "01234") + d2 = wf.writeChunk(0, b"01234") d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_trunc) d.addCallback(lambda ign: self.root.get(u"newfile")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "01234")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"01234")) # test WRITE | TRUNC with permissions: 0 d.addCallback(lambda ign: - self.handler.openFile("newfile", sftp.FXF_WRITE | sftp.FXF_TRUNC, {'permissions': 0})) + self.handler.openFile(b"newfile", sftp.FXF_WRITE | sftp.FXF_TRUNC, {'permissions': 0})) d.addCallback(_write_trunc) d.addCallback(lambda ign: self.root.get(u"newfile")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "01234")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"01234")) d.addCallback(lambda ign: self.root.get_metadata_for(u"newfile")) d.addCallback(lambda metadata: self.failIf(metadata.get('no-write', False), metadata)) # test EXCL flag d.addCallback(lambda ign: - self.handler.openFile("excl", sftp.FXF_WRITE | sftp.FXF_CREAT | + self.handler.openFile(b"excl", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC | sftp.FXF_EXCL, {})) def _write_excl(wf): d2 = self.root.get(u"excl") d2.addCallback(lambda node: download_to_data(node)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"")) - d2.addCallback(lambda ign: wf.writeChunk(0, "0123456789")) + d2.addCallback(lambda ign: wf.writeChunk(0, b"0123456789")) d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_excl) d.addCallback(lambda ign: self.root.get(u"excl")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) # test that writing a zero-length file with EXCL only updates the directory once d.addCallback(lambda ign: - self.handler.openFile("zerolength", sftp.FXF_WRITE | sftp.FXF_CREAT | + self.handler.openFile(b"zerolength", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) def _write_excl_zerolength(wf): d2 = self.root.get(u"zerolength") d2.addCallback(lambda node: download_to_data(node)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"")) # FIXME: no API to get the best version number exists (fix as part of #993) """ @@ -796,84 +796,84 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(_write_excl_zerolength) d.addCallback(lambda ign: self.root.get(u"zerolength")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"")) # test WRITE | CREAT | EXCL | APPEND d.addCallback(lambda ign: - self.handler.openFile("exclappend", sftp.FXF_WRITE | sftp.FXF_CREAT | + self.handler.openFile(b"exclappend", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL | sftp.FXF_APPEND, {})) def _write_excl_append(wf): d2 = self.root.get(u"exclappend") d2.addCallback(lambda node: download_to_data(node)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"")) - d2.addCallback(lambda ign: wf.writeChunk(10, "0123456789")) - d2.addCallback(lambda ign: wf.writeChunk(5, "01234")) + d2.addCallback(lambda ign: wf.writeChunk(10, b"0123456789")) + d2.addCallback(lambda ign: wf.writeChunk(5, b"01234")) d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_excl_append) d.addCallback(lambda ign: self.root.get(u"exclappend")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "012345678901234")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"012345678901234")) # test WRITE | CREAT | APPEND when the file does not already exist d.addCallback(lambda ign: - self.handler.openFile("creatappend", sftp.FXF_WRITE | sftp.FXF_CREAT | + self.handler.openFile(b"creatappend", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_APPEND, {})) def _write_creat_append_new(wf): - d2 = wf.writeChunk(10, "0123456789") - d2.addCallback(lambda ign: wf.writeChunk(5, "01234")) + d2 = wf.writeChunk(10, b"0123456789") + d2.addCallback(lambda ign: wf.writeChunk(5, b"01234")) d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_creat_append_new) d.addCallback(lambda ign: self.root.get(u"creatappend")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "012345678901234")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"012345678901234")) # ... and when it does exist d.addCallback(lambda ign: - self.handler.openFile("creatappend", sftp.FXF_WRITE | sftp.FXF_CREAT | + self.handler.openFile(b"creatappend", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_APPEND, {})) def _write_creat_append_existing(wf): - d2 = wf.writeChunk(5, "01234") + d2 = wf.writeChunk(5, b"01234") d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_creat_append_existing) d.addCallback(lambda ign: self.root.get(u"creatappend")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "01234567890123401234")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"01234567890123401234")) # test WRITE | CREAT without TRUNC, when the file does not already exist d.addCallback(lambda ign: - self.handler.openFile("newfile2", sftp.FXF_WRITE | sftp.FXF_CREAT, {})) + self.handler.openFile(b"newfile2", sftp.FXF_WRITE | sftp.FXF_CREAT, {})) def _write_creat_new(wf): - d2 = wf.writeChunk(0, "0123456789") + d2 = wf.writeChunk(0, b"0123456789") d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_creat_new) d.addCallback(lambda ign: self.root.get(u"newfile2")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) # ... and when it does exist d.addCallback(lambda ign: - self.handler.openFile("newfile2", sftp.FXF_WRITE | sftp.FXF_CREAT, {})) + self.handler.openFile(b"newfile2", sftp.FXF_WRITE | sftp.FXF_CREAT, {})) def _write_creat_existing(wf): - d2 = wf.writeChunk(0, "abcde") + d2 = wf.writeChunk(0, b"abcde") d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_creat_existing) d.addCallback(lambda ign: self.root.get(u"newfile2")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "abcde56789")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"abcde56789")) d.addCallback(lambda ign: self.root.set_node(u"mutable2", self.mutable)) # test writing to a mutable file d.addCallback(lambda ign: - self.handler.openFile("mutable", sftp.FXF_WRITE, {})) + self.handler.openFile(b"mutable", sftp.FXF_WRITE, {})) def _write_mutable(wf): - d2 = wf.writeChunk(8, "new!") + d2 = wf.writeChunk(8, b"new!") d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_mutable) @@ -884,30 +884,30 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas self.failUnlessReallyEqual(node.get_uri(), self.mutable_uri) return node.download_best_version() d.addCallback(_check_same_file) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "mutable new! contents")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"mutable new! contents")) # ... and with permissions, which should be ignored d.addCallback(lambda ign: - self.handler.openFile("mutable", sftp.FXF_WRITE, {'permissions': 0})) + self.handler.openFile(b"mutable", sftp.FXF_WRITE, {'permissions': 0})) d.addCallback(_write_mutable) d.addCallback(lambda ign: self.root.get(u"mutable")) d.addCallback(_check_same_file) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "mutable new! contents")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"mutable new! contents")) # ... and with a setAttrs call that diminishes the parent link to read-only, first by path d.addCallback(lambda ign: - self.handler.openFile("mutable", sftp.FXF_WRITE, {})) + self.handler.openFile(b"mutable", sftp.FXF_WRITE, {})) def _write_mutable_setattr(wf): - d2 = wf.writeChunk(8, "read-only link from parent") + d2 = wf.writeChunk(8, b"read-only link from parent") - d2.addCallback(lambda ign: self.handler.setAttrs("mutable", {'permissions': 0o444})) + d2.addCallback(lambda ign: self.handler.setAttrs(b"mutable", {'permissions': 0o444})) d2.addCallback(lambda ign: self.root.get(u"mutable")) d2.addCallback(lambda node: self.failUnless(node.is_readonly())) d2.addCallback(lambda ign: wf.getAttrs()) d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0o666)) - d2.addCallback(lambda ign: self.handler.getAttrs("mutable", followLinks=0)) + d2.addCallback(lambda ign: self.handler.getAttrs(b"mutable", followLinks=0)) d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0o444)) d2.addCallback(lambda ign: wf.close()) @@ -921,13 +921,13 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas self.failUnlessReallyEqual(node.get_storage_index(), self.mutable.get_storage_index()) return node.download_best_version() d.addCallback(_check_readonly_file) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "mutable read-only link from parent")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"mutable read-only link from parent")) # ... and then by handle d.addCallback(lambda ign: - self.handler.openFile("mutable2", sftp.FXF_WRITE, {})) + self.handler.openFile(b"mutable2", sftp.FXF_WRITE, {})) def _write_mutable2_setattr(wf): - d2 = wf.writeChunk(7, "2") + d2 = wf.writeChunk(7, b"2") d2.addCallback(lambda ign: wf.setAttrs({'permissions': 0o444, 'size': 8})) @@ -937,7 +937,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d2.addCallback(lambda ign: wf.getAttrs()) d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0o444)) - d2.addCallback(lambda ign: self.handler.getAttrs("mutable2", followLinks=0)) + d2.addCallback(lambda ign: self.handler.getAttrs(b"mutable2", followLinks=0)) d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0o666)) d2.addCallback(lambda ign: wf.close()) @@ -945,55 +945,55 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(_write_mutable2_setattr) d.addCallback(lambda ign: self.root.get(u"mutable2")) d.addCallback(_check_readonly_file) # from above - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "mutable2")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"mutable2")) # test READ | WRITE without CREAT or TRUNC d.addCallback(lambda ign: - self.handler.openFile("small", sftp.FXF_READ | sftp.FXF_WRITE, {})) + self.handler.openFile(b"small", sftp.FXF_READ | sftp.FXF_WRITE, {})) def _read_write(rwf): - d2 = rwf.writeChunk(8, "0123") + d2 = rwf.writeChunk(8, b"0123") # test immediate read starting after the old end-of-file d2.addCallback(lambda ign: rwf.readChunk(11, 1)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "3")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"3")) d2.addCallback(lambda ign: rwf.readChunk(0, 100)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "012345670123")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"012345670123")) d2.addCallback(lambda ign: rwf.close()) return d2 d.addCallback(_read_write) d.addCallback(lambda ign: self.root.get(u"small")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "012345670123")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"012345670123")) # test WRITE and rename while still open d.addCallback(lambda ign: - self.handler.openFile("small", sftp.FXF_WRITE, {})) + self.handler.openFile(b"small", sftp.FXF_WRITE, {})) def _write_rename(wf): - d2 = wf.writeChunk(0, "abcd") - d2.addCallback(lambda ign: self.handler.renameFile("small", "renamed")) - d2.addCallback(lambda ign: wf.writeChunk(4, "efgh")) + d2 = wf.writeChunk(0, b"abcd") + d2.addCallback(lambda ign: self.handler.renameFile(b"small", b"renamed")) + d2.addCallback(lambda ign: wf.writeChunk(4, b"efgh")) d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_rename) d.addCallback(lambda ign: self.root.get(u"renamed")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "abcdefgh0123")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"abcdefgh0123")) d.addCallback(lambda ign: self.shouldFail(NoSuchChildError, "rename small while open", "small", self.root.get, u"small")) # test WRITE | CREAT | EXCL and rename while still open d.addCallback(lambda ign: - self.handler.openFile("newexcl", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) + self.handler.openFile(b"newexcl", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) def _write_creat_excl_rename(wf): - d2 = wf.writeChunk(0, "abcd") - d2.addCallback(lambda ign: self.handler.renameFile("newexcl", "renamedexcl")) - d2.addCallback(lambda ign: wf.writeChunk(4, "efgh")) + d2 = wf.writeChunk(0, b"abcd") + d2.addCallback(lambda ign: self.handler.renameFile(b"newexcl", b"renamedexcl")) + d2.addCallback(lambda ign: wf.writeChunk(4, b"efgh")) d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_creat_excl_rename) d.addCallback(lambda ign: self.root.get(u"renamedexcl")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "abcdefgh")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"abcdefgh")) d.addCallback(lambda ign: self.shouldFail(NoSuchChildError, "rename newexcl while open", "newexcl", self.root.get, u"newexcl")) @@ -1002,21 +1002,21 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas def _open_and_rename_race(ign): slow_open = defer.Deferred() reactor.callLater(1, slow_open.callback, None) - d2 = self.handler.openFile("new", sftp.FXF_WRITE | sftp.FXF_CREAT, {}, delay=slow_open) + d2 = self.handler.openFile(b"new", sftp.FXF_WRITE | sftp.FXF_CREAT, {}, delay=slow_open) # deliberate race between openFile and renameFile - d3 = self.handler.renameFile("new", "new2") + d3 = self.handler.renameFile(b"new", b"new2") d3.addErrback(lambda err: self.fail("renameFile failed: %r" % (err,))) return d2 d.addCallback(_open_and_rename_race) def _write_rename_race(wf): - d2 = wf.writeChunk(0, "abcd") + d2 = wf.writeChunk(0, b"abcd") d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_rename_race) d.addCallback(lambda ign: self.root.get(u"new2")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "abcd")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"abcd")) d.addCallback(lambda ign: self.shouldFail(NoSuchChildError, "rename new while open", "new", self.root.get, u"new")) @@ -1027,7 +1027,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas gross = u"gro\u00DF".encode("utf-8") d.addCallback(lambda ign: self.handler.openFile(gross, sftp.FXF_READ | sftp.FXF_WRITE, {})) def _read_write_broken(rwf): - d2 = rwf.writeChunk(0, "abcdefghij") + d2 = rwf.writeChunk(0, b"abcdefghij") d2.addCallback(lambda ign: self.g.nuke_from_orbit()) # reading should fail (reliably if we read past the written chunk) From f3d795d9a8ee83ba5c89d4144660a525d68d9ee8 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 6 Jan 2021 09:31:15 -0500 Subject: [PATCH 098/213] More passing tests on Python 3. --- src/allmydata/test/test_sftp.py | 48 ++++++++++++++++----------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/src/allmydata/test/test_sftp.py b/src/allmydata/test/test_sftp.py index a147f21d0..3bbc3accf 100644 --- a/src/allmydata/test/test_sftp.py +++ b/src/allmydata/test/test_sftp.py @@ -1051,57 +1051,57 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "removeFile nofile", - self.handler.removeFile, "nofile")) + self.handler.removeFile, b"nofile")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "removeFile nofile", - self.handler.removeFile, "nofile")) + self.handler.removeFile, b"nofile")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "removeFile nodir/file", - self.handler.removeFile, "nodir/file")) + self.handler.removeFile, b"nodir/file")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "removefile ''", - self.handler.removeFile, "")) + self.handler.removeFile, b"")) # removing a directory should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "removeFile tiny_lit_dir", - self.handler.removeFile, "tiny_lit_dir")) + self.handler.removeFile, b"tiny_lit_dir")) # removing a file should succeed d.addCallback(lambda ign: self.root.get(u"gro\u00DF")) d.addCallback(lambda ign: self.handler.removeFile(u"gro\u00DF".encode('utf-8'))) d.addCallback(lambda ign: - self.shouldFail(NoSuchChildError, "removeFile gross", "gro\\xdf", + self.shouldFail(NoSuchChildError, "removeFile gross", "gro", self.root.get, u"gro\u00DF")) # removing an unknown should succeed d.addCallback(lambda ign: self.root.get(u"unknown")) - d.addCallback(lambda ign: self.handler.removeFile("unknown")) + d.addCallback(lambda ign: self.handler.removeFile(b"unknown")) d.addCallback(lambda ign: self.shouldFail(NoSuchChildError, "removeFile unknown", "unknown", self.root.get, u"unknown")) # removing a link to an open file should not prevent it from being read - d.addCallback(lambda ign: self.handler.openFile("small", sftp.FXF_READ, {})) + d.addCallback(lambda ign: self.handler.openFile(b"small", sftp.FXF_READ, {})) def _remove_and_read_small(rf): - d2 = self.handler.removeFile("small") + d2 = self.handler.removeFile(b"small") d2.addCallback(lambda ign: self.shouldFail(NoSuchChildError, "removeFile small", "small", self.root.get, u"small")) d2.addCallback(lambda ign: rf.readChunk(0, 10)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) d2.addCallback(lambda ign: rf.close()) return d2 d.addCallback(_remove_and_read_small) # removing a link to a created file should prevent it from being created - d.addCallback(lambda ign: self.handler.openFile("tempfile", sftp.FXF_READ | sftp.FXF_WRITE | + d.addCallback(lambda ign: self.handler.openFile(b"tempfile", sftp.FXF_READ | sftp.FXF_WRITE | sftp.FXF_CREAT, {})) def _write_remove(rwf): - d2 = rwf.writeChunk(0, "0123456789") - d2.addCallback(lambda ign: self.handler.removeFile("tempfile")) + d2 = rwf.writeChunk(0, b"0123456789") + d2.addCallback(lambda ign: self.handler.removeFile(b"tempfile")) d2.addCallback(lambda ign: rwf.readChunk(0, 10)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) d2.addCallback(lambda ign: rwf.close()) return d2 d.addCallback(_write_remove) @@ -1110,14 +1110,14 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas self.root.get, u"tempfile")) # ... even if the link is renamed while open - d.addCallback(lambda ign: self.handler.openFile("tempfile2", sftp.FXF_READ | sftp.FXF_WRITE | + d.addCallback(lambda ign: self.handler.openFile(b"tempfile2", sftp.FXF_READ | sftp.FXF_WRITE | sftp.FXF_CREAT, {})) def _write_rename_remove(rwf): - d2 = rwf.writeChunk(0, "0123456789") - d2.addCallback(lambda ign: self.handler.renameFile("tempfile2", "tempfile3")) - d2.addCallback(lambda ign: self.handler.removeFile("tempfile3")) + d2 = rwf.writeChunk(0, b"0123456789") + d2.addCallback(lambda ign: self.handler.renameFile(b"tempfile2", b"tempfile3")) + d2.addCallback(lambda ign: self.handler.removeFile(b"tempfile3")) d2.addCallback(lambda ign: rwf.readChunk(0, 10)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) d2.addCallback(lambda ign: rwf.close()) return d2 d.addCallback(_write_rename_remove) @@ -1138,13 +1138,13 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "removeDirectory nodir", - self.handler.removeDirectory, "nodir")) + self.handler.removeDirectory, b"nodir")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "removeDirectory nodir/nodir", - self.handler.removeDirectory, "nodir/nodir")) + self.handler.removeDirectory, b"nodir/nodir")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "removeDirectory ''", - self.handler.removeDirectory, "")) + self.handler.removeDirectory, b"")) # removing a file should fail d.addCallback(lambda ign: @@ -1153,14 +1153,14 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas # removing a directory should succeed d.addCallback(lambda ign: self.root.get(u"tiny_lit_dir")) - d.addCallback(lambda ign: self.handler.removeDirectory("tiny_lit_dir")) + d.addCallback(lambda ign: self.handler.removeDirectory(b"tiny_lit_dir")) d.addCallback(lambda ign: self.shouldFail(NoSuchChildError, "removeDirectory tiny_lit_dir", "tiny_lit_dir", self.root.get, u"tiny_lit_dir")) # removing an unknown should succeed d.addCallback(lambda ign: self.root.get(u"unknown")) - d.addCallback(lambda ign: self.handler.removeDirectory("unknown")) + d.addCallback(lambda ign: self.handler.removeDirectory(b"unknown")) d.addCallback(lambda err: self.shouldFail(NoSuchChildError, "removeDirectory unknown", "unknown", self.root.get, u"unknown")) From ebaf075966e75800fb4bf238e5082a0d06956c39 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 6 Jan 2021 09:38:43 -0500 Subject: [PATCH 099/213] Another passing test on Python 3. --- src/allmydata/frontends/sftpd.py | 6 +++--- src/allmydata/test/test_sftp.py | 28 ++++++++++++++-------------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index 36db21f06..a7444c3e9 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -1128,7 +1128,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): (from_direntry, to_direntry, len(all_heisenfiles), len(self._heisenfiles), request), level=NOISY) if not overwrite and (to_userpath in self._heisenfiles or to_direntry in all_heisenfiles): - def _existing(): raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + to_userpath) + def _existing(): raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + unicode(to_userpath, "utf-8")) if noisy: self.log("existing", level=NOISY) return defer.execute(_existing) @@ -1513,7 +1513,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): d2.addCallback(lambda ign: to_parent.get(to_childname)) def _expect_fail(res): if not isinstance(res, Failure): - raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + to_userpath) + raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + unicode(to_userpath, "utf-8")) # It is OK if we fail for errors other than NoSuchChildError, since that probably # indicates some problem accessing the destination directory. @@ -1538,7 +1538,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): if not isinstance(err, Failure) or (renamed and err.check(NoSuchChildError)): return None if not overwrite and err.check(ExistingChildError): - raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + to_userpath) + raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + unicode(to_userpath, "utf-8")) return err d3.addBoth(_check) diff --git a/src/allmydata/test/test_sftp.py b/src/allmydata/test/test_sftp.py index 3bbc3accf..9a5538c6f 100644 --- a/src/allmydata/test/test_sftp.py +++ b/src/allmydata/test/test_sftp.py @@ -1176,58 +1176,58 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas # renaming a non-existent file should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile nofile newfile", - self.handler.renameFile, "nofile", "newfile")) + self.handler.renameFile, b"nofile", b"newfile")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile '' newfile", - self.handler.renameFile, "", "newfile")) + self.handler.renameFile, b"", b"newfile")) # renaming a file to a non-existent path should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile small nodir/small", - self.handler.renameFile, "small", "nodir/small")) + self.handler.renameFile, b"small", b"nodir/small")) # renaming a file to an invalid UTF-8 name should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile small invalid", - self.handler.renameFile, "small", "\xFF")) + self.handler.renameFile, b"small", b"\xFF")) # renaming a file to or from an URI should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile small from uri", - self.handler.renameFile, "uri/"+self.small_uri, "new")) + self.handler.renameFile, b"uri/"+self.small_uri, b"new")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile small to uri", - self.handler.renameFile, "small", "uri/fake_uri")) + self.handler.renameFile, b"small", b"uri/fake_uri")) # renaming a file onto an existing file, directory or unknown should fail # The SFTP spec isn't clear about what error should be returned, but sshfs depends on # it being FX_PERMISSION_DENIED. d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "renameFile small small2", - self.handler.renameFile, "small", "small2")) + self.handler.renameFile, b"small", b"small2")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "renameFile small tiny_lit_dir", - self.handler.renameFile, "small", "tiny_lit_dir")) + self.handler.renameFile, b"small", b"tiny_lit_dir")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "renameFile small unknown", - self.handler.renameFile, "small", "unknown")) + self.handler.renameFile, b"small", b"unknown")) # renaming a file onto a heisenfile should fail, even if the open hasn't completed def _rename_onto_heisenfile_race(wf): slow_open = defer.Deferred() reactor.callLater(1, slow_open.callback, None) - d2 = self.handler.openFile("heisenfile", sftp.FXF_WRITE | sftp.FXF_CREAT, {}, delay=slow_open) + d2 = self.handler.openFile(b"heisenfile", sftp.FXF_WRITE | sftp.FXF_CREAT, {}, delay=slow_open) # deliberate race between openFile and renameFile d3 = self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "renameFile small heisenfile", - self.handler.renameFile, "small", "heisenfile") + self.handler.renameFile, b"small", b"heisenfile") d2.addCallback(lambda wf: wf.close()) return deferredutil.gatherResults([d2, d3]) d.addCallback(_rename_onto_heisenfile_race) # renaming a file to a correct path should succeed - d.addCallback(lambda ign: self.handler.renameFile("small", "new_small")) + d.addCallback(lambda ign: self.handler.renameFile(b"small", b"new_small")) d.addCallback(lambda ign: self.root.get(u"new_small")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.small_uri)) @@ -1238,12 +1238,12 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.gross_uri)) # renaming a directory to a correct path should succeed - d.addCallback(lambda ign: self.handler.renameFile("tiny_lit_dir", "new_tiny_lit_dir")) + d.addCallback(lambda ign: self.handler.renameFile(b"tiny_lit_dir", b"new_tiny_lit_dir")) d.addCallback(lambda ign: self.root.get(u"new_tiny_lit_dir")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.tiny_lit_dir_uri)) # renaming an unknown to a correct path should succeed - d.addCallback(lambda ign: self.handler.renameFile("unknown", "new_unknown")) + d.addCallback(lambda ign: self.handler.renameFile(b"unknown", b"new_unknown")) d.addCallback(lambda ign: self.root.get(u"new_unknown")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.unknown_uri)) From a5e22d93cca16c6a76a60e75e4c29c2a7e385d43 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 6 Jan 2021 09:54:48 -0500 Subject: [PATCH 100/213] All tests pass on Python 3. --- src/allmydata/test/test_sftp.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/src/allmydata/test/test_sftp.py b/src/allmydata/test/test_sftp.py index 9a5538c6f..ada749547 100644 --- a/src/allmydata/test/test_sftp.py +++ b/src/allmydata/test/test_sftp.py @@ -1256,7 +1256,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas extData = (struct.pack('>L', len(fromPathstring)) + fromPathstring + struct.pack('>L', len(toPathstring)) + toPathstring) - d2 = self.handler.extendedRequest('posix-rename@openssh.com', extData) + d2 = self.handler.extendedRequest(b'posix-rename@openssh.com', extData) def _check(res): res.trap(sftp.SFTPError) if res.value.code == sftp.FX_OK: @@ -1276,44 +1276,44 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas # POSIX-renaming a non-existent file should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile_posix nofile newfile", - _renameFile, "nofile", "newfile")) + _renameFile, b"nofile", b"newfile")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile_posix '' newfile", - _renameFile, "", "newfile")) + _renameFile, b"", b"newfile")) # POSIX-renaming a file to a non-existent path should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile_posix small nodir/small", - _renameFile, "small", "nodir/small")) + _renameFile, b"small", b"nodir/small")) # POSIX-renaming a file to an invalid UTF-8 name should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile_posix small invalid", - _renameFile, "small", "\xFF")) + _renameFile, b"small", b"\xFF")) # POSIX-renaming a file to or from an URI should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile_posix small from uri", - _renameFile, "uri/"+self.small_uri, "new")) + _renameFile, b"uri/"+self.small_uri, b"new")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile_posix small to uri", - _renameFile, "small", "uri/fake_uri")) + _renameFile, b"small", b"uri/fake_uri")) # POSIX-renaming a file onto an existing file, directory or unknown should succeed - d.addCallback(lambda ign: _renameFile("small", "small2")) + d.addCallback(lambda ign: _renameFile(b"small", b"small2")) d.addCallback(lambda ign: self.root.get(u"small2")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.small_uri)) - d.addCallback(lambda ign: _renameFile("small2", "loop2")) + d.addCallback(lambda ign: _renameFile(b"small2", b"loop2")) d.addCallback(lambda ign: self.root.get(u"loop2")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.small_uri)) - d.addCallback(lambda ign: _renameFile("loop2", "unknown2")) + d.addCallback(lambda ign: _renameFile(b"loop2", b"unknown2")) d.addCallback(lambda ign: self.root.get(u"unknown2")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.small_uri)) # POSIX-renaming a file to a correct new path should succeed - d.addCallback(lambda ign: _renameFile("unknown2", "new_small")) + d.addCallback(lambda ign: _renameFile(b"unknown2", b"new_small")) d.addCallback(lambda ign: self.root.get(u"new_small")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.small_uri)) @@ -1324,12 +1324,12 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.gross_uri)) # POSIX-renaming a directory to a correct path should succeed - d.addCallback(lambda ign: _renameFile("tiny_lit_dir", "new_tiny_lit_dir")) + d.addCallback(lambda ign: _renameFile(b"tiny_lit_dir", b"new_tiny_lit_dir")) d.addCallback(lambda ign: self.root.get(u"new_tiny_lit_dir")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.tiny_lit_dir_uri)) # POSIX-renaming an unknown to a correct path should succeed - d.addCallback(lambda ign: _renameFile("unknown", "new_unknown")) + d.addCallback(lambda ign: _renameFile(b"unknown", b"new_unknown")) d.addCallback(lambda ign: self.root.get(u"new_unknown")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.unknown_uri)) From 192063acfaab70bffb8125fadaf2abb3466e24fe Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 6 Jan 2021 09:59:33 -0500 Subject: [PATCH 101/213] Port to Python 3. --- src/allmydata/test/test_sftp.py | 10 ++++++++++ src/allmydata/util/_python3.py | 1 + 2 files changed, 11 insertions(+) diff --git a/src/allmydata/test/test_sftp.py b/src/allmydata/test/test_sftp.py index ada749547..ee02ae2d8 100644 --- a/src/allmydata/test/test_sftp.py +++ b/src/allmydata/test/test_sftp.py @@ -1,4 +1,14 @@ +""" +Ported to Python 3. +""" from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import re, struct, traceback, time, calendar from stat import S_IFREG, S_IFDIR diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py index f405a8187..ff9409ea8 100644 --- a/src/allmydata/util/_python3.py +++ b/src/allmydata/util/_python3.py @@ -168,6 +168,7 @@ PORTED_TEST_MODULES = [ "allmydata.test.test_pipeline", "allmydata.test.test_python3", "allmydata.test.test_repairer", + "allmydata.test.test_sftp", "allmydata.test.test_spans", "allmydata.test.test_statistics", "allmydata.test.test_stats", From 7b091bde9baf75ca9a49418cf1136b5157f024c8 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 6 Jan 2021 10:51:05 -0500 Subject: [PATCH 102/213] Port to Python 3. --- src/allmydata/frontends/sftpd.py | 52 ++++++++++++++++++-------------- src/allmydata/util/_python3.py | 1 + 2 files changed, 31 insertions(+), 22 deletions(-) diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index a7444c3e9..621dac921 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -1,4 +1,12 @@ -from past.builtins import unicode +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import six import heapq, traceback, array, stat, struct from stat import S_IFREG, S_IFDIR @@ -53,7 +61,7 @@ def eventually_errback(d): def _utf8(x): - if isinstance(x, unicode): + if isinstance(x, str): return x.encode('utf-8') if isinstance(x, bytes): return x @@ -64,7 +72,7 @@ def _to_sftp_time(t): """SFTP times are unsigned 32-bit integers representing UTC seconds (ignoring leap seconds) since the Unix epoch, January 1 1970 00:00 UTC. A Tahoe time is the corresponding float.""" - return long(t) & long(0xFFFFFFFF) + return int(t) & int(0xFFFFFFFF) def _convert_error(res, request): @@ -73,7 +81,7 @@ def _convert_error(res, request): if not isinstance(res, Failure): logged_res = res - if isinstance(res, str): logged_res = "" % (len(res),) + if isinstance(res, (bytes, str)): logged_res = "" % (len(res),) logmsg("SUCCESS %r %r" % (request, logged_res,), level=OPERATIONAL) return res @@ -224,7 +232,7 @@ def _populate_attrs(childnode, metadata, size=None): if childnode and size is None: size = childnode.get_size() if size is not None: - _assert(isinstance(size, (int, long)) and not isinstance(size, bool), size=size) + _assert(isinstance(size, int) and not isinstance(size, bool), size=size) attrs['size'] = size perms = S_IFREG | 0o666 @@ -256,7 +264,7 @@ def _attrs_to_metadata(attrs): for key in attrs: if key == "mtime" or key == "ctime" or key == "createtime": - metadata[key] = long(attrs[key]) + metadata[key] = int(attrs[key]) elif key.startswith("ext_"): metadata[key] = str(attrs[key]) @@ -268,7 +276,7 @@ def _attrs_to_metadata(attrs): def _direntry_for(filenode_or_parent, childname, filenode=None): - precondition(isinstance(childname, (unicode, type(None))), childname=childname) + precondition(isinstance(childname, (str, type(None))), childname=childname) if childname is None: filenode_or_parent = filenode @@ -673,7 +681,7 @@ class GeneralSFTPFile(PrefixingLogMixin): self.log(".open(parent=%r, childname=%r, filenode=%r, metadata=%r)" % (parent, childname, filenode, metadata), level=OPERATIONAL) - precondition(isinstance(childname, (unicode, type(None))), childname=childname) + precondition(isinstance(childname, (str, type(None))), childname=childname) precondition(filenode is None or IFileNode.providedBy(filenode), filenode=filenode) precondition(not self.closed, sftpfile=self) @@ -724,7 +732,7 @@ class GeneralSFTPFile(PrefixingLogMixin): def rename(self, new_userpath, new_parent, new_childname): self.log(".rename(%r, %r, %r)" % (new_userpath, new_parent, new_childname), level=OPERATIONAL) - precondition(isinstance(new_userpath, bytes) and isinstance(new_childname, unicode), + precondition(isinstance(new_userpath, bytes) and isinstance(new_childname, str), new_userpath=new_userpath, new_childname=new_childname) self.userpath = new_userpath self.parent = new_parent @@ -926,7 +934,7 @@ class GeneralSFTPFile(PrefixingLogMixin): return defer.execute(_closed) size = attrs.get("size", None) - if size is not None and (not isinstance(size, (int, long)) or size < 0): + if size is not None and (not isinstance(size, int) or size < 0): def _bad(): raise SFTPError(FX_BAD_MESSAGE, "new size is not a valid nonnegative integer") return defer.execute(_bad) @@ -1013,7 +1021,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def logout(self): self.log(".logout()", level=OPERATIONAL) - for files in self._heisenfiles.itervalues(): + for files in self._heisenfiles.values(): for f in files: f.abandon() @@ -1089,8 +1097,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): (from_userpath, from_parent, from_childname, to_userpath, to_parent, to_childname, overwrite)) self.log(request, level=OPERATIONAL) - precondition((isinstance(from_userpath, bytes) and isinstance(from_childname, unicode) and - isinstance(to_userpath, bytes) and isinstance(to_childname, unicode)), + precondition((isinstance(from_userpath, bytes) and isinstance(from_childname, str) and + isinstance(to_userpath, bytes) and isinstance(to_childname, str)), from_userpath=from_userpath, from_childname=from_childname, to_userpath=to_userpath, to_childname=to_childname) if noisy: self.log("all_heisenfiles = %r\nself._heisenfiles = %r" % (all_heisenfiles, self._heisenfiles), level=NOISY) @@ -1128,7 +1136,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): (from_direntry, to_direntry, len(all_heisenfiles), len(self._heisenfiles), request), level=NOISY) if not overwrite and (to_userpath in self._heisenfiles or to_direntry in all_heisenfiles): - def _existing(): raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + unicode(to_userpath, "utf-8")) + def _existing(): raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + str(to_userpath, "utf-8")) if noisy: self.log("existing", level=NOISY) return defer.execute(_existing) @@ -1220,7 +1228,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _remove_heisenfile(self, userpath, parent, childname, file_to_remove): if noisy: self.log("._remove_heisenfile(%r, %r, %r, %r)" % (userpath, parent, childname, file_to_remove), level=NOISY) - _assert(isinstance(userpath, bytes) and isinstance(childname, (unicode, type(None))), + _assert(isinstance(userpath, bytes) and isinstance(childname, (str, type(None))), userpath=userpath, childname=childname) direntry = _direntry_for(parent, childname) @@ -1247,7 +1255,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): (existing_file, userpath, flags, _repr_flags(flags), parent, childname, filenode, metadata), level=NOISY) - _assert((isinstance(userpath, bytes) and isinstance(childname, (unicode, type(None))) and + _assert((isinstance(userpath, bytes) and isinstance(childname, (str, type(None))) and (metadata is None or 'no-write' in metadata)), userpath=userpath, childname=childname, metadata=metadata) @@ -1513,7 +1521,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): d2.addCallback(lambda ign: to_parent.get(to_childname)) def _expect_fail(res): if not isinstance(res, Failure): - raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + unicode(to_userpath, "utf-8")) + raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + str(to_userpath, "utf-8")) # It is OK if we fail for errors other than NoSuchChildError, since that probably # indicates some problem accessing the destination directory. @@ -1538,7 +1546,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): if not isinstance(err, Failure) or (renamed and err.check(NoSuchChildError)): return None if not overwrite and err.check(ExistingChildError): - raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + unicode(to_userpath, "utf-8")) + raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + str(to_userpath, "utf-8")) return err d3.addBoth(_check) @@ -1648,7 +1656,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _render(children): parent_readonly = dirnode.is_readonly() results = [] - for filename, (child, metadata) in children.items(): + for filename, (child, metadata) in list(children.items()): # The file size may be cached or absent. metadata['no-write'] = _no_write(parent_readonly, child, metadata) attrs = _populate_attrs(child, metadata) @@ -1980,9 +1988,9 @@ class SFTPServer(service.MultiService): def __init__(self, client, accountfile, accounturl, sftp_portstr, pubkey_file, privkey_file): - precondition(isinstance(accountfile, (unicode, type(None))), accountfile) - precondition(isinstance(pubkey_file, unicode), pubkey_file) - precondition(isinstance(privkey_file, unicode), privkey_file) + precondition(isinstance(accountfile, (str, type(None))), accountfile) + precondition(isinstance(pubkey_file, str), pubkey_file) + precondition(isinstance(privkey_file, str), privkey_file) service.MultiService.__init__(self) r = Dispatcher(client) diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py index ff9409ea8..f2b8cf004 100644 --- a/src/allmydata/util/_python3.py +++ b/src/allmydata/util/_python3.py @@ -37,6 +37,7 @@ PORTED_MODULES = [ "allmydata.deep_stats", "allmydata.dirnode", "allmydata.frontends.ftpd", + "allmydata.frontends.sftpd", "allmydata.hashtree", "allmydata.immutable.checker", "allmydata.immutable.downloader", From 7b1bfadd215685d7da73470aa01f1ace0ee93594 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 6 Jan 2021 13:37:52 -0500 Subject: [PATCH 103/213] Rip out FTP. --- docs/about.rst | 4 +- docs/configuration.rst | 9 +- docs/frontends/FTP-and-SFTP.rst | 81 ++----- docs/frontends/webapi.rst | 2 +- docs/helper.rst | 2 +- docs/known_issues.rst | 6 +- docs/running.rst | 7 +- newsfragments/3583.removed | 1 + src/allmydata/client.py | 19 -- src/allmydata/frontends/ftpd.py | 340 ------------------------------ src/allmydata/test/test_client.py | 84 +------- src/allmydata/test/test_ftp.py | 106 ---------- 12 files changed, 32 insertions(+), 629 deletions(-) create mode 100644 newsfragments/3583.removed delete mode 100644 src/allmydata/frontends/ftpd.py delete mode 100644 src/allmydata/test/test_ftp.py diff --git a/docs/about.rst b/docs/about.rst index 626792d6b..120abb079 100644 --- a/docs/about.rst +++ b/docs/about.rst @@ -67,12 +67,12 @@ Here's how it works: A "storage grid" is made up of a number of storage servers. A storage server has direct attached storage (typically one or more hard disks). A "gateway" communicates with storage nodes, and uses them to provide access to the -grid over protocols such as HTTP(S), SFTP or FTP. +grid over protocols such as HTTP(S) and SFTP. Note that you can find "client" used to refer to gateway nodes (which act as a client to storage servers), and also to processes or programs connecting to a gateway node and performing operations on the grid -- for example, a CLI -command, Web browser, SFTP client, or FTP client. +command, Web browser, or SFTP client. Users do not rely on storage servers to provide *confidentiality* nor *integrity* for their data -- instead all of the data is encrypted and diff --git a/docs/configuration.rst b/docs/configuration.rst index 2c0746ba2..93c9aa0f1 100644 --- a/docs/configuration.rst +++ b/docs/configuration.rst @@ -81,7 +81,6 @@ Client/server nodes provide one or more of the following services: * web-API service * SFTP service -* FTP service * helper service * storage service. @@ -708,12 +707,12 @@ CLI file store, uploading/downloading files, and creating/running Tahoe nodes. See :doc:`frontends/CLI` for details. -SFTP, FTP +SFTP - Tahoe can also run both SFTP and FTP servers, and map a username/password + Tahoe can also run SFTP servers, and map a username/password pair to a top-level Tahoe directory. See :doc:`frontends/FTP-and-SFTP` - for instructions on configuring these services, and the ``[sftpd]`` and - ``[ftpd]`` sections of ``tahoe.cfg``. + for instructions on configuring this service, and the ``[sftpd]`` + section of ``tahoe.cfg``. Storage Server Configuration diff --git a/docs/frontends/FTP-and-SFTP.rst b/docs/frontends/FTP-and-SFTP.rst index dc348af34..ee6371812 100644 --- a/docs/frontends/FTP-and-SFTP.rst +++ b/docs/frontends/FTP-and-SFTP.rst @@ -1,22 +1,21 @@ .. -*- coding: utf-8-with-signature -*- -================================= -Tahoe-LAFS SFTP and FTP Frontends -================================= +======================== +Tahoe-LAFS SFTP Frontend +======================== -1. `SFTP/FTP Background`_ +1. `SFTP Background`_ 2. `Tahoe-LAFS Support`_ 3. `Creating an Account File`_ 4. `Running An Account Server (accounts.url)`_ 5. `Configuring SFTP Access`_ -6. `Configuring FTP Access`_ -7. `Dependencies`_ -8. `Immutable and Mutable Files`_ -9. `Known Issues`_ +6. `Dependencies`_ +7. `Immutable and Mutable Files`_ +8. `Known Issues`_ -SFTP/FTP Background -=================== +SFTP Background +=============== FTP is the venerable internet file-transfer protocol, first developed in 1971. The FTP server usually listens on port 21. A separate connection is @@ -33,20 +32,18 @@ Both FTP and SFTP were developed assuming a UNIX-like server, with accounts and passwords, octal file modes (user/group/other, read/write/execute), and ctime/mtime timestamps. -We recommend SFTP over FTP, because the protocol is better, and the server -implementation in Tahoe-LAFS is more complete. See `Known Issues`_, below, -for details. +Previous versions of Tahoe-LAFS supported FTP, but now only the superior SFTP +frontend is supported. See `Known Issues`_, below, for details on the +limitations of SFTP. Tahoe-LAFS Support ================== All Tahoe-LAFS client nodes can run a frontend SFTP server, allowing regular SFTP clients (like ``/usr/bin/sftp``, the ``sshfs`` FUSE plugin, and many -others) to access the file store. They can also run an FTP server, so FTP -clients (like ``/usr/bin/ftp``, ``ncftp``, and others) can too. These -frontends sit at the same level as the web-API interface. +others) to access the file store. -Since Tahoe-LAFS does not use user accounts or passwords, the SFTP/FTP +Since Tahoe-LAFS does not use user accounts or passwords, the SFTP servers must be configured with a way to first authenticate a user (confirm that a prospective client has a legitimate claim to whatever authorities we might grant a particular user), and second to decide what directory cap @@ -173,39 +170,6 @@ clients and with the sshfs filesystem, see wiki:SftpFrontend_ .. _wiki:SftpFrontend: https://tahoe-lafs.org/trac/tahoe-lafs/wiki/SftpFrontend -Configuring FTP Access -====================== - -To enable the FTP server with an accounts file, add the following lines to -the BASEDIR/tahoe.cfg file:: - - [ftpd] - enabled = true - port = tcp:8021:interface=127.0.0.1 - accounts.file = private/accounts - -The FTP server will listen on the given port number and on the loopback -interface only. The "accounts.file" pathname will be interpreted relative to -the node's BASEDIR. - -To enable the FTP server with an account server instead, provide the URL of -that server in an "accounts.url" directive:: - - [ftpd] - enabled = true - port = tcp:8021:interface=127.0.0.1 - accounts.url = https://example.com/login - -You can provide both accounts.file and accounts.url, although it probably -isn't very useful except for testing. - -FTP provides no security, and so your password or caps could be eavesdropped -if you connect to the FTP server remotely. The examples above include -":interface=127.0.0.1" in the "port" option, which causes the server to only -accept connections from localhost. - -Public key authentication is not supported for FTP. - Dependencies ============ @@ -216,7 +180,7 @@ separately: debian puts it in the "python-twisted-conch" package. Immutable and Mutable Files =========================== -All files created via SFTP (and FTP) are immutable files. However, files can +All files created via SFTP are immutable files. However, files can only be created in writeable directories, which allows the directory entry to be relinked to a different file. Normally, when the path of an immutable file is opened for writing by SFTP, the directory entry is relinked to another @@ -256,18 +220,3 @@ See also wiki:SftpFrontend_. .. _ticket #1059: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1059 .. _ticket #1089: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1089 - -Known Issues in the FTP Frontend --------------------------------- - -Mutable files are not supported by the FTP frontend (`ticket #680`_). - -Non-ASCII filenames are not supported by FTP (`ticket #682`_). - -The FTP frontend sometimes fails to report errors, for example if an upload -fails because it does meet the "servers of happiness" threshold (`ticket -#1081`_). - -.. _ticket #680: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/680 -.. _ticket #682: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/682 -.. _ticket #1081: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1081 diff --git a/docs/frontends/webapi.rst b/docs/frontends/webapi.rst index 99fa44979..417109213 100644 --- a/docs/frontends/webapi.rst +++ b/docs/frontends/webapi.rst @@ -2157,7 +2157,7 @@ When modifying the file, be careful to update it atomically, otherwise a request may arrive while the file is only halfway written, and the partial file may be incorrectly parsed. -The blacklist is applied to all access paths (including SFTP, FTP, and CLI +The blacklist is applied to all access paths (including SFTP and CLI operations), not just the web-API. The blacklist also applies to directories. If a directory is blacklisted, the gateway will refuse access to both that directory and any child files/directories underneath it, when accessed via diff --git a/docs/helper.rst b/docs/helper.rst index 0fcdf4601..55d302cac 100644 --- a/docs/helper.rst +++ b/docs/helper.rst @@ -122,7 +122,7 @@ Who should consider using a Helper? * clients who experience problems with TCP connection fairness: if other programs or machines in the same home are getting less than their fair share of upload bandwidth. If the connection is being shared fairly, then - a Tahoe upload that is happening at the same time as a single FTP upload + a Tahoe upload that is happening at the same time as a single SFTP upload should get half the bandwidth. * clients who have been given the helper.furl by someone who is running a Helper and is willing to let them use it diff --git a/docs/known_issues.rst b/docs/known_issues.rst index e040ffaf6..98bd1b35d 100644 --- a/docs/known_issues.rst +++ b/docs/known_issues.rst @@ -23,7 +23,7 @@ Known Issues in Tahoe-LAFS v1.10.3, released 30-Mar-2016 * `Disclosure of file through embedded hyperlinks or JavaScript in that file`_ * `Command-line arguments are leaked to other local users`_ * `Capabilities may be leaked to web browser phishing filter / "safe browsing" servers`_ - * `Known issues in the FTP and SFTP frontends`_ + * `Known issues in the SFTP frontend`_ * `Traffic analysis based on sizes of files/directories, storage indices, and timing`_ * `Privacy leak via Google Chart API link in map-update timing web page`_ @@ -213,8 +213,8 @@ To disable the filter in Chrome: ---- -Known issues in the FTP and SFTP frontends ------------------------------------------- +Known issues in the SFTP frontend +--------------------------------- These are documented in :doc:`frontends/FTP-and-SFTP` and on `the SftpFrontend page`_ on the wiki. diff --git a/docs/running.rst b/docs/running.rst index 6d82a97f2..82b0443f9 100644 --- a/docs/running.rst +++ b/docs/running.rst @@ -207,10 +207,10 @@ create a new directory and lose the capability to it, then you cannot access that directory ever again. -The SFTP and FTP frontends --------------------------- +The SFTP frontend +----------------- -You can access your Tahoe-LAFS grid via any SFTP_ or FTP_ client. See +You can access your Tahoe-LAFS grid via any SFTP_ client. See :doc:`frontends/FTP-and-SFTP` for how to set this up. On most Unix platforms, you can also use SFTP to plug Tahoe-LAFS into your computer's local filesystem via ``sshfs``, but see the `FAQ about performance @@ -220,7 +220,6 @@ The SftpFrontend_ page on the wiki has more information about using SFTP with Tahoe-LAFS. .. _SFTP: https://en.wikipedia.org/wiki/SSH_file_transfer_protocol -.. _FTP: https://en.wikipedia.org/wiki/File_Transfer_Protocol .. _FAQ about performance problems: https://tahoe-lafs.org/trac/tahoe-lafs/wiki/FAQ#Q23_FUSE .. _SftpFrontend: https://tahoe-lafs.org/trac/tahoe-lafs/wiki/SftpFrontend diff --git a/newsfragments/3583.removed b/newsfragments/3583.removed new file mode 100644 index 000000000..a3fce48be --- /dev/null +++ b/newsfragments/3583.removed @@ -0,0 +1 @@ +FTP is no longer supported by Tahoe-LAFS. Please use the SFTP support instead. \ No newline at end of file diff --git a/src/allmydata/client.py b/src/allmydata/client.py index bd744fe6a..f5e603490 100644 --- a/src/allmydata/client.py +++ b/src/allmydata/client.py @@ -86,12 +86,6 @@ _client_config = configutil.ValidConfiguration( "shares.total", "storage.plugins", ), - "ftpd": ( - "accounts.file", - "accounts.url", - "enabled", - "port", - ), "storage": ( "debug_discard", "enabled", @@ -656,7 +650,6 @@ class _Client(node.Node, pollmixin.PollMixin): raise ValueError("config error: helper is enabled, but tub " "is not listening ('tub.port=' is empty)") self.init_helper() - self.init_ftp_server() self.init_sftp_server() # If the node sees an exit_trigger file, it will poll every second to see @@ -1032,18 +1025,6 @@ class _Client(node.Node, pollmixin.PollMixin): ) ws.setServiceParent(self) - def init_ftp_server(self): - if self.config.get_config("ftpd", "enabled", False, boolean=True): - accountfile = self.config.get_config("ftpd", "accounts.file", None) - if accountfile: - accountfile = self.config.get_config_path(accountfile) - accounturl = self.config.get_config("ftpd", "accounts.url", None) - ftp_portstr = self.config.get_config("ftpd", "port", "8021") - - from allmydata.frontends import ftpd - s = ftpd.FTPServer(self, accountfile, accounturl, ftp_portstr) - s.setServiceParent(self) - def init_sftp_server(self): if self.config.get_config("sftpd", "enabled", False, boolean=True): accountfile = self.config.get_config("sftpd", "accounts.file", None) diff --git a/src/allmydata/frontends/ftpd.py b/src/allmydata/frontends/ftpd.py deleted file mode 100644 index 0b18df85b..000000000 --- a/src/allmydata/frontends/ftpd.py +++ /dev/null @@ -1,340 +0,0 @@ -from six import ensure_str - -from types import NoneType - -from zope.interface import implementer -from twisted.application import service, strports -from twisted.internet import defer -from twisted.internet.interfaces import IConsumer -from twisted.cred import portal -from twisted.python import filepath -from twisted.protocols import ftp - -from allmydata.interfaces import IDirectoryNode, ExistingChildError, \ - NoSuchChildError -from allmydata.immutable.upload import FileHandle -from allmydata.util.fileutil import EncryptedTemporaryFile -from allmydata.util.assertutil import precondition - -@implementer(ftp.IReadFile) -class ReadFile(object): - def __init__(self, node): - self.node = node - def send(self, consumer): - d = self.node.read(consumer) - return d # when consumed - -@implementer(IConsumer) -class FileWriter(object): - - def registerProducer(self, producer, streaming): - if not streaming: - raise NotImplementedError("Non-streaming producer not supported.") - # we write the data to a temporary file, since Tahoe can't do - # streaming upload yet. - self.f = EncryptedTemporaryFile() - return None - - def unregisterProducer(self): - # the upload actually happens in WriteFile.close() - pass - - def write(self, data): - self.f.write(data) - -@implementer(ftp.IWriteFile) -class WriteFile(object): - - def __init__(self, parent, childname, convergence): - self.parent = parent - self.childname = childname - self.convergence = convergence - - def receive(self): - self.c = FileWriter() - return defer.succeed(self.c) - - def close(self): - u = FileHandle(self.c.f, self.convergence) - d = self.parent.add_file(self.childname, u) - return d - - -class NoParentError(Exception): - pass - -# filepath.Permissions was added in Twisted-11.1.0, which we require. Twisted -# <15.0.0 expected an int, and only does '&' on it. Twisted >=15.0.0 expects -# a filepath.Permissions. This satisfies both. - -class IntishPermissions(filepath.Permissions): - def __init__(self, statModeInt): - self._tahoe_statModeInt = statModeInt - filepath.Permissions.__init__(self, statModeInt) - def __and__(self, other): - return self._tahoe_statModeInt & other - -@implementer(ftp.IFTPShell) -class Handler(object): - def __init__(self, client, rootnode, username, convergence): - self.client = client - self.root = rootnode - self.username = username - self.convergence = convergence - - def makeDirectory(self, path): - d = self._get_root(path) - d.addCallback(lambda root_and_path: - self._get_or_create_directories(root_and_path[0], root_and_path[1])) - return d - - def _get_or_create_directories(self, node, path): - if not IDirectoryNode.providedBy(node): - # unfortunately it is too late to provide the name of the - # blocking directory in the error message. - raise ftp.FileExistsError("cannot create directory because there " - "is a file in the way") - if not path: - return defer.succeed(node) - d = node.get(path[0]) - def _maybe_create(f): - f.trap(NoSuchChildError) - return node.create_subdirectory(path[0]) - d.addErrback(_maybe_create) - d.addCallback(self._get_or_create_directories, path[1:]) - return d - - def _get_parent(self, path): - # fire with (parentnode, childname) - path = [unicode(p) for p in path] - if not path: - raise NoParentError - childname = path[-1] - d = self._get_root(path) - def _got_root(root_and_path): - (root, path) = root_and_path - if not path: - raise NoParentError - return root.get_child_at_path(path[:-1]) - d.addCallback(_got_root) - def _got_parent(parent): - return (parent, childname) - d.addCallback(_got_parent) - return d - - def _remove_thing(self, path, must_be_directory=False, must_be_file=False): - d = defer.maybeDeferred(self._get_parent, path) - def _convert_error(f): - f.trap(NoParentError) - raise ftp.PermissionDeniedError("cannot delete root directory") - d.addErrback(_convert_error) - def _got_parent(parent_and_childname): - (parent, childname) = parent_and_childname - d = parent.get(childname) - def _got_child(child): - if must_be_directory and not IDirectoryNode.providedBy(child): - raise ftp.IsNotADirectoryError("rmdir called on a file") - if must_be_file and IDirectoryNode.providedBy(child): - raise ftp.IsADirectoryError("rmfile called on a directory") - return parent.delete(childname) - d.addCallback(_got_child) - d.addErrback(self._convert_error) - return d - d.addCallback(_got_parent) - return d - - def removeDirectory(self, path): - return self._remove_thing(path, must_be_directory=True) - - def removeFile(self, path): - return self._remove_thing(path, must_be_file=True) - - def rename(self, fromPath, toPath): - # the target directory must already exist - d = self._get_parent(fromPath) - def _got_from_parent(fromparent_and_childname): - (fromparent, childname) = fromparent_and_childname - d = self._get_parent(toPath) - d.addCallback(lambda toparent_and_tochildname: - fromparent.move_child_to(childname, - toparent_and_tochildname[0], toparent_and_tochildname[1], - overwrite=False)) - return d - d.addCallback(_got_from_parent) - d.addErrback(self._convert_error) - return d - - def access(self, path): - # we allow access to everything that exists. We are required to raise - # an error for paths that don't exist: FTP clients (at least ncftp) - # uses this to decide whether to mkdir or not. - d = self._get_node_and_metadata_for_path(path) - d.addErrback(self._convert_error) - d.addCallback(lambda res: None) - return d - - def _convert_error(self, f): - if f.check(NoSuchChildError): - childname = f.value.args[0].encode("utf-8") - msg = "'%s' doesn't exist" % childname - raise ftp.FileNotFoundError(msg) - if f.check(ExistingChildError): - msg = f.value.args[0].encode("utf-8") - raise ftp.FileExistsError(msg) - return f - - def _get_root(self, path): - # return (root, remaining_path) - path = [unicode(p) for p in path] - if path and path[0] == "uri": - d = defer.maybeDeferred(self.client.create_node_from_uri, - str(path[1])) - d.addCallback(lambda root: (root, path[2:])) - else: - d = defer.succeed((self.root,path)) - return d - - def _get_node_and_metadata_for_path(self, path): - d = self._get_root(path) - def _got_root(root_and_path): - (root,path) = root_and_path - if path: - return root.get_child_and_metadata_at_path(path) - else: - return (root,{}) - d.addCallback(_got_root) - return d - - def _populate_row(self, keys, childnode_and_metadata): - (childnode, metadata) = childnode_and_metadata - values = [] - isdir = bool(IDirectoryNode.providedBy(childnode)) - for key in keys: - if key == "size": - if isdir: - value = 0 - else: - value = childnode.get_size() or 0 - elif key == "directory": - value = isdir - elif key == "permissions": - # Twisted-14.0.2 (and earlier) expected an int, and used it - # in a rendering function that did (mode & NUMBER). - # Twisted-15.0.0 expects a - # twisted.python.filepath.Permissions , and calls its - # .shorthand() method. This provides both. - value = IntishPermissions(0o600) - elif key == "hardlinks": - value = 1 - elif key == "modified": - # follow sftpd convention (i.e. linkmotime in preference to mtime) - if "linkmotime" in metadata.get("tahoe", {}): - value = metadata["tahoe"]["linkmotime"] - else: - value = metadata.get("mtime", 0) - elif key == "owner": - value = self.username - elif key == "group": - value = self.username - else: - value = "??" - values.append(value) - return values - - def stat(self, path, keys=()): - # for files only, I think - d = self._get_node_and_metadata_for_path(path) - def _render(node_and_metadata): - (node, metadata) = node_and_metadata - assert not IDirectoryNode.providedBy(node) - return self._populate_row(keys, (node,metadata)) - d.addCallback(_render) - d.addErrback(self._convert_error) - return d - - def list(self, path, keys=()): - # the interface claims that path is a list of unicodes, but in - # practice it is not - d = self._get_node_and_metadata_for_path(path) - def _list(node_and_metadata): - (node, metadata) = node_and_metadata - if IDirectoryNode.providedBy(node): - return node.list() - return { path[-1]: (node, metadata) } # need last-edge metadata - d.addCallback(_list) - def _render(children): - results = [] - for (name, childnode) in children.iteritems(): - # the interface claims that the result should have a unicode - # object as the name, but it fails unless you give it a - # bytestring - results.append( (name.encode("utf-8"), - self._populate_row(keys, childnode) ) ) - return results - d.addCallback(_render) - d.addErrback(self._convert_error) - return d - - def openForReading(self, path): - d = self._get_node_and_metadata_for_path(path) - d.addCallback(lambda node_and_metadata: ReadFile(node_and_metadata[0])) - d.addErrback(self._convert_error) - return d - - def openForWriting(self, path): - path = [unicode(p) for p in path] - if not path: - raise ftp.PermissionDeniedError("cannot STOR to root directory") - childname = path[-1] - d = self._get_root(path) - def _got_root(root_and_path): - (root, path) = root_and_path - if not path: - raise ftp.PermissionDeniedError("cannot STOR to root directory") - return root.get_child_at_path(path[:-1]) - d.addCallback(_got_root) - def _got_parent(parent): - return WriteFile(parent, childname, self.convergence) - d.addCallback(_got_parent) - return d - -from allmydata.frontends.auth import AccountURLChecker, AccountFileChecker, NeedRootcapLookupScheme - - -@implementer(portal.IRealm) -class Dispatcher(object): - def __init__(self, client): - self.client = client - - def requestAvatar(self, avatarID, mind, interface): - assert interface == ftp.IFTPShell - rootnode = self.client.create_node_from_uri(avatarID.rootcap) - convergence = self.client.convergence - s = Handler(self.client, rootnode, avatarID.username, convergence) - def logout(): pass - return (interface, s, None) - - -class FTPServer(service.MultiService): - def __init__(self, client, accountfile, accounturl, ftp_portstr): - precondition(isinstance(accountfile, (unicode, NoneType)), accountfile) - service.MultiService.__init__(self) - - r = Dispatcher(client) - p = portal.Portal(r) - - if accountfile: - c = AccountFileChecker(self, accountfile) - p.registerChecker(c) - if accounturl: - c = AccountURLChecker(self, accounturl) - p.registerChecker(c) - if not accountfile and not accounturl: - # we could leave this anonymous, with just the /uri/CAP form - raise NeedRootcapLookupScheme("must provide some translation") - - f = ftp.FTPFactory(p) - # strports requires a native string. - ftp_portstr = ensure_str(ftp_portstr) - s = strports.service(ftp_portstr, f) - s.setServiceParent(self) diff --git a/src/allmydata/test/test_client.py b/src/allmydata/test/test_client.py index 342fe4af1..1ebb75b84 100644 --- a/src/allmydata/test/test_client.py +++ b/src/allmydata/test/test_client.py @@ -424,88 +424,8 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase): expected = fileutil.abspath_expanduser_unicode(u"relative", abs_basedir) self.failUnlessReallyEqual(w.staticdir, expected) - # TODO: also test config options for SFTP. - - @defer.inlineCallbacks - def test_ftp_create(self): - """ - configuration for sftpd results in it being started - """ - root = FilePath(self.mktemp()) - root.makedirs() - accounts = root.child(b"sftp-accounts") - accounts.touch() - - data = FilePath(__file__).sibling(b"data") - privkey = data.child(b"openssh-rsa-2048.txt") - pubkey = data.child(b"openssh-rsa-2048.pub.txt") - - basedir = u"client.Basic.test_ftp_create" - create_node_dir(basedir, "testing") - with open(os.path.join(basedir, "tahoe.cfg"), "w") as f: - f.write(( - '[sftpd]\n' - 'enabled = true\n' - 'accounts.file = {}\n' - 'host_pubkey_file = {}\n' - 'host_privkey_file = {}\n' - ).format(accounts.path, pubkey.path, privkey.path)) - - client_node = yield client.create_client( - basedir, - ) - sftp = client_node.getServiceNamed("frontend:sftp") - self.assertIs(sftp.parent, client_node) - - - @defer.inlineCallbacks - def test_ftp_auth_keyfile(self): - """ - ftpd accounts.file is parsed properly - """ - basedir = u"client.Basic.test_ftp_auth_keyfile" - os.mkdir(basedir) - fileutil.write(os.path.join(basedir, "tahoe.cfg"), - (BASECONFIG + - "[ftpd]\n" - "enabled = true\n" - "port = tcp:0:interface=127.0.0.1\n" - "accounts.file = private/accounts\n")) - os.mkdir(os.path.join(basedir, "private")) - fileutil.write(os.path.join(basedir, "private", "accounts"), "\n") - c = yield client.create_client(basedir) # just make sure it can be instantiated - del c - - @defer.inlineCallbacks - def test_ftp_auth_url(self): - """ - ftpd accounts.url is parsed properly - """ - basedir = u"client.Basic.test_ftp_auth_url" - os.mkdir(basedir) - fileutil.write(os.path.join(basedir, "tahoe.cfg"), - (BASECONFIG + - "[ftpd]\n" - "enabled = true\n" - "port = tcp:0:interface=127.0.0.1\n" - "accounts.url = http://0.0.0.0/\n")) - c = yield client.create_client(basedir) # just make sure it can be instantiated - del c - - @defer.inlineCallbacks - def test_ftp_auth_no_accountfile_or_url(self): - """ - ftpd requires some way to look up accounts - """ - basedir = u"client.Basic.test_ftp_auth_no_accountfile_or_url" - os.mkdir(basedir) - fileutil.write(os.path.join(basedir, "tahoe.cfg"), - (BASECONFIG + - "[ftpd]\n" - "enabled = true\n" - "port = tcp:0:interface=127.0.0.1\n")) - with self.assertRaises(NeedRootcapLookupScheme): - yield client.create_client(basedir) + # TODO: also test config options for SFTP. See Git history for deleted FTP + # tests that could be used as basis for these tests. @defer.inlineCallbacks def _storage_dir_test(self, basedir, storage_path, expected_path): diff --git a/src/allmydata/test/test_ftp.py b/src/allmydata/test/test_ftp.py deleted file mode 100644 index 4eddef440..000000000 --- a/src/allmydata/test/test_ftp.py +++ /dev/null @@ -1,106 +0,0 @@ - -from twisted.trial import unittest - -from allmydata.frontends import ftpd -from allmydata.immutable import upload -from allmydata.mutable import publish -from allmydata.test.no_network import GridTestMixin -from allmydata.test.common_util import ReallyEqualMixin - -class Handler(GridTestMixin, ReallyEqualMixin, unittest.TestCase): - """ - This is a no-network unit test of ftpd.Handler and the abstractions - it uses. - """ - - FALL_OF_BERLIN_WALL = 626644800 - TURN_OF_MILLENIUM = 946684800 - - def _set_up(self, basedir, num_clients=1, num_servers=10): - self.basedir = "ftp/" + basedir - self.set_up_grid(num_clients=num_clients, num_servers=num_servers, - oneshare=True) - - self.client = self.g.clients[0] - self.username = "alice" - self.convergence = "" - - d = self.client.create_dirnode() - def _created_root(node): - self.root = node - self.root_uri = node.get_uri() - self.handler = ftpd.Handler(self.client, self.root, self.username, - self.convergence) - d.addCallback(_created_root) - return d - - def _set_metadata(self, name, metadata): - """Set metadata for `name', avoiding MetadataSetter's timestamp reset - behavior.""" - def _modifier(old_contents, servermap, first_time): - children = self.root._unpack_contents(old_contents) - children[name] = (children[name][0], metadata) - return self.root._pack_contents(children) - - return self.root._node.modify(_modifier) - - def _set_up_tree(self): - # add immutable file at root - immutable = upload.Data("immutable file contents", None) - d = self.root.add_file(u"immutable", immutable) - - # `mtime' and `linkmotime' both set - md_both = {'mtime': self.FALL_OF_BERLIN_WALL, - 'tahoe': {'linkmotime': self.TURN_OF_MILLENIUM}} - d.addCallback(lambda _: self._set_metadata(u"immutable", md_both)) - - # add link to root from root - d.addCallback(lambda _: self.root.set_node(u"loop", self.root)) - - # `mtime' set, but no `linkmotime' - md_just_mtime = {'mtime': self.FALL_OF_BERLIN_WALL, 'tahoe': {}} - d.addCallback(lambda _: self._set_metadata(u"loop", md_just_mtime)) - - # add mutable file at root - mutable = publish.MutableData("mutable file contents") - d.addCallback(lambda _: self.client.create_mutable_file(mutable)) - d.addCallback(lambda node: self.root.set_node(u"mutable", node)) - - # neither `mtime' nor `linkmotime' set - d.addCallback(lambda _: self._set_metadata(u"mutable", {})) - - return d - - def _compareDirLists(self, actual, expected): - actual_list = sorted(actual) - expected_list = sorted(expected) - - self.failUnlessReallyEqual(len(actual_list), len(expected_list), - "%r is wrong length, expecting %r" % ( - actual_list, expected_list)) - for (a, b) in zip(actual_list, expected_list): - (name, meta) = a - (expected_name, expected_meta) = b - self.failUnlessReallyEqual(name, expected_name) - self.failUnlessReallyEqual(meta, expected_meta) - - def test_list(self): - keys = ("size", "directory", "permissions", "hardlinks", "modified", - "owner", "group", "unexpected") - d = self._set_up("list") - - d.addCallback(lambda _: self._set_up_tree()) - d.addCallback(lambda _: self.handler.list("", keys=keys)) - - expected_root = [ - ('loop', - [0, True, ftpd.IntishPermissions(0o600), 1, self.FALL_OF_BERLIN_WALL, 'alice', 'alice', '??']), - ('immutable', - [23, False, ftpd.IntishPermissions(0o600), 1, self.TURN_OF_MILLENIUM, 'alice', 'alice', '??']), - ('mutable', - # timestamp should be 0 if no timestamp metadata is present - [0, False, ftpd.IntishPermissions(0o600), 1, 0, 'alice', 'alice', '??'])] - - d.addCallback(lambda root: self._compareDirLists(root, expected_root)) - - return d From 308dbba92403ef96c2de99c8f33d77c9c17806c4 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Thu, 7 Jan 2021 09:20:39 -0500 Subject: [PATCH 104/213] Fix flake. --- src/allmydata/test/test_client.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/allmydata/test/test_client.py b/src/allmydata/test/test_client.py index 1ebb75b84..63a5ceaaa 100644 --- a/src/allmydata/test/test_client.py +++ b/src/allmydata/test/test_client.py @@ -51,7 +51,6 @@ from allmydata.nodemaker import ( NodeMaker, ) from allmydata.node import OldConfigError, UnescapedHashError, create_node_dir -from allmydata.frontends.auth import NeedRootcapLookupScheme from allmydata import client from allmydata.storage_client import ( StorageClientConfig, From 054af4b76ee918eb4eb27e775d53bb814f48ff0f Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Thu, 7 Jan 2021 11:25:26 -0500 Subject: [PATCH 105/213] Sketch of where SFTP setup needs to happen. --- integration/conftest.py | 12 ++++++++++++ integration/util.py | 21 +++++++++++---------- 2 files changed, 23 insertions(+), 10 deletions(-) diff --git a/integration/conftest.py b/integration/conftest.py index f37ec9353..cc4ffaa08 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -7,6 +7,7 @@ from os import mkdir, listdir, environ from os.path import join, exists from tempfile import mkdtemp, mktemp from functools import partial +from json import loads from foolscap.furl import ( decode_furl, @@ -37,6 +38,8 @@ from util import ( _tahoe_runner_optional_coverage, await_client_ready, TahoeProcess, + cli, + _run_node, ) @@ -350,6 +353,15 @@ def alice(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, requ ) ) await_client_ready(process) + cli(process, "create-alias", "test") + rwcap = loads(cli(process, "list-aliases", "--json"))["test"]["readwrite"] + # TODO at this point we need to: + # 1. configure sftpd + # 2. add an sftp access file with username, password, and rwcap + # 3. eventually, add sftp access with public key + process.kill() + pytest_twisted.blockon(_run_node(reactor, process.node_dir, request, None)) + await_client_ready(process) return process diff --git a/integration/util.py b/integration/util.py index eed073225..60d96a214 100644 --- a/integration/util.py +++ b/integration/util.py @@ -5,6 +5,7 @@ from os import mkdir, environ from os.path import exists, join from six.moves import StringIO from functools import partial +from subprocess import check_output from twisted.python.filepath import ( FilePath, @@ -175,6 +176,10 @@ class TahoeProcess(object): u"portnum", ) + def kill(self): + """Kill the process, block until it's done.""" + _cleanup_tahoe_process(self.transport, self.transport.exited) + def __str__(self): return "".format(self._node_dir) @@ -249,7 +254,7 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam '--helper', ] if not storage: - args.append('--no-storage') + args.append('--no-storage') args.append(node_dir) _tahoe_runner_optional_coverage(done_proto, reactor, request, args) @@ -390,17 +395,13 @@ def await_file_vanishes(path, timeout=10): raise FileShouldVanishException(path, timeout) -def cli(request, reactor, node_dir, *argv): +def cli(node, *argv): """ - Run a tahoe CLI subcommand for a given node, optionally running - under coverage if '--coverage' was supplied. + Run a tahoe CLI subcommand for a given node in a blocking manner, returning + the output. """ - proto = _CollectOutputProtocol() - _tahoe_runner_optional_coverage( - proto, reactor, request, - ['--node-directory', node_dir] + list(argv), - ) - return proto.done + arguments = ["tahoe", '--node-directory', node.node_dir] + return check_output(arguments + list(argv)) def node_url(node_dir, uri_fragment): From 3b29a5f70725299b0783dd0e5fb54227e868bd31 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Thu, 7 Jan 2021 11:59:23 -0500 Subject: [PATCH 106/213] Work with new Unicode configs. --- src/allmydata/frontends/sftpd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index b25ac0270..65a01b322 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -2012,5 +2012,5 @@ class SFTPServer(service.MultiService): f = SSHFactory() f.portal = p - s = strports.service(sftp_portstr, f) + s = strports.service(six.ensure_str(sftp_portstr), f) s.setServiceParent(self) From a536a1a970a6455dc17f89e248acacfd50cfd9a8 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Thu, 7 Jan 2021 12:50:31 -0500 Subject: [PATCH 107/213] First passing end-to-end test of SFTP --- integration/conftest.py | 37 +++++++++++++++++++++++++++++++++---- integration/test_sftp.py | 37 +++++++++++++++++++++++++++++++++++++ setup.py | 1 + 3 files changed, 71 insertions(+), 4 deletions(-) create mode 100644 integration/test_sftp.py diff --git a/integration/conftest.py b/integration/conftest.py index cc4ffaa08..28e364cb6 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -8,6 +8,7 @@ from os.path import join, exists from tempfile import mkdtemp, mktemp from functools import partial from json import loads +from subprocess import check_call from foolscap.furl import ( decode_furl, @@ -342,6 +343,12 @@ def storage_nodes(reactor, temp_dir, introducer, introducer_furl, flog_gatherer, return nodes +def generate_ssh_key(path): + """Create a new SSH private/public key pair.""" + check_call(["ckeygen", "--type", "rsa", "--no-passphrase", "--bits", "512", + "--file", path]) + + @pytest.fixture(scope='session') @log_call(action_type=u"integration:alice", include_args=[], include_result=False) def alice(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, request): @@ -353,14 +360,36 @@ def alice(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, requ ) ) await_client_ready(process) + + # 1. Create a new RW directory cap: cli(process, "create-alias", "test") rwcap = loads(cli(process, "list-aliases", "--json"))["test"]["readwrite"] - # TODO at this point we need to: - # 1. configure sftpd - # 2. add an sftp access file with username, password, and rwcap - # 3. eventually, add sftp access with public key + + # 2. Enable SFTP on the node: + ssh_key_path = join(process.node_dir, "private", "ssh_host_rsa_key") + accounts_path = join(process.node_dir, "private", "accounts") + with open(join(process.node_dir, "tahoe.cfg"), "a") as f: + f.write("""\ +[sftpd] +enabled = true +port = tcp:8022:interface=127.0.0.1 +host_pubkey_file = {ssh_key_path}.pub +host_privkey_file = {ssh_key_path} +accounts.file = {accounts_path} +""".format(ssh_key_path=ssh_key_path, accounts_path=accounts_path)) + generate_ssh_key(ssh_key_path) + + # 3. Add a SFTP access file with username, password, and rwcap. + with open(accounts_path, "w") as f: + f.write("""\ +alice password {} +""".format(rwcap)) + # TODO add sftp access with public key + + # 4. Restart the node with new SFTP config. process.kill() pytest_twisted.blockon(_run_node(reactor, process.node_dir, request, None)) + await_client_ready(process) return process diff --git a/integration/test_sftp.py b/integration/test_sftp.py new file mode 100644 index 000000000..f308ddf5a --- /dev/null +++ b/integration/test_sftp.py @@ -0,0 +1,37 @@ +""" +It's possible to create/rename/delete files and directories in Tahoe-LAFS using +SFTP. +""" + +from __future__ import unicode_literals +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + +from paramiko import SSHClient +from paramiko.client import AutoAddPolicy +from paramiko.sftp_client import SFTPClient + + +def test_read_write_files(alice): + """It's possible to upload and download files.""" + client = SSHClient() + client.set_missing_host_key_policy(AutoAddPolicy) + client.connect( + "localhost", username="alice", password="password", port=8022, + look_for_keys=False + ) + sftp = SFTPClient.from_transport(client.get_transport()) + f = sftp.file("myfile", "wb") + f.write(b"abc") + f.write(b"def") + f.close() + f = sftp.file("myfile", "rb") + assert f.read(4) == b"abcd" + assert f.read(2) == b"ef" + assert f.read(1) == b"" + f.close() diff --git a/setup.py b/setup.py index 0e5a43dba..32581e293 100644 --- a/setup.py +++ b/setup.py @@ -399,6 +399,7 @@ setup(name="tahoe-lafs", # also set in __init__.py "html5lib", "junitxml", "tenacity", + "paramiko", ] + tor_requires + i2p_requires, "tor": tor_requires, "i2p": i2p_requires, From b8879916b2618c810724693e4232d3aa996c7547 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Thu, 7 Jan 2021 13:27:34 -0500 Subject: [PATCH 108/213] More SFTP integration tests. --- integration/test_sftp.py | 80 ++++++++++++++++++++++++++++++++++++---- 1 file changed, 73 insertions(+), 7 deletions(-) diff --git a/integration/test_sftp.py b/integration/test_sftp.py index f308ddf5a..eeeb8b0b7 100644 --- a/integration/test_sftp.py +++ b/integration/test_sftp.py @@ -12,20 +12,53 @@ from future.utils import PY2 if PY2: from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from posixpath import join +from stat import S_ISDIR + from paramiko import SSHClient from paramiko.client import AutoAddPolicy from paramiko.sftp_client import SFTPClient +from paramiko.ssh_exception import AuthenticationException + +import pytest + + +def connect_sftp(alice, username="alice", password="password"): + """Create an SFTP client.""" + client = SSHClient() + client.set_missing_host_key_policy(AutoAddPolicy) + client.connect( + "localhost", username=username, password=password, port=8022, + look_for_keys=False + ) + sftp = SFTPClient.from_transport(client.get_transport()) + + def rmdir(path, delete_root=True): + for f in sftp.listdir_attr(path=path): + childpath = join(path, f.filename) + if S_ISDIR(f.st_mode): + rmdir(childpath) + else: + sftp.remove(childpath) + if delete_root: + sftp.rmdir(path) + + # Delete any files left over from previous tests :( + rmdir("/", delete_root=False) + + return sftp + + +def test_bad_account_password(alice): + """Can't login with unknown username or wrong password.""" + for u, p in [("alice", "wrong"), ("someuser", "password")]: + with pytest.raises(AuthenticationException): + connect_sftp(alice, u, p) def test_read_write_files(alice): """It's possible to upload and download files.""" - client = SSHClient() - client.set_missing_host_key_policy(AutoAddPolicy) - client.connect( - "localhost", username="alice", password="password", port=8022, - look_for_keys=False - ) - sftp = SFTPClient.from_transport(client.get_transport()) + sftp = connect_sftp(alice) f = sftp.file("myfile", "wb") f.write(b"abc") f.write(b"def") @@ -35,3 +68,36 @@ def test_read_write_files(alice): assert f.read(2) == b"ef" assert f.read(1) == b"" f.close() + + +def test_directories(alice): + """ + It's possible to create, list directories, and create and remove files in + them. + """ + sftp = connect_sftp(alice) + assert sftp.listdir() == [] + + sftp.mkdir("childdir") + assert sftp.listdir() == ["childdir"] + + with sftp.file("myfile", "wb") as f: + f.write(b"abc") + assert sorted(sftp.listdir()) == ["childdir", "myfile"] + + sftp.chdir("childdir") + assert sftp.listdir() == [] + + with sftp.file("myfile2", "wb") as f: + f.write(b"def") + assert sftp.listdir() == ["myfile2"] + + sftp.chdir(None) # root + with sftp.file("childdir/myfile2", "rb") as f: + assert f.read() == b"def" + + sftp.remove("myfile") + assert sftp.listdir() == ["childdir"] + + sftp.rmdir("childdir") + assert sftp.listdir() == [] From 3764e3b6b1d75fbdba105b9cb2699c842b1c38b2 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Thu, 7 Jan 2021 13:59:57 -0500 Subject: [PATCH 109/213] A (so far failing) test for SSH public key authentication. --- integration/conftest.py | 30 +++++++++++++------------ integration/test_sftp.py | 47 +++++++++++++++++++++++++++++++--------- integration/util.py | 8 ++++++- 3 files changed, 60 insertions(+), 25 deletions(-) diff --git a/integration/conftest.py b/integration/conftest.py index 28e364cb6..4ae22deee 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -8,7 +8,6 @@ from os.path import join, exists from tempfile import mkdtemp, mktemp from functools import partial from json import loads -from subprocess import check_call from foolscap.furl import ( decode_furl, @@ -41,6 +40,7 @@ from util import ( TahoeProcess, cli, _run_node, + generate_ssh_key ) @@ -343,12 +343,6 @@ def storage_nodes(reactor, temp_dir, introducer, introducer_furl, flog_gatherer, return nodes -def generate_ssh_key(path): - """Create a new SSH private/public key pair.""" - check_call(["ckeygen", "--type", "rsa", "--no-passphrase", "--bits", "512", - "--file", path]) - - @pytest.fixture(scope='session') @log_call(action_type=u"integration:alice", include_args=[], include_result=False) def alice(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, request): @@ -366,7 +360,7 @@ def alice(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, requ rwcap = loads(cli(process, "list-aliases", "--json"))["test"]["readwrite"] # 2. Enable SFTP on the node: - ssh_key_path = join(process.node_dir, "private", "ssh_host_rsa_key") + host_ssh_key_path = join(process.node_dir, "private", "ssh_host_rsa_key") accounts_path = join(process.node_dir, "private", "accounts") with open(join(process.node_dir, "tahoe.cfg"), "a") as f: f.write("""\ @@ -376,15 +370,23 @@ port = tcp:8022:interface=127.0.0.1 host_pubkey_file = {ssh_key_path}.pub host_privkey_file = {ssh_key_path} accounts.file = {accounts_path} -""".format(ssh_key_path=ssh_key_path, accounts_path=accounts_path)) - generate_ssh_key(ssh_key_path) +""".format(ssh_key_path=host_ssh_key_path, accounts_path=accounts_path)) + generate_ssh_key(host_ssh_key_path) - # 3. Add a SFTP access file with username, password, and rwcap. + # 3. Add a SFTP access file with username/password and SSH key auth. + + # The client SSH key path is typically going to be somewhere else (~/.ssh, + # typically), but for convenience sake for testing we'll put it inside node. + client_ssh_key_path = join(process.node_dir, "private", "ssh_client_rsa_key") + generate_ssh_key(client_ssh_key_path) + # Pub key format is "ssh-rsa ". We want the key. + ssh_public_key = open(client_ssh_key_path + ".pub").read().strip().split()[1] with open(accounts_path, "w") as f: f.write("""\ -alice password {} -""".format(rwcap)) - # TODO add sftp access with public key +alice password {rwcap} + +alice2 ssh-rsa {ssh_public_key} {rwcap} +""".format(rwcap=rwcap, ssh_public_key=ssh_public_key)) # 4. Restart the node with new SFTP config. process.kill() diff --git a/integration/test_sftp.py b/integration/test_sftp.py index eeeb8b0b7..f9a7830ac 100644 --- a/integration/test_sftp.py +++ b/integration/test_sftp.py @@ -23,14 +23,11 @@ from paramiko.ssh_exception import AuthenticationException import pytest -def connect_sftp(alice, username="alice", password="password"): +def connect_sftp(connect_args={"username": "alice", "password": "password"}): """Create an SFTP client.""" client = SSHClient() client.set_missing_host_key_policy(AutoAddPolicy) - client.connect( - "localhost", username=username, password=password, port=8022, - look_for_keys=False - ) + client.connect("localhost", port=8022, look_for_keys=False, **connect_args) sftp = SFTPClient.from_transport(client.get_transport()) def rmdir(path, delete_root=True): @@ -49,16 +46,30 @@ def connect_sftp(alice, username="alice", password="password"): return sftp -def test_bad_account_password(alice): - """Can't login with unknown username or wrong password.""" +def test_bad_account_password_ssh_key(alice): + """ + Can't login with unknown username, wrong password, or wrong SSH pub key. + """ for u, p in [("alice", "wrong"), ("someuser", "password")]: with pytest.raises(AuthenticationException): - connect_sftp(alice, u, p) + connect_sftp(connect_args={ + "username": u, "password": p, + }) + # TODO bad pubkey + + +def test_ssh_key_auth(alice): + """It's possible to login authenticating with SSH public key.""" + key_filename = join(alice.node_dir, "private", "ssh_client_rsa_key") + sftp = connect_sftp(connect_args={ + "username": "alice2", "key_filename": key_filename + }) + assert sftp.listdir() == [] def test_read_write_files(alice): """It's possible to upload and download files.""" - sftp = connect_sftp(alice) + sftp = connect_sftp() f = sftp.file("myfile", "wb") f.write(b"abc") f.write(b"def") @@ -75,7 +86,7 @@ def test_directories(alice): It's possible to create, list directories, and create and remove files in them. """ - sftp = connect_sftp(alice) + sftp = connect_sftp() assert sftp.listdir() == [] sftp.mkdir("childdir") @@ -101,3 +112,19 @@ def test_directories(alice): sftp.rmdir("childdir") assert sftp.listdir() == [] + + +def test_rename(alice): + """Directories and files can be renamed.""" + sftp = connect_sftp() + sftp.mkdir("dir") + + filepath = join("dir", "file") + with sftp.file(filepath, "wb") as f: + f.write(b"abc") + + sftp.rename(filepath, join("dir", "file2")) + sftp.rename("dir", "dir2") + + with sftp.file(join("dir2", "file2"), "rb") as f: + assert f.read() == b"abc" diff --git a/integration/util.py b/integration/util.py index 60d96a214..0e8fea2be 100644 --- a/integration/util.py +++ b/integration/util.py @@ -5,7 +5,7 @@ from os import mkdir, environ from os.path import exists, join from six.moves import StringIO from functools import partial -from subprocess import check_output +from subprocess import check_output, check_call from twisted.python.filepath import ( FilePath, @@ -506,3 +506,9 @@ def await_client_ready(tahoe, timeout=10, liveness=60*2): tahoe, ) ) + + +def generate_ssh_key(path): + """Create a new SSH private/public key pair.""" + check_call(["ckeygen", "--type", "rsa", "--no-passphrase", "--bits", "512", + "--file", path]) From 7a15f7e11dd1e68e5e9d0b7e2e350b20a83752fa Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 8 Jan 2021 13:32:11 -0500 Subject: [PATCH 110/213] Switch to modern (circa 2014!) Conch API. --- src/allmydata/frontends/auth.py | 37 +++++++-------------------------- 1 file changed, 8 insertions(+), 29 deletions(-) diff --git a/src/allmydata/frontends/auth.py b/src/allmydata/frontends/auth.py index 1bd481321..943db27c8 100644 --- a/src/allmydata/frontends/auth.py +++ b/src/allmydata/frontends/auth.py @@ -6,6 +6,7 @@ from twisted.internet import defer from twisted.cred import error, checkers, credentials from twisted.conch import error as conch_error from twisted.conch.ssh import keys +from twisted.conch.checkers import SSHPublicKeyChecker, InMemorySSHKeyDB from allmydata.util import base32 from allmydata.util.fileutil import abspath_expanduser_unicode @@ -29,7 +30,7 @@ class AccountFileChecker(object): def __init__(self, client, accountfile): self.client = client self.passwords = {} - self.pubkeys = {} + pubkeys = {} self.rootcaps = {} with open(abspath_expanduser_unicode(accountfile), "r") as f: for line in f: @@ -40,12 +41,14 @@ class AccountFileChecker(object): if passwd.startswith("ssh-"): bits = rest.split() keystring = " ".join([passwd] + bits[:-1]) + key = keys.Key.fromString(keystring) rootcap = bits[-1] - self.pubkeys[name] = keystring + pubkeys[name] = [key] else: self.passwords[name] = passwd rootcap = rest self.rootcaps[name] = rootcap + self._pubkeychecker = SSHPublicKeyChecker(InMemorySSHKeyDB(pubkeys)) def _avatarId(self, username): return FTPAvatarID(username, self.rootcaps[username]) @@ -57,11 +60,9 @@ class AccountFileChecker(object): def requestAvatarId(self, creds): if credentials.ISSHPrivateKey.providedBy(creds): - # Re-using twisted.conch.checkers.SSHPublicKeyChecker here, rather - # than re-implementing all of the ISSHPrivateKey checking logic, - # would be better. That would require Twisted 14.1.0 or newer, - # though. - return self._checkKey(creds) + d = defer.maybeDeferred(self._pubkeychecker.requestAvatarId, creds) + d.addCallback(self._avatarId) + return d elif credentials.IUsernameHashedPassword.providedBy(creds): return self._checkPassword(creds) elif credentials.IUsernamePassword.providedBy(creds): @@ -86,28 +87,6 @@ class AccountFileChecker(object): d.addCallback(self._cbPasswordMatch, str(creds.username)) return d - def _checkKey(self, creds): - """ - Determine whether some key-based credentials correctly authenticates a - user. - - Returns a Deferred that fires with the username if so or with an - UnauthorizedLogin failure otherwise. - """ - - # Is the public key indicated by the given credentials allowed to - # authenticate the username in those credentials? - if creds.blob == self.pubkeys.get(creds.username): - if creds.signature is None: - return defer.fail(conch_error.ValidPublicKey()) - - # Is the signature in the given credentials the correct - # signature for the data in those credentials? - key = keys.Key.fromString(creds.blob) - if key.verify(creds.signature, creds.sigData): - return defer.succeed(self._avatarId(creds.username)) - - return defer.fail(error.UnauthorizedLogin()) @implementer(checkers.ICredentialsChecker) class AccountURLChecker(object): From 2589737e1e39b93f516cae84624ac1cc1cbf4d5a Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 8 Jan 2021 13:33:22 -0500 Subject: [PATCH 111/213] Public key auth test passes. --- integration/test_sftp.py | 8 +++++--- integration/util.py | 4 ++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/integration/test_sftp.py b/integration/test_sftp.py index f9a7830ac..51d3f15c3 100644 --- a/integration/test_sftp.py +++ b/integration/test_sftp.py @@ -19,6 +19,7 @@ from paramiko import SSHClient from paramiko.client import AutoAddPolicy from paramiko.sftp_client import SFTPClient from paramiko.ssh_exception import AuthenticationException +from paramiko.rsakey import RSAKey import pytest @@ -27,7 +28,8 @@ def connect_sftp(connect_args={"username": "alice", "password": "password"}): """Create an SFTP client.""" client = SSHClient() client.set_missing_host_key_policy(AutoAddPolicy) - client.connect("localhost", port=8022, look_for_keys=False, **connect_args) + client.connect("localhost", port=8022, look_for_keys=False, + allow_agent=False, **connect_args) sftp = SFTPClient.from_transport(client.get_transport()) def rmdir(path, delete_root=True): @@ -60,9 +62,9 @@ def test_bad_account_password_ssh_key(alice): def test_ssh_key_auth(alice): """It's possible to login authenticating with SSH public key.""" - key_filename = join(alice.node_dir, "private", "ssh_client_rsa_key") + key = RSAKey(filename=join(alice.node_dir, "private", "ssh_client_rsa_key")) sftp = connect_sftp(connect_args={ - "username": "alice2", "key_filename": key_filename + "username": "alice2", "pkey": key }) assert sftp.listdir() == [] diff --git a/integration/util.py b/integration/util.py index 0e8fea2be..d4c09d073 100644 --- a/integration/util.py +++ b/integration/util.py @@ -510,5 +510,5 @@ def await_client_ready(tahoe, timeout=10, liveness=60*2): def generate_ssh_key(path): """Create a new SSH private/public key pair.""" - check_call(["ckeygen", "--type", "rsa", "--no-passphrase", "--bits", "512", - "--file", path]) + check_call(["ckeygen", "--type", "rsa", "--no-passphrase", "--bits", "2048", + "--file", path, "--private-key-subtype", "v1"]) From 57282d243111043aaa392f8a2edb5daa1c0749ea Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 4 Jan 2021 20:38:44 -0500 Subject: [PATCH 112/213] Include contribution guidelines for real This warning should go away with this commit: WARNING: toctree contains reference to nonexisting document u'.github/CONTRIBUTING' --- docs/contributing.rst | 1 + docs/index.rst | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 docs/contributing.rst diff --git a/docs/contributing.rst b/docs/contributing.rst new file mode 100644 index 000000000..15e1b6432 --- /dev/null +++ b/docs/contributing.rst @@ -0,0 +1 @@ +.. include:: ../.github/CONTRIBUTING.rst diff --git a/docs/index.rst b/docs/index.rst index 3d0a41302..e5db73db0 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -23,7 +23,7 @@ Contents: frontends/download-status known_issues - ../.github/CONTRIBUTING + contributing CODE_OF_CONDUCT servers From 97454242354072619988ead4067df537952b8b08 Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 4 Jan 2021 20:44:13 -0500 Subject: [PATCH 113/213] Include release checklist --- docs/index.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/index.rst b/docs/index.rst index e5db73db0..60a3aa5d4 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -25,6 +25,7 @@ Contents: known_issues contributing CODE_OF_CONDUCT + release-checklist servers helper From d0859b11017a7e049df94b509dda123db06828d0 Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 4 Jan 2021 21:04:13 -0500 Subject: [PATCH 114/213] Fix indentation in webapi docs Warning was: tahoe-lafs/docs/frontends/webapi.rst:2035: WARNING: Unexpected indentation. --- docs/frontends/webapi.rst | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/docs/frontends/webapi.rst b/docs/frontends/webapi.rst index 99fa44979..e00445fbf 100644 --- a/docs/frontends/webapi.rst +++ b/docs/frontends/webapi.rst @@ -2032,10 +2032,11 @@ potential for surprises when the file store structure is changed. Tahoe-LAFS provides a mutable file store, but the ways that the store can change are limited. The only things that can change are: - * the mapping from child names to child objects inside mutable directories - (by adding a new child, removing an existing child, or changing an - existing child to point to a different object) - * the contents of mutable files + +* the mapping from child names to child objects inside mutable directories + (by adding a new child, removing an existing child, or changing an + existing child to point to a different object) +* the contents of mutable files Obviously if you query for information about the file store and then act to change it (such as by getting a listing of the contents of a mutable From 4e56cc6d379c35b8a2d479b665b35756279be41b Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 4 Jan 2021 21:19:40 -0500 Subject: [PATCH 115/213] Fix nested bullet lists in release checklist doc --- docs/release-checklist.rst | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/docs/release-checklist.rst b/docs/release-checklist.rst index be32aea6c..da692ef61 100644 --- a/docs/release-checklist.rst +++ b/docs/release-checklist.rst @@ -40,23 +40,31 @@ Create Branch and Apply Updates - Create a branch for release-candidates (e.g. `XXXX.release-1.15.0.rc0`) - run `tox -e news` to produce a new NEWS.txt file (this does a commit) - create the news for the release + - newsfragments/.minor - commit it + - manually fix NEWS.txt + - proper title for latest release ("Release 1.15.0" instead of "Release ...post1432") - double-check date (maybe release will be in the future) - spot-check the release notes (these come from the newsfragments files though so don't do heavy editing) - commit these changes + - update "relnotes.txt" + - update all mentions of 1.14.0 -> 1.15.0 - update "previous release" statement and date - summarize major changes - commit it + - update "CREDITS" + - are there any new contributors in this release? - one way: git log release-1.14.0.. | grep Author | sort | uniq - commit it + - update "docs/known_issues.rst" if appropriate - update "docs/INSTALL.rst" references to the new release - Push the branch to github @@ -82,21 +90,32 @@ they will need to evaluate which contributors' signatures they trust. - (all steps above are completed) - sign the release + - git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-1.15.0rc0" tahoe-lafs-1.15.0rc0 - (replace the key-id above with your own) + - build all code locally - these should all pass: + - tox -e py27,codechecks,docs,integration + - these can fail (ideally they should not of course): + - tox -e deprecations,upcoming-deprecations + - build tarballs + - tox -e tarballs - confirm it at least exists: - ls dist/ | grep 1.15.0rc0 + - inspect and test the tarballs + - install each in a fresh virtualenv - run `tahoe` command + - when satisfied, sign the tarballs: + - gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0-py2-none-any.whl - gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.tar.bz2 - gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.tar.gz @@ -129,6 +148,7 @@ need to be uploaded to https://tahoe-lafs.org in `~source/downloads` https://tahoe-lafs.org/downloads/ on the Web. - scp dist/*1.15.0* username@tahoe-lafs.org:/home/source/downloads - the following developers have access to do this: + - exarkun - meejah - warner @@ -139,6 +159,7 @@ uploaded to PyPI as well. - how to do this? - (original guide says only "twine upload dist/*") - the following developers have access to do this: + - warner - exarkun (partial?) - meejah (partial?) From 1063ee1c1f222276341b4596161455f4f9a7668b Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 4 Jan 2021 21:21:43 -0500 Subject: [PATCH 116/213] Fix warning in release checklist doc Fix "WARNING: Inline emphasis start-string without end-string." --- docs/release-checklist.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/release-checklist.rst b/docs/release-checklist.rst index da692ef61..fabb69fc7 100644 --- a/docs/release-checklist.rst +++ b/docs/release-checklist.rst @@ -157,7 +157,7 @@ For the actual release, the tarball and signature files need to be uploaded to PyPI as well. - how to do this? -- (original guide says only "twine upload dist/*") +- (original guide says only `twine upload dist/*`) - the following developers have access to do this: - warner From e986e864314589c4c8dbca6395cfb86b3a089a19 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 8 Jan 2021 13:41:04 -0500 Subject: [PATCH 117/213] Test failure to auth. --- integration/test_sftp.py | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/integration/test_sftp.py b/integration/test_sftp.py index 51d3f15c3..f1cf92eab 100644 --- a/integration/test_sftp.py +++ b/integration/test_sftp.py @@ -23,6 +23,8 @@ from paramiko.rsakey import RSAKey import pytest +from .util import generate_ssh_key + def connect_sftp(connect_args={"username": "alice", "password": "password"}): """Create an SFTP client.""" @@ -48,16 +50,33 @@ def connect_sftp(connect_args={"username": "alice", "password": "password"}): return sftp -def test_bad_account_password_ssh_key(alice): +def test_bad_account_password_ssh_key(alice, tmpdir): """ Can't login with unknown username, wrong password, or wrong SSH pub key. """ + # Wrong password, wrong username: for u, p in [("alice", "wrong"), ("someuser", "password")]: with pytest.raises(AuthenticationException): connect_sftp(connect_args={ "username": u, "password": p, }) - # TODO bad pubkey + + another_key = join(str(tmpdir), "ssh_key") + generate_ssh_key(another_key) + good_key = RSAKey(filename=join(alice.node_dir, "private", "ssh_client_rsa_key")) + bad_key = RSAKey(filename=another_key) + + # Wrong key: + with pytest.raises(AuthenticationException): + connect_sftp(connect_args={ + "username": "alice2", "pkey": bad_key, + }) + + # Wrong username: + with pytest.raises(AuthenticationException): + connect_sftp(connect_args={ + "username": "someoneelse", "pkey": good_key, + }) def test_ssh_key_auth(alice): From f71dcfe9fcb13d1de226a79f9aaf3fe79b87c639 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 8 Jan 2021 13:42:10 -0500 Subject: [PATCH 118/213] Lint. --- src/allmydata/frontends/auth.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/allmydata/frontends/auth.py b/src/allmydata/frontends/auth.py index 943db27c8..de406d604 100644 --- a/src/allmydata/frontends/auth.py +++ b/src/allmydata/frontends/auth.py @@ -4,7 +4,6 @@ from zope.interface import implementer from twisted.web.client import getPage from twisted.internet import defer from twisted.cred import error, checkers, credentials -from twisted.conch import error as conch_error from twisted.conch.ssh import keys from twisted.conch.checkers import SSHPublicKeyChecker, InMemorySSHKeyDB From 6f3b3d07fdb76f53e1a64555481deaf38d40c76f Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 8 Jan 2021 13:43:23 -0500 Subject: [PATCH 119/213] News file. --- newsfragments/3584.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/3584.bugfix diff --git a/newsfragments/3584.bugfix b/newsfragments/3584.bugfix new file mode 100644 index 000000000..73650f40b --- /dev/null +++ b/newsfragments/3584.bugfix @@ -0,0 +1 @@ +SFTP public key auth likely works better, and SFTP in general was broken in the prerelease. \ No newline at end of file From f1cf9483566ae719d605df7754630e68fdb6f77f Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Fri, 8 Jan 2021 13:45:16 -0500 Subject: [PATCH 120/213] Enable markdown support in Sphinx configuration The upstream Contributor Covenant is a markdown document. Since we prefer to keep things close to the original, enabling markdown support to render that document seems like a good idea. --- docs/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 34ddd1bd4..612c324a3 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -28,7 +28,7 @@ import os # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = [] +extensions = ['recommonmark'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -36,7 +36,7 @@ templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ['.rst', '.md'] # The encoding of source files. #source_encoding = 'utf-8-sig' From c1bc69e1f71db11aad0ef7cb6f54fe1433fce0e8 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 8 Jan 2021 13:47:32 -0500 Subject: [PATCH 121/213] Remove two more references to FTP. --- setup.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/setup.py b/setup.py index 0e5a43dba..205c00ae6 100644 --- a/setup.py +++ b/setup.py @@ -63,12 +63,8 @@ install_requires = [ # version of cryptography will *really* be installed. "cryptography >= 2.6", - # * We need Twisted 10.1.0 for the FTP frontend in order for - # Twisted's FTP server to support asynchronous close. # * The SFTP frontend depends on Twisted 11.0.0 to fix the SSH server # rekeying bug - # * The FTP frontend depends on Twisted >= 11.1.0 for - # filepath.Permissions # * The SFTP frontend and manhole depend on the conch extra. However, we # can't explicitly declare that without an undesirable dependency on gmpy, # as explained in ticket #2740. From 65926d6e708b967759b1ad50999bb84ff05c13d2 Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Fri, 8 Jan 2021 13:48:04 -0500 Subject: [PATCH 122/213] Install recommonmark in tox "docs" environment We're going to need markdown support to render contributor covenant. --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index c61331885..d32f90b15 100644 --- a/tox.ini +++ b/tox.ini @@ -211,6 +211,7 @@ commands = deps = sphinx docutils==0.12 + recommonmark # normal install is not needed for docs, and slows things down skip_install = True commands = From f682d946d0ffffaf1f05d1b9157a83faea84ee5d Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Fri, 8 Jan 2021 13:48:23 -0500 Subject: [PATCH 123/213] Rename docs/README.md After enabling markdown extension in Sphinx configuration, there's a warning about docs/README.md not being included in the toc tree. Since docs/README.md is not to be included in the final rendered document, we'll keep it as a .txt document. --- docs/{README.md => README.txt} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename docs/{README.md => README.txt} (100%) diff --git a/docs/README.md b/docs/README.txt similarity index 100% rename from docs/README.md rename to docs/README.txt From 11abf5339ff833efd9a51637746064ccc498b546 Mon Sep 17 00:00:00 2001 From: Lukas Pirl Date: Sat, 9 Jan 2021 14:41:03 +0100 Subject: [PATCH 124/213] credits to Lukas Pirl (buildslaves) --- CREDITS | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/CREDITS b/CREDITS index 1394d87d8..07ac1e476 100644 --- a/CREDITS +++ b/CREDITS @@ -206,4 +206,9 @@ D: various bug-fixes and features N: Viktoriia Savchuk W: https://twitter.com/viktoriiasvchk -D: Developer community focused improvements on the README file. \ No newline at end of file +D: Developer community focused improvements on the README file. + +N: Lukas Pirl +E: tahoe@lukas-pirl.de +W: http://lukas-pirl.de +D: Buildslaves (Debian, Fedora, CentOS; 2016-2021) From b00bf94643c25309ce3b52d99a4bab54be1aac5c Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Sat, 9 Jan 2021 09:06:19 -0500 Subject: [PATCH 125/213] news fragment --- newsfragments/3587.minor | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/3587.minor diff --git a/newsfragments/3587.minor b/newsfragments/3587.minor new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/newsfragments/3587.minor @@ -0,0 +1 @@ + From 6b2a999f8d28d467320b6fd0384ff23751ae332d Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 11 Jan 2021 14:02:45 -0500 Subject: [PATCH 126/213] Replace ckeygen with Paramiko library calls, since ckeygen doesn't work on Windows. --- integration/util.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/integration/util.py b/integration/util.py index d4c09d073..39ec36a38 100644 --- a/integration/util.py +++ b/integration/util.py @@ -16,6 +16,8 @@ from twisted.internet.error import ProcessExitedAlready, ProcessDone import requests +from paramiko.rsakey import RSAKey + from allmydata.util.configutil import ( get_config, set_config, @@ -510,5 +512,7 @@ def await_client_ready(tahoe, timeout=10, liveness=60*2): def generate_ssh_key(path): """Create a new SSH private/public key pair.""" - check_call(["ckeygen", "--type", "rsa", "--no-passphrase", "--bits", "2048", - "--file", path, "--private-key-subtype", "v1"]) + key = RSAKey.generate(2048) + key.write_private_key_file(path) + with open(path + ".pub", "wb") as f: + f.write(b"%s %s" % (key.get_name(), key.get_base64())) From 8f1864842fad2524450dddcf48c8b0427b3061f9 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 11 Jan 2021 15:23:17 -0500 Subject: [PATCH 127/213] Fix integration tests. --- src/allmydata/frontends/sftpd.py | 121 +++++++++++++++++-------------- 1 file changed, 66 insertions(+), 55 deletions(-) diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index 135ba4fc9..895c93f85 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -53,6 +53,17 @@ from allmydata.util.log import NOISY, OPERATIONAL, WEIRD, \ if six.PY3: long = int + +def createSFTPError(errorCode, errorMessage): + """ + SFTPError that can accept both Unicode and bytes. + + Twisted expects _native_ strings for the SFTPError message, but we often do + Unicode by default even on Python 2. + """ + return SFTPError(errorCode, six.ensure_str(errorMessage)) + + def eventually_callback(d): return lambda res: eventually(d.callback, res) @@ -100,10 +111,10 @@ def _convert_error(res, request): raise err if err.check(NoSuchChildError): childname = _utf8(err.value.args[0]) - raise SFTPError(FX_NO_SUCH_FILE, childname) + raise createSFTPError(FX_NO_SUCH_FILE, childname) if err.check(NotWriteableError) or err.check(ChildOfWrongTypeError): msg = _utf8(err.value.args[0]) - raise SFTPError(FX_PERMISSION_DENIED, msg) + raise createSFTPError(FX_PERMISSION_DENIED, msg) if err.check(ExistingChildError): # Versions of SFTP after v3 (which is what twisted.conch implements) # define a specific error code for this case: FX_FILE_ALREADY_EXISTS. @@ -112,16 +123,16 @@ def _convert_error(res, request): # to translate the error to the equivalent of POSIX EEXIST, which is # necessary for some picky programs (such as gedit). msg = _utf8(err.value.args[0]) - raise SFTPError(FX_FAILURE, msg) + raise createSFTPError(FX_FAILURE, msg) if err.check(NotImplementedError): - raise SFTPError(FX_OP_UNSUPPORTED, _utf8(err.value)) + raise createSFTPError(FX_OP_UNSUPPORTED, _utf8(err.value)) if err.check(EOFError): - raise SFTPError(FX_EOF, "end of file reached") + raise createSFTPError(FX_EOF, "end of file reached") if err.check(defer.FirstError): _convert_error(err.value.subFailure, request) # We assume that the error message is not anonymity-sensitive. - raise SFTPError(FX_FAILURE, _utf8(err.value)) + raise createSFTPError(FX_FAILURE, _utf8(err.value)) def _repr_flags(flags): @@ -424,7 +435,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin): return if noisy: self.log("MILESTONE %r %r" % (next, d), level=NOISY) heapq.heappop(self.milestones) - eventually_callback(d)("reached") + eventually_callback(d)(b"reached") if milestone >= self.download_size: self.download_done(b"reached download size") @@ -433,7 +444,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin): if noisy: self.log(".overwrite(%r, )" % (offset, len(data)), level=NOISY) if self.is_closed: self.log("overwrite called on a closed OverwriteableFileConsumer", level=WEIRD) - raise SFTPError(FX_BAD_MESSAGE, "cannot write to a closed file handle") + raise createSFTPError(FX_BAD_MESSAGE, "cannot write to a closed file handle") if offset > self.current_size: # Normally writing at an offset beyond the current end-of-file @@ -464,7 +475,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin): if noisy: self.log(".read(%r, %r), current_size = %r" % (offset, length, self.current_size), level=NOISY) if self.is_closed: self.log("read called on a closed OverwriteableFileConsumer", level=WEIRD) - raise SFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle") + raise createSFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle") # Note that the overwrite method is synchronous. When a write request is processed # (e.g. a writeChunk request on the async queue of GeneralSFTPFile), overwrite will @@ -586,7 +597,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin): self.log(request, level=OPERATIONAL) if self.closed: - def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle") + def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle") return defer.execute(_closed) d = defer.Deferred() @@ -603,7 +614,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin): # i.e. we respond with an EOF error iff offset is already at EOF. if offset >= len(data): - eventually_errback(d)(Failure(SFTPError(FX_EOF, "read at or past end of file"))) + eventually_errback(d)(Failure(createSFTPError(FX_EOF, "read at or past end of file"))) else: eventually_callback(d)(data[offset:offset+length]) # truncated if offset+length > len(data) return data @@ -614,7 +625,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin): def writeChunk(self, offset, data): self.log(".writeChunk(%r, ) denied" % (offset, len(data)), level=OPERATIONAL) - def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") + def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") return defer.execute(_denied) def close(self): @@ -628,7 +639,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin): self.log(request, level=OPERATIONAL) if self.closed: - def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot get attributes for a closed file handle") + def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot get attributes for a closed file handle") return defer.execute(_closed) d = defer.execute(_populate_attrs, self.filenode, self.metadata) @@ -637,7 +648,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin): def setAttrs(self, attrs): self.log(".setAttrs(%r) denied" % (attrs,), level=OPERATIONAL) - def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") + def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") return defer.execute(_denied) @@ -760,11 +771,11 @@ class GeneralSFTPFile(PrefixingLogMixin): self.log(request, level=OPERATIONAL) if not (self.flags & FXF_READ): - def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for reading") + def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "file handle was not opened for reading") return defer.execute(_denied) if self.closed: - def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle") + def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle") return defer.execute(_closed) d = defer.Deferred() @@ -782,11 +793,11 @@ class GeneralSFTPFile(PrefixingLogMixin): self.log(".writeChunk(%r, )" % (offset, len(data)), level=OPERATIONAL) if not (self.flags & FXF_WRITE): - def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") + def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") return defer.execute(_denied) if self.closed: - def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot write to a closed file handle") + def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot write to a closed file handle") return defer.execute(_closed) self.has_changed = True @@ -902,7 +913,7 @@ class GeneralSFTPFile(PrefixingLogMixin): self.log(request, level=OPERATIONAL) if self.closed: - def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot get attributes for a closed file handle") + def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot get attributes for a closed file handle") return defer.execute(_closed) # Optimization for read-only handles, when we already know the metadata. @@ -926,16 +937,16 @@ class GeneralSFTPFile(PrefixingLogMixin): self.log(request, level=OPERATIONAL) if not (self.flags & FXF_WRITE): - def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") + def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") return defer.execute(_denied) if self.closed: - def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot set attributes for a closed file handle") + def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot set attributes for a closed file handle") return defer.execute(_closed) size = attrs.get("size", None) if size is not None and (not isinstance(size, int) or size < 0): - def _bad(): raise SFTPError(FX_BAD_MESSAGE, "new size is not a valid nonnegative integer") + def _bad(): raise createSFTPError(FX_BAD_MESSAGE, "new size is not a valid nonnegative integer") return defer.execute(_bad) d = defer.Deferred() @@ -1127,7 +1138,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): # does not mean that they were not committed; it is used to determine whether # a NoSuchChildError from the rename attempt should be suppressed). If overwrite # is False and there were already heisenfiles at the destination userpath or - # direntry, we return a Deferred that fails with SFTPError(FX_PERMISSION_DENIED). + # direntry, we return a Deferred that fails with createSFTPError(FX_PERMISSION_DENIED). from_direntry = _direntry_for(from_parent, from_childname) to_direntry = _direntry_for(to_parent, to_childname) @@ -1136,7 +1147,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): (from_direntry, to_direntry, len(all_heisenfiles), len(self._heisenfiles), request), level=NOISY) if not overwrite and (to_userpath in self._heisenfiles or to_direntry in all_heisenfiles): - def _existing(): raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + str(to_userpath, "utf-8")) + def _existing(): raise createSFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + str(to_userpath, "utf-8")) if noisy: self.log("existing", level=NOISY) return defer.execute(_existing) @@ -1289,17 +1300,17 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): if not (flags & (FXF_READ | FXF_WRITE)): def _bad_readwrite(): - raise SFTPError(FX_BAD_MESSAGE, "invalid file open flags: at least one of FXF_READ and FXF_WRITE must be set") + raise createSFTPError(FX_BAD_MESSAGE, "invalid file open flags: at least one of FXF_READ and FXF_WRITE must be set") return defer.execute(_bad_readwrite) if (flags & FXF_EXCL) and not (flags & FXF_CREAT): def _bad_exclcreat(): - raise SFTPError(FX_BAD_MESSAGE, "invalid file open flags: FXF_EXCL cannot be set without FXF_CREAT") + raise createSFTPError(FX_BAD_MESSAGE, "invalid file open flags: FXF_EXCL cannot be set without FXF_CREAT") return defer.execute(_bad_exclcreat) path = self._path_from_string(pathstring) if not path: - def _emptypath(): raise SFTPError(FX_NO_SUCH_FILE, "path cannot be empty") + def _emptypath(): raise createSFTPError(FX_NO_SUCH_FILE, "path cannot be empty") return defer.execute(_emptypath) # The combination of flags is potentially valid. @@ -1358,20 +1369,20 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _got_root(root_and_path): (root, path) = root_and_path if root.is_unknown(): - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot open an unknown cap (or child of an unknown object). " "Upgrading the gateway to a later Tahoe-LAFS version may help") if not path: # case 1 if noisy: self.log("case 1: root = %r, path[:-1] = %r" % (root, path[:-1]), level=NOISY) if not IFileNode.providedBy(root): - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot open a directory cap") if (flags & FXF_WRITE) and root.is_readonly(): - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot write to a non-writeable filecap without a parent directory") if flags & FXF_EXCL: - raise SFTPError(FX_FAILURE, + raise createSFTPError(FX_FAILURE, "cannot create a file exclusively when it already exists") # The file does not need to be added to all_heisenfiles, because it is not @@ -1398,7 +1409,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _got_parent(parent): if noisy: self.log("_got_parent(%r)" % (parent,), level=NOISY) if parent.is_unknown(): - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot open a child of an unknown object. " "Upgrading the gateway to a later Tahoe-LAFS version may help") @@ -1413,7 +1424,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): # which is consistent with what might happen on a POSIX filesystem. if parent_readonly: - raise SFTPError(FX_FAILURE, + raise createSFTPError(FX_FAILURE, "cannot create a file exclusively when the parent directory is read-only") # 'overwrite=False' ensures failure if the link already exists. @@ -1445,14 +1456,14 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): metadata['no-write'] = _no_write(parent_readonly, filenode, current_metadata) if filenode.is_unknown(): - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot open an unknown cap. Upgrading the gateway " "to a later Tahoe-LAFS version may help") if not IFileNode.providedBy(filenode): - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot open a directory as if it were a file") if (flags & FXF_WRITE) and metadata['no-write']: - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot open a non-writeable file for writing") return self._make_file(file, userpath, flags, parent=parent, childname=childname, @@ -1462,10 +1473,10 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): f.trap(NoSuchChildError) if not (flags & FXF_CREAT): - raise SFTPError(FX_NO_SUCH_FILE, + raise createSFTPError(FX_NO_SUCH_FILE, "the file does not exist, and was not opened with the creation (CREAT) flag") if parent_readonly: - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot create a file when the parent directory is read-only") return self._make_file(file, userpath, flags, parent=parent, childname=childname) @@ -1504,9 +1515,9 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): (to_parent, to_childname) = to_pair if from_childname is None: - raise SFTPError(FX_NO_SUCH_FILE, "cannot rename a source object specified by URI") + raise createSFTPError(FX_NO_SUCH_FILE, "cannot rename a source object specified by URI") if to_childname is None: - raise SFTPError(FX_NO_SUCH_FILE, "cannot rename to a destination specified by URI") + raise createSFTPError(FX_NO_SUCH_FILE, "cannot rename to a destination specified by URI") # # "It is an error if there already exists a file with the name specified @@ -1521,7 +1532,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): d2.addCallback(lambda ign: to_parent.get(to_childname)) def _expect_fail(res): if not isinstance(res, Failure): - raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + str(to_userpath, "utf-8")) + raise createSFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + str(to_userpath, "utf-8")) # It is OK if we fail for errors other than NoSuchChildError, since that probably # indicates some problem accessing the destination directory. @@ -1546,7 +1557,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): if not isinstance(err, Failure) or (renamed and err.check(NoSuchChildError)): return None if not overwrite and err.check(ExistingChildError): - raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + str(to_userpath, "utf-8")) + raise createSFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + str(to_userpath, "utf-8")) return err d3.addBoth(_check) @@ -1564,7 +1575,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): path = self._path_from_string(pathstring) metadata = _attrs_to_metadata(attrs) if 'no-write' in metadata: - def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "cannot create a directory that is initially read-only") + def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "cannot create a directory that is initially read-only") return defer.execute(_denied) d = self._get_root(path) @@ -1576,7 +1587,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _get_or_create_directories(self, node, path, metadata): if not IDirectoryNode.providedBy(node): # TODO: provide the name of the blocking file in the error message. - def _blocked(): raise SFTPError(FX_FAILURE, "cannot create directory because there " + def _blocked(): raise createSFTPError(FX_FAILURE, "cannot create directory because there " "is a file in the way") # close enough return defer.execute(_blocked) @@ -1614,7 +1625,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _got_parent(parent_and_childname): (parent, childname) = parent_and_childname if childname is None: - raise SFTPError(FX_NO_SUCH_FILE, "cannot remove an object specified by URI") + raise createSFTPError(FX_NO_SUCH_FILE, "cannot remove an object specified by URI") direntry = _direntry_for(parent, childname) d2 = defer.succeed(False) @@ -1645,11 +1656,11 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): d.addCallback(_got_parent_or_node) def _list(dirnode): if dirnode.is_unknown(): - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot list an unknown cap as a directory. Upgrading the gateway " "to a later Tahoe-LAFS version may help") if not IDirectoryNode.providedBy(dirnode): - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot list a file as if it were a directory") d2 = dirnode.list() @@ -1736,7 +1747,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): if "size" in attrs: # this would require us to download and re-upload the truncated/extended # file contents - def _unsupported(): raise SFTPError(FX_OP_UNSUPPORTED, "setAttrs wth size attribute unsupported") + def _unsupported(): raise createSFTPError(FX_OP_UNSUPPORTED, "setAttrs wth size attribute unsupported") return defer.execute(_unsupported) path = self._path_from_string(pathstring) @@ -1753,7 +1764,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): if childname is None: if updated_heisenfiles: return None - raise SFTPError(FX_NO_SUCH_FILE, userpath) + raise createSFTPError(FX_NO_SUCH_FILE, userpath) else: desired_metadata = _attrs_to_metadata(attrs) if noisy: self.log("desired_metadata = %r" % (desired_metadata,), level=NOISY) @@ -1776,7 +1787,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def readLink(self, pathstring): self.log(".readLink(%r)" % (pathstring,), level=OPERATIONAL) - def _unsupported(): raise SFTPError(FX_OP_UNSUPPORTED, "readLink") + def _unsupported(): raise createSFTPError(FX_OP_UNSUPPORTED, "readLink") return defer.execute(_unsupported) def makeLink(self, linkPathstring, targetPathstring): @@ -1785,7 +1796,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): # If this is implemented, note the reversal of arguments described in point 7 of # . - def _unsupported(): raise SFTPError(FX_OP_UNSUPPORTED, "makeLink") + def _unsupported(): raise createSFTPError(FX_OP_UNSUPPORTED, "makeLink") return defer.execute(_unsupported) def extendedRequest(self, extensionName, extensionData): @@ -1795,7 +1806,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): # if extensionName == b'posix-rename@openssh.com': - def _bad(): raise SFTPError(FX_BAD_MESSAGE, "could not parse posix-rename@openssh.com request") + def _bad(): raise createSFTPError(FX_BAD_MESSAGE, "could not parse posix-rename@openssh.com request") if 4 > len(extensionData): return defer.execute(_bad) (fromPathLen,) = struct.unpack('>L', extensionData[0:4]) @@ -1812,7 +1823,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): # an error, or an FXP_EXTENDED_REPLY. But it happens to do the right thing # (respond with an FXP_STATUS message) if we return a Failure with code FX_OK. def _succeeded(ign): - raise SFTPError(FX_OK, "request succeeded") + raise createSFTPError(FX_OK, "request succeeded") d.addCallback(_succeeded) return d @@ -1832,7 +1843,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): 65535, # uint64 f_namemax /* maximum filename length */ )) - def _unsupported(): raise SFTPError(FX_OP_UNSUPPORTED, "unsupported %r request " % + def _unsupported(): raise createSFTPError(FX_OP_UNSUPPORTED, "unsupported %r request " % (extensionName, len(extensionData))) return defer.execute(_unsupported) @@ -1869,7 +1880,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): try: p = p_utf8.decode('utf-8', 'strict') except UnicodeError: - raise SFTPError(FX_NO_SUCH_FILE, "path could not be decoded as UTF-8") + raise createSFTPError(FX_NO_SUCH_FILE, "path could not be decoded as UTF-8") path.append(p) if noisy: self.log(" PATH %r" % (path,), level=NOISY) From 6107e52f96249ff44726da6f19c0eb430f82f030 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 11 Jan 2021 15:26:38 -0500 Subject: [PATCH 128/213] Fix flake. --- integration/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration/util.py b/integration/util.py index 39ec36a38..64f02a446 100644 --- a/integration/util.py +++ b/integration/util.py @@ -5,7 +5,7 @@ from os import mkdir, environ from os.path import exists, join from six.moves import StringIO from functools import partial -from subprocess import check_output, check_call +from subprocess import check_output from twisted.python.filepath import ( FilePath, From 3489e381be45b094c6dfe7b35102d7cf65bebe55 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 12 Jan 2021 11:16:45 -0500 Subject: [PATCH 129/213] Get rid of finalizer which, I suspect, is keeping tests from shutting down on Windows. --- integration/conftest.py | 3 +++ integration/util.py | 10 ++++++---- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/integration/conftest.py b/integration/conftest.py index 4ae22deee..27404d9e8 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -351,6 +351,9 @@ def alice(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, requ reactor, request, temp_dir, introducer_furl, flog_gatherer, "alice", web_port="tcp:9980:interface=localhost", storage=False, + # We're going to kill this ourselves, so no need for finalizer to + # do it: + finalize=False, ) ) await_client_ready(process) diff --git a/integration/util.py b/integration/util.py index 64f02a446..f5f7029d8 100644 --- a/integration/util.py +++ b/integration/util.py @@ -186,7 +186,7 @@ class TahoeProcess(object): return "".format(self._node_dir) -def _run_node(reactor, node_dir, request, magic_text): +def _run_node(reactor, node_dir, request, magic_text, finalize=True): """ Run a tahoe process from its node_dir. @@ -210,7 +210,8 @@ def _run_node(reactor, node_dir, request, magic_text): ) transport.exited = protocol.exited - request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited)) + if finalize: + request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited)) # XXX abusing the Deferred; should use .when_magic_seen() pattern @@ -229,7 +230,8 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam magic_text=None, needed=2, happy=3, - total=4): + total=4, + finalize=True): """ Helper to create a single node, run it and return the instance spawnProcess returned (ITransport) @@ -277,7 +279,7 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam d = Deferred() d.callback(None) d.addCallback(lambda _: created_d) - d.addCallback(lambda _: _run_node(reactor, node_dir, request, magic_text)) + d.addCallback(lambda _: _run_node(reactor, node_dir, request, magic_text, finalize=finalize)) return d From 116c59142d46a033cf4d9ac8615a43fee78cf022 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 12 Jan 2021 11:26:43 -0500 Subject: [PATCH 130/213] Port to Python 3. --- src/allmydata/test/web/test_logs.py | 4 ++++ src/allmydata/util/_python3.py | 1 + 2 files changed, 5 insertions(+) diff --git a/src/allmydata/test/web/test_logs.py b/src/allmydata/test/web/test_logs.py index 4895ed6f0..ca8e5b918 100644 --- a/src/allmydata/test/web/test_logs.py +++ b/src/allmydata/test/web/test_logs.py @@ -9,6 +9,10 @@ from __future__ import ( division, ) +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from testtools.matchers import ( Equals, ) diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py index 14db70735..eca06058b 100644 --- a/src/allmydata/util/_python3.py +++ b/src/allmydata/util/_python3.py @@ -186,6 +186,7 @@ PORTED_TEST_MODULES = [ "allmydata.test.test_util", "allmydata.test.web.test_common", "allmydata.test.web.test_grid", + "allmydata.test.web.test_logs", "allmydata.test.web.test_status", "allmydata.test.web.test_util", "allmydata.test.web.test_webish", From d99c94753c1e222e0e5cb9eca79369f5f955f123 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 12 Jan 2021 11:38:37 -0500 Subject: [PATCH 131/213] On Python 3 we need to make sure bytes get written to the websocket. --- src/allmydata/web/logs.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/allmydata/web/logs.py b/src/allmydata/web/logs.py index 0ba8b17e9..896dce418 100644 --- a/src/allmydata/web/logs.py +++ b/src/allmydata/web/logs.py @@ -5,6 +5,8 @@ from __future__ import ( division, ) +from future.builtins import str + import json from autobahn.twisted.resource import WebSocketResource @@ -49,7 +51,11 @@ class TokenAuthenticatedWebSocketServerProtocol(WebSocketServerProtocol): """ # probably want a try/except around here? what do we do if # transmission fails or anything else bad happens? - self.sendMessage(json.dumps(message)) + encoded = json.dumps(message) + if isinstance(encoded, str): + # On Python 3 dumps() returns Unicode... + encoded = encoded.encode("utf-8") + self.sendMessage(encoded) def onOpen(self): """ From c2d69c53096fe1bd5894fd0cd198a475a2d592d5 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 12 Jan 2021 11:41:19 -0500 Subject: [PATCH 132/213] Merge all log tests into one test module. --- src/allmydata/test/test_websocket_logs.py | 54 ---------------------- src/allmydata/test/web/test_logs.py | 56 +++++++++++++++++++++++ 2 files changed, 56 insertions(+), 54 deletions(-) delete mode 100644 src/allmydata/test/test_websocket_logs.py diff --git a/src/allmydata/test/test_websocket_logs.py b/src/allmydata/test/test_websocket_logs.py deleted file mode 100644 index e666a4902..000000000 --- a/src/allmydata/test/test_websocket_logs.py +++ /dev/null @@ -1,54 +0,0 @@ -import json - -from twisted.trial import unittest -from twisted.internet.defer import inlineCallbacks - -from eliot import log_call - -from autobahn.twisted.testing import create_memory_agent, MemoryReactorClockResolver, create_pumper - -from allmydata.web.logs import TokenAuthenticatedWebSocketServerProtocol - - -class TestStreamingLogs(unittest.TestCase): - """ - Test websocket streaming of logs - """ - - def setUp(self): - self.reactor = MemoryReactorClockResolver() - self.pumper = create_pumper() - self.agent = create_memory_agent(self.reactor, self.pumper, TokenAuthenticatedWebSocketServerProtocol) - return self.pumper.start() - - def tearDown(self): - return self.pumper.stop() - - @inlineCallbacks - def test_one_log(self): - """ - write a single Eliot log and see it streamed via websocket - """ - - proto = yield self.agent.open( - transport_config=u"ws://localhost:1234/ws", - options={}, - ) - - messages = [] - def got_message(msg, is_binary=False): - messages.append(json.loads(msg)) - proto.on("message", got_message) - - @log_call(action_type=u"test:cli:some-exciting-action") - def do_a_thing(): - pass - - do_a_thing() - - proto.transport.loseConnection() - yield proto.is_closed - - self.assertEqual(len(messages), 2) - self.assertEqual("started", messages[0]["action_status"]) - self.assertEqual("succeeded", messages[1]["action_status"]) diff --git a/src/allmydata/test/web/test_logs.py b/src/allmydata/test/web/test_logs.py index ca8e5b918..5d697f910 100644 --- a/src/allmydata/test/web/test_logs.py +++ b/src/allmydata/test/web/test_logs.py @@ -1,5 +1,7 @@ """ Tests for ``allmydata.web.logs``. + +Ported to Python 3. """ from __future__ import ( @@ -13,6 +15,15 @@ from future.utils import PY2 if PY2: from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +import json + +from twisted.trial import unittest +from twisted.internet.defer import inlineCallbacks + +from eliot import log_call + +from autobahn.twisted.testing import create_memory_agent, MemoryReactorClockResolver, create_pumper + from testtools.matchers import ( Equals, ) @@ -41,6 +52,7 @@ from ..common import ( from ...web.logs import ( create_log_resources, + TokenAuthenticatedWebSocketServerProtocol, ) class StreamingEliotLogsTests(SyncTestCase): @@ -61,3 +73,47 @@ class StreamingEliotLogsTests(SyncTestCase): self.client.get(b"http:///v1"), succeeded(has_response_code(Equals(OK))), ) + + +class TestStreamingLogs(unittest.TestCase): + """ + Test websocket streaming of logs + """ + + def setUp(self): + self.reactor = MemoryReactorClockResolver() + self.pumper = create_pumper() + self.agent = create_memory_agent(self.reactor, self.pumper, TokenAuthenticatedWebSocketServerProtocol) + return self.pumper.start() + + def tearDown(self): + return self.pumper.stop() + + @inlineCallbacks + def test_one_log(self): + """ + write a single Eliot log and see it streamed via websocket + """ + + proto = yield self.agent.open( + transport_config=u"ws://localhost:1234/ws", + options={}, + ) + + messages = [] + def got_message(msg, is_binary=False): + messages.append(json.loads(msg)) + proto.on("message", got_message) + + @log_call(action_type=u"test:cli:some-exciting-action") + def do_a_thing(): + pass + + do_a_thing() + + proto.transport.loseConnection() + yield proto.is_closed + + self.assertEqual(len(messages), 2) + self.assertEqual("started", messages[0]["action_status"]) + self.assertEqual("succeeded", messages[1]["action_status"]) From 7e5e3291381ddf03625375e718658566da009e65 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 12 Jan 2021 11:44:27 -0500 Subject: [PATCH 133/213] Port to Python 3. --- src/allmydata/util/_python3.py | 1 + src/allmydata/web/logs.py | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py index eca06058b..9917be84c 100644 --- a/src/allmydata/util/_python3.py +++ b/src/allmydata/util/_python3.py @@ -113,6 +113,7 @@ PORTED_MODULES = [ "allmydata.util.spans", "allmydata.util.statistics", "allmydata.util.time_format", + "allmydata.web.logs", "allmydata.webish", ] diff --git a/src/allmydata/web/logs.py b/src/allmydata/web/logs.py index 896dce418..6f15a3ca9 100644 --- a/src/allmydata/web/logs.py +++ b/src/allmydata/web/logs.py @@ -1,3 +1,6 @@ +""" +Ported to Python 3. +""" from __future__ import ( print_function, unicode_literals, @@ -5,8 +8,6 @@ from __future__ import ( division, ) -from future.builtins import str - import json from autobahn.twisted.resource import WebSocketResource From b74ec6919dc695e661e2523ef8c528535e13332e Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 12 Jan 2021 13:24:42 -0500 Subject: [PATCH 134/213] Don't blow up just because irrelevant cleanup complains. --- integration/conftest.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/integration/conftest.py b/integration/conftest.py index 27404d9e8..1679bd9f9 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -536,7 +536,13 @@ def tor_network(reactor, temp_dir, chutney, request): path=join(chutney_dir), env=env, ) - pytest_twisted.blockon(proto.done) + try: + pytest_twisted.blockon(proto.done) + except ProcessTerminated: + # If this doesn't exit cleanly, that's fine, that shouldn't fail + # the test suite. + pass + request.addfinalizer(cleanup) return chut From dfcd75f20de629723b091c9d5b8ca2aa1b194718 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 12 Jan 2021 13:58:28 -0500 Subject: [PATCH 135/213] Infinite blocking is bad. --- integration/conftest.py | 9 +++++---- integration/util.py | 9 ++++++++- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/integration/conftest.py b/integration/conftest.py index 1679bd9f9..533cbdb67 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -40,7 +40,8 @@ from util import ( TahoeProcess, cli, _run_node, - generate_ssh_key + generate_ssh_key, + block_with_timeout, ) @@ -156,7 +157,7 @@ def flog_gatherer(reactor, temp_dir, flog_binary, request): ) print("Waiting for flogtool to complete") try: - pytest_twisted.blockon(flog_protocol.done) + block_with_timeout(flog_protocol.done, reactor) except ProcessTerminated as e: print("flogtool exited unexpectedly: {}".format(str(e))) print("Flogtool completed") @@ -297,7 +298,7 @@ log_gatherer.furl = {log_furl} def cleanup(): try: transport.signalProcess('TERM') - pytest_twisted.blockon(protocol.exited) + block_with_timeout(protocol.exited, reactor) except ProcessExitedAlready: pass request.addfinalizer(cleanup) @@ -537,7 +538,7 @@ def tor_network(reactor, temp_dir, chutney, request): env=env, ) try: - pytest_twisted.blockon(proto.done) + block_with_timeout(proto.done, reactor) except ProcessTerminated: # If this doesn't exit cleanly, that's fine, that shouldn't fail # the test suite. diff --git a/integration/util.py b/integration/util.py index f5f7029d8..3d1708bae 100644 --- a/integration/util.py +++ b/integration/util.py @@ -28,6 +28,12 @@ from allmydata import client import pytest_twisted +def block_with_timeout(deferred, reactor, timeout=10): + """Block until Deferred has result, but timeout instead of waiting forever.""" + deferred.addTimeout(timeout, reactor) + return pytest_twisted.blockon(deferred) + + class _ProcessExitedProtocol(ProcessProtocol): """ Internal helper that .callback()s on self.done when the process @@ -126,11 +132,12 @@ def _cleanup_tahoe_process(tahoe_transport, exited): :return: After the process has exited. """ + from twisted.internet import reactor try: print("signaling {} with TERM".format(tahoe_transport.pid)) tahoe_transport.signalProcess('TERM') print("signaled, blocking on exit") - pytest_twisted.blockon(exited) + block_with_timeout(exited, reactor) print("exited, goodbye") except ProcessExitedAlready: pass From bd364feec5c9d178929c37179fdc9066f65d97f1 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 12 Jan 2021 13:22:43 -0500 Subject: [PATCH 136/213] Tests pass on Python 3. --- src/allmydata/test/web/test_introducer.py | 2 +- src/allmydata/web/introweb.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/allmydata/test/web/test_introducer.py b/src/allmydata/test/web/test_introducer.py index 929fba507..43f4d5934 100644 --- a/src/allmydata/test/web/test_introducer.py +++ b/src/allmydata/test/web/test_introducer.py @@ -213,7 +213,7 @@ class IntroducerRootTests(unittest.TestCase): resource = IntroducerRoot(introducer_node) response = json.loads( self.successResultOf( - render(resource, {"t": [b"json"]}), + render(resource, {b"t": [b"json"]}), ), ) self.assertEqual( diff --git a/src/allmydata/web/introweb.py b/src/allmydata/web/introweb.py index 6ec558e82..280d6cc26 100644 --- a/src/allmydata/web/introweb.py +++ b/src/allmydata/web/introweb.py @@ -26,10 +26,10 @@ class IntroducerRoot(MultiFormatResource): self.introducer_node = introducer_node self.introducer_service = introducer_node.getServiceNamed("introducer") # necessary as a root Resource - self.putChild("", self) + self.putChild(b"", self) static_dir = resource_filename("allmydata.web", "static") for filen in os.listdir(static_dir): - self.putChild(filen, static.File(os.path.join(static_dir, filen))) + self.putChild(filen.encode("utf-8"), static.File(os.path.join(static_dir, filen))) def _create_element(self): """ @@ -66,7 +66,7 @@ class IntroducerRoot(MultiFormatResource): announcement_summary[service_name] += 1 res[u"announcement_summary"] = announcement_summary - return json.dumps(res, indent=1) + b"\n" + return (json.dumps(res, indent=1) + "\n").encode("utf-8") class IntroducerRootElement(Element): From 8c41f60fdb2b6017f83497ed7a877f955fd7f673 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 12 Jan 2021 14:17:38 -0500 Subject: [PATCH 137/213] Port to Python 3. --- src/allmydata/test/web/test_introducer.py | 12 ++++++++++++ src/allmydata/util/_python3.py | 1 + 2 files changed, 13 insertions(+) diff --git a/src/allmydata/test/web/test_introducer.py b/src/allmydata/test/web/test_introducer.py index 43f4d5934..08d95bda9 100644 --- a/src/allmydata/test/web/test_introducer.py +++ b/src/allmydata/test/web/test_introducer.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import json from os.path import join diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py index 9917be84c..63aa5bb0a 100644 --- a/src/allmydata/util/_python3.py +++ b/src/allmydata/util/_python3.py @@ -187,6 +187,7 @@ PORTED_TEST_MODULES = [ "allmydata.test.test_util", "allmydata.test.web.test_common", "allmydata.test.web.test_grid", + "allmydata.test.web.test_introducer", "allmydata.test.web.test_logs", "allmydata.test.web.test_status", "allmydata.test.web.test_util", From c076e1ee2646758a65c42100265e4ddb80e269ef Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 12 Jan 2021 14:24:11 -0500 Subject: [PATCH 138/213] Just fix all the putChild. --- src/allmydata/web/root.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/src/allmydata/web/root.py b/src/allmydata/web/root.py index fdc72ab71..f6316bff5 100644 --- a/src/allmydata/web/root.py +++ b/src/allmydata/web/root.py @@ -1,3 +1,5 @@ +from past.builtins import unicode + import os import time import urllib @@ -227,26 +229,25 @@ class Root(MultiFormatResource): self._client = client self._now_fn = now_fn - # Children need to be bytes; for now just doing these to make specific - # tests pass on Python 3, but eventually will do all them when this - # module is ported to Python 3 (if not earlier). self.putChild(b"uri", URIHandler(client)) - self.putChild("cap", URIHandler(client)) + self.putChild(b"cap", URIHandler(client)) # Handler for everything beneath "/private", an area of the resource # hierarchy which is only accessible with the private per-node API # auth token. - self.putChild("private", create_private_tree(client.get_auth_token)) + self.putChild(b"private", create_private_tree(client.get_auth_token)) - self.putChild("file", FileHandler(client)) - self.putChild("named", FileHandler(client)) - self.putChild("status", status.Status(client.get_history())) - self.putChild("statistics", status.Statistics(client.stats_provider)) + self.putChild(b"file", FileHandler(client)) + self.putChild(b"named", FileHandler(client)) + self.putChild(b"status", status.Status(client.get_history())) + self.putChild(b"statistics", status.Statistics(client.stats_provider)) static_dir = resource_filename("allmydata.web", "static") for filen in os.listdir(static_dir): + if isinstance(filen, unicode): + filen = filen.encode("utf-8") self.putChild(filen, static.File(os.path.join(static_dir, filen))) - self.putChild("report_incident", IncidentReporter()) + self.putChild(b"report_incident", IncidentReporter()) @exception_to_child def getChild(self, path, request): From 4940da47da1cd98ae9e0de3b5b80d11a8c2f573a Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 12 Jan 2021 14:24:17 -0500 Subject: [PATCH 139/213] Tests pass on Python 3. --- src/allmydata/test/web/test_private.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/allmydata/test/web/test_private.py b/src/allmydata/test/web/test_private.py index 27ddbcf78..583f629f5 100644 --- a/src/allmydata/test/web/test_private.py +++ b/src/allmydata/test/web/test_private.py @@ -9,6 +9,8 @@ from __future__ import ( division, ) +from future.builtins import str + from testtools.matchers import ( Equals, ) @@ -56,6 +58,7 @@ class PrivacyTests(SyncTestCase): return super(PrivacyTests, self).setUp() def _authorization(self, scheme, value): + value = str(value, "utf-8") return Headers({ u"authorization": [u"{} {}".format(scheme, value)], }) @@ -90,7 +93,7 @@ class PrivacyTests(SyncTestCase): self.assertThat( self.client.head( b"http:///foo/bar", - headers=self._authorization(SCHEME, u"foo bar"), + headers=self._authorization(str(SCHEME, "utf-8"), b"foo bar"), ), succeeded(has_response_code(Equals(UNAUTHORIZED))), ) @@ -103,7 +106,7 @@ class PrivacyTests(SyncTestCase): self.assertThat( self.client.head( b"http:///foo/bar", - headers=self._authorization(SCHEME, self.token), + headers=self._authorization(str(SCHEME, "utf-8"), self.token), ), # It's a made up URL so we don't get a 200, either, but a 404. succeeded(has_response_code(Equals(NOT_FOUND))), From 03fb936716689447edb676c85044cabd79833f3d Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 12 Jan 2021 14:25:16 -0500 Subject: [PATCH 140/213] Port to Python 3. --- src/allmydata/test/web/test_private.py | 6 ++++++ src/allmydata/util/_python3.py | 1 + 2 files changed, 7 insertions(+) diff --git a/src/allmydata/test/web/test_private.py b/src/allmydata/test/web/test_private.py index 583f629f5..293796b1c 100644 --- a/src/allmydata/test/web/test_private.py +++ b/src/allmydata/test/web/test_private.py @@ -1,5 +1,7 @@ """ Tests for ``allmydata.web.private``. + +Ported to Python 3. """ from __future__ import ( @@ -9,6 +11,10 @@ from __future__ import ( division, ) +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from future.builtins import str from testtools.matchers import ( diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py index 63aa5bb0a..7cd80335c 100644 --- a/src/allmydata/util/_python3.py +++ b/src/allmydata/util/_python3.py @@ -189,6 +189,7 @@ PORTED_TEST_MODULES = [ "allmydata.test.web.test_grid", "allmydata.test.web.test_introducer", "allmydata.test.web.test_logs", + "allmydata.test.web.test_private", "allmydata.test.web.test_status", "allmydata.test.web.test_util", "allmydata.test.web.test_webish", From 7a3e9ab43e520a531ab63178a06a1fe0160d6aed Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 12 Jan 2021 14:39:20 -0500 Subject: [PATCH 141/213] Tests pass on Python 3. --- src/allmydata/test/web/test_root.py | 8 ++++---- src/allmydata/web/root.py | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/allmydata/test/web/test_root.py b/src/allmydata/test/web/test_root.py index 0715c8102..497440d1c 100644 --- a/src/allmydata/test/web/test_root.py +++ b/src/allmydata/test/web/test_root.py @@ -1,6 +1,6 @@ import time -from urllib import ( +from urllib.parse import ( quote, ) @@ -77,7 +77,7 @@ class RenderSlashUri(unittest.TestCase): ) self.assertEqual( response_body, - "Invalid capability", + b"Invalid capability", ) @@ -92,7 +92,7 @@ class RenderServiceRow(unittest.TestCase): ann = {"anonymous-storage-FURL": "pb://w2hqnbaa25yw4qgcvghl5psa3srpfgw3@tcp:127.0.0.1:51309/vucto2z4fxment3vfxbqecblbf6zyp6x", "permutation-seed-base32": "w2hqnbaa25yw4qgcvghl5psa3srpfgw3", } - srv = NativeStorageServer("server_id", ann, None, {}, EMPTY_CLIENT_CONFIG) + srv = NativeStorageServer(b"server_id", ann, None, {}, EMPTY_CLIENT_CONFIG) srv.get_connection_status = lambda: ConnectionStatus(False, "summary", {}, 0, 0) class FakeClient(_Client): @@ -103,7 +103,7 @@ class RenderServiceRow(unittest.TestCase): tub_maker=None, node_config=EMPTY_CLIENT_CONFIG, ) - self.storage_broker.test_add_server("test-srv", srv) + self.storage_broker.test_add_server(b"test-srv", srv) root = RootElement(FakeClient(), time.time) req = DummyRequest(b"") diff --git a/src/allmydata/web/root.py b/src/allmydata/web/root.py index f6316bff5..5829da51e 100644 --- a/src/allmydata/web/root.py +++ b/src/allmydata/web/root.py @@ -2,7 +2,7 @@ from past.builtins import unicode import os import time -import urllib +from urllib.parse import quote as urlquote from hyperlink import DecodedURL, URL from pkg_resources import resource_filename @@ -83,7 +83,7 @@ class URIHandler(resource.Resource, object): # it seems Nevow was creating absolute URLs including # host/port whereas req.uri is absolute (but lacks host/port) redir_uri = URL.from_text(req.prePathURL().decode('utf8')) - redir_uri = redir_uri.child(urllib.quote(uri_arg).decode('utf8')) + redir_uri = redir_uri.child(urlquote(uri_arg)) # add back all the query args that AREN'T "?uri=" for k, values in req.args.items(): if k != b"uri": From 5d77282784b88dbcb3445d279b9c9d0afd6f6db2 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 12 Jan 2021 14:40:33 -0500 Subject: [PATCH 142/213] Ported to Python 3. --- src/allmydata/test/web/test_root.py | 12 ++++++++++++ src/allmydata/util/_python3.py | 1 + 2 files changed, 13 insertions(+) diff --git a/src/allmydata/test/web/test_root.py b/src/allmydata/test/web/test_root.py index 497440d1c..ca3cc695d 100644 --- a/src/allmydata/test/web/test_root.py +++ b/src/allmydata/test/web/test_root.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import time from urllib.parse import ( diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py index 7cd80335c..575a69cb7 100644 --- a/src/allmydata/util/_python3.py +++ b/src/allmydata/util/_python3.py @@ -190,6 +190,7 @@ PORTED_TEST_MODULES = [ "allmydata.test.web.test_introducer", "allmydata.test.web.test_logs", "allmydata.test.web.test_private", + "allmydata.test.web.test_root", "allmydata.test.web.test_status", "allmydata.test.web.test_util", "allmydata.test.web.test_webish", From 6b0849490ad8cbbb23041b16e3850be6e837d30b Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 12 Jan 2021 14:40:46 -0500 Subject: [PATCH 143/213] News file. --- newsfragments/3589.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/3589.minor diff --git a/newsfragments/3589.minor b/newsfragments/3589.minor new file mode 100644 index 000000000..e69de29bb From aace119790f92f691c586067f3554a842656c148 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 13 Jan 2021 09:55:54 -0500 Subject: [PATCH 144/213] Fix Python 3 issue with combining bytes and unicode. --- src/allmydata/web/root.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/allmydata/web/root.py b/src/allmydata/web/root.py index 5829da51e..e3d49cd66 100644 --- a/src/allmydata/web/root.py +++ b/src/allmydata/web/root.py @@ -243,9 +243,10 @@ class Root(MultiFormatResource): self.putChild(b"statistics", status.Statistics(client.stats_provider)) static_dir = resource_filename("allmydata.web", "static") for filen in os.listdir(static_dir): + child_path = filen if isinstance(filen, unicode): - filen = filen.encode("utf-8") - self.putChild(filen, static.File(os.path.join(static_dir, filen))) + child_path = filen.encode("utf-8") + self.putChild(child_path, static.File(os.path.join(static_dir, filen))) self.putChild(b"report_incident", IncidentReporter()) From c5669e16e0ba0943e83fd7f3e39a2569eb582743 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 13 Jan 2021 09:56:08 -0500 Subject: [PATCH 145/213] Fix flake. --- src/allmydata/test/web/test_private.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/allmydata/test/web/test_private.py b/src/allmydata/test/web/test_private.py index 293796b1c..b426b4d93 100644 --- a/src/allmydata/test/web/test_private.py +++ b/src/allmydata/test/web/test_private.py @@ -15,8 +15,6 @@ from future.utils import PY2 if PY2: from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -from future.builtins import str - from testtools.matchers import ( Equals, ) From 20e90b4b65cbe67a5bda54fb4a1a52fcf2de5a3e Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 13 Jan 2021 10:21:00 -0500 Subject: [PATCH 146/213] Set --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index c61331885..110be7a51 100644 --- a/tox.ini +++ b/tox.ini @@ -77,7 +77,7 @@ setenv = COVERAGE_PROCESS_START=.coveragerc commands = # NOTE: 'run with "py.test --keep-tempdir -s -v integration/" to debug failures' - py.test --coverage -v {posargs:integration} + py.test --timeout=1800 --coverage -v {posargs:integration} coverage combine coverage report From 9ca17d780ebfc972cf5b6b319d5303dab0a3c4ce Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 13 Jan 2021 10:21:06 -0500 Subject: [PATCH 147/213] Add some overall timeout, and timeout on specific test that seems to be the issue somehow. --- integration/test_web.py | 3 +++ setup.py | 1 + 2 files changed, 4 insertions(+) diff --git a/integration/test_web.py b/integration/test_web.py index fe2137ff3..a16bf2e71 100644 --- a/integration/test_web.py +++ b/integration/test_web.py @@ -21,6 +21,8 @@ import requests import html5lib from bs4 import BeautifulSoup +import pytest + def test_index(alice): """ @@ -175,6 +177,7 @@ def test_deep_stats(alice): time.sleep(.5) +@pytest.mark.timeout(60) def test_status(alice): """ confirm we get something sensible from /status and the various sub-types diff --git a/setup.py b/setup.py index 952a921bc..5dc68d367 100644 --- a/setup.py +++ b/setup.py @@ -396,6 +396,7 @@ setup(name="tahoe-lafs", # also set in __init__.py "junitxml", "tenacity", "paramiko", + "pytest-timeout", ] + tor_requires + i2p_requires, "tor": tor_requires, "i2p": i2p_requires, From a2dab7c89fed217603116b97fd7d5ca8d0426823 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Thu, 14 Jan 2021 09:40:10 -0500 Subject: [PATCH 148/213] Only do this on Python 3. --- src/allmydata/web/root.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/allmydata/web/root.py b/src/allmydata/web/root.py index e3d49cd66..b1ba501a9 100644 --- a/src/allmydata/web/root.py +++ b/src/allmydata/web/root.py @@ -1,3 +1,4 @@ +from future.utils import PY3 from past.builtins import unicode import os @@ -244,7 +245,7 @@ class Root(MultiFormatResource): static_dir = resource_filename("allmydata.web", "static") for filen in os.listdir(static_dir): child_path = filen - if isinstance(filen, unicode): + if PY3: child_path = filen.encode("utf-8") self.putChild(child_path, static.File(os.path.join(static_dir, filen))) From 42b31a28099ed7f5c732c4bfc6a71db46b303559 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Thu, 14 Jan 2021 15:58:18 -0500 Subject: [PATCH 149/213] Fix flake. --- src/allmydata/web/root.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/allmydata/web/root.py b/src/allmydata/web/root.py index b1ba501a9..0ef6b00d2 100644 --- a/src/allmydata/web/root.py +++ b/src/allmydata/web/root.py @@ -1,5 +1,4 @@ from future.utils import PY3 -from past.builtins import unicode import os import time From a01078ddec86e1d193c7940087d6506d376028d8 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Fri, 15 Jan 2021 15:00:57 -0500 Subject: [PATCH 150/213] Switch to one of our Docker images for typecheck CI --- .circleci/config.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index ff14d6dd3..29b55ad5f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -481,13 +481,14 @@ jobs: typechecks: docker: - <<: *DOCKERHUB_AUTH - image: "jaraco/multipy-tox" + image: "tahoelafsci/ubuntu:18.04-py3" steps: - "checkout" - run: name: "Validate Types" - command: tox -e typechecks + command: | + /tmp/venv/bin/tox -e typechecks build-image: &BUILD_IMAGE # This is a template for a job to build a Docker image that has as much of From c7a4cdb44d801034de1e068f10aa3d8df8a4950f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 15 Jan 2021 15:25:07 -0500 Subject: [PATCH 151/213] Rely on main branch of foolscap for typechecks. --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 706899ebe..2873bded8 100644 --- a/tox.ini +++ b/tox.ini @@ -118,7 +118,7 @@ skip_install = True deps = mypy git+https://github.com/Shoobx/mypy-zope - git+https://github.com/jaraco/foolscap@bugfix/75-use-metaclass + git+https://github.com/warner/foolscap commands = mypy src From 621de4d882a8a0df0bb555897380dc9e7f111302 Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 18 Jan 2021 10:55:18 -0500 Subject: [PATCH 152/213] Add newsfragment --- newsfragments/3591.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/3591.minor diff --git a/newsfragments/3591.minor b/newsfragments/3591.minor new file mode 100644 index 000000000..e69de29bb From fa1a8e8371a2a6567901e8b5dcd7259f0e1e7352 Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 18 Jan 2021 10:57:31 -0500 Subject: [PATCH 153/213] Upgrade pip used in GitHub Actions From pip 20.1+ onward, "pip cache dir" can be used to find location of pip cache, and this is useful across all three major OSes supported by GitHub Actions. --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fd5049104..50a1a3d2f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -43,7 +43,7 @@ jobs: - name: Install Python packages run: | - pip install --upgrade codecov tox setuptools + pip install --upgrade codecov tox setuptools pip pip list - name: Display tool versions @@ -114,7 +114,7 @@ jobs: - name: Install Python packages run: | - pip install --upgrade tox + pip install --upgrade tox pip pip list - name: Display tool versions @@ -166,7 +166,7 @@ jobs: - name: Install Python packages run: | - pip install --upgrade tox + pip install --upgrade tox pip pip list - name: Display tool versions From 27a122088cdba445465428da6e7680c9a22e6c74 Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 18 Jan 2021 11:02:55 -0500 Subject: [PATCH 154/213] Use pip cache on GitHub Actions Using the method outlined in https://github.com/actions/cache/blob/main/examples.md#using-pip-to-get-cache-location --- .github/workflows/ci.yml | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 50a1a3d2f..8ca015e07 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -46,6 +46,19 @@ jobs: pip install --upgrade codecov tox setuptools pip pip list + - name: Get pip cache directory + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + + - name: pip cache + uses: actions/cache@v2 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Display tool versions run: python misc/build_helpers/show-tool-versions.py @@ -117,6 +130,19 @@ jobs: pip install --upgrade tox pip pip list + - name: Get pip cache directory + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + + - name: pip cache + uses: actions/cache@v2 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Display tool versions run: python misc/build_helpers/show-tool-versions.py @@ -169,6 +195,19 @@ jobs: pip install --upgrade tox pip pip list + - name: Get pip cache directory + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + + - name: pip cache + uses: actions/cache@v2 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Display tool versions run: python misc/build_helpers/show-tool-versions.py From 573ab8768b4443a8e41304708040607974a9f4d4 Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 18 Jan 2021 11:08:56 -0500 Subject: [PATCH 155/213] Re-title "use pip cache" step in GitHub Actions --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8ca015e07..51cec9e54 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -51,7 +51,7 @@ jobs: run: | echo "::set-output name=dir::$(pip cache dir)" - - name: pip cache + - name: Use pip cache uses: actions/cache@v2 with: path: ${{ steps.pip-cache.outputs.dir }} @@ -135,7 +135,7 @@ jobs: run: | echo "::set-output name=dir::$(pip cache dir)" - - name: pip cache + - name: Use pip cache uses: actions/cache@v2 with: path: ${{ steps.pip-cache.outputs.dir }} @@ -200,7 +200,7 @@ jobs: run: | echo "::set-output name=dir::$(pip cache dir)" - - name: pip cache + - name: Use pip cache uses: actions/cache@v2 with: path: ${{ steps.pip-cache.outputs.dir }} From f731159cd7608925c5713cb4053067ef81c6bf8b Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 18 Jan 2021 13:13:32 -0500 Subject: [PATCH 156/213] Install Python packages after setting up pip cache --- .github/workflows/ci.yml | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 51cec9e54..c3f32c919 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -41,11 +41,6 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Install Python packages - run: | - pip install --upgrade codecov tox setuptools pip - pip list - - name: Get pip cache directory id: pip-cache run: | @@ -59,6 +54,11 @@ jobs: restore-keys: | ${{ runner.os }}-pip- + - name: Install Python packages + run: | + pip install --upgrade codecov tox setuptools pip + pip list + - name: Display tool versions run: python misc/build_helpers/show-tool-versions.py @@ -125,11 +125,6 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Install Python packages - run: | - pip install --upgrade tox pip - pip list - - name: Get pip cache directory id: pip-cache run: | @@ -143,6 +138,11 @@ jobs: restore-keys: | ${{ runner.os }}-pip- + - name: Install Python packages + run: | + pip install --upgrade tox pip + pip list + - name: Display tool versions run: python misc/build_helpers/show-tool-versions.py @@ -190,11 +190,6 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Install Python packages - run: | - pip install --upgrade tox pip - pip list - - name: Get pip cache directory id: pip-cache run: | @@ -208,6 +203,11 @@ jobs: restore-keys: | ${{ runner.os }}-pip- + - name: Install Python packages + run: | + pip install --upgrade tox pip + pip list + - name: Display tool versions run: python misc/build_helpers/show-tool-versions.py From 8bf068f99152aed31b51ab86b6f3edbfd20baf4a Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 18 Jan 2021 13:19:18 -0500 Subject: [PATCH 157/213] What's the pip version on GitHub Actions? There's no need of upgrading pip if GA offers a sufficiently new pip. --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c3f32c919..0135d9ab9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -44,6 +44,7 @@ jobs: - name: Get pip cache directory id: pip-cache run: | + pip --version echo "::set-output name=dir::$(pip cache dir)" - name: Use pip cache From 1f1a30095ea9359a3c3b752bfbc513efa3685dca Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 18 Jan 2021 13:22:04 -0500 Subject: [PATCH 158/213] Get pip version for all three GitHub Actions OSes --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0135d9ab9..1943933b2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -44,7 +44,6 @@ jobs: - name: Get pip cache directory id: pip-cache run: | - pip --version echo "::set-output name=dir::$(pip cache dir)" - name: Use pip cache @@ -194,6 +193,7 @@ jobs: - name: Get pip cache directory id: pip-cache run: | + pip --version echo "::set-output name=dir::$(pip cache dir)" - name: Use pip cache From 2a1a5cb0a5b232487894dafdfa7200a96f1b1d4b Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 18 Jan 2021 13:32:53 -0500 Subject: [PATCH 159/213] GitHub Actions have sufficiently recent pip At the time of writing this commit message, GitHub Actions offers pip v20.3.3 for both ubuntu-latest and windows-latest, and pip v20.3.1 for macos-latest. Those are sufficiently recent pip versions that have "cache dir" sub-command. --- .github/workflows/ci.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1943933b2..c3f32c919 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -193,7 +193,6 @@ jobs: - name: Get pip cache directory id: pip-cache run: | - pip --version echo "::set-output name=dir::$(pip cache dir)" - name: Use pip cache From adbe23fe7aac3a0f8b55b3135d266a531ec0bcc4 Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 18 Jan 2021 13:34:25 -0500 Subject: [PATCH 160/213] Add a note about pip version on GitHub Actions --- .github/workflows/ci.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c3f32c919..8ccf07aba 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -41,6 +41,11 @@ jobs: with: python-version: ${{ matrix.python-version }} + # We need "pip cache dir", which became a thing in pip v20.1+. + # At the time of writing this, GitHub Actions offers pip v20.3.3 + # for both ubuntu-latest and windows-latest, and pip v20.3.1 for + # macos-latest. Those are sufficiently recent pip versions that + # have "cache dir" sub-command. - name: Get pip cache directory id: pip-cache run: | From 99cca0ea8e7223886b5ee9d184dd70a70141a031 Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 18 Jan 2021 13:38:09 -0500 Subject: [PATCH 161/213] No need of upgrading pip on GitHub Actions --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8ccf07aba..afb99c67a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -61,7 +61,7 @@ jobs: - name: Install Python packages run: | - pip install --upgrade codecov tox setuptools pip + pip install --upgrade codecov tox setuptools pip list - name: Display tool versions @@ -145,7 +145,7 @@ jobs: - name: Install Python packages run: | - pip install --upgrade tox pip + pip install --upgrade tox pip list - name: Display tool versions @@ -210,7 +210,7 @@ jobs: - name: Install Python packages run: | - pip install --upgrade tox pip + pip install --upgrade tox pip list - name: Display tool versions From 9e4ea0c4910c819284b9fc74fd488d522bcc5d47 Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 18 Jan 2021 17:54:17 -0500 Subject: [PATCH 162/213] Use fetch-depth of 0 with GitHub Actions Using a fetch-depth of 0 should have the same effect as as `git fetch --prune --unshallow` after doing a shallow checkout. --- .github/workflows/ci.yml | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index afb99c67a..213a87ba1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,11 +30,12 @@ jobs: with: args: install vcpython27 + # See https://github.com/actions/checkout. A fetch-depth of 0 + # fetches all tags and branches. - name: Check out Tahoe-LAFS sources uses: actions/checkout@v2 - - - name: Fetch all history for all tags and branches - run: git fetch --prune --unshallow + with: + fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 @@ -121,9 +122,8 @@ jobs: - name: Check out Tahoe-LAFS sources uses: actions/checkout@v2 - - - name: Fetch all history for all tags and branches - run: git fetch --prune --unshallow + with: + fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 @@ -186,9 +186,8 @@ jobs: - name: Check out Tahoe-LAFS sources uses: actions/checkout@v2 - - - name: Fetch all history for all tags and branches - run: git fetch --prune --unshallow + with: + fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 From ed92202762f56c7b37a161064dc4613ec7d9d8d7 Mon Sep 17 00:00:00 2001 From: Sajith Sasidharan Date: Mon, 18 Jan 2021 17:55:07 -0500 Subject: [PATCH 163/213] Updates comments about GitHub cache action --- .github/workflows/ci.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 213a87ba1..ee36833ca 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -42,16 +42,17 @@ jobs: with: python-version: ${{ matrix.python-version }} - # We need "pip cache dir", which became a thing in pip v20.1+. - # At the time of writing this, GitHub Actions offers pip v20.3.3 - # for both ubuntu-latest and windows-latest, and pip v20.3.1 for - # macos-latest. Those are sufficiently recent pip versions that - # have "cache dir" sub-command. + # To use pip caching with GitHub Actions in an OS-independent + # manner, we need `pip cache dir` command, which became + # available since pip v20.1+. At the time of writing this, + # GitHub Actions offers pip v20.3.3 for both ubuntu-latest and + # windows-latest, and pip v20.3.1 for macos-latest. - name: Get pip cache directory id: pip-cache run: | echo "::set-output name=dir::$(pip cache dir)" + # See https://github.com/actions/cache - name: Use pip cache uses: actions/cache@v2 with: From afcae42fd6d3f9df21cee943aeb1a65140d7013a Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 15 Jan 2021 11:52:16 -0500 Subject: [PATCH 164/213] Notice that there's an error on the server, rather than continuing silently. --- integration/test_web.py | 1 + 1 file changed, 1 insertion(+) diff --git a/integration/test_web.py b/integration/test_web.py index fe2137ff3..a322129f8 100644 --- a/integration/test_web.py +++ b/integration/test_web.py @@ -133,6 +133,7 @@ def test_deep_stats(alice): u"file": FILE_CONTENTS, }, ) + resp.raise_for_status() # confirm the file is in the directory resp = requests.get( From 5dd7aa2dfda61156f83e24c37312238a631757c1 Mon Sep 17 00:00:00 2001 From: meejah Date: Tue, 19 Jan 2021 11:25:09 -0700 Subject: [PATCH 165/213] news --- newsfragments/2920.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/2920.minor diff --git a/newsfragments/2920.minor b/newsfragments/2920.minor new file mode 100644 index 000000000..e69de29bb From 3166545509867c158b822794dfc26d716ee8224a Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 19 Jan 2021 13:52:12 -0500 Subject: [PATCH 166/213] Unit test reproducing the bug in the integration test. --- src/allmydata/test/web/test_web.py | 33 ++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/src/allmydata/test/web/test_web.py b/src/allmydata/test/web/test_web.py index ce9a40389..c7d866ad1 100644 --- a/src/allmydata/test/web/test_web.py +++ b/src/allmydata/test/web/test_web.py @@ -4757,6 +4757,39 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi op_url = self.webish_url + "/operations/134?t=status&output=JSON" yield self.assertHTTPError(op_url, 404, "unknown/expired handle '134'") + @inlineCallbacks + def test_upload_file_in_directory(self): + """Create a directory, then upload a file into it. + + Unit test reproducer for https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3590 + """ + def req(method, path, **kwargs): + return treq.request(method, self.webish_url + path, persistent=False, + **kwargs) + + response = yield req("POST", "/uri?format=sdmf&t=mkdir&redirect_to_result=true", + browser_like_redirects=True) + + uri = urllib.unquote(response.request.absoluteURI) + assert 'URI:DIR2:' in uri + dircap = uri[uri.find("URI:DIR2:"):].rstrip('/') + dircap_uri = "/uri/{}".format(urllib.quote(dircap)) + + # POST a file into this directory + FILE_CONTENTS = u"a file in a directory" + + body, headers = self.build_form(t="upload", when_done=".", name="file", + file=FILE_CONTENTS) + response = yield req( + "POST", + dircap_uri, + data=body, + headers=headers, + browser_like_redirects=True + ) + if response.code >= 400: + raise Error(response.code, response=response.content()) + def test_incident(self): d = self.POST("/report_incident", details="eek") def _done(res): From 6979cfa205ee13da0358dd92d0971d78c09ffefc Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 19 Jan 2021 14:28:00 -0500 Subject: [PATCH 167/213] Fix the redirect 'str has no render' bug. --- src/allmydata/web/root.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/allmydata/web/root.py b/src/allmydata/web/root.py index 0ef6b00d2..2e82e94ec 100644 --- a/src/allmydata/web/root.py +++ b/src/allmydata/web/root.py @@ -11,7 +11,7 @@ from twisted.web import ( resource, static, ) -from twisted.web.util import redirectTo +from twisted.web.util import redirectTo, Redirect from twisted.python.filepath import FilePath from twisted.web.template import ( Element, @@ -155,7 +155,7 @@ class URIHandler(resource.Resource, object): u = u.replace( path=(s for s in u.path if s), # remove empty segments ) - return redirectTo(u.to_uri().to_text().encode('utf8'), req) + return Redirect(u.to_uri().to_text().encode('utf8')) try: node = self.client.create_node_from_uri(name) return directory.make_handler_for(node, self.client) From 11e4bcf47680f20eb9015df069ed30fd8c71ee0c Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 19 Jan 2021 14:41:58 -0500 Subject: [PATCH 168/213] Add a direct unit test for FileHandle.get_encryption_key --- src/allmydata/test/test_upload.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py index 94d7575c3..664e4cc94 100644 --- a/src/allmydata/test/test_upload.py +++ b/src/allmydata/test/test_upload.py @@ -14,6 +14,9 @@ if PY2: import os, shutil from io import BytesIO +from base64 import ( + b64encode, +) from twisted.trial import unittest from twisted.python.failure import Failure @@ -877,6 +880,34 @@ def is_happy_enough(servertoshnums, h, k): return True +class FileHandleTests(unittest.TestCase): + """ + Tests for ``FileHandle``. + """ + def test_get_encryption_key_convergent(self): + """ + When ``FileHandle`` is initialized with a convergence secret, + ``FileHandle.get_encryption_key`` returns a deterministic result that + is a function of that secret. + """ + secret = b"\x42" * 16 + handle = upload.FileHandle(BytesIO(b"hello world"), secret) + handle.set_default_encoding_parameters({ + "k": 3, + "happy": 5, + "n": 10, + # Remember this is the *max* segment size. In reality, the data + # size is much smaller so the actual segment size incorporated + # into the encryption key is also smaller. + "max_segment_size": 128 * 1024, + }) + + self.assertEqual( + b64encode(self.successResultOf(handle.get_encryption_key())), + b"oBcuR/wKdCgCV2GKKXqiNg==", + ) + + class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin, ShouldFailMixin): From be5cf1a0bea687d21ded82a2bfdc2c6301afae73 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 19 Jan 2021 14:42:30 -0500 Subject: [PATCH 169/213] news fragment --- newsfragments/3593.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/3593.minor diff --git a/newsfragments/3593.minor b/newsfragments/3593.minor new file mode 100644 index 000000000..e69de29bb From b0cb50b8973be87b0af3ba553f8cfb530c2b8b90 Mon Sep 17 00:00:00 2001 From: meejah Date: Sun, 20 Dec 2020 19:09:34 -0700 Subject: [PATCH 170/213] write verification instructions, and developer statement --- docs/INSTALL.rst | 37 ++++++++++++++++++++++++++++++- docs/developer-release-signatures | 25 +++++++++++++++++++++ 2 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 docs/developer-release-signatures diff --git a/docs/INSTALL.rst b/docs/INSTALL.rst index 3a724b790..568869407 100644 --- a/docs/INSTALL.rst +++ b/docs/INSTALL.rst @@ -173,7 +173,9 @@ from PyPI with ``venv/bin/pip install tahoe-lafs``. After installation, run Install From a Source Tarball ----------------------------- -You can also install directly from the source tarball URL:: +You can also install directly from the source tarball URL. To verify +signatures, first see verifying_signatures_ and replace the URL in the +following instructions with the local filename. % virtualenv venv New python executable in ~/venv/bin/python2.7 @@ -189,6 +191,39 @@ You can also install directly from the source tarball URL:: tahoe-lafs: 1.14.0 ... +.. _verifying_signatures: + +Verifying Signatures +-------------------- + +First download the source tarball and then any signatures. There are several +developers who are expected to produce signatures for a release. *At least +two signatures should be verified*. + +This statement, signed by the existing Tahoe release-signing key, attests to +those developers authorized to sign a Tahoe release: + +.. include:: developer-release-signatures + :code: + +Signatures are made available beside the release. So for example, a release +like ``https://tahoe-lafs.org/downloads/tahoe-lafs-1.16.0.tar.bz2`` might +have signatures ``tahoe-lafs-1.16.0.tar.bz2.meejah.asc`` and +``tahoe-lafs-1.16.0.tar.bz2.warner.asc``. + +To verify the signatures using GnuPG:: + + % gpg --verify tahoe-lafs-1.16.0.tar.bz2.meejah.asc tahoe-lafs-1.16.0.tar.bz2 + gpg: Signature made XXX + gpg: using RSA key 9D5A2BD5688ECB889DEBCD3FC2602803128069A7 + gpg: Good signature from "meejah " [full] + % gpg --verify tahoe-lafs-1.16.0.tar.bz2.warner.asc tahoe-lafs-1.16.0.tar.bz2 + gpg: Signature made XXX + gpg: using RSA key 967EFE06699872411A77DF36D43B4C9C73225AAF + gpg: Good signature from "Brian Warner " [full] + + + Extras ------ diff --git a/docs/developer-release-signatures b/docs/developer-release-signatures new file mode 100644 index 000000000..d79d01fab --- /dev/null +++ b/docs/developer-release-signatures @@ -0,0 +1,25 @@ +TODO: clear-sign this with the release key + + +Any two of the following core Tahoe contributers may sign a +release. They each independantly produce a signature which are made +available beside Tahoe releases after 1.15.0 + +This statement is signed by the previous Tahoe release key. Any future +such statements may be signed by it OR by any two developers (for +example, to add or remove developers from the list). + +meejah +0xC2602803128069A7 +9D5A 2BD5 688E CB88 9DEB CD3F C260 2803 1280 69A7 +https://meejah.ca/meejah.asc + +jean-paul calderone +0x?? +fingerprint +[url for key] + +brian warner +0xD43B4C9C73225AAF +967E FE06 6998 7241 1A77 DF36 D43B 4C9C 7322 5AAF +http://www.lothar.com/warner-gpg.html \ No newline at end of file From 56337c442103e9b76c9ee7351cfa7260b850cf70 Mon Sep 17 00:00:00 2001 From: meejah Date: Sun, 20 Dec 2020 20:29:00 -0700 Subject: [PATCH 171/213] better words --- docs/INSTALL.rst | 4 ++-- docs/developer-release-signatures | 11 +++++++---- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/docs/INSTALL.rst b/docs/INSTALL.rst index 568869407..9c67e0ffe 100644 --- a/docs/INSTALL.rst +++ b/docs/INSTALL.rst @@ -197,8 +197,8 @@ Verifying Signatures -------------------- First download the source tarball and then any signatures. There are several -developers who are expected to produce signatures for a release. *At least -two signatures should be verified*. +developers who are able to produce signatures for a release. *At least two +signatures should be found and verified*. This statement, signed by the existing Tahoe release-signing key, attests to those developers authorized to sign a Tahoe release: diff --git a/docs/developer-release-signatures b/docs/developer-release-signatures index d79d01fab..0d916cf6f 100644 --- a/docs/developer-release-signatures +++ b/docs/developer-release-signatures @@ -1,9 +1,12 @@ TODO: clear-sign this with the release key +TODO: update jean-paul's information +January 3, 2021 -Any two of the following core Tahoe contributers may sign a -release. They each independantly produce a signature which are made -available beside Tahoe releases after 1.15.0 +Any of the following core Tahoe contributers may sign a release. Each +release should be signed by at least two developers. They each +independantly produce a signature which are made available beside +Tahoe releases after 1.15.0 This statement is signed by the previous Tahoe release key. Any future such statements may be signed by it OR by any two developers (for @@ -22,4 +25,4 @@ fingerprint brian warner 0xD43B4C9C73225AAF 967E FE06 6998 7241 1A77 DF36 D43B 4C9C 7322 5AAF -http://www.lothar.com/warner-gpg.html \ No newline at end of file +http://www.lothar.com/warner-gpg.html From 848fac815b93c57002495b1e9566168db2615364 Mon Sep 17 00:00:00 2001 From: meejah Date: Mon, 4 Jan 2021 14:06:20 -0700 Subject: [PATCH 172/213] spelling --- docs/developer-release-signatures | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/developer-release-signatures b/docs/developer-release-signatures index 0d916cf6f..9f00662f4 100644 --- a/docs/developer-release-signatures +++ b/docs/developer-release-signatures @@ -5,7 +5,7 @@ January 3, 2021 Any of the following core Tahoe contributers may sign a release. Each release should be signed by at least two developers. They each -independantly produce a signature which are made available beside +independently produce a signature which are made available beside Tahoe releases after 1.15.0 This statement is signed by the previous Tahoe release key. Any future From a858d4a7cb9a128cb26f6d692c62e9b7627d5bac Mon Sep 17 00:00:00 2001 From: meejah Date: Mon, 4 Jan 2021 14:15:33 -0700 Subject: [PATCH 173/213] update exarkun's information --- docs/developer-release-signatures | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/developer-release-signatures b/docs/developer-release-signatures index 9f00662f4..b2752e8ca 100644 --- a/docs/developer-release-signatures +++ b/docs/developer-release-signatures @@ -17,9 +17,9 @@ meejah 9D5A 2BD5 688E CB88 9DEB CD3F C260 2803 1280 69A7 https://meejah.ca/meejah.asc -jean-paul calderone -0x?? -fingerprint +jean-paul calderone (exarkun) +0xE27B085EDEAA4B1B +96B9 C5DA B2EA 9EB6 7941 9DB7 E27B 085E DEAA 4B1B [url for key] brian warner From 2a3d01a9cc077af2ffd965978b009544cb899ded Mon Sep 17 00:00:00 2001 From: meejah Date: Mon, 4 Jan 2021 14:22:09 -0700 Subject: [PATCH 174/213] url for exarkun's key --- docs/developer-release-signatures | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/developer-release-signatures b/docs/developer-release-signatures index b2752e8ca..ba2a88dc9 100644 --- a/docs/developer-release-signatures +++ b/docs/developer-release-signatures @@ -20,7 +20,7 @@ https://meejah.ca/meejah.asc jean-paul calderone (exarkun) 0xE27B085EDEAA4B1B 96B9 C5DA B2EA 9EB6 7941 9DB7 E27B 085E DEAA 4B1B -[url for key] +http://pgp.mit.edu/pks/lookup?op=get&search=0xE27B085EDEAA4B1B brian warner 0xD43B4C9C73225AAF From 52c2e292d876606dd00191177080dc552d16d554 Mon Sep 17 00:00:00 2001 From: meejah Date: Mon, 4 Jan 2021 14:31:24 -0700 Subject: [PATCH 175/213] news --- newsfragments/3580.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/3580.minor diff --git a/newsfragments/3580.minor b/newsfragments/3580.minor new file mode 100644 index 000000000..e69de29bb From ed9bc93571e44310dad6f9992012f0af0ac13524 Mon Sep 17 00:00:00 2001 From: meejah Date: Tue, 5 Jan 2021 09:28:42 -0700 Subject: [PATCH 176/213] redundant newsfragment --- newsfragments/2920.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 newsfragments/2920.minor diff --git a/newsfragments/2920.minor b/newsfragments/2920.minor deleted file mode 100644 index e69de29bb..000000000 From 91de725d93a8810c98679e3cb8647f615064b9c8 Mon Sep 17 00:00:00 2001 From: meejah Date: Tue, 5 Jan 2021 09:29:10 -0700 Subject: [PATCH 177/213] better url for exarkun's key --- docs/developer-release-signatures | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/developer-release-signatures b/docs/developer-release-signatures index ba2a88dc9..5e93e4425 100644 --- a/docs/developer-release-signatures +++ b/docs/developer-release-signatures @@ -20,7 +20,7 @@ https://meejah.ca/meejah.asc jean-paul calderone (exarkun) 0xE27B085EDEAA4B1B 96B9 C5DA B2EA 9EB6 7941 9DB7 E27B 085E DEAA 4B1B -http://pgp.mit.edu/pks/lookup?op=get&search=0xE27B085EDEAA4B1B +https://twistedmatrix.com/~exarkun/E27B085EDEAA4B1B.asc brian warner 0xD43B4C9C73225AAF From a031e6a4b34c986258985410672706101f029d3d Mon Sep 17 00:00:00 2001 From: meejah Date: Tue, 5 Jan 2021 09:33:31 -0700 Subject: [PATCH 178/213] more realistic date, better info --- docs/developer-release-signatures | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/developer-release-signatures b/docs/developer-release-signatures index 5e93e4425..6dd7303fb 100644 --- a/docs/developer-release-signatures +++ b/docs/developer-release-signatures @@ -1,7 +1,5 @@ -TODO: clear-sign this with the release key -TODO: update jean-paul's information -January 3, 2021 +January 8, 2021 Any of the following core Tahoe contributers may sign a release. Each release should be signed by at least two developers. They each From 9957790bb8b163e8e804d399068035c52c7a390a Mon Sep 17 00:00:00 2001 From: meejah Date: Sun, 20 Dec 2020 19:09:34 -0700 Subject: [PATCH 179/213] write verification instructions, and developer statement --- docs/INSTALL.rst | 5 +++-- docs/developer-release-signatures | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/INSTALL.rst b/docs/INSTALL.rst index 9c67e0ffe..59d0eb5ea 100644 --- a/docs/INSTALL.rst +++ b/docs/INSTALL.rst @@ -197,8 +197,9 @@ Verifying Signatures -------------------- First download the source tarball and then any signatures. There are several -developers who are able to produce signatures for a release. *At least two -signatures should be found and verified*. +developers who are expected to produce signatures for a release. Thus, a +release may have more than one signature. All signatures should be valid and +you should confirm at least one signature. This statement, signed by the existing Tahoe release-signing key, attests to those developers authorized to sign a Tahoe release: diff --git a/docs/developer-release-signatures b/docs/developer-release-signatures index 6dd7303fb..2c9460738 100644 --- a/docs/developer-release-signatures +++ b/docs/developer-release-signatures @@ -23,4 +23,4 @@ https://twistedmatrix.com/~exarkun/E27B085EDEAA4B1B.asc brian warner 0xD43B4C9C73225AAF 967E FE06 6998 7241 1A77 DF36 D43B 4C9C 7322 5AAF -http://www.lothar.com/warner-gpg.html +https://www.lothar.com/warner-gpg.html From 3995c932ef7967a2131c6bc24af28cefef096ad5 Mon Sep 17 00:00:00 2001 From: meejah Date: Sun, 20 Dec 2020 20:29:00 -0700 Subject: [PATCH 180/213] better words --- docs/INSTALL.rst | 6 +++--- newsfragments/2920.minor | 0 2 files changed, 3 insertions(+), 3 deletions(-) create mode 100644 newsfragments/2920.minor diff --git a/docs/INSTALL.rst b/docs/INSTALL.rst index 59d0eb5ea..e47d87bd6 100644 --- a/docs/INSTALL.rst +++ b/docs/INSTALL.rst @@ -197,9 +197,9 @@ Verifying Signatures -------------------- First download the source tarball and then any signatures. There are several -developers who are expected to produce signatures for a release. Thus, a -release may have more than one signature. All signatures should be valid and -you should confirm at least one signature. +developers who are able to produce signatures for a release. A release may +have multiple signatures. All should be valid and you should confirm at least +one of them (ideally, confirm all). This statement, signed by the existing Tahoe release-signing key, attests to those developers authorized to sign a Tahoe release: diff --git a/newsfragments/2920.minor b/newsfragments/2920.minor new file mode 100644 index 000000000..e69de29bb From 8c1c682fdd6747d308f0663ab9be3b7b6cdd2be2 Mon Sep 17 00:00:00 2001 From: meejah Date: Tue, 5 Jan 2021 09:28:42 -0700 Subject: [PATCH 181/213] redundant newsfragment --- newsfragments/2920.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 newsfragments/2920.minor diff --git a/newsfragments/2920.minor b/newsfragments/2920.minor deleted file mode 100644 index e69de29bb..000000000 From 8aaf0ee36224b9fb35d800099c0b755c37278c99 Mon Sep 17 00:00:00 2001 From: meejah Date: Tue, 19 Jan 2021 10:23:27 -0700 Subject: [PATCH 182/213] tweak statement --- docs/developer-release-signatures | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/developer-release-signatures b/docs/developer-release-signatures index 2c9460738..8092fb436 100644 --- a/docs/developer-release-signatures +++ b/docs/developer-release-signatures @@ -1,12 +1,12 @@ -January 8, 2021 +January 20, 2021 Any of the following core Tahoe contributers may sign a release. Each -release should be signed by at least two developers. They each -independently produce a signature which are made available beside -Tahoe releases after 1.15.0 +release MUST be signed by at least one developer but MAY have +additional signatures. Each developer independently produces a +signature which is made available beside Tahoe releases after 1.15.0 -This statement is signed by the previous Tahoe release key. Any future +This statement is signed by the existing Tahoe release key. Any future such statements may be signed by it OR by any two developers (for example, to add or remove developers from the list). @@ -20,7 +20,7 @@ jean-paul calderone (exarkun) 96B9 C5DA B2EA 9EB6 7941 9DB7 E27B 085E DEAA 4B1B https://twistedmatrix.com/~exarkun/E27B085EDEAA4B1B.asc -brian warner -0xD43B4C9C73225AAF -967E FE06 6998 7241 1A77 DF36 D43B 4C9C 7322 5AAF +brian warner (lothar) +0x863333C265497810 +5810 F125 7F8C F753 7753 895A 8633 33C2 6549 7810 https://www.lothar.com/warner-gpg.html From 407014ec5b51da4e883457bdbae59145be53e1d5 Mon Sep 17 00:00:00 2001 From: meejah Date: Tue, 19 Jan 2021 11:22:18 -0700 Subject: [PATCH 183/213] actually sign statement --- docs/developer-release-signatures | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/developer-release-signatures b/docs/developer-release-signatures index 8092fb436..1b55641d9 100644 --- a/docs/developer-release-signatures +++ b/docs/developer-release-signatures @@ -1,3 +1,6 @@ +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA512 + January 20, 2021 @@ -24,3 +27,16 @@ brian warner (lothar) 0x863333C265497810 5810 F125 7F8C F753 7753 895A 8633 33C2 6549 7810 https://www.lothar.com/warner-gpg.html + + +-----BEGIN PGP SIGNATURE----- + +iQEzBAEBCgAdFiEE405i0G0Oac/KQXn/veDTHWhmanoFAmAHIyIACgkQveDTHWhm +anqhqQf/YSbMXL+gwFhAZsjX39EVlbr/Ik7WPPkJW7v1oHybTnwFpFIc52COU1x/ +sqRfk4OyYtz9IBgOPXoWgXu9R4qdK6vYKxEsekcGT9C5l0OyDz8YWXEWgbGK5mvI +aEub9WucD8r2uOQnnW6DtznFuEpvOjtf/+2BU767+bvLsbViW88ocbuLfCqLdOgD +WZT9j3M+Y2Dc56DAJzP/4fkrUSVIofZStYp5u9HBjburgcYIp0g/cyc4xXRoi6Mp +lFTRFv3MIjmoamzSQseoIgP6fi8QRqPrffPrsyqAp+06mJnPhxxFqxtO/ZErmpSa ++BGrLBxdWa8IF9U1A4Fs5nuAzAKMEg== +=E9J+ +-----END PGP SIGNATURE----- From 781deefcde2a45492c056fa0103ca4306ce5aa46 Mon Sep 17 00:00:00 2001 From: meejah Date: Tue, 19 Jan 2021 11:32:53 -0700 Subject: [PATCH 184/213] command-line to sign a tag with official key --- docs/release-checklist.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/release-checklist.rst b/docs/release-checklist.rst index 18c908a99..fedefee51 100644 --- a/docs/release-checklist.rst +++ b/docs/release-checklist.rst @@ -118,6 +118,12 @@ Did anyone contribute a hack since the last release? If so, then https://tahoe-lafs.org/hacktahoelafs/ needs to be updated. +Sign Git Tag +```````````` + +- git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-X.Y.Z" tahoe-lafs-X.Y.Z + + Upload Artifacts ```````````````` From e91d37e64b50579bd75edf23dd767161b699611d Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 19 Jan 2021 17:13:52 -0500 Subject: [PATCH 185/213] Fix unit test so it's actually testing the real bug. --- src/allmydata/test/web/test_web.py | 22 ++++++++-------------- src/allmydata/web/root.py | 2 +- 2 files changed, 9 insertions(+), 15 deletions(-) diff --git a/src/allmydata/test/web/test_web.py b/src/allmydata/test/web/test_web.py index c7d866ad1..8dcb7d95b 100644 --- a/src/allmydata/test/web/test_web.py +++ b/src/allmydata/test/web/test_web.py @@ -4758,8 +4758,8 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi yield self.assertHTTPError(op_url, 404, "unknown/expired handle '134'") @inlineCallbacks - def test_upload_file_in_directory(self): - """Create a directory, then upload a file into it. + def test_uri_redirect(self): + """URI redirects don't cause failure. Unit test reproducer for https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3590 """ @@ -4767,26 +4767,20 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi return treq.request(method, self.webish_url + path, persistent=False, **kwargs) - response = yield req("POST", "/uri?format=sdmf&t=mkdir&redirect_to_result=true", - browser_like_redirects=True) + response = yield req("POST", "/uri?format=sdmf&t=mkdir&redirect_to_result=true") uri = urllib.unquote(response.request.absoluteURI) assert 'URI:DIR2:' in uri dircap = uri[uri.find("URI:DIR2:"):].rstrip('/') - dircap_uri = "/uri/{}".format(urllib.quote(dircap)) + dircap_uri = "/uri/?uri={}&t=json".format(urllib.quote(dircap)) - # POST a file into this directory - FILE_CONTENTS = u"a file in a directory" - - body, headers = self.build_form(t="upload", when_done=".", name="file", - file=FILE_CONTENTS) response = yield req( - "POST", + "GET", dircap_uri, - data=body, - headers=headers, - browser_like_redirects=True ) + self.assertEqual( + response.request.absoluteURI, + self.webish_url + "/uri/{}?t=json".format(urllib.quote(dircap))) if response.code >= 400: raise Error(response.code, response=response.content()) diff --git a/src/allmydata/web/root.py b/src/allmydata/web/root.py index 2e82e94ec..9fb3ac9d3 100644 --- a/src/allmydata/web/root.py +++ b/src/allmydata/web/root.py @@ -147,7 +147,7 @@ class URIHandler(resource.Resource, object): and creates and appropriate handler (depending on the kind of capability it was passed). """ - # this is in case a URI like "/uri/?cap=" is + # this is in case a URI like "/uri/?uri=" is # passed -- we re-direct to the non-trailing-slash version so # that there is just one valid URI for "uri" resource. if not name: From 7d2aa50894bb59043b5e722425d71726f871eb4c Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 19 Jan 2021 17:15:07 -0500 Subject: [PATCH 186/213] when_done is bad, at least here. --- integration/test_web.py | 1 - 1 file changed, 1 deletion(-) diff --git a/integration/test_web.py b/integration/test_web.py index a322129f8..216d80d42 100644 --- a/integration/test_web.py +++ b/integration/test_web.py @@ -127,7 +127,6 @@ def test_deep_stats(alice): dircap_uri, data={ u"t": u"upload", - u"when_done": u".", }, files={ u"file": FILE_CONTENTS, From 53243540cc9ad5310dd9d97de0ca7e1afeed1068 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 19 Jan 2021 17:15:58 -0500 Subject: [PATCH 187/213] News file. --- 3590.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 3590.bugfix diff --git a/3590.bugfix b/3590.bugfix new file mode 100644 index 000000000..aa504a5e3 --- /dev/null +++ b/3590.bugfix @@ -0,0 +1 @@ +Fixed issue where redirecting old-style URIs (/uri/?uri=...) didn't work. \ No newline at end of file From 8be3678cb47f0902a94d2ed1b1d651842b738efd Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 20 Jan 2021 11:22:22 -0500 Subject: [PATCH 188/213] Directly test read_encrypted behavior --- src/allmydata/test/test_upload.py | 69 +++++++++++++++++++++++++++++++ 1 file changed, 69 insertions(+) diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py index 94d7575c3..7e41bfc24 100644 --- a/src/allmydata/test/test_upload.py +++ b/src/allmydata/test/test_upload.py @@ -14,6 +14,17 @@ if PY2: import os, shutil from io import BytesIO +from base64 import ( + b64encode, +) + +from hypothesis import ( + given, +) +from hypothesis.strategies import ( + just, + integers, +) from twisted.trial import unittest from twisted.python.failure import Failure @@ -2029,6 +2040,64 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin, f.close() return None + +class EncryptAnUploadableTests(unittest.TestCase): + """ + Tests for ``EncryptAnUploadable``. + """ + def test_same_length(self): + """ + ``EncryptAnUploadable.read_encrypted`` returns ciphertext of the same + length as the underlying plaintext. + """ + plaintext = b"hello world" + uploadable = upload.FileHandle(BytesIO(plaintext), None) + uploadable.set_default_encoding_parameters({ + # These values shouldn't matter. + "k": 3, + "happy": 5, + "n": 10, + "max_segment_size": 128 * 1024, + }) + encrypter = upload.EncryptAnUploadable(uploadable) + ciphertext = b"".join(self.successResultOf(encrypter.read_encrypted(1024, False))) + self.assertEqual(len(ciphertext), len(plaintext)) + + @given(just(b"hello world"), integers(min_value=0, max_value=len(b"hello world"))) + def test_known_result(self, plaintext, split_at): + """ + ``EncryptAnUploadable.read_encrypted`` returns a known-correct ciphertext + string for certain inputs. The ciphertext is independent of the read + sizes. + """ + convergence = b"\x42" * 16 + uploadable = upload.FileHandle(BytesIO(plaintext), convergence) + uploadable.set_default_encoding_parameters({ + # The convergence key is a function of k, n, and max_segment_size + # (among other things). The value for happy doesn't matter + # though. + "k": 3, + "happy": 5, + "n": 10, + "max_segment_size": 128 * 1024, + }) + encrypter = upload.EncryptAnUploadable(uploadable) + def read(n): + return b"".join(self.successResultOf(encrypter.read_encrypted(n, False))) + + # Read the string in one or two pieces to make sure underlying state + # is maintained properly. + first = read(split_at) + second = read(len(plaintext) - split_at) + third = read(1) + ciphertext = first + second + third + + self.assertEqual( + b"Jd2LHCRXozwrEJc=", + b64encode(ciphertext), + ) + + # TODO: # upload with exactly 75 servers (shares_of_happiness) # have a download fail From f75f71cba6e98ac508cf06108523a9d0c1a4842f Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 20 Jan 2021 11:23:35 -0500 Subject: [PATCH 189/213] news fragment --- newsfragments/3594.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/3594.minor diff --git a/newsfragments/3594.minor b/newsfragments/3594.minor new file mode 100644 index 000000000..e69de29bb From 932481ad47c650cb00070394829a7cc268fdd00e Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 20 Jan 2021 12:58:03 -0500 Subject: [PATCH 190/213] A helper for doing something repeatedly for a while --- src/allmydata/test/test_deferredutil.py | 55 +++++++++++++++++++++++++ src/allmydata/util/deferredutil.py | 30 ++++++++++++++ 2 files changed, 85 insertions(+) diff --git a/src/allmydata/test/test_deferredutil.py b/src/allmydata/test/test_deferredutil.py index 6ebc93556..2a155089f 100644 --- a/src/allmydata/test/test_deferredutil.py +++ b/src/allmydata/test/test_deferredutil.py @@ -74,3 +74,58 @@ class DeferredUtilTests(unittest.TestCase, deferredutil.WaitForDelayedCallsMixin d = defer.succeed(None) d.addBoth(self.wait_for_delayed_calls) return d + + +class UntilTests(unittest.TestCase): + """ + Tests for ``deferredutil.until``. + """ + def test_exception(self): + """ + If the action raises an exception, the ``Deferred`` returned by ``until`` + fires with a ``Failure``. + """ + self.assertFailure( + deferredutil.until(lambda: 1/0, lambda: True), + ZeroDivisionError, + ) + + def test_stops_on_condition(self): + """ + The action is called repeatedly until ``condition`` returns ``True``. + """ + calls = [] + def action(): + calls.append(None) + + def condition(): + return len(calls) == 3 + + self.assertIs( + self.successResultOf( + deferredutil.until(action, condition), + ), + None, + ) + self.assertEqual(3, len(calls)) + + def test_waits_for_deferred(self): + """ + If the action returns a ``Deferred`` then it is called again when the + ``Deferred`` fires. + """ + counter = [0] + r1 = defer.Deferred() + r2 = defer.Deferred() + results = [r1, r2] + def action(): + counter[0] += 1 + return results.pop(0) + + def condition(): + return False + + deferredutil.until(action, condition) + self.assertEqual([1], counter) + r1.callback(None) + self.assertEqual([2], counter) diff --git a/src/allmydata/util/deferredutil.py b/src/allmydata/util/deferredutil.py index 1d13f61e6..ed2a11ee4 100644 --- a/src/allmydata/util/deferredutil.py +++ b/src/allmydata/util/deferredutil.py @@ -15,7 +15,18 @@ if PY2: import time +try: + from typing import ( + Callable, + Any, + ) +except ImportError: + pass + from foolscap.api import eventually +from eliot.twisted import ( + inline_callbacks, +) from twisted.internet import defer, reactor, error from twisted.python.failure import Failure @@ -201,3 +212,22 @@ class WaitForDelayedCallsMixin(PollMixin): d.addErrback(log.err, "error while waiting for delayed calls") d.addBoth(lambda ign: res) return d + +@inline_callbacks +def until( + action, # type: Callable[[], defer.Deferred[Any]] + condition, # type: Callable[[], bool] +): + # type: (...) -> defer.Deferred[None] + """ + Run a Deferred-returning function until a condition is true. + + :param action: The action to run. + :param condition: The predicate signaling stop. + + :return: A Deferred that fires after the condition signals stop. + """ + while True: + yield action() + if condition(): + break From 12087738d682c051aeeb75a7c90dd78d7b8ebfb0 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 20 Jan 2021 13:54:37 -0500 Subject: [PATCH 191/213] Switch from fireEventually to `until` --- src/allmydata/immutable/upload.py | 102 +++++++++++++++++++++--------- src/allmydata/test/test_upload.py | 27 ++++++++ 2 files changed, 98 insertions(+), 31 deletions(-) diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py index adcdaed10..fe173b46c 100644 --- a/src/allmydata/immutable/upload.py +++ b/src/allmydata/immutable/upload.py @@ -13,19 +13,33 @@ if PY2: from past.builtins import long, unicode from six import ensure_str +try: + from typing import List +except ImportError: + pass + import os, time, weakref, itertools +from functools import ( + partial, +) + +import attr + from zope.interface import implementer from twisted.python import failure from twisted.internet import defer from twisted.application import service -from foolscap.api import Referenceable, Copyable, RemoteCopy, fireEventually +from foolscap.api import Referenceable, Copyable, RemoteCopy from allmydata.crypto import aes from allmydata.util.hashutil import file_renewal_secret_hash, \ file_cancel_secret_hash, bucket_renewal_secret_hash, \ bucket_cancel_secret_hash, plaintext_hasher, \ storage_index_hash, plaintext_segment_hasher, convergence_hasher -from allmydata.util.deferredutil import timeout_call +from allmydata.util.deferredutil import ( + timeout_call, + until, +) from allmydata import hashtree, uri from allmydata.storage.server import si_b2a from allmydata.immutable import encode @@ -900,13 +914,41 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin): raise UploadUnhappinessError(msg) +@attr.s +class _Accum(object): + """ + Accumulate up to some known amount of ciphertext. + + :ivar remaining: The number of bytes still expected. + :ivar ciphertext: The bytes accumulated so far. + """ + remaining = attr.ib(validator=attr.validators.instance_of(int)) # type: int + ciphertext = attr.ib(default=attr.Factory(list)) # type: List[bytes] + + def extend(self, + size, # type: int + ciphertext, # type: List[bytes] + ): + """ + Accumulate some more ciphertext. + + :param size: The amount of data the new ciphertext represents towards + the goal. This may be more than the actual size of the given + ciphertext if the source has run out of data. + + :param ciphertext: The new ciphertext to accumulate. + """ + self.remaining -= size + self.ciphertext.extend(ciphertext) + + @implementer(IEncryptedUploadable) class EncryptAnUploadable(object): """This is a wrapper that takes an IUploadable and provides IEncryptedUploadable.""" CHUNKSIZE = 50*1024 - def __init__(self, original, log_parent=None, progress=None): + def __init__(self, original, log_parent=None, progress=None, chunk_size=None): precondition(original.default_params_set, "set_default_encoding_parameters not called on %r before wrapping with EncryptAnUploadable" % (original,)) self.original = IUploadable(original) @@ -920,6 +962,8 @@ class EncryptAnUploadable(object): self._ciphertext_bytes_read = 0 self._status = None self._progress = progress + if chunk_size is not None: + self.CHUNKSIZE = chunk_size def set_upload_status(self, upload_status): self._status = IUploadStatus(upload_status) @@ -1026,47 +1070,43 @@ class EncryptAnUploadable(object): # and size d.addCallback(lambda ignored: self.get_size()) d.addCallback(lambda ignored: self._get_encryptor()) - # then fetch and encrypt the plaintext. The unusual structure here - # (passing a Deferred *into* a function) is needed to avoid - # overflowing the stack: Deferreds don't optimize out tail recursion. - # We also pass in a list, to which _read_encrypted will append - # ciphertext. - ciphertext = [] - d2 = defer.Deferred() - d.addCallback(lambda ignored: - self._read_encrypted(length, ciphertext, hash_only, d2)) - d.addCallback(lambda ignored: d2) + + accum = _Accum(length) + action = partial(self._read_encrypted, accum, hash_only) + condition = lambda: accum.remaining == 0 + + d.addCallback(lambda ignored: until(action, condition)) + d.addCallback(lambda ignored: accum.ciphertext) return d - def _read_encrypted(self, remaining, ciphertext, hash_only, fire_when_done): - if not remaining: - fire_when_done.callback(ciphertext) - return None + def _read_encrypted(self, + ciphertext_accum, # type: _Accum + hash_only, # type: bool + ): + # type: (...) -> defer.Deferred + """ + Read the next chunk of plaintext, encrypt it, and extend the accumulator + with the resulting ciphertext. + """ # tolerate large length= values without consuming a lot of RAM by # reading just a chunk (say 50kB) at a time. This only really matters # when hash_only==True (i.e. resuming an interrupted upload), since # that's the case where we will be skipping over a lot of data. - size = min(remaining, self.CHUNKSIZE) - remaining = remaining - size + size = min(ciphertext_accum.remaining, self.CHUNKSIZE) + # read a chunk of plaintext.. d = defer.maybeDeferred(self.original.read, size) - # N.B.: if read() is synchronous, then since everything else is - # actually synchronous too, we'd blow the stack unless we stall for a - # tick. Once you accept a Deferred from IUploadable.read(), you must - # be prepared to have it fire immediately too. - d.addCallback(fireEventually) def _good(plaintext): # and encrypt it.. # o/' over the fields we go, hashing all the way, sHA! sHA! sHA! o/' ct = self._hash_and_encrypt_plaintext(plaintext, hash_only) - ciphertext.extend(ct) - self._read_encrypted(remaining, ciphertext, hash_only, - fire_when_done) - def _err(why): - fire_when_done.errback(why) + # Intentionally tell the accumulator about the expected size, not + # the actual size. If we run out of data we still want remaining + # to drop otherwise it will never reach 0 and the loop will never + # end. + ciphertext_accum.extend(size, ct) d.addCallback(_good) - d.addErrback(_err) - return None + return d def _hash_and_encrypt_plaintext(self, data, hash_only): assert isinstance(data, (tuple, list)), type(data) diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py index 7e41bfc24..07ede2074 100644 --- a/src/allmydata/test/test_upload.py +++ b/src/allmydata/test/test_upload.py @@ -2097,6 +2097,33 @@ class EncryptAnUploadableTests(unittest.TestCase): b64encode(ciphertext), ) + def test_large_read(self): + """ + ``EncryptAnUploadable.read_encrypted`` succeeds even when the requested + data length is much larger than the chunk size. + """ + convergence = b"\x42" * 16 + # 4kB of plaintext + plaintext = b"\xde\xad\xbe\xef" * 1024 + uploadable = upload.FileHandle(BytesIO(plaintext), convergence) + uploadable.set_default_encoding_parameters({ + "k": 3, + "happy": 5, + "n": 10, + "max_segment_size": 128 * 1024, + }) + # Make the chunk size very small so we don't have to operate on a huge + # amount of data to exercise the relevant codepath. + encrypter = upload.EncryptAnUploadable(uploadable, chunk_size=1) + d = encrypter.read_encrypted(len(plaintext), False) + ciphertext = self.successResultOf(d) + self.assertEqual( + list(map(len, ciphertext)), + # Chunk size was specified as 1 above so we will get the whole + # plaintext in one byte chunks. + [1] * len(plaintext), + ) + # TODO: # upload with exactly 75 servers (shares_of_happiness) From 9c91261fa6675e2f92890c9cd1229474e49883db Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 20 Jan 2021 13:57:01 -0500 Subject: [PATCH 192/213] news fragment --- newsfragments/3595.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/3595.minor diff --git a/newsfragments/3595.minor b/newsfragments/3595.minor new file mode 100644 index 000000000..e69de29bb From 23e52b12377e921bd5cf3b1d15b413cde24aedd3 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Thu, 21 Jan 2021 09:58:58 -0500 Subject: [PATCH 193/213] Simplify the unit test. --- src/allmydata/test/web/test_web.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/allmydata/test/web/test_web.py b/src/allmydata/test/web/test_web.py index 8dcb7d95b..2f000b7a1 100644 --- a/src/allmydata/test/web/test_web.py +++ b/src/allmydata/test/web/test_web.py @@ -4767,11 +4767,9 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi return treq.request(method, self.webish_url + path, persistent=False, **kwargs) - response = yield req("POST", "/uri?format=sdmf&t=mkdir&redirect_to_result=true") - - uri = urllib.unquote(response.request.absoluteURI) - assert 'URI:DIR2:' in uri - dircap = uri[uri.find("URI:DIR2:"):].rstrip('/') + response = yield req("POST", "/uri?format=sdmf&t=mkdir") + dircap = yield response.content() + assert dircap.startswith('URI:DIR2:') dircap_uri = "/uri/?uri={}&t=json".format(urllib.quote(dircap)) response = yield req( From db22291660d3a73a8bda617c33299e6e98ca21b7 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Thu, 21 Jan 2021 13:54:22 -0500 Subject: [PATCH 194/213] Try to minimally workaround issues causing Windows to block when writing logs. --- integration/test_sftp.py | 7 ++++++- integration/test_web.py | 1 + integration/util.py | 20 ++++++++++++++++++++ 3 files changed, 27 insertions(+), 1 deletion(-) diff --git a/integration/test_sftp.py b/integration/test_sftp.py index f1cf92eab..0b1c600d1 100644 --- a/integration/test_sftp.py +++ b/integration/test_sftp.py @@ -23,7 +23,7 @@ from paramiko.rsakey import RSAKey import pytest -from .util import generate_ssh_key +from .util import generate_ssh_key, run_in_thread def connect_sftp(connect_args={"username": "alice", "password": "password"}): @@ -50,6 +50,7 @@ def connect_sftp(connect_args={"username": "alice", "password": "password"}): return sftp +@run_in_thread def test_bad_account_password_ssh_key(alice, tmpdir): """ Can't login with unknown username, wrong password, or wrong SSH pub key. @@ -79,6 +80,7 @@ def test_bad_account_password_ssh_key(alice, tmpdir): }) +@run_in_thread def test_ssh_key_auth(alice): """It's possible to login authenticating with SSH public key.""" key = RSAKey(filename=join(alice.node_dir, "private", "ssh_client_rsa_key")) @@ -88,6 +90,7 @@ def test_ssh_key_auth(alice): assert sftp.listdir() == [] +@run_in_thread def test_read_write_files(alice): """It's possible to upload and download files.""" sftp = connect_sftp() @@ -102,6 +105,7 @@ def test_read_write_files(alice): f.close() +@run_in_thread def test_directories(alice): """ It's possible to create, list directories, and create and remove files in @@ -135,6 +139,7 @@ def test_directories(alice): assert sftp.listdir() == [] +@run_in_thread def test_rename(alice): """Directories and files can be renamed.""" sftp = connect_sftp() diff --git a/integration/test_web.py b/integration/test_web.py index a16bf2e71..fb45807b3 100644 --- a/integration/test_web.py +++ b/integration/test_web.py @@ -178,6 +178,7 @@ def test_deep_stats(alice): @pytest.mark.timeout(60) +@util.run_in_thread def test_status(alice): """ confirm we get something sensible from /status and the various sub-types diff --git a/integration/util.py b/integration/util.py index 3d1708bae..2b8f587f2 100644 --- a/integration/util.py +++ b/integration/util.py @@ -13,10 +13,12 @@ from twisted.python.filepath import ( from twisted.internet.defer import Deferred, succeed from twisted.internet.protocol import ProcessProtocol from twisted.internet.error import ProcessExitedAlready, ProcessDone +from twisted.internet.threads import deferToThread import requests from paramiko.rsakey import RSAKey +from boltons.funcutils import wraps from allmydata.util.configutil import ( get_config, @@ -525,3 +527,21 @@ def generate_ssh_key(path): key.write_private_key_file(path) with open(path + ".pub", "wb") as f: f.write(b"%s %s" % (key.get_name(), key.get_base64())) + + +def run_in_thread(f): + """Decorator for integration tests that runs code in a thread. + + Because we're using pytest_twisted, tests are expected to return a Deferred + so reactor can run. If the reactor doesn't run, reads from nodes' stdout + and stderr don't happen. eventually the buffers fill up, and the nodes + block when they try to flush logs. + + We can switch to Twisted APIs (treq instead of requests etc.), but + sometimes it's easier or expedient to just have a block test. So this runs + the test in a thread in a way that still lets the reactor run. + """ + @wraps(f) + def test(*args, **kwargs): + return deferToThread(lambda: f(*args, **kwargs)) + return test From 411ee141e98fc2527797bb461a5c08440b479daf Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Thu, 21 Jan 2021 13:55:51 -0500 Subject: [PATCH 195/213] Fix location for news fragment. --- 3590.bugfix => newsfragments/3590.bugfix | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename 3590.bugfix => newsfragments/3590.bugfix (100%) diff --git a/3590.bugfix b/newsfragments/3590.bugfix similarity index 100% rename from 3590.bugfix rename to newsfragments/3590.bugfix From 5a0c913f589de968e7543ba7175386454b509be3 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 25 Jan 2021 08:21:39 -0500 Subject: [PATCH 196/213] document the new parameter --- src/allmydata/immutable/upload.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py index fe173b46c..27cc923fd 100644 --- a/src/allmydata/immutable/upload.py +++ b/src/allmydata/immutable/upload.py @@ -949,6 +949,10 @@ class EncryptAnUploadable(object): CHUNKSIZE = 50*1024 def __init__(self, original, log_parent=None, progress=None, chunk_size=None): + """ + :param chunk_size: The number of bytes to read from the uploadable at a + time, or None for some default. + """ precondition(original.default_params_set, "set_default_encoding_parameters not called on %r before wrapping with EncryptAnUploadable" % (original,)) self.original = IUploadable(original) From e0fa2286228a46cd81b82868b56cf096d18d95dc Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 25 Jan 2021 08:23:40 -0500 Subject: [PATCH 197/213] expand partial/lambda into full functions for clarity --- src/allmydata/immutable/upload.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py index 27cc923fd..46e01184f 100644 --- a/src/allmydata/immutable/upload.py +++ b/src/allmydata/immutable/upload.py @@ -19,9 +19,6 @@ except ImportError: pass import os, time, weakref, itertools -from functools import ( - partial, -) import attr @@ -1076,8 +1073,18 @@ class EncryptAnUploadable(object): d.addCallback(lambda ignored: self._get_encryptor()) accum = _Accum(length) - action = partial(self._read_encrypted, accum, hash_only) - condition = lambda: accum.remaining == 0 + + def action(): + """ + Read some bytes into the accumulator. + """ + return self._read_encrypted(accum, hash_only) + + def condition(): + """ + Check to see if the accumulator has all the data. + """ + return accum.remaining == 0 d.addCallback(lambda ignored: until(action, condition)) d.addCallback(lambda ignored: accum.ciphertext) From 3b893a56f94dfd57debccc51f2039e0db2e5567f Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 26 Jan 2021 09:55:13 -0500 Subject: [PATCH 198/213] Just rely on global timeout. --- integration/test_web.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/integration/test_web.py b/integration/test_web.py index e89837d30..aab11412f 100644 --- a/integration/test_web.py +++ b/integration/test_web.py @@ -21,8 +21,6 @@ import requests import html5lib from bs4 import BeautifulSoup -import pytest - def test_index(alice): """ @@ -177,7 +175,6 @@ def test_deep_stats(alice): time.sleep(.5) -@pytest.mark.timeout(60) @util.run_in_thread def test_status(alice): """ From 3d2ca566f613a8f238677e71182e025d7a7804f5 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 26 Jan 2021 09:57:02 -0500 Subject: [PATCH 199/213] news fragment --- newsfragments/3599.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/3599.minor diff --git a/newsfragments/3599.minor b/newsfragments/3599.minor new file mode 100644 index 000000000..e69de29bb From 0424ba2a487bdc6b3ba6499f6f864e3ffad3e92d Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 26 Jan 2021 09:57:11 -0500 Subject: [PATCH 200/213] Fix indent. --- integration/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration/util.py b/integration/util.py index 2b8f587f2..e8064934f 100644 --- a/integration/util.py +++ b/integration/util.py @@ -267,7 +267,7 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam '--helper', ] if not storage: - args.append('--no-storage') + args.append('--no-storage') args.append(node_dir) _tahoe_runner_optional_coverage(done_proto, reactor, request, args) From e6b3d59501d4b87021491cf03d05bff1516d68b1 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 26 Jan 2021 09:58:08 -0500 Subject: [PATCH 201/213] try to link to the same thing more robustly --- .github/CONTRIBUTING.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst index 0c0da9503..b59385aa4 100644 --- a/.github/CONTRIBUTING.rst +++ b/.github/CONTRIBUTING.rst @@ -17,4 +17,4 @@ Examples of contributions include: * `Patch reviews `_ Before authoring or reviewing a patch, -please familiarize yourself with the `Coding Standards `_ and the `Contributor Code of Conduct `_. +please familiarize yourself with the `Coding Standards `_ and the `Contributor Code of Conduct <../docs/CODE_OF_CONDUCT.md>`_. From cc8e613fe3bb1cdec3f84860cfc7c89dee7fb610 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 26 Jan 2021 10:00:50 -0500 Subject: [PATCH 202/213] Rephrase. --- newsfragments/3584.bugfix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/newsfragments/3584.bugfix b/newsfragments/3584.bugfix index 73650f40b..faf57713b 100644 --- a/newsfragments/3584.bugfix +++ b/newsfragments/3584.bugfix @@ -1 +1 @@ -SFTP public key auth likely works better, and SFTP in general was broken in the prerelease. \ No newline at end of file +SFTP public key auth likely works more consistently, and SFTP in general was previously broken. \ No newline at end of file From e7ab792c4c81bd33e989499b304e2fdd212e4715 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 26 Jan 2021 10:06:17 -0500 Subject: [PATCH 203/213] Explain why Paramiko. --- integration/test_sftp.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/integration/test_sftp.py b/integration/test_sftp.py index 0b1c600d1..ed5b37a31 100644 --- a/integration/test_sftp.py +++ b/integration/test_sftp.py @@ -1,6 +1,13 @@ """ It's possible to create/rename/delete files and directories in Tahoe-LAFS using SFTP. + +These tests use Paramiko, rather than Twisted's Conch, because: + + 1. It's a different implementation, so we're not testing Conch against + itself. + + 2. Its API is much simpler to use. """ from __future__ import unicode_literals From 4e89ab2e66f3fb78c92e3060507f1a50a5a74d69 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 26 Jan 2021 10:06:57 -0500 Subject: [PATCH 204/213] Context manager. --- integration/test_sftp.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/integration/test_sftp.py b/integration/test_sftp.py index ed5b37a31..6171c7413 100644 --- a/integration/test_sftp.py +++ b/integration/test_sftp.py @@ -101,15 +101,14 @@ def test_ssh_key_auth(alice): def test_read_write_files(alice): """It's possible to upload and download files.""" sftp = connect_sftp() - f = sftp.file("myfile", "wb") - f.write(b"abc") - f.write(b"def") - f.close() - f = sftp.file("myfile", "rb") - assert f.read(4) == b"abcd" - assert f.read(2) == b"ef" - assert f.read(1) == b"" - f.close() + with sftp.file("myfile", "wb") as f: + f.write(b"abc") + f.write(b"def") + + with sftp.file("myfile", "rb") as f: + assert f.read(4) == b"abcd" + assert f.read(2) == b"ef" + assert f.read(1) == b"" @run_in_thread From 6c04ea74977d745fc4649e871c85bc690e9f9263 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 26 Jan 2021 10:14:14 -0500 Subject: [PATCH 205/213] Explanatory comment is better now. --- integration/util.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/integration/util.py b/integration/util.py index e8064934f..444c8b05e 100644 --- a/integration/util.py +++ b/integration/util.py @@ -532,14 +532,21 @@ def generate_ssh_key(path): def run_in_thread(f): """Decorator for integration tests that runs code in a thread. - Because we're using pytest_twisted, tests are expected to return a Deferred - so reactor can run. If the reactor doesn't run, reads from nodes' stdout - and stderr don't happen. eventually the buffers fill up, and the nodes - block when they try to flush logs. + Because we're using pytest_twisted, tests that rely on the reactor are + expected to return a Deferred and use async APIs so the reactor can run. + + In the case of the integration test suite, it launches nodes in the + background using Twisted APIs. The nodes stdout and stderr is read via + Twisted code. If the reactor doesn't run, reads don't happen, and + eventually the buffers fill up, and the nodes block when they try to flush + logs. We can switch to Twisted APIs (treq instead of requests etc.), but - sometimes it's easier or expedient to just have a block test. So this runs - the test in a thread in a way that still lets the reactor run. + sometimes it's easier or expedient to just have a blocking test. So this + decorator allows you to run the test in a thread, and the reactor can keep + running in the main thread. + + See https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3597 for tracking bug. """ @wraps(f) def test(*args, **kwargs): From d25a0f1ce29d82efd02d1e845f23caa09a2dac93 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 26 Jan 2021 12:40:39 -0500 Subject: [PATCH 206/213] Increase timeout, just to be on the safe side. --- integration/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration/util.py b/integration/util.py index 444c8b05e..256fd68c1 100644 --- a/integration/util.py +++ b/integration/util.py @@ -30,7 +30,7 @@ from allmydata import client import pytest_twisted -def block_with_timeout(deferred, reactor, timeout=10): +def block_with_timeout(deferred, reactor, timeout=120): """Block until Deferred has result, but timeout instead of waiting forever.""" deferred.addTimeout(timeout, reactor) return pytest_twisted.blockon(deferred) From d1be6b4bde63ed31c4d3075f76e04a77799dbcdd Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Thu, 28 Jan 2021 15:10:57 -0500 Subject: [PATCH 207/213] Prepare for merge. --- src/allmydata/frontends/sftpd.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index 895c93f85..51b161dda 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -1,3 +1,6 @@ +""" +Ported to Python 3. +""" from __future__ import absolute_import from __future__ import division from __future__ import print_function @@ -1266,8 +1269,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): (existing_file, userpath, flags, _repr_flags(flags), parent, childname, filenode, metadata), level=NOISY) - _assert((isinstance(userpath, bytes) and isinstance(childname, (str, type(None))) and - (metadata is None or 'no-write' in metadata)), + _assert((isinstance(userpath, bytes) and isinstance(childname, (str, type(None)) (metadata is None or 'no-write' in metadata)), userpath=userpath, childname=childname, metadata=metadata) writing = (flags & (FXF_WRITE | FXF_CREAT)) != 0 From 943eabab28d976c80044613d237db36b3efba22a Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 29 Jan 2021 10:02:16 -0500 Subject: [PATCH 208/213] Pacify flake8. --- src/allmydata/frontends/sftpd.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index b737e0473..bc7196de6 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -11,7 +11,7 @@ if PY2: from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import six -import heapq, traceback, array, stat, struct +import heapq, traceback, stat, struct from stat import S_IFREG, S_IFDIR from time import time, strftime, localtime @@ -433,10 +433,10 @@ class OverwriteableFileConsumer(PrefixingLogMixin): milestone = end while len(self.milestones) > 0: - (next, d) = self.milestones[0] - if next > milestone: + (next_, d) = self.milestones[0] + if next_ > milestone: return - if noisy: self.log("MILESTONE %r %r" % (next, d), level=NOISY) + if noisy: self.log("MILESTONE %r %r" % (next_, d), level=NOISY) heapq.heappop(self.milestones) eventually_callback(d)(b"reached") @@ -549,8 +549,8 @@ class OverwriteableFileConsumer(PrefixingLogMixin): eventually_callback(self.done)(None) while len(self.milestones) > 0: - (next, d) = self.milestones[0] - if noisy: self.log("MILESTONE FINISH %r %r %r" % (next, d, res), level=NOISY) + (next_, d) = self.milestones[0] + if noisy: self.log("MILESTONE FINISH %r %r %r" % (next_, d, res), level=NOISY) heapq.heappop(self.milestones) # The callback means that the milestone has been reached if # it is ever going to be. Note that the file may have been @@ -691,7 +691,7 @@ class GeneralSFTPFile(PrefixingLogMixin): # not be set before then. self.consumer = None - def open(self, parent=None, childname=None, filenode=None, metadata=None): + def open(self, parent=None, childname=None, filenode=None, metadata=None): # noqa: F811 self.log(".open(parent=%r, childname=%r, filenode=%r, metadata=%r)" % (parent, childname, filenode, metadata), level=OPERATIONAL) From ddcb43561d119a87cb58d0ba65d9ae3c6d3d5ac5 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 8 Feb 2021 19:49:02 -0500 Subject: [PATCH 209/213] Try to convince Mypy it's okay --- src/allmydata/windows/fixups.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/allmydata/windows/fixups.py b/src/allmydata/windows/fixups.py index abf4a8680..bb8e3fd97 100644 --- a/src/allmydata/windows/fixups.py +++ b/src/allmydata/windows/fixups.py @@ -1,5 +1,15 @@ from __future__ import print_function +# This code isn't loadable or sensible except on Windows. Importers all know +# this and are careful. Normally I would just let an import error from ctypes +# explain any mistakes but Mypy also needs some help here. This assert +# explains to it that this module is Windows-only. This prevents errors about +# ctypes.windll and such which only exist when running on Windows. +# +# https://mypy.readthedocs.io/en/stable/common_issues.html?highlight=platform#python-version-and-system-platform-checks +from sys import platform +assert platform == "win32" + import codecs, re from functools import partial From 541d7043d7674024e489757f23ee80f3a379316f Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 9 Feb 2021 10:20:14 -0500 Subject: [PATCH 210/213] Some comments about unicode handling in this UnicodeOutput thing --- src/allmydata/windows/fixups.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/allmydata/windows/fixups.py b/src/allmydata/windows/fixups.py index bb8e3fd97..b02c63b0b 100644 --- a/src/allmydata/windows/fixups.py +++ b/src/allmydata/windows/fixups.py @@ -275,10 +275,15 @@ class UnicodeOutput(object): def write(self, text): try: if self._hConsole is None: + # There is no Windows console available. That means we are + # responsible for encoding the unicode to a byte string to + # write it to a Python file object. if isinstance(text, unicode): text = text.encode('utf-8') self._stream.write(text) else: + # There is a Windows console available. That means Windows is + # responsible for dealing with the unicode itself. if not isinstance(text, unicode): text = str(text).decode('utf-8') remaining = len(text) From 27fcfe94dd371b6aeb6309cd2481ea4037fc2d39 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 9 Feb 2021 10:24:46 -0500 Subject: [PATCH 211/213] The code is 3-clause BSD licensed now. --- src/allmydata/test/_win_subprocess.py | 34 +++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/src/allmydata/test/_win_subprocess.py b/src/allmydata/test/_win_subprocess.py index cc66f7552..1ca2de1f4 100644 --- a/src/allmydata/test/_win_subprocess.py +++ b/src/allmydata/test/_win_subprocess.py @@ -1,3 +1,37 @@ +# -*- coding: utf-8 -*- + +## Copyright (C) 2021 Valentin Lab +## +## Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions +## are met: +## +## 1. Redistributions of source code must retain the above copyright +## notice, this list of conditions and the following disclaimer. +## +## 2. Redistributions in binary form must reproduce the above +## copyright notice, this list of conditions and the following +## disclaimer in the documentation and/or other materials provided +## with the distribution. +## +## 3. Neither the name of the copyright holder nor the names of its +## contributors may be used to endorse or promote products derived +## from this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +## FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +## COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +## INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +## (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +## SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +## HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +## STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +## ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED +## OF THE POSSIBILITY OF SUCH DAMAGE. +## + ## issue: https://bugs.python.org/issue19264 import os From b26652cad1cb5d5589b25a387c6ff10f9912c191 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 9 Feb 2021 14:36:19 -0500 Subject: [PATCH 212/213] Try to get Mypy to recognize it this way? --- src/allmydata/windows/fixups.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/allmydata/windows/fixups.py b/src/allmydata/windows/fixups.py index b02c63b0b..0d1ed2717 100644 --- a/src/allmydata/windows/fixups.py +++ b/src/allmydata/windows/fixups.py @@ -6,9 +6,12 @@ from __future__ import print_function # explains to it that this module is Windows-only. This prevents errors about # ctypes.windll and such which only exist when running on Windows. # +# Beware of the limitations of the Mypy AST analyzer. The check needs to take +# exactly this form or it may not be recognized. +# # https://mypy.readthedocs.io/en/stable/common_issues.html?highlight=platform#python-version-and-system-platform-checks -from sys import platform -assert platform == "win32" +import sys +assert sys.platform == "win32" import codecs, re from functools import partial From 28acc5ccb4cd911fd7a5d0272ccceef1b82ab30b Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 9 Feb 2021 14:50:29 -0500 Subject: [PATCH 213/213] Duplicate the fix for the other Windows-only module --- src/allmydata/test/_win_subprocess.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/allmydata/test/_win_subprocess.py b/src/allmydata/test/_win_subprocess.py index 1ca2de1f4..fe6960c73 100644 --- a/src/allmydata/test/_win_subprocess.py +++ b/src/allmydata/test/_win_subprocess.py @@ -34,6 +34,10 @@ ## issue: https://bugs.python.org/issue19264 +# See allmydata/windows/fixups.py +import sys +assert sys.platform == "win32" + import os import ctypes import subprocess