From add510701c0809cf89494434c1dccdfc3271df47 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 16 Nov 2022 11:44:51 -0500 Subject: [PATCH 01/43] Run integration tests both with and without HTTP storage protocol. --- .github/workflows/ci.yml | 6 +++++- integration/util.py | 17 +++++++++-------- newsfragments/3937.minor | 0 src/allmydata/protocol_switch.py | 16 ++++++++++++++++ src/allmydata/testing/__init__.py | 18 ++++++++++++++++++ 5 files changed, 48 insertions(+), 9 deletions(-) create mode 100644 newsfragments/3937.minor diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0327014ca..26574066c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -229,7 +229,11 @@ jobs: # aren't too long. On Windows tox won't pass it through so it has no # effect. On Linux it doesn't make a difference one way or another. TMPDIR: "/tmp" - run: tox -e integration + run: | + # Run with Foolscap forced: + __TAHOE_INTEGRATION_FORCE_FOOLSCAP=1 tox -e integration + # Run with Foolscap not forced, which should result in HTTP being used. + __TAHOE_INTEGRATION_FORCE_FOOLSCAP=0 tox -e integration - name: Upload eliot.log in case of failure uses: actions/upload-artifact@v1 diff --git a/integration/util.py b/integration/util.py index ad9249e45..cde837218 100644 --- a/integration/util.py +++ b/integration/util.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import sys import time @@ -38,6 +30,7 @@ from allmydata.util.configutil import ( write_config, ) from allmydata import client +from allmydata.testing import foolscap_only_for_integration_testing import pytest_twisted @@ -300,6 +293,14 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam u'log_gatherer.furl', flog_gatherer, ) + force_foolscap = foolscap_only_for_integration_testing() + if force_foolscap is not None: + set_config( + config, + 'storage', + 'force_foolscap', + str(force_foolscap), + ) write_config(FilePath(config_path), config) created_d.addCallback(created) diff --git a/newsfragments/3937.minor b/newsfragments/3937.minor new file mode 100644 index 000000000..e69de29bb diff --git a/src/allmydata/protocol_switch.py b/src/allmydata/protocol_switch.py index b0af84c33..d88863fdb 100644 --- a/src/allmydata/protocol_switch.py +++ b/src/allmydata/protocol_switch.py @@ -30,6 +30,7 @@ from foolscap.api import Tub from .storage.http_server import HTTPServer, build_nurl from .storage.server import StorageServer +from .testing import foolscap_only_for_integration_testing class _PretendToBeNegotiation(type): @@ -170,6 +171,21 @@ class _FoolscapOrHttps(Protocol, metaclass=_PretendToBeNegotiation): # and later data, otherwise assume HTTPS. self._timeout.cancel() if self._buffer.startswith(b"GET /id/"): + if foolscap_only_for_integration_testing() == False: + # Tahoe will prefer HTTP storage protocol over Foolscap when possible. + # + # If this is branch is taken, we are running a test that should + # be using HTTP for the storage protocol. As such, we + # aggressively disable Foolscap to ensure that HTTP is in fact + # going to be used. If we hit this branch that means our + # expectation that HTTP will be used was wrong, suggesting a + # bug in either the code of the integration testing setup. + # + # This branch should never be hit in production! + self.transport.loseConnection() + print("FOOLSCAP IS DISABLED, I PITY THE FOOLS WHO SEE THIS MESSAGE") + return + # We're a Foolscap Negotiation server protocol instance: transport = self.transport buf = self._buffer diff --git a/src/allmydata/testing/__init__.py b/src/allmydata/testing/__init__.py index e69de29bb..119ae4101 100644 --- a/src/allmydata/testing/__init__.py +++ b/src/allmydata/testing/__init__.py @@ -0,0 +1,18 @@ +import os +from typing import Optional + + +def foolscap_only_for_integration_testing() -> Optional[bool]: + """ + Return whether HTTP storage protocol has been disabled / Foolscap + forced, for purposes of integration testing. + + This is determined by the __TAHOE_INTEGRATION_FORCE_FOOLSCAP environment + variable, which can be 1, 0, or not set, corresponding to results of + ``True``, ``False`` and ``None`` (i.e. default). + """ + force_foolscap = os.environ.get("__TAHOE_INTEGRATION_FORCE_FOOLSCAP") + if force_foolscap is None: + return None + + return bool(int(force_foolscap)) From 7afd821efc826f2ee644ed85369b0bc6b8dbb482 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 16 Nov 2022 13:28:26 -0500 Subject: [PATCH 02/43] Sigh --- src/allmydata/test/test_storage_https.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/allmydata/test/test_storage_https.py b/src/allmydata/test/test_storage_https.py index bacb40290..a9421c3e5 100644 --- a/src/allmydata/test/test_storage_https.py +++ b/src/allmydata/test/test_storage_https.py @@ -179,6 +179,10 @@ class PinningHTTPSValidation(AsyncTestCase): response = await self.request(url, certificate) self.assertEqual(await response.content(), b"YOYODYNE") + # We keep getting TLSMemoryBIOProtocol being left around, so try harder + # to wait for it to finish. + await deferLater(reactor, 0.01) + @async_to_deferred async def test_server_certificate_not_valid_yet(self): """ From 4c8e8a74a4920359617bb4471f97eb3817eed37a Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Thu, 17 Nov 2022 12:25:37 -0500 Subject: [PATCH 03/43] Not needed. --- src/allmydata/test/test_storage_https.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/allmydata/test/test_storage_https.py b/src/allmydata/test/test_storage_https.py index a9421c3e5..88435bf89 100644 --- a/src/allmydata/test/test_storage_https.py +++ b/src/allmydata/test/test_storage_https.py @@ -202,10 +202,6 @@ class PinningHTTPSValidation(AsyncTestCase): response = await self.request(url, certificate) self.assertEqual(await response.content(), b"YOYODYNE") - # We keep getting TLSMemoryBIOProtocol being left around, so try harder - # to wait for it to finish. - await deferLater(reactor, 0.001) - # A potential attack to test is a private key that doesn't match the # certificate... but OpenSSL (quite rightly) won't let you listen with that # so I don't know how to test that! See From f5b24d51e909d4e5bc5a836fb2970ee025faf66f Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 29 Nov 2022 10:14:08 -0500 Subject: [PATCH 04/43] Add a test for missing Authorization --- src/allmydata/test/test_storage_http.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/src/allmydata/test/test_storage_http.py b/src/allmydata/test/test_storage_http.py index 8dbe18545..de60812e3 100644 --- a/src/allmydata/test/test_storage_http.py +++ b/src/allmydata/test/test_storage_http.py @@ -37,6 +37,7 @@ from twisted.web import http from twisted.web.http_headers import Headers from werkzeug import routing from werkzeug.exceptions import NotFound as WNotFound +from testtools.matchers import Equals from .common import SyncTestCase from ..storage.http_common import get_content_type, CBOR_MIME_TYPE @@ -555,6 +556,20 @@ class GenericHTTPAPITests(SyncTestCase): super(GenericHTTPAPITests, self).setUp() self.http = self.useFixture(HttpTestFixture()) + def test_missing_authentication(self) -> None: + """ + If nothing is given in the ``Authorization`` header at all an + ``Unauthorized`` response is returned. + """ + client = StubTreq(self.http.http_server.get_resource()) + response = self.http.result_of_with_flush( + client.request( + "GET", + "http://127.0.0.1/storage/v1/version", + ), + ) + self.assertThat(response.code, Equals(http.UNAUTHORIZED)) + def test_bad_authentication(self): """ If the wrong swissnum is used, an ``Unauthorized`` response code is From 920467dcea958dee101eec55e6cb67d7118e11ac Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 29 Nov 2022 10:19:01 -0500 Subject: [PATCH 05/43] Treat missing Authorization as the same as empty Authorization --- src/allmydata/storage/http_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/storage/http_server.py b/src/allmydata/storage/http_server.py index 3902976ba..96a491882 100644 --- a/src/allmydata/storage/http_server.py +++ b/src/allmydata/storage/http_server.py @@ -100,7 +100,7 @@ def _authorization_decorator(required_secrets): @wraps(f) def route(self, request, *args, **kwargs): if not timing_safe_compare( - request.requestHeaders.getRawHeaders("Authorization", [None])[0].encode( + request.requestHeaders.getRawHeaders("Authorization", [""])[0].encode( "utf-8" ), swissnum_auth_header(self._swissnum), From 57f13a2472c4fef1e99ffc4b8522a88d4be3c14c Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 29 Nov 2022 10:20:13 -0500 Subject: [PATCH 06/43] news fragment --- newsfragments/3942.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/3942.minor diff --git a/newsfragments/3942.minor b/newsfragments/3942.minor new file mode 100644 index 000000000..e69de29bb From 4367e5a0fcfd5c905195b741eec727eb2416096d Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 30 Nov 2022 09:28:58 -0500 Subject: [PATCH 07/43] Bump the Twisted dependency so we can do this --- setup.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 768e44e29..a3b3d5b98 100644 --- a/setup.py +++ b/setup.py @@ -96,7 +96,9 @@ install_requires = [ # an sftp extra in Tahoe-LAFS, there is no point in having one. # * Twisted 19.10 introduces Site.getContentFile which we use to get # temporary upload files placed into a per-node temporary directory. - "Twisted[tls,conch] >= 19.10.0", + # * Twisted 22.8.0 added support for coroutine-returning functions in many + # places (mainly via `maybeDeferred`) + "Twisted[tls,conch] >= 22.8.0", "PyYAML >= 3.11", From 5cebe91406c5d9db2c4b5ce150f85a3fd50322e7 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 30 Nov 2022 09:29:57 -0500 Subject: [PATCH 08/43] update the module docstring --- src/allmydata/test/mutable/test_version.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/allmydata/test/mutable/test_version.py b/src/allmydata/test/mutable/test_version.py index d5c44f204..aa6fb539f 100644 --- a/src/allmydata/test/mutable/test_version.py +++ b/src/allmydata/test/mutable/test_version.py @@ -1,5 +1,6 @@ """ -Ported to Python 3. +Tests related to the way ``allmydata.mutable`` handles different versions +of data for an object. """ from __future__ import print_function from __future__ import absolute_import From 1acf8604eff5227ed372b81eac20bc08677a853a Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 30 Nov 2022 09:30:08 -0500 Subject: [PATCH 09/43] Remove the Py2/Py3 compatibility header --- src/allmydata/test/mutable/test_version.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/src/allmydata/test/mutable/test_version.py b/src/allmydata/test/mutable/test_version.py index aa6fb539f..669baa8db 100644 --- a/src/allmydata/test/mutable/test_version.py +++ b/src/allmydata/test/mutable/test_version.py @@ -2,17 +2,9 @@ Tests related to the way ``allmydata.mutable`` handles different versions of data for an object. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from io import StringIO import os -from six.moves import cStringIO as StringIO from twisted.internet import defer from ..common import AsyncTestCase From a11eeaf240d1fde831e571ad5b5df3ebeed97168 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 30 Nov 2022 09:30:37 -0500 Subject: [PATCH 10/43] Convert all of the asynchronous functions to use `async` and `await` --- src/allmydata/test/mutable/test_version.py | 546 +++++++++------------ 1 file changed, 228 insertions(+), 318 deletions(-) diff --git a/src/allmydata/test/mutable/test_version.py b/src/allmydata/test/mutable/test_version.py index 669baa8db..d14cc9295 100644 --- a/src/allmydata/test/mutable/test_version.py +++ b/src/allmydata/test/mutable/test_version.py @@ -5,8 +5,8 @@ of data for an object. from io import StringIO import os +from typing import Optional -from twisted.internet import defer from ..common import AsyncTestCase from testtools.matchers import ( Equals, @@ -40,343 +40,269 @@ class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \ self.small_data = b"test data" * 10 # 90 B; SDMF - def do_upload_mdmf(self, data=None): + async def do_upload_mdmf(self, data: Optional[bytes] = None) -> MutableFileNode: if data is None: data = self.data - d = self.nm.create_mutable_file(MutableData(data), - version=MDMF_VERSION) - def _then(n): - self.assertThat(n, IsInstance(MutableFileNode)) - self.assertThat(n._protocol_version, Equals(MDMF_VERSION)) - self.mdmf_node = n - return n - d.addCallback(_then) - return d + n = await self.nm.create_mutable_file(MutableData(data), + version=MDMF_VERSION) + self.assertThat(n, IsInstance(MutableFileNode)) + self.assertThat(n._protocol_version, Equals(MDMF_VERSION)) + self.mdmf_node = n + return n - def do_upload_sdmf(self, data=None): + async def do_upload_sdmf(self, data: Optional[bytes] = None) -> MutableFileNode: if data is None: data = self.small_data - d = self.nm.create_mutable_file(MutableData(data)) - def _then(n): - self.assertThat(n, IsInstance(MutableFileNode)) - self.assertThat(n._protocol_version, Equals(SDMF_VERSION)) - self.sdmf_node = n - return n - d.addCallback(_then) - return d + n = await self.nm.create_mutable_file(MutableData(data)) + self.assertThat(n, IsInstance(MutableFileNode)) + self.assertThat(n._protocol_version, Equals(SDMF_VERSION)) + self.sdmf_node = n + return n - def do_upload_empty_sdmf(self): - d = self.nm.create_mutable_file(MutableData(b"")) - def _then(n): - self.assertThat(n, IsInstance(MutableFileNode)) - self.sdmf_zero_length_node = n - self.assertThat(n._protocol_version, Equals(SDMF_VERSION)) - return n - d.addCallback(_then) - return d + async def do_upload_empty_sdmf(self) -> MutableFileNode: + n = await self.nm.create_mutable_file(MutableData(b"")) + self.assertThat(n, IsInstance(MutableFileNode)) + self.sdmf_zero_length_node = n + self.assertThat(n._protocol_version, Equals(SDMF_VERSION)) + return n - def do_upload(self): - d = self.do_upload_mdmf() - d.addCallback(lambda ign: self.do_upload_sdmf()) - return d + async def do_upload(self) -> MutableFileNode: + await self.do_upload_mdmf() + return await self.do_upload_sdmf() - def test_debug(self): - d = self.do_upload_mdmf() - def _debug(n): - fso = debug.FindSharesOptions() - storage_index = base32.b2a(n.get_storage_index()) - fso.si_s = str(storage_index, "utf-8") # command-line options are unicode on Python 3 - fso.nodedirs = [os.path.dirname(abspath_expanduser_unicode(str(storedir))) - for (i,ss,storedir) - in self.iterate_servers()] - fso.stdout = StringIO() - fso.stderr = StringIO() - debug.find_shares(fso) - sharefiles = fso.stdout.getvalue().splitlines() - expected = self.nm.default_encoding_parameters["n"] - self.assertThat(sharefiles, HasLength(expected)) + async def test_debug(self) -> None: + n = await self.do_upload_mdmf() + fso = debug.FindSharesOptions() + storage_index = base32.b2a(n.get_storage_index()) + fso.si_s = str(storage_index, "utf-8") # command-line options are unicode on Python 3 + fso.nodedirs = [os.path.dirname(abspath_expanduser_unicode(str(storedir))) + for (i,ss,storedir) + in self.iterate_servers()] + fso.stdout = StringIO() + fso.stderr = StringIO() + debug.find_shares(fso) + sharefiles = fso.stdout.getvalue().splitlines() + expected = self.nm.default_encoding_parameters["n"] + self.assertThat(sharefiles, HasLength(expected)) - do = debug.DumpOptions() - do["filename"] = sharefiles[0] - do.stdout = StringIO() - debug.dump_share(do) - output = do.stdout.getvalue() - lines = set(output.splitlines()) - self.assertTrue("Mutable slot found:" in lines, output) - self.assertTrue(" share_type: MDMF" in lines, output) - self.assertTrue(" num_extra_leases: 0" in lines, output) - self.assertTrue(" MDMF contents:" in lines, output) - self.assertTrue(" seqnum: 1" in lines, output) - self.assertTrue(" required_shares: 3" in lines, output) - self.assertTrue(" total_shares: 10" in lines, output) - self.assertTrue(" segsize: 131073" in lines, output) - self.assertTrue(" datalen: %d" % len(self.data) in lines, output) - vcap = str(n.get_verify_cap().to_string(), "utf-8") - self.assertTrue(" verify-cap: %s" % vcap in lines, output) - cso = debug.CatalogSharesOptions() - cso.nodedirs = fso.nodedirs - cso.stdout = StringIO() - cso.stderr = StringIO() - debug.catalog_shares(cso) - shares = cso.stdout.getvalue().splitlines() - oneshare = shares[0] # all shares should be MDMF - self.failIf(oneshare.startswith("UNKNOWN"), oneshare) - self.assertTrue(oneshare.startswith("MDMF"), oneshare) - fields = oneshare.split() - self.assertThat(fields[0], Equals("MDMF")) - self.assertThat(fields[1].encode("ascii"), Equals(storage_index)) - self.assertThat(fields[2], Equals("3/10")) - self.assertThat(fields[3], Equals("%d" % len(self.data))) - self.assertTrue(fields[4].startswith("#1:"), fields[3]) - # the rest of fields[4] is the roothash, which depends upon - # encryption salts and is not constant. fields[5] is the - # remaining time on the longest lease, which is timing dependent. - # The rest of the line is the quoted pathname to the share. - d.addCallback(_debug) - return d + do = debug.DumpOptions() + do["filename"] = sharefiles[0] + do.stdout = StringIO() + debug.dump_share(do) + output = do.stdout.getvalue() + lines = set(output.splitlines()) + self.assertTrue("Mutable slot found:" in lines, output) + self.assertTrue(" share_type: MDMF" in lines, output) + self.assertTrue(" num_extra_leases: 0" in lines, output) + self.assertTrue(" MDMF contents:" in lines, output) + self.assertTrue(" seqnum: 1" in lines, output) + self.assertTrue(" required_shares: 3" in lines, output) + self.assertTrue(" total_shares: 10" in lines, output) + self.assertTrue(" segsize: 131073" in lines, output) + self.assertTrue(" datalen: %d" % len(self.data) in lines, output) + vcap = str(n.get_verify_cap().to_string(), "utf-8") + self.assertTrue(" verify-cap: %s" % vcap in lines, output) + cso = debug.CatalogSharesOptions() + cso.nodedirs = fso.nodedirs + cso.stdout = StringIO() + cso.stderr = StringIO() + debug.catalog_shares(cso) + shares = cso.stdout.getvalue().splitlines() + oneshare = shares[0] # all shares should be MDMF + self.failIf(oneshare.startswith("UNKNOWN"), oneshare) + self.assertTrue(oneshare.startswith("MDMF"), oneshare) + fields = oneshare.split() + self.assertThat(fields[0], Equals("MDMF")) + self.assertThat(fields[1].encode("ascii"), Equals(storage_index)) + self.assertThat(fields[2], Equals("3/10")) + self.assertThat(fields[3], Equals("%d" % len(self.data))) + self.assertTrue(fields[4].startswith("#1:"), fields[3]) + # the rest of fields[4] is the roothash, which depends upon + # encryption salts and is not constant. fields[5] is the + # remaining time on the longest lease, which is timing dependent. + # The rest of the line is the quoted pathname to the share. + + async def test_get_sequence_number(self) -> None: + await self.do_upload() + bv = await self.mdmf_node.get_best_readable_version() + self.assertThat(bv.get_sequence_number(), Equals(1)) + bv = await self.sdmf_node.get_best_readable_version() + self.assertThat(bv.get_sequence_number(), Equals(1)) - def test_get_sequence_number(self): - d = self.do_upload() - d.addCallback(lambda ign: self.mdmf_node.get_best_readable_version()) - d.addCallback(lambda bv: - self.assertThat(bv.get_sequence_number(), Equals(1))) - d.addCallback(lambda ignored: - self.sdmf_node.get_best_readable_version()) - d.addCallback(lambda bv: - self.assertThat(bv.get_sequence_number(), Equals(1))) # Now update. The sequence number in both cases should be 1 in # both cases. - def _do_update(ignored): - new_data = MutableData(b"foo bar baz" * 100000) - new_small_data = MutableData(b"foo bar baz" * 10) - d1 = self.mdmf_node.overwrite(new_data) - d2 = self.sdmf_node.overwrite(new_small_data) - dl = gatherResults([d1, d2]) - return dl - d.addCallback(_do_update) - d.addCallback(lambda ignored: - self.mdmf_node.get_best_readable_version()) - d.addCallback(lambda bv: - self.assertThat(bv.get_sequence_number(), Equals(2))) - d.addCallback(lambda ignored: - self.sdmf_node.get_best_readable_version()) - d.addCallback(lambda bv: - self.assertThat(bv.get_sequence_number(), Equals(2))) - return d + new_data = MutableData(b"foo bar baz" * 100000) + new_small_data = MutableData(b"foo bar baz" * 10) + d1 = self.mdmf_node.overwrite(new_data) + d2 = self.sdmf_node.overwrite(new_small_data) + await gatherResults([d1, d2]) + bv = await self.mdmf_node.get_best_readable_version() + self.assertThat(bv.get_sequence_number(), Equals(2)) + bv = await self.sdmf_node.get_best_readable_version() + self.assertThat(bv.get_sequence_number(), Equals(2)) - - def test_cap_after_upload(self): + async def test_cap_after_upload(self) -> None: # If we create a new mutable file and upload things to it, and # it's an MDMF file, we should get an MDMF cap back from that # file and should be able to use that. # That's essentially what MDMF node is, so just check that. - d = self.do_upload_mdmf() - def _then(ign): - mdmf_uri = self.mdmf_node.get_uri() - cap = uri.from_string(mdmf_uri) - self.assertTrue(isinstance(cap, uri.WriteableMDMFFileURI)) - readonly_mdmf_uri = self.mdmf_node.get_readonly_uri() - cap = uri.from_string(readonly_mdmf_uri) - self.assertTrue(isinstance(cap, uri.ReadonlyMDMFFileURI)) - d.addCallback(_then) - return d + await self.do_upload_mdmf() + mdmf_uri = self.mdmf_node.get_uri() + cap = uri.from_string(mdmf_uri) + self.assertTrue(isinstance(cap, uri.WriteableMDMFFileURI)) + readonly_mdmf_uri = self.mdmf_node.get_readonly_uri() + cap = uri.from_string(readonly_mdmf_uri) + self.assertTrue(isinstance(cap, uri.ReadonlyMDMFFileURI)) - def test_mutable_version(self): + async def test_mutable_version(self) -> None: # assert that getting parameters from the IMutableVersion object # gives us the same data as getting them from the filenode itself - d = self.do_upload() - d.addCallback(lambda ign: self.mdmf_node.get_best_mutable_version()) - def _check_mdmf(bv): - n = self.mdmf_node - self.assertThat(bv.get_writekey(), Equals(n.get_writekey())) - self.assertThat(bv.get_storage_index(), Equals(n.get_storage_index())) - self.assertFalse(bv.is_readonly()) - d.addCallback(_check_mdmf) - d.addCallback(lambda ign: self.sdmf_node.get_best_mutable_version()) - def _check_sdmf(bv): - n = self.sdmf_node - self.assertThat(bv.get_writekey(), Equals(n.get_writekey())) - self.assertThat(bv.get_storage_index(), Equals(n.get_storage_index())) - self.assertFalse(bv.is_readonly()) - d.addCallback(_check_sdmf) - return d + await self.do_upload() + bv = await self.mdmf_node.get_best_mutable_version() + n = self.mdmf_node + self.assertThat(bv.get_writekey(), Equals(n.get_writekey())) + self.assertThat(bv.get_storage_index(), Equals(n.get_storage_index())) + self.assertFalse(bv.is_readonly()) + + bv = await self.sdmf_node.get_best_mutable_version() + n = self.sdmf_node + self.assertThat(bv.get_writekey(), Equals(n.get_writekey())) + self.assertThat(bv.get_storage_index(), Equals(n.get_storage_index())) + self.assertFalse(bv.is_readonly()) - def test_get_readonly_version(self): - d = self.do_upload() - d.addCallback(lambda ign: self.mdmf_node.get_best_readable_version()) - d.addCallback(lambda bv: self.assertTrue(bv.is_readonly())) + async def test_get_readonly_version(self) -> None: + await self.do_upload() + bv = await self.mdmf_node.get_best_readable_version() + self.assertTrue(bv.is_readonly()) # Attempting to get a mutable version of a mutable file from a # filenode initialized with a readcap should return a readonly # version of that same node. - d.addCallback(lambda ign: self.mdmf_node.get_readonly()) - d.addCallback(lambda ro: ro.get_best_mutable_version()) - d.addCallback(lambda v: self.assertTrue(v.is_readonly())) + ro = self.mdmf_node.get_readonly() + v = await ro.get_best_mutable_version() + self.assertTrue(v.is_readonly()) - d.addCallback(lambda ign: self.sdmf_node.get_best_readable_version()) - d.addCallback(lambda bv: self.assertTrue(bv.is_readonly())) + bv = await self.sdmf_node.get_best_readable_version() + self.assertTrue(bv.is_readonly()) - d.addCallback(lambda ign: self.sdmf_node.get_readonly()) - d.addCallback(lambda ro: ro.get_best_mutable_version()) - d.addCallback(lambda v: self.assertTrue(v.is_readonly())) - return d + ro = self.sdmf_node.get_readonly() + v = await ro.get_best_mutable_version() + self.assertTrue(v.is_readonly()) - def test_toplevel_overwrite(self): + async def test_toplevel_overwrite(self) -> None: new_data = MutableData(b"foo bar baz" * 100000) new_small_data = MutableData(b"foo bar baz" * 10) - d = self.do_upload() - d.addCallback(lambda ign: self.mdmf_node.overwrite(new_data)) - d.addCallback(lambda ignored: - self.mdmf_node.download_best_version()) - d.addCallback(lambda data: - self.assertThat(data, Equals(b"foo bar baz" * 100000))) - d.addCallback(lambda ignored: - self.sdmf_node.overwrite(new_small_data)) - d.addCallback(lambda ignored: - self.sdmf_node.download_best_version()) - d.addCallback(lambda data: - self.assertThat(data, Equals(b"foo bar baz" * 10))) - return d + await self.do_upload() + await self.mdmf_node.overwrite(new_data) + data = await self.mdmf_node.download_best_version() + self.assertThat(data, Equals(b"foo bar baz" * 100000)) + await self.sdmf_node.overwrite(new_small_data) + data = await self.sdmf_node.download_best_version() + self.assertThat(data, Equals(b"foo bar baz" * 10)) - def test_toplevel_modify(self): - d = self.do_upload() + async def test_toplevel_modify(self) -> None: + await self.do_upload() def modifier(old_contents, servermap, first_time): return old_contents + b"modified" - d.addCallback(lambda ign: self.mdmf_node.modify(modifier)) - d.addCallback(lambda ignored: - self.mdmf_node.download_best_version()) - d.addCallback(lambda data: - self.assertThat(data, Contains(b"modified"))) - d.addCallback(lambda ignored: - self.sdmf_node.modify(modifier)) - d.addCallback(lambda ignored: - self.sdmf_node.download_best_version()) - d.addCallback(lambda data: - self.assertThat(data, Contains(b"modified"))) - return d + await self.mdmf_node.modify(modifier) + data = await self.mdmf_node.download_best_version() + self.assertThat(data, Contains(b"modified")) + await self.sdmf_node.modify(modifier) + data = await self.sdmf_node.download_best_version() + self.assertThat(data, Contains(b"modified")) - def test_version_modify(self): + async def test_version_modify(self) -> None: # TODO: When we can publish multiple versions, alter this test # to modify a version other than the best usable version, then # test to see that the best recoverable version is that. - d = self.do_upload() + await self.do_upload() def modifier(old_contents, servermap, first_time): return old_contents + b"modified" - d.addCallback(lambda ign: self.mdmf_node.modify(modifier)) - d.addCallback(lambda ignored: - self.mdmf_node.download_best_version()) - d.addCallback(lambda data: - self.assertThat(data, Contains(b"modified"))) - d.addCallback(lambda ignored: - self.sdmf_node.modify(modifier)) - d.addCallback(lambda ignored: - self.sdmf_node.download_best_version()) - d.addCallback(lambda data: - self.assertThat(data, Contains(b"modified"))) - return d + await self.mdmf_node.modify(modifier) + data = await self.mdmf_node.download_best_version() + self.assertThat(data, Contains(b"modified")) + await self.sdmf_node.modify(modifier) + data = await self.sdmf_node.download_best_version() + self.assertThat(data, Contains(b"modified")) - def test_download_version(self): - d = self.publish_multiple() + async def test_download_version(self) -> None: + await self.publish_multiple() # We want to have two recoverable versions on the grid. - d.addCallback(lambda res: - self._set_versions({0:0,2:0,4:0,6:0,8:0, - 1:1,3:1,5:1,7:1,9:1})) + self._set_versions({0:0,2:0,4:0,6:0,8:0, + 1:1,3:1,5:1,7:1,9:1}) # Now try to download each version. We should get the plaintext # associated with that version. - d.addCallback(lambda ignored: - self._fn.get_servermap(mode=MODE_READ)) - def _got_servermap(smap): - versions = smap.recoverable_versions() - assert len(versions) == 2 + smap = await self._fn.get_servermap(mode=MODE_READ) + versions = smap.recoverable_versions() + assert len(versions) == 2 - self.servermap = smap - self.version1, self.version2 = versions - assert self.version1 != self.version2 + self.servermap = smap + self.version1, self.version2 = versions + assert self.version1 != self.version2 - self.version1_seqnum = self.version1[0] - self.version2_seqnum = self.version2[0] - self.version1_index = self.version1_seqnum - 1 - self.version2_index = self.version2_seqnum - 1 + self.version1_seqnum = self.version1[0] + self.version2_seqnum = self.version2[0] + self.version1_index = self.version1_seqnum - 1 + self.version2_index = self.version2_seqnum - 1 - d.addCallback(_got_servermap) - d.addCallback(lambda ignored: - self._fn.download_version(self.servermap, self.version1)) - d.addCallback(lambda results: - self.assertThat(self.CONTENTS[self.version1_index], - Equals(results))) - d.addCallback(lambda ignored: - self._fn.download_version(self.servermap, self.version2)) - d.addCallback(lambda results: - self.assertThat(self.CONTENTS[self.version2_index], - Equals(results))) - return d + results = await self._fn.download_version(self.servermap, self.version1) + self.assertThat(self.CONTENTS[self.version1_index], + Equals(results)) + results = await self._fn.download_version(self.servermap, self.version2) + self.assertThat(self.CONTENTS[self.version2_index], + Equals(results)) - def test_download_nonexistent_version(self): - d = self.do_upload_mdmf() - d.addCallback(lambda ign: self.mdmf_node.get_servermap(mode=MODE_WRITE)) - def _set_servermap(servermap): - self.servermap = servermap - d.addCallback(_set_servermap) - d.addCallback(lambda ignored: - self.shouldFail(UnrecoverableFileError, "nonexistent version", - None, - self.mdmf_node.download_version, self.servermap, - "not a version")) - return d + async def test_download_nonexistent_version(self) -> None: + await self.do_upload_mdmf() + servermap = await self.mdmf_node.get_servermap(mode=MODE_WRITE) + await self.shouldFail(UnrecoverableFileError, "nonexistent version", + None, + self.mdmf_node.download_version, servermap, + "not a version") - def _test_partial_read(self, node, expected, modes, step): - d = node.get_best_readable_version() + async def _test_partial_read(self, node, expected, modes, step) -> None: + version = await node.get_best_readable_version() for (name, offset, length) in modes: - d.addCallback(self._do_partial_read, name, expected, offset, length) + version = await self._do_partial_read(version, name, expected, offset, length) # then read the whole thing, but only a few bytes at a time, and see # that the results are what we expect. - def _read_data(version): - c = consumer.MemoryConsumer() - d2 = defer.succeed(None) - for i in range(0, len(expected), step): - d2.addCallback(lambda ignored, i=i: version.read(c, i, step)) - d2.addCallback(lambda ignored: - self.assertThat(expected, Equals(b"".join(c.chunks)))) - return d2 - d.addCallback(_read_data) - return d - - def _do_partial_read(self, version, name, expected, offset, length): c = consumer.MemoryConsumer() - d = version.read(c, offset, length) + for i in range(0, len(expected), step): + await version.read(c, i, step) + self.assertThat(expected, Equals(b"".join(c.chunks))) + + async def _do_partial_read(self, version, name, expected, offset, length) -> None: + c = consumer.MemoryConsumer() + await version.read(c, offset, length) if length is None: expected_range = expected[offset:] else: expected_range = expected[offset:offset+length] - d.addCallback(lambda ignored: b"".join(c.chunks)) - def _check(results): - if results != expected_range: - print("read([%d]+%s) got %d bytes, not %d" % \ - (offset, length, len(results), len(expected_range))) - print("got: %s ... %s" % (results[:20], results[-20:])) - print("exp: %s ... %s" % (expected_range[:20], expected_range[-20:])) - self.fail("results[%s] != expected_range" % name) - return version # daisy-chained to next call - d.addCallback(_check) - return d + results = b"".join(c.chunks) + if results != expected_range: + print("read([%d]+%s) got %d bytes, not %d" % \ + (offset, length, len(results), len(expected_range))) + print("got: %s ... %s" % (results[:20], results[-20:])) + print("exp: %s ... %s" % (expected_range[:20], expected_range[-20:])) + self.fail("results[%s] != expected_range" % name) + return version # daisy-chained to next call - def test_partial_read_mdmf_0(self): + async def test_partial_read_mdmf_0(self) -> None: data = b"" - d = self.do_upload_mdmf(data=data) + result = await self.do_upload_mdmf(data=data) modes = [("all1", 0,0), ("all2", 0,None), ] - d.addCallback(self._test_partial_read, data, modes, 1) - return d + await self._test_partial_read(result, data, modes, 1) - def test_partial_read_mdmf_large(self): + async def test_partial_read_mdmf_large(self) -> None: segment_boundary = mathutil.next_multiple(128 * 1024, 3) modes = [("start_on_segment_boundary", segment_boundary, 50), ("ending_one_byte_after_segment_boundary", segment_boundary-50, 51), @@ -386,20 +312,18 @@ class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \ ("complete_file1", 0, len(self.data)), ("complete_file2", 0, None), ] - d = self.do_upload_mdmf() - d.addCallback(self._test_partial_read, self.data, modes, 10000) - return d + result = await self.do_upload_mdmf() + await self._test_partial_read(result, self.data, modes, 10000) - def test_partial_read_sdmf_0(self): + async def test_partial_read_sdmf_0(self) -> None: data = b"" modes = [("all1", 0,0), ("all2", 0,None), ] - d = self.do_upload_sdmf(data=data) - d.addCallback(self._test_partial_read, data, modes, 1) - return d + result = await self.do_upload_sdmf(data=data) + await self._test_partial_read(result, data, modes, 1) - def test_partial_read_sdmf_2(self): + async def test_partial_read_sdmf_2(self) -> None: data = b"hi" modes = [("one_byte", 0, 1), ("last_byte", 1, 1), @@ -407,11 +331,10 @@ class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \ ("complete_file", 0, 2), ("complete_file2", 0, None), ] - d = self.do_upload_sdmf(data=data) - d.addCallback(self._test_partial_read, data, modes, 1) - return d + result = await self.do_upload_sdmf(data=data) + await self._test_partial_read(result, data, modes, 1) - def test_partial_read_sdmf_90(self): + async def test_partial_read_sdmf_90(self) -> None: modes = [("start_at_middle", 50, 40), ("start_at_middle2", 50, None), ("zero_length_at_start", 0, 0), @@ -420,11 +343,10 @@ class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \ ("complete_file1", 0, None), ("complete_file2", 0, 90), ] - d = self.do_upload_sdmf() - d.addCallback(self._test_partial_read, self.small_data, modes, 10) - return d + result = await self.do_upload_sdmf() + await self._test_partial_read(result, self.small_data, modes, 10) - def test_partial_read_sdmf_100(self): + async def test_partial_read_sdmf_100(self) -> None: data = b"test data "*10 modes = [("start_at_middle", 50, 50), ("start_at_middle2", 50, None), @@ -433,42 +355,30 @@ class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \ ("complete_file1", 0, 100), ("complete_file2", 0, None), ] - d = self.do_upload_sdmf(data=data) - d.addCallback(self._test_partial_read, data, modes, 10) - return d + result = await self.do_upload_sdmf(data=data) + await self._test_partial_read(result, data, modes, 10) + async def _test_read_and_download(self, node, expected) -> None: + version = await node.get_best_readable_version() + c = consumer.MemoryConsumer() + await version.read(c) + self.assertThat(expected, Equals(b"".join(c.chunks))) - def _test_read_and_download(self, node, expected): - d = node.get_best_readable_version() - def _read_data(version): - c = consumer.MemoryConsumer() - c2 = consumer.MemoryConsumer() - d2 = defer.succeed(None) - d2.addCallback(lambda ignored: version.read(c)) - d2.addCallback(lambda ignored: - self.assertThat(expected, Equals(b"".join(c.chunks)))) + c2 = consumer.MemoryConsumer() + await version.read(c2, offset=0, size=len(expected)) + self.assertThat(expected, Equals(b"".join(c2.chunks))) - d2.addCallback(lambda ignored: version.read(c2, offset=0, - size=len(expected))) - d2.addCallback(lambda ignored: - self.assertThat(expected, Equals(b"".join(c2.chunks)))) - return d2 - d.addCallback(_read_data) - d.addCallback(lambda ignored: node.download_best_version()) - d.addCallback(lambda data: self.assertThat(expected, Equals(data))) - return d + data = await node.download_best_version() + self.assertThat(expected, Equals(data)) - def test_read_and_download_mdmf(self): - d = self.do_upload_mdmf() - d.addCallback(self._test_read_and_download, self.data) - return d + async def test_read_and_download_mdmf(self) -> None: + result = await self.do_upload_mdmf() + await self._test_read_and_download(result, self.data) - def test_read_and_download_sdmf(self): - d = self.do_upload_sdmf() - d.addCallback(self._test_read_and_download, self.small_data) - return d + async def test_read_and_download_sdmf(self) -> None: + result = await self.do_upload_sdmf() + await self._test_read_and_download(result, self.small_data) - def test_read_and_download_sdmf_zero_length(self): - d = self.do_upload_empty_sdmf() - d.addCallback(self._test_read_and_download, b"") - return d + async def test_read_and_download_sdmf_zero_length(self) -> None: + result = await self.do_upload_empty_sdmf() + await self._test_read_and_download(result, b"") From e72847115be571559167181d5209fa3dccfbd458 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 30 Nov 2022 09:37:26 -0500 Subject: [PATCH 11/43] news fragment --- newsfragments/3947.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/3947.minor diff --git a/newsfragments/3947.minor b/newsfragments/3947.minor new file mode 100644 index 000000000..e69de29bb From 156954c621f7b39406831ca18bed00a2dedf8b70 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 30 Nov 2022 09:43:01 -0500 Subject: [PATCH 12/43] no longer any need to "daisy chain" this value --- src/allmydata/test/mutable/test_version.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/allmydata/test/mutable/test_version.py b/src/allmydata/test/mutable/test_version.py index d14cc9295..1d9467694 100644 --- a/src/allmydata/test/mutable/test_version.py +++ b/src/allmydata/test/mutable/test_version.py @@ -270,7 +270,7 @@ class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \ async def _test_partial_read(self, node, expected, modes, step) -> None: version = await node.get_best_readable_version() for (name, offset, length) in modes: - version = await self._do_partial_read(version, name, expected, offset, length) + await self._do_partial_read(version, name, expected, offset, length) # then read the whole thing, but only a few bytes at a time, and see # that the results are what we expect. c = consumer.MemoryConsumer() @@ -292,7 +292,6 @@ class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \ print("got: %s ... %s" % (results[:20], results[-20:])) print("exp: %s ... %s" % (expected_range[:20], expected_range[-20:])) self.fail("results[%s] != expected_range" % name) - return version # daisy-chained to next call async def test_partial_read_mdmf_0(self) -> None: data = b"" From 05dfa875a771e6ff27006b8fc13aad3dc1709b67 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 30 Nov 2022 09:46:13 -0500 Subject: [PATCH 13/43] Quite a mypy warning about formatting bytes into a string --- src/allmydata/test/mutable/test_version.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/allmydata/test/mutable/test_version.py b/src/allmydata/test/mutable/test_version.py index 1d9467694..87050424b 100644 --- a/src/allmydata/test/mutable/test_version.py +++ b/src/allmydata/test/mutable/test_version.py @@ -289,8 +289,8 @@ class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \ if results != expected_range: print("read([%d]+%s) got %d bytes, not %d" % \ (offset, length, len(results), len(expected_range))) - print("got: %s ... %s" % (results[:20], results[-20:])) - print("exp: %s ... %s" % (expected_range[:20], expected_range[-20:])) + print("got: %r ... %r" % (results[:20], results[-20:])) + print("exp: %r ... %r" % (expected_range[:20], expected_range[-20:])) self.fail("results[%s] != expected_range" % name) async def test_partial_read_mdmf_0(self) -> None: From 11fb194d74599dc3f27f31b14ba340acbd3a2615 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Fri, 9 Dec 2022 14:00:41 -0500 Subject: [PATCH 14/43] kick ci --- newsfragments/3942.minor | 1 + 1 file changed, 1 insertion(+) diff --git a/newsfragments/3942.minor b/newsfragments/3942.minor index e69de29bb..8b1378917 100644 --- a/newsfragments/3942.minor +++ b/newsfragments/3942.minor @@ -0,0 +1 @@ + From 88ee978d98aea740ddf357acf08c85768bd0e950 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Fri, 9 Dec 2022 14:06:24 -0500 Subject: [PATCH 15/43] Some features we depend on are broken in tox 4 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 768e44e29..8558abd02 100644 --- a/setup.py +++ b/setup.py @@ -396,7 +396,7 @@ setup(name="tahoe-lafs", # also set in __init__.py "pyflakes == 2.2.0", "coverage ~= 5.0", "mock", - "tox", + "tox ~= 3.0", "pytest", "pytest-twisted", "hypothesis >= 3.6.1", From 6485eb5186190a5e73eb55f05b66a42ffb6655ff Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Fri, 9 Dec 2022 14:07:38 -0500 Subject: [PATCH 16/43] Also constrain tox here --- .circleci/config.yml | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 051e690b7..d7e4f2563 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -133,10 +133,10 @@ jobs: steps: - "checkout" - - run: + - run: &INSTALL_TOX name: "Install tox" command: | - pip install --user tox + pip install --user 'tox~=3.0' - run: name: "Static-ish code checks" @@ -152,9 +152,7 @@ jobs: - "checkout" - run: - name: "Install tox" - command: | - pip install --user tox + <<: *INSTALL_TOX - run: name: "Make PyInstaller executable" From 98e25507df5fdde29b5047c7d325607cb3906b5a Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 12 Dec 2022 10:43:36 -0500 Subject: [PATCH 17/43] A different approach to forcing foolscap in integration tests. --- .github/workflows/ci.yml | 27 +++++++++++++-------------- integration/conftest.py | 16 +++++++--------- integration/util.py | 17 ++++++++--------- src/allmydata/protocol_switch.py | 16 ---------------- src/allmydata/testing/__init__.py | 18 ------------------ 5 files changed, 28 insertions(+), 66 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 41de7baed..afbe5c7a4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -161,19 +161,21 @@ jobs: strategy: fail-fast: false matrix: - os: - - windows-latest - # 22.04 has some issue with Tor at the moment: - # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3943 - - ubuntu-20.04 - python-version: - - 3.7 - - 3.9 include: - # On macOS don't bother with 3.7, just to get faster builds. - os: macos-latest python-version: 3.9 - + extra-tox-options: "" + - os: windows-latest + python-version: 3.10 + extra-tox-options: "" + # 22.04 has some issue with Tor at the moment: + # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3943 + - os: ubuntu-20.04 + python-version: 3.8 + extra-tox-options: "--force-foolscap integration/" + - os: ubuntu-20.04 + python-version: 3.10 + extra-tox-options: "" steps: - name: Install Tor [Ubuntu] @@ -232,10 +234,7 @@ jobs: # effect. On Linux it doesn't make a difference one way or another. TMPDIR: "/tmp" run: | - # Run with Foolscap forced: - __TAHOE_INTEGRATION_FORCE_FOOLSCAP=1 tox -e integration - # Run with Foolscap not forced, which should result in HTTP being used. - __TAHOE_INTEGRATION_FORCE_FOOLSCAP=0 tox -e integration + tox -e integration ${{ matrix.extra-tox-options }} - name: Upload eliot.log in case of failure uses: actions/upload-artifact@v1 diff --git a/integration/conftest.py b/integration/conftest.py index e284b5cba..5cbe9ad6b 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -1,15 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import sys import shutil from time import sleep @@ -66,6 +57,13 @@ def pytest_addoption(parser): "--coverage", action="store_true", dest="coverage", help="Collect coverage statistics", ) + parser.addoption( + "--force-foolscap", action="store_true", default=False, + dest="force_foolscap", + help=("If set, force Foolscap only for the storage protocol. " + + "Otherwise HTTP will be used.") + ) + @pytest.fixture(autouse=True, scope='session') def eliot_logging(): diff --git a/integration/util.py b/integration/util.py index cde837218..7d885ee6c 100644 --- a/integration/util.py +++ b/integration/util.py @@ -30,7 +30,6 @@ from allmydata.util.configutil import ( write_config, ) from allmydata import client -from allmydata.testing import foolscap_only_for_integration_testing import pytest_twisted @@ -293,14 +292,14 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam u'log_gatherer.furl', flog_gatherer, ) - force_foolscap = foolscap_only_for_integration_testing() - if force_foolscap is not None: - set_config( - config, - 'storage', - 'force_foolscap', - str(force_foolscap), - ) + force_foolscap = request.config.getoption("force_foolscap") + assert force_foolscap in (True, False) + set_config( + config, + 'storage', + 'force_foolscap', + str(force_foolscap), + ) write_config(FilePath(config_path), config) created_d.addCallback(created) diff --git a/src/allmydata/protocol_switch.py b/src/allmydata/protocol_switch.py index d88863fdb..b0af84c33 100644 --- a/src/allmydata/protocol_switch.py +++ b/src/allmydata/protocol_switch.py @@ -30,7 +30,6 @@ from foolscap.api import Tub from .storage.http_server import HTTPServer, build_nurl from .storage.server import StorageServer -from .testing import foolscap_only_for_integration_testing class _PretendToBeNegotiation(type): @@ -171,21 +170,6 @@ class _FoolscapOrHttps(Protocol, metaclass=_PretendToBeNegotiation): # and later data, otherwise assume HTTPS. self._timeout.cancel() if self._buffer.startswith(b"GET /id/"): - if foolscap_only_for_integration_testing() == False: - # Tahoe will prefer HTTP storage protocol over Foolscap when possible. - # - # If this is branch is taken, we are running a test that should - # be using HTTP for the storage protocol. As such, we - # aggressively disable Foolscap to ensure that HTTP is in fact - # going to be used. If we hit this branch that means our - # expectation that HTTP will be used was wrong, suggesting a - # bug in either the code of the integration testing setup. - # - # This branch should never be hit in production! - self.transport.loseConnection() - print("FOOLSCAP IS DISABLED, I PITY THE FOOLS WHO SEE THIS MESSAGE") - return - # We're a Foolscap Negotiation server protocol instance: transport = self.transport buf = self._buffer diff --git a/src/allmydata/testing/__init__.py b/src/allmydata/testing/__init__.py index 119ae4101..e69de29bb 100644 --- a/src/allmydata/testing/__init__.py +++ b/src/allmydata/testing/__init__.py @@ -1,18 +0,0 @@ -import os -from typing import Optional - - -def foolscap_only_for_integration_testing() -> Optional[bool]: - """ - Return whether HTTP storage protocol has been disabled / Foolscap - forced, for purposes of integration testing. - - This is determined by the __TAHOE_INTEGRATION_FORCE_FOOLSCAP environment - variable, which can be 1, 0, or not set, corresponding to results of - ``True``, ``False`` and ``None`` (i.e. default). - """ - force_foolscap = os.environ.get("__TAHOE_INTEGRATION_FORCE_FOOLSCAP") - if force_foolscap is None: - return None - - return bool(int(force_foolscap)) From c5c616afd5146f8cde9dddead3bdbeb092890992 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 12 Dec 2022 10:44:49 -0500 Subject: [PATCH 18/43] Garbage. --- src/allmydata/test/test_storage_https.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/allmydata/test/test_storage_https.py b/src/allmydata/test/test_storage_https.py index 3d2a31143..a11b0eed5 100644 --- a/src/allmydata/test/test_storage_https.py +++ b/src/allmydata/test/test_storage_https.py @@ -181,10 +181,6 @@ class PinningHTTPSValidation(AsyncTestCase): response = await self.request(url, certificate) self.assertEqual(await response.content(), b"YOYODYNE") - # We keep getting TLSMemoryBIOProtocol being left around, so try harder - # to wait for it to finish. - await deferLater(reactor, 0.01) - @async_to_deferred async def test_server_certificate_not_valid_yet(self): """ From 106df423be0e864100ba2d96cdb91558d206160a Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 12 Dec 2022 10:52:01 -0500 Subject: [PATCH 19/43] Another approach. --- .github/workflows/ci.yml | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2ffc260df..8c3eaf29e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -156,18 +156,18 @@ jobs: include: - os: macos-latest python-version: 3.9 - extra-tox-options: "" + force-foolscap: false - os: windows-latest python-version: 3.10 - extra-tox-options: "" + force-foolscap: false # 22.04 has some issue with Tor at the moment: # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3943 - os: ubuntu-20.04 python-version: 3.8 - extra-tox-options: "--force-foolscap integration/" + force-foolscap: true - os: ubuntu-20.04 python-version: 3.10 - extra-tox-options: "" + force-foolscap: false steps: - name: Install Tor [Ubuntu] @@ -208,14 +208,24 @@ jobs: run: python misc/build_helpers/show-tool-versions.py - name: Run "Python 3 integration tests" + if: "${{ !matrix.force-foolscap }}" env: # On macOS this is necessary to ensure unix socket paths for tor # aren't too long. On Windows tox won't pass it through so it has no # effect. On Linux it doesn't make a difference one way or another. TMPDIR: "/tmp" run: | - tox -e integration ${{ matrix.extra-tox-options }} + tox -e integration + - name: Run "Python 3 integration tests (force Foolscap)" + if: "${{ matrix.force-foolscap }}" + env: + # On macOS this is necessary to ensure unix socket paths for tor + # aren't too long. On Windows tox won't pass it through so it has no + # effect. On Linux it doesn't make a difference one way or another. + TMPDIR: "/tmp" + run: | + tox -e integration -- --force-foolscap integration/ - name: Upload eliot.log in case of failure uses: actions/upload-artifact@v3 if: failure() From 742b352861629a0d1f1b900c4c71d6e1ba22a0f2 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 12 Dec 2022 10:52:17 -0500 Subject: [PATCH 20/43] Whitespace. --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8c3eaf29e..e87337a1b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -226,6 +226,7 @@ jobs: TMPDIR: "/tmp" run: | tox -e integration -- --force-foolscap integration/ + - name: Upload eliot.log in case of failure uses: actions/upload-artifact@v3 if: failure() From d05a1313d1773d8f4bf7041d51f4bca1ab0de4b3 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 12 Dec 2022 10:54:23 -0500 Subject: [PATCH 21/43] Don't change versions for now, use strings so it'll be future compatible with 3.10. --- .github/workflows/ci.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e87337a1b..01f0890da 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -155,18 +155,18 @@ jobs: matrix: include: - os: macos-latest - python-version: 3.9 + python-version: "3.9" force-foolscap: false - os: windows-latest - python-version: 3.10 + python-version: "3.9" force-foolscap: false # 22.04 has some issue with Tor at the moment: # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3943 - os: ubuntu-20.04 - python-version: 3.8 + python-version: "3.7" force-foolscap: true - os: ubuntu-20.04 - python-version: 3.10 + python-version: "3.9" force-foolscap: false steps: From 366cbf90017874ec24d60bd0df3b3d9f75d79182 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 12 Dec 2022 10:55:07 -0500 Subject: [PATCH 22/43] Tox is bad? --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 01f0890da..37f41d06b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -225,7 +225,7 @@ jobs: # effect. On Linux it doesn't make a difference one way or another. TMPDIR: "/tmp" run: | - tox -e integration -- --force-foolscap integration/ + tox -e integration -- --force-foolscap,integration/ - name: Upload eliot.log in case of failure uses: actions/upload-artifact@v3 From 6a1f49551b6683f64b110c2060dcd309c21bdd8d Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 12 Dec 2022 11:05:09 -0500 Subject: [PATCH 23/43] No, that's not it. --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 37f41d06b..01f0890da 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -225,7 +225,7 @@ jobs: # effect. On Linux it doesn't make a difference one way or another. TMPDIR: "/tmp" run: | - tox -e integration -- --force-foolscap,integration/ + tox -e integration -- --force-foolscap integration/ - name: Upload eliot.log in case of failure uses: actions/upload-artifact@v3 From be3ace7adebffaa5d410e3f8e248b6db08bd7d50 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 13 Dec 2022 15:39:04 -0500 Subject: [PATCH 24/43] News file. --- newsfragments/3954.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/3954.minor diff --git a/newsfragments/3954.minor b/newsfragments/3954.minor new file mode 100644 index 000000000..e69de29bb From 6ae40a932d5504edc66176fc0fbdef45998dec77 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 13 Dec 2022 15:54:19 -0500 Subject: [PATCH 25/43] A much more reasonable number of HTTP connections. --- src/allmydata/storage/http_client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/allmydata/storage/http_client.py b/src/allmydata/storage/http_client.py index 79bf061c9..90bda7fc0 100644 --- a/src/allmydata/storage/http_client.py +++ b/src/allmydata/storage/http_client.py @@ -323,6 +323,7 @@ class StorageClient(object): swissnum = nurl.path[0].encode("ascii") certificate_hash = nurl.user.encode("ascii") pool = HTTPConnectionPool(reactor) + pool.maxPersistentPerHost = 20 if cls.TEST_MODE_REGISTER_HTTP_POOL is not None: cls.TEST_MODE_REGISTER_HTTP_POOL(pool) From 2057f59950fcbd6576d530526d41f9835e42ec7c Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 14 Dec 2022 08:35:06 -0500 Subject: [PATCH 26/43] news fragment --- newsfragments/3953.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/3953.minor diff --git a/newsfragments/3953.minor b/newsfragments/3953.minor new file mode 100644 index 000000000..e69de29bb From a1cb8893083d06da0c7f1bca760e3333334acac3 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 14 Dec 2022 08:35:10 -0500 Subject: [PATCH 27/43] Take typechecks and codechecks out of the GitHub Actions config There's a dedicated job on CircleCI. --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index db4748033..96eed4e40 100644 --- a/tox.ini +++ b/tox.ini @@ -7,7 +7,7 @@ # the tox-gh-actions package. [gh-actions] python = - 3.7: py37-coverage,typechecks,codechecks + 3.7: py37-coverage 3.8: py38-coverage 3.9: py39-coverage 3.10: py310-coverage From 2677f26455f2b91f13e8c453b91f43d9c08f0527 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 14 Dec 2022 08:46:39 -0500 Subject: [PATCH 28/43] news fragment --- newsfragments/3914.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/3914.minor diff --git a/newsfragments/3914.minor b/newsfragments/3914.minor new file mode 100644 index 000000000..e69de29bb From 05c7450376bbbfc4cfe0fb977265b9a1365cf588 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 14 Dec 2022 08:47:05 -0500 Subject: [PATCH 29/43] Try to use an upcoming python-cryptography feature to avoid some costs If the key is the wrong number of bits then we don't care about any other validation results because we're just going to reject it. So, check that before applying other validation, if possible. This is untested since the version of python-cryptography that supports it is not released yet and I don't feel like setting up a Rust build tool chain at the moment. --- src/allmydata/crypto/rsa.py | 27 +++++++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/src/allmydata/crypto/rsa.py b/src/allmydata/crypto/rsa.py index 95cf01413..96885cfa1 100644 --- a/src/allmydata/crypto/rsa.py +++ b/src/allmydata/crypto/rsa.py @@ -72,20 +72,39 @@ def create_signing_keypair_from_string(private_key_der): :returns: 2-tuple of (private_key, public_key) """ - priv_key = load_der_private_key( + load = partial( + load_der_private_key, private_key_der, password=None, backend=default_backend(), ) - if not isinstance(priv_key, rsa.RSAPrivateKey): + + try: + # Load it once without the potentially expensive OpenSSL validation + # checks. These have superlinear complexity. We *will* run them just + # below - but first we'll apply our own constant-time checks. + unsafe_priv_key = load(unsafe_skip_rsa_key_validation=True) + except TypeError: + # cryptography<39 does not support this parameter, so just load the + # key with validation... + unsafe_priv_key = load() + # But avoid *reloading* it since that will run the expensive + # validation *again*. + load = lambda: unsafe_priv_key + + if not isinstance(unsafe_priv_key, rsa.RSAPrivateKey): raise ValueError( "Private Key did not decode to an RSA key" ) - if priv_key.key_size != 2048: + if unsafe_priv_key.key_size != 2048: raise ValueError( "Private Key must be 2048 bits" ) - return priv_key, priv_key.public_key() + + # Now re-load it with OpenSSL's validation applied. + safe_priv_key = load() + + return safe_priv_key, safe_priv_key.public_key() def der_string_from_signing_key(private_key): From c014ad55b1aa42295db7295f7a7d99092fee39fd Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 14 Dec 2022 08:48:02 -0500 Subject: [PATCH 30/43] remove Python 2 boilerplate --- src/allmydata/crypto/rsa.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/src/allmydata/crypto/rsa.py b/src/allmydata/crypto/rsa.py index 96885cfa1..cdd9a6035 100644 --- a/src/allmydata/crypto/rsa.py +++ b/src/allmydata/crypto/rsa.py @@ -12,14 +12,9 @@ on any of their methods. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import annotations -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from functools import partial from cryptography.exceptions import InvalidSignature from cryptography.hazmat.backends import default_backend From 78e04cc82170f8139b67b419f6cc72e3e75bc477 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 21 Dec 2022 06:25:22 -0500 Subject: [PATCH 31/43] Modernize cachix usage; attempt to fix CircleCI conditional CIRCLE_PR_NUMBER documentation may just be wrong. It seems like maybe it is never set? Try inspecting the source repo value instead. --- .circleci/config.yml | 73 ++++++++++++-------------------------------- .circleci/lib.sh | 25 +++++++++++++++ 2 files changed, 44 insertions(+), 54 deletions(-) create mode 100644 .circleci/lib.sh diff --git a/.circleci/config.yml b/.circleci/config.yml index d7e4f2563..4dcf2a2db 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -380,7 +380,7 @@ jobs: docker: # Run in a highly Nix-capable environment. - <<: *DOCKERHUB_AUTH - image: "nixos/nix:2.3.16" + image: "nixos/nix:2.10.3" environment: # CACHIX_AUTH_TOKEN is manually set in the CircleCI web UI and @@ -390,27 +390,21 @@ jobs: steps: - "run": - # The nixos/nix image does not include ssh. Install it so the - # `checkout` step will succeed. We also want cachix for - # Nix-friendly caching. + # Get cachix for Nix-friendly caching. name: "Install Basic Dependencies" command: | + NIXPKGS="https://github.com/nixos/nixpkgs/archive/nixos-<>.tar.gz" nix-env \ - --file https://github.com/nixos/nixpkgs/archive/nixos-<>.tar.gz \ + --file $NIXPKGS \ --install \ - -A openssh cachix bash + -A cachix bash + # Activate it for "binary substitution". This sets up + # configuration tht lets Nix download something from the cache + # instead of building it locally, if possible. + cachix use "${CACHIX_NAME}" - "checkout" - - run: - name: "Cachix setup" - # Record the store paths that exist before we did much. There's no - # reason to cache these, they're either in the image or have to be - # retrieved before we can use cachix to restore from cache. - command: | - cachix use "${CACHIX_NAME}" - nix path-info --all > /tmp/store-path-pre-build - - "run": # The Nix package doesn't know how to do this part, unfortunately. name: "Generate version" @@ -432,50 +426,21 @@ jobs: # build a couple simple little dependencies that don't take # advantage of multiple cores and we get a little speedup by doing # them in parallel. - nix-build --cores 3 --max-jobs 2 --argstr pkgsVersion "nixpkgs-<>" + source .circleci/lib.sh + cache_if_able nix-build \ + --cores 3 \ + --max-jobs 2 \ + --argstr pkgsVersion "nixpkgs-<>" - "run": name: "Test" command: | # Let it go somewhat wild for the test suite itself - nix-build --cores 8 --argstr pkgsVersion "nixpkgs-<>" tests.nix - - - run: - # Send any new store objects to cachix. - name: "Push to Cachix" - when: "always" - command: | - # Cribbed from - # https://circleci.com/blog/managing-secrets-when-you-have-pull-requests-from-outside-contributors/ - if [ -n "$CIRCLE_PR_NUMBER" ]; then - # I'm sure you're thinking "CIRCLE_PR_NUMBER must just be the - # number of the PR being built". Sorry, dear reader, you have - # guessed poorly. It is also conditionally set based on whether - # this is a PR from a fork or not. - # - # https://circleci.com/docs/2.0/env-vars/#built-in-environment-variables - echo "Skipping Cachix push for forked PR." - else - # If this *isn't* a build from a fork then we have the Cachix - # write key in our environment and we can push any new objects - # to Cachix. - # - # To decide what to push, we inspect the list of store objects - # that existed before and after we did most of our work. Any - # that are new after the work is probably a useful thing to have - # around so push it to the cache. We exclude all derivation - # objects (.drv files) because they're cheap to reconstruct and - # by the time you know their cache key you've already done all - # the work anyway. - # - # This shell expression for finding the objects and pushing them - # was from the Cachix docs: - # - # https://docs.cachix.org/continuous-integration-setup/circleci.html - # - # but they seem to have removed it now. - bash -c "comm -13 <(sort /tmp/store-path-pre-build | grep -v '\.drv$') <(nix path-info --all | grep -v '\.drv$' | sort) | cachix push $CACHIX_NAME" - fi + source .circleci/lib.sh + cache_if_able nix-build \ + --cores 8 \ + --argstr pkgsVersion "nixpkgs-<>" \ + tests.nix typechecks: docker: diff --git a/.circleci/lib.sh b/.circleci/lib.sh new file mode 100644 index 000000000..f3fe07bae --- /dev/null +++ b/.circleci/lib.sh @@ -0,0 +1,25 @@ +# Run a command, enabling cache writes to cachix if possible. The command is +# accepted as a variable number of positional arguments (like argv). +function cache_if_able() { + # The `cachix watch-exec ...` does our cache population. When it sees + # something added to the store (I guess) it pushes it to the named cache. + # + # We can only *push* to it if we have a CACHIX_AUTH_TOKEN, though. + # in-repo jobs will get this from CircleCI configuration but jobs from + # forks may not. + if [ -v CACHIX_AUTH_TOKEN ]; then + echo "Cachix credentials present; will attempt to write to cache." + cachix watch-exec "${CACHIX_NAME}" -- "$@" + else + # If we're building a from a forked repository then we're allowed to + # not have the credentials (but it's also fine if the owner of the + # fork supplied their own). + if [ "${CIRCLE_PR_REPONAME}" == "https://github.com/tahoe-lafs/tahoe-lafs" ]; then + echo "Required credentials (CACHIX_AUTH_TOKEN) are missing." + return 1 + else + echo "Cachix credentials missing; will not attempt cache writes." + "$@" + fi + fi +} From 21af00bf83ff8b1f684d965d772c564d7af92e2b Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 21 Dec 2022 06:27:41 -0500 Subject: [PATCH 32/43] Report the CIRCLE_PR_REPONAME too, because who knows --- .circleci/lib.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/lib.sh b/.circleci/lib.sh index f3fe07bae..cc7ce5e97 100644 --- a/.circleci/lib.sh +++ b/.circleci/lib.sh @@ -7,6 +7,7 @@ function cache_if_able() { # We can only *push* to it if we have a CACHIX_AUTH_TOKEN, though. # in-repo jobs will get this from CircleCI configuration but jobs from # forks may not. + echo "Building PR from repo: ${CIRCLE_PR_REPONAME}" if [ -v CACHIX_AUTH_TOKEN ]; then echo "Cachix credentials present; will attempt to write to cache." cachix watch-exec "${CACHIX_NAME}" -- "$@" From 25eb3ca262e0a2bff842e8eff78284f0723faa42 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 21 Dec 2022 06:47:21 -0500 Subject: [PATCH 33/43] Switch to a variable observed in practice There is apparently no CIRCLE_PR_REPONAME set in the runtime environment, either, despite what the docs say. --- .circleci/lib.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/lib.sh b/.circleci/lib.sh index cc7ce5e97..7717cdb18 100644 --- a/.circleci/lib.sh +++ b/.circleci/lib.sh @@ -7,7 +7,7 @@ function cache_if_able() { # We can only *push* to it if we have a CACHIX_AUTH_TOKEN, though. # in-repo jobs will get this from CircleCI configuration but jobs from # forks may not. - echo "Building PR from repo: ${CIRCLE_PR_REPONAME}" + echo "Building PR from user/org: ${CIRCLE_PROJECT_USERNAME}" if [ -v CACHIX_AUTH_TOKEN ]; then echo "Cachix credentials present; will attempt to write to cache." cachix watch-exec "${CACHIX_NAME}" -- "$@" @@ -15,7 +15,7 @@ function cache_if_able() { # If we're building a from a forked repository then we're allowed to # not have the credentials (but it's also fine if the owner of the # fork supplied their own). - if [ "${CIRCLE_PR_REPONAME}" == "https://github.com/tahoe-lafs/tahoe-lafs" ]; then + if [ "${CIRCLE_PROJECT_USERNAME}" == "tahoe-lafs" ]; then echo "Required credentials (CACHIX_AUTH_TOKEN) are missing." return 1 else From 2da3d43b2e4e7a0b6dff7f2efd7a8bb675a00ced Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 21 Dec 2022 07:22:37 -0500 Subject: [PATCH 34/43] news fragment --- newsfragments/3870.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/3870.minor diff --git a/newsfragments/3870.minor b/newsfragments/3870.minor new file mode 100644 index 000000000..e69de29bb From 825fd64dddc860e24fd85dcc891c728ef35779e6 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 9 Jan 2023 10:52:24 -0500 Subject: [PATCH 35/43] News file. --- newsfragments/3964.removed | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/3964.removed diff --git a/newsfragments/3964.removed b/newsfragments/3964.removed new file mode 100644 index 000000000..1c2c3e544 --- /dev/null +++ b/newsfragments/3964.removed @@ -0,0 +1 @@ +Python 3.7 is no longer supported. \ No newline at end of file From 1482d419181c76d760359b09867c6d667f0753c9 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 9 Jan 2023 11:01:45 -0500 Subject: [PATCH 36/43] Drop 3.7. --- .circleci/config.yml | 52 ++++------------------------------------ .github/workflows/ci.yml | 10 +++----- README.rst | 2 +- setup.py | 9 +++---- tox.ini | 6 +---- 5 files changed, 13 insertions(+), 66 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 4dcf2a2db..21f60368c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -167,12 +167,7 @@ jobs: command: | dist/Tahoe-LAFS/tahoe --version - debian-10: &DEBIAN - docker: - - <<: *DOCKERHUB_AUTH - image: "tahoelafsci/debian:10-py3.7" - user: "nobody" - + debian-11: &DEBIAN environment: &UTF_8_ENVIRONMENT # In general, the test suite is not allowed to fail while the job # succeeds. But you can set this to "yes" if you want it to be @@ -184,7 +179,7 @@ jobs: # filenames and argv). LANG: "en_US.UTF-8" # Select a tox environment to run for this job. - TAHOE_LAFS_TOX_ENVIRONMENT: "py37" + TAHOE_LAFS_TOX_ENVIRONMENT: "py39" # Additional arguments to pass to tox. TAHOE_LAFS_TOX_ARGS: "" # The path in which test artifacts will be placed. @@ -252,15 +247,11 @@ jobs: /tmp/venv/bin/codecov fi - debian-11: - <<: *DEBIAN docker: - <<: *DOCKERHUB_AUTH image: "tahoelafsci/debian:11-py3.9" user: "nobody" - environment: - <<: *UTF_8_ENVIRONMENT - TAHOE_LAFS_TOX_ENVIRONMENT: "py39" + # Restore later using PyPy3.8 # pypy27-buster: @@ -312,22 +303,6 @@ jobs: - run: *SETUP_VIRTUALENV - run: *RUN_TESTS - ubuntu-18-04: &UBUNTU_18_04 - <<: *DEBIAN - docker: - - <<: *DOCKERHUB_AUTH - image: "tahoelafsci/ubuntu:18.04-py3.7" - user: "nobody" - - environment: - <<: *UTF_8_ENVIRONMENT - # The default trial args include --rterrors which is incompatible with - # this reporter on Python 3. So drop that and just specify the - # reporter. - TAHOE_LAFS_TRIAL_ARGS: "--reporter=subunitv2-file" - TAHOE_LAFS_TOX_ENVIRONMENT: "py37" - - ubuntu-20-04: <<: *DEBIAN docker: @@ -445,7 +420,7 @@ jobs: typechecks: docker: - <<: *DOCKERHUB_AUTH - image: "tahoelafsci/ubuntu:18.04-py3.7" + image: "tahoelafsci/ubuntu:20.04-py3.9" steps: - "checkout" @@ -457,7 +432,7 @@ jobs: docs: docker: - <<: *DOCKERHUB_AUTH - image: "tahoelafsci/ubuntu:18.04-py3.7" + image: "tahoelafsci/ubuntu:20.04-py3.9" steps: - "checkout" @@ -508,15 +483,6 @@ jobs: docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION} - build-image-debian-10: - <<: *BUILD_IMAGE - - environment: - DISTRO: "debian" - TAG: "10" - PYTHON_VERSION: "3.7" - - build-image-debian-11: <<: *BUILD_IMAGE @@ -525,14 +491,6 @@ jobs: TAG: "11" PYTHON_VERSION: "3.9" - build-image-ubuntu-18-04: - <<: *BUILD_IMAGE - - environment: - DISTRO: "ubuntu" - TAG: "18.04" - PYTHON_VERSION: "3.7" - build-image-ubuntu-20-04: <<: *BUILD_IMAGE diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d7fa3244b..80b312008 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -48,21 +48,20 @@ jobs: - windows-latest - ubuntu-latest python-version: - - "3.7" - "3.8" - "3.9" - "3.10" include: - # On macOS don't bother with 3.7-3.8, just to get faster builds. + # On macOS don't bother with 3.8, just to get faster builds. - os: macos-latest python-version: "3.9" - os: macos-latest python-version: "3.10" # We only support PyPy on Linux at the moment. - - os: ubuntu-latest - python-version: "pypy-3.7" - os: ubuntu-latest python-version: "pypy-3.8" + - os: ubuntu-latest + python-version: "pypy-3.9" steps: # See https://github.com/actions/checkout. A fetch-depth of 0 @@ -162,9 +161,6 @@ jobs: force-foolscap: false # 22.04 has some issue with Tor at the moment: # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3943 - - os: ubuntu-20.04 - python-version: "3.7" - force-foolscap: true - os: ubuntu-20.04 python-version: "3.9" force-foolscap: false diff --git a/README.rst b/README.rst index 317378fae..bbf88610d 100644 --- a/README.rst +++ b/README.rst @@ -56,7 +56,7 @@ Once ``tahoe --version`` works, see `How to Run Tahoe-LAFS `__ 🐍 Python 2 ----------- -Python 3.7 or later is now required. +Python 3.8 or later is required. If you are still using Python 2.7, use Tahoe-LAFS version 1.17.1. diff --git a/setup.py b/setup.py index 1075e2129..edef7a4c3 100644 --- a/setup.py +++ b/setup.py @@ -223,7 +223,7 @@ def run_command(args, cwd=None): use_shell = sys.platform == "win32" try: p = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=cwd, shell=use_shell) - except EnvironmentError as e: # if this gives a SyntaxError, note that Tahoe-LAFS requires Python 3.7+ + except EnvironmentError as e: # if this gives a SyntaxError, note that Tahoe-LAFS requires Python 3.8+ print("Warning: unable to run %r." % (" ".join(args),)) print(e) return None @@ -374,8 +374,8 @@ setup(name="tahoe-lafs", # also set in __init__.py package_dir = {'':'src'}, packages=find_packages('src') + ['allmydata.test.plugins'], classifiers=trove_classifiers, - # We support Python 3.7 or later. 3.11 is not supported yet. - python_requires=">=3.7, <3.11", + # We support Python 3.8 or later. 3.11 is not supported yet. + python_requires=">=3.8, <3.11", install_requires=install_requires, extras_require={ # Duplicate the Twisted pywin32 dependency here. See @@ -388,9 +388,6 @@ setup(name="tahoe-lafs", # also set in __init__.py ], "test": [ "flake8", - # On Python 3.7, importlib_metadata v5 breaks flake8. - # https://github.com/python/importlib_metadata/issues/407 - "importlib_metadata<5; python_version < '3.8'", # Pin a specific pyflakes so we don't have different folks # disagreeing on what is or is not a lint issue. We can bump # this version from time to time, but we will do it diff --git a/tox.ini b/tox.ini index 96eed4e40..3e2dacbb2 100644 --- a/tox.ini +++ b/tox.ini @@ -7,11 +7,9 @@ # the tox-gh-actions package. [gh-actions] python = - 3.7: py37-coverage 3.8: py38-coverage 3.9: py39-coverage 3.10: py310-coverage - pypy-3.7: pypy37 pypy-3.8: pypy38 pypy-3.9: pypy39 @@ -19,7 +17,7 @@ python = twisted = 1 [tox] -envlist = typechecks,codechecks,py{37,38,39,310}-{coverage},pypy27,pypy37,pypy38,pypy39,integration +envlist = typechecks,codechecks,py{38,39,310}-{coverage},pypy27,pypy38,pypy39,integration minversion = 2.4 [testenv] @@ -49,8 +47,6 @@ deps = # regressions in new releases of this package that cause us the kind of # suffering we're trying to avoid with the above pins. certifi - # VCS hooks support - py37,!coverage: pre-commit # We add usedevelop=False because testing against a true installation gives # more useful results. From c4153d54055e0c2e33343de812a77b14f7069439 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 9 Jan 2023 11:03:15 -0500 Subject: [PATCH 37/43] Additional changes. --- newsfragments/3964.removed | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/newsfragments/3964.removed b/newsfragments/3964.removed index 1c2c3e544..d022f94af 100644 --- a/newsfragments/3964.removed +++ b/newsfragments/3964.removed @@ -1 +1 @@ -Python 3.7 is no longer supported. \ No newline at end of file +Python 3.7 is no longer supported, and Debian 10 and Ubuntu 18.04 are no longer tested. \ No newline at end of file From 8c418832bb6ba62fefb1c740bea973cbaf5c07f9 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 9 Jan 2023 11:06:57 -0500 Subject: [PATCH 38/43] Remove references to missing jobs. --- .circleci/config.yml | 9 --------- 1 file changed, 9 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 21f60368c..834c5f9d2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,16 +15,11 @@ workflows: ci: jobs: # Start with jobs testing various platforms. - - "debian-10": - {} - "debian-11": {} - "ubuntu-20-04": {} - - "ubuntu-18-04": - requires: - - "ubuntu-20-04" # Equivalent to RHEL 8; CentOS 8 is dead. - "oraclelinux-8": @@ -85,12 +80,8 @@ workflows: # Contexts are managed in the CircleCI web interface: # # https://app.circleci.com/settings/organization/github/tahoe-lafs/contexts - - "build-image-debian-10": &DOCKERHUB_CONTEXT - context: "dockerhub-auth" - "build-image-debian-11": <<: *DOCKERHUB_CONTEXT - - "build-image-ubuntu-18-04": - <<: *DOCKERHUB_CONTEXT - "build-image-ubuntu-20-04": <<: *DOCKERHUB_CONTEXT - "build-image-fedora-35": From 34f5da7246b725589d5dc1ec4febb1bce8c4029e Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 9 Jan 2023 11:08:31 -0500 Subject: [PATCH 39/43] And add back necessary anchor. --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 834c5f9d2..9080c43ed 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -80,7 +80,7 @@ workflows: # Contexts are managed in the CircleCI web interface: # # https://app.circleci.com/settings/organization/github/tahoe-lafs/contexts - - "build-image-debian-11": + - "build-image-debian-11": &DOCKERHUB_CONTEXT <<: *DOCKERHUB_CONTEXT - "build-image-ubuntu-20-04": <<: *DOCKERHUB_CONTEXT From 6bb57e248de0410faf1d7b6ab43efa99a3b1e5eb Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 9 Jan 2023 13:09:59 -0500 Subject: [PATCH 40/43] Try to switch Nix off 3.7. --- default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/default.nix b/default.nix index 5f4db2c78..e4f2dd4d4 100644 --- a/default.nix +++ b/default.nix @@ -29,7 +29,7 @@ in , pypiData ? sources.pypi-deps-db # the pypi package database snapshot to use # for dependency resolution -, pythonVersion ? "python37" # a string choosing the python derivation from +, pythonVersion ? "python39" # a string choosing the python derivation from # nixpkgs to target , extras ? [ "tor" "i2p" ] # a list of strings identifying tahoe-lafs extras, From 7b2f19b0fa0ada0f1d5a05056927fc84fa3fa327 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 9 Jan 2023 13:16:04 -0500 Subject: [PATCH 41/43] Switch Nix off 3.7 some more. --- tests.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests.nix b/tests.nix index dd477c273..f8ed678f3 100644 --- a/tests.nix +++ b/tests.nix @@ -5,7 +5,7 @@ in { pkgsVersion ? "nixpkgs-21.11" , pkgs ? import sources.${pkgsVersion} { } , pypiData ? sources.pypi-deps-db -, pythonVersion ? "python37" +, pythonVersion ? "python39" , mach-nix ? import sources.mach-nix { inherit pkgs pypiData; python = pythonVersion; @@ -21,7 +21,7 @@ let inherit pkgs; lib = pkgs.lib; }; - tests_require = (mach-lib.extract "python37" ./. "extras_require" ).extras_require.test; + tests_require = (mach-lib.extract "python39" ./. "extras_require" ).extras_require.test; # Get the Tahoe-LAFS package itself. This does not include test # requirements and we don't ask for test requirements so that we can just From b05793e56b3bfcedd75c633fc9186adf02aad48a Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 9 Jan 2023 13:45:22 -0500 Subject: [PATCH 42/43] Meaningless tweak to rerun CI. --- README.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/README.rst b/README.rst index bbf88610d..56451701a 100644 --- a/README.rst +++ b/README.rst @@ -45,6 +45,7 @@ Tahoe-LAFS was first designed in 2007, following the "principle of least authori Please read more about Tahoe-LAFS architecture `here `__. + ✅ Installation --------------- From 046d9cf802e541d77f203ad1d29edbff6f628790 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 9 Jan 2023 14:25:47 -0500 Subject: [PATCH 43/43] Another meaningless tweak. --- README.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/README.rst b/README.rst index 56451701a..bbf88610d 100644 --- a/README.rst +++ b/README.rst @@ -45,7 +45,6 @@ Tahoe-LAFS was first designed in 2007, following the "principle of least authori Please read more about Tahoe-LAFS architecture `here `__. - ✅ Installation ---------------