From 5a88dfd5753ba4243613566f02a878bc435d4e34 Mon Sep 17 00:00:00 2001 From: meejah Date: Sat, 14 Nov 2020 01:56:03 -0700 Subject: [PATCH 001/172] refactor integration tests and add some for grid-manager --- integration/conftest.py | 172 ++------ integration/grid.py | 507 +++++++++++++++++++++++ integration/test_servers_of_happiness.py | 7 +- integration/test_tor.py | 39 +- integration/test_web.py | 10 +- integration/util.py | 54 ++- 6 files changed, 614 insertions(+), 175 deletions(-) create mode 100644 integration/grid.py diff --git a/integration/conftest.py b/integration/conftest.py index ca18230cd..15450767b 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -8,10 +8,6 @@ from os.path import join, exists from tempfile import mkdtemp, mktemp from functools import partial -from foolscap.furl import ( - decode_furl, -) - from eliot import ( to_file, log_call, @@ -38,6 +34,11 @@ from util import ( await_client_ready, TahoeProcess, ) +from grid import ( + create_port_allocator, + create_flog_gatherer, + create_grid, +) # pytest customization hooks @@ -74,6 +75,12 @@ def reactor(): return _reactor +@pytest.fixture(scope='session') +@log_call(action_type=u"integration:port_allocator", include_result=False) +def port_allocator(reactor): + return create_port_allocator(start_port=45000) + + @pytest.fixture(scope='session') @log_call(action_type=u"integration:temp_dir", include_args=[]) def temp_dir(request): @@ -108,137 +115,30 @@ def flog_binary(): @pytest.fixture(scope='session') @log_call(action_type=u"integration:flog_gatherer", include_args=[]) def flog_gatherer(reactor, temp_dir, flog_binary, request): - out_protocol = _CollectOutputProtocol() - gather_dir = join(temp_dir, 'flog_gather') - reactor.spawnProcess( - out_protocol, - flog_binary, - ( - 'flogtool', 'create-gatherer', - '--location', 'tcp:localhost:3117', - '--port', '3117', - gather_dir, - ) + fg = pytest_twisted.blockon( + create_flog_gatherer(reactor, request, temp_dir, flog_binary) ) - pytest_twisted.blockon(out_protocol.done) - - twistd_protocol = _MagicTextProtocol("Gatherer waiting at") - twistd_process = reactor.spawnProcess( - twistd_protocol, - which('twistd')[0], - ( - 'twistd', '--nodaemon', '--python', - join(gather_dir, 'gatherer.tac'), - ), - path=gather_dir, - ) - pytest_twisted.blockon(twistd_protocol.magic_seen) - - def cleanup(): - _cleanup_tahoe_process(twistd_process, twistd_protocol.exited) - - flog_file = mktemp('.flog_dump') - flog_protocol = _DumpOutputProtocol(open(flog_file, 'w')) - flog_dir = join(temp_dir, 'flog_gather') - flogs = [x for x in listdir(flog_dir) if x.endswith('.flog')] - - print("Dumping {} flogtool logfiles to '{}'".format(len(flogs), flog_file)) - reactor.spawnProcess( - flog_protocol, - flog_binary, - ( - 'flogtool', 'dump', join(temp_dir, 'flog_gather', flogs[0]) - ), - ) - print("Waiting for flogtool to complete") - try: - pytest_twisted.blockon(flog_protocol.done) - except ProcessTerminated as e: - print("flogtool exited unexpectedly: {}".format(str(e))) - print("Flogtool completed") - - request.addfinalizer(cleanup) - - with open(join(gather_dir, 'log_gatherer.furl'), 'r') as f: - furl = f.read().strip() - return furl + return fg @pytest.fixture(scope='session') -@log_call( - action_type=u"integration:introducer", - include_args=["temp_dir", "flog_gatherer"], - include_result=False, -) -def introducer(reactor, temp_dir, flog_gatherer, request): - config = ''' -[node] -nickname = introducer0 -web.port = 4560 -log_gatherer.furl = {log_furl} -'''.format(log_furl=flog_gatherer) - - intro_dir = join(temp_dir, 'introducer') - print("making introducer", intro_dir) - - if not exists(intro_dir): - mkdir(intro_dir) - done_proto = _ProcessExitedProtocol() - _tahoe_runner_optional_coverage( - done_proto, - reactor, - request, - ( - 'create-introducer', - '--listen=tcp', - '--hostname=localhost', - intro_dir, - ), - ) - pytest_twisted.blockon(done_proto.done) - - # over-write the config file with our stuff - with open(join(intro_dir, 'tahoe.cfg'), 'w') as f: - f.write(config) - - # on windows, "tahoe start" means: run forever in the foreground, - # but on linux it means daemonize. "tahoe run" is consistent - # between platforms. - protocol = _MagicTextProtocol('introducer running') - transport = _tahoe_runner_optional_coverage( - protocol, - reactor, - request, - ( - 'run', - intro_dir, - ), +@log_call(action_type=u"integration:grid", include_args=[]) +def grid(reactor, request, temp_dir, flog_gatherer, port_allocator): + g = pytest_twisted.blockon( + create_grid(reactor, request, temp_dir, flog_gatherer, port_allocator) ) - request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited)) + return g - pytest_twisted.blockon(protocol.magic_seen) - return TahoeProcess(transport, intro_dir) + +@pytest.fixture(scope='session') +def introducer(grid): + return grid.introducer @pytest.fixture(scope='session') @log_call(action_type=u"integration:introducer:furl", include_args=["temp_dir"]) def introducer_furl(introducer, temp_dir): - furl_fname = join(temp_dir, 'introducer', 'private', 'introducer.furl') - while not exists(furl_fname): - print("Don't see {} yet".format(furl_fname)) - sleep(.1) - furl = open(furl_fname, 'r').read() - tubID, location_hints, name = decode_furl(furl) - if not location_hints: - # If there are no location hints then nothing can ever possibly - # connect to it and the only thing that can happen next is something - # will hang or time out. So just give up right now. - raise ValueError( - "Introducer ({!r}) fURL has no location hints!".format( - introducer_furl, - ), - ) - return furl + return introducer.furl @pytest.fixture(scope='session') @@ -317,28 +217,20 @@ def tor_introducer_furl(tor_introducer, temp_dir): @pytest.fixture(scope='session') @log_call( action_type=u"integration:storage_nodes", - include_args=["temp_dir", "introducer_furl", "flog_gatherer"], + include_args=["grid"], include_result=False, ) -def storage_nodes(reactor, temp_dir, introducer, introducer_furl, flog_gatherer, request): +def storage_nodes(grid): nodes_d = [] # start all 5 nodes in parallel for x in range(5): - name = 'node{}'.format(x) - web_port= 9990 + x - nodes_d.append( - _create_node( - reactor, request, temp_dir, introducer_furl, flog_gatherer, name, - web_port="tcp:{}:interface=localhost".format(web_port), - storage=True, - ) - ) + #nodes_d.append(grid.add_storage_node()) + pytest_twisted.blockon(grid.add_storage_node()) + nodes_status = pytest_twisted.blockon(DeferredList(nodes_d)) - nodes = [] - for ok, process in nodes_status: - assert ok, "Storage node creation failed: {}".format(process) - nodes.append(process) - return nodes + for ok, value in nodes_status: + assert ok, "Storage node creation failed: {}".format(value) + return grid.storage_servers @pytest.fixture(scope='session') diff --git a/integration/grid.py b/integration/grid.py new file mode 100644 index 000000000..5c3086eea --- /dev/null +++ b/integration/grid.py @@ -0,0 +1,507 @@ +""" +Classes which directly represent various kinds of Tahoe processes +that co-operate to for "a Grid". + +These methods and objects are used by conftest.py fixtures but may +also be used as direct helpers for tests that don't want to (or can't) +rely on 'the' global grid as provided by fixtures like 'alice' or +'storage_servers'. +""" + +from os import mkdir, listdir, environ +from os.path import join, exists +from tempfile import mkdtemp, mktemp + +from eliot import ( + log_call, +) + +from foolscap.furl import ( + decode_furl, +) + +from twisted.python.procutils import which +from twisted.internet.defer import ( + inlineCallbacks, + returnValue, + maybeDeferred, +) +from twisted.internet.task import ( + deferLater, +) +from twisted.internet.interfaces import ( + IProcessTransport, + IProcessProtocol, + IProtocol, +) +from twisted.internet.endpoints import ( + TCP4ServerEndpoint, +) +from twisted.internet.protocol import ( + Factory, + Protocol, +) + +from util import ( + _CollectOutputProtocol, + _MagicTextProtocol, + _DumpOutputProtocol, + _ProcessExitedProtocol, + _create_node, + _run_node, + _cleanup_tahoe_process, + _tahoe_runner_optional_coverage, + TahoeProcess, + await_client_ready, +) + +import attr +import pytest_twisted + + +# further directions: +# - "Grid" is unused, basically -- tie into the rest? +# - could make a Grid instance mandatory for create_* calls +# - could instead make create_* calls methods of Grid +# - Bring more 'util' or 'conftest' code into here +# - stop()/start()/restart() methods on StorageServer etc +# - more-complex stuff like config changes (which imply a restart too)? + + +@attr.s +class FlogGatherer(object): + """ + Flog Gatherer process. + """ + + process = attr.ib( + validator=attr.validators.provides(IProcessTransport) + ) + protocol = attr.ib( + validator=attr.validators.provides(IProcessProtocol) + ) + furl = attr.ib() + + +@inlineCallbacks +def create_flog_gatherer(reactor, request, temp_dir, flog_binary): + out_protocol = _CollectOutputProtocol() + gather_dir = join(temp_dir, 'flog_gather') + reactor.spawnProcess( + out_protocol, + flog_binary, + ( + 'flogtool', 'create-gatherer', + '--location', 'tcp:localhost:3117', + '--port', '3117', + gather_dir, + ) + ) + yield out_protocol.done + + twistd_protocol = _MagicTextProtocol("Gatherer waiting at") + twistd_process = reactor.spawnProcess( + twistd_protocol, + which('twistd')[0], + ( + 'twistd', '--nodaemon', '--python', + join(gather_dir, 'gatherer.tac'), + ), + path=gather_dir, + ) + yield twistd_protocol.magic_seen + + def cleanup(): + _cleanup_tahoe_process(twistd_process, twistd_protocol.exited) + + flog_file = mktemp('.flog_dump') + flog_protocol = _DumpOutputProtocol(open(flog_file, 'w')) + flog_dir = join(temp_dir, 'flog_gather') + flogs = [x for x in listdir(flog_dir) if x.endswith('.flog')] + + print("Dumping {} flogtool logfiles to '{}'".format(len(flogs), flog_file)) + reactor.spawnProcess( + flog_protocol, + flog_binary, + ( + 'flogtool', 'dump', join(temp_dir, 'flog_gather', flogs[0]) + ), + ) + print("Waiting for flogtool to complete") + try: + pytest_twisted.blockon(flog_protocol.done) + except ProcessTerminated as e: + print("flogtool exited unexpectedly: {}".format(str(e))) + print("Flogtool completed") + + request.addfinalizer(cleanup) + + with open(join(gather_dir, 'log_gatherer.furl'), 'r') as f: + furl = f.read().strip() + returnValue( + FlogGatherer( + protocol=twistd_protocol, + process=twistd_process, + furl=furl, + ) + ) + + +@attr.s +class StorageServer(object): + """ + Represents a Tahoe Storage Server + """ + + process = attr.ib( + validator=attr.validators.instance_of(TahoeProcess) + ) + protocol = attr.ib( + validator=attr.validators.provides(IProcessProtocol) + ) + + @inlineCallbacks + def restart(self, reactor, request): + """ + re-start our underlying process by issuing a TERM, waiting and + then running again. await_client_ready() will be done as well + + Note that self.process and self.protocol will be new instances + after this. + """ + self.process.transport.signalProcess('TERM') + yield self.protocol.exited + self.process = yield _run_node( + reactor, self.process.node_dir, request, None, + ) + self.protocol = self.process.transport._protocol + + +@inlineCallbacks +def create_storage_server(reactor, request, temp_dir, introducer, flog_gatherer, name, web_port, + needed=2, happy=3, total=4): + """ + Create a new storage server + """ + from util import _create_node + node_process = yield _create_node( + reactor, request, temp_dir, introducer.furl, flog_gatherer, + name, web_port, storage=True, needed=needed, happy=happy, total=total, + ) + storage = StorageServer( + process=node_process, + protocol=node_process.transport._protocol, + ) + returnValue(storage) + + +@attr.s +class Client(object): + """ + Represents a Tahoe client + """ + + process = attr.ib( + validator=attr.validators.instance_of(TahoeProcess) + ) + protocol = attr.ib( + validator=attr.validators.provides(IProcessProtocol) + ) + + @inlineCallbacks + def restart(self, reactor, request, servers=1): + """ + re-start our underlying process by issuing a TERM, waiting and + then running again. + + :param int servers: number of server connections we will wait + for before being 'ready' + + Note that self.process and self.protocol will be new instances + after this. + """ + self.process.transport.signalProcess('TERM') + yield self.protocol.exited + process = yield _run_node( + reactor, self.process.node_dir, request, None, + ) + self.process = process + self.protocol = self.process.transport._protocol + + + # XXX add stop / start / restart + # ...maybe "reconfig" of some kind? + + +@inlineCallbacks +def create_client(reactor, request, temp_dir, introducer, flog_gatherer, name, web_port, + needed=2, happy=3, total=4): + """ + Create a new storage server + """ + from util import _create_node + node_process = yield _create_node( + reactor, request, temp_dir, introducer.furl, flog_gatherer, + name, web_port, storage=False, needed=needed, happy=happy, total=total, + ) + returnValue( + Client( + process=node_process, + protocol=node_process.transport._protocol, + ) + ) + + +@attr.s +class Introducer(object): + """ + Reprsents a running introducer + """ + + process = attr.ib( + validator=attr.validators.instance_of(TahoeProcess) + ) + protocol = attr.ib( + validator=attr.validators.provides(IProcessProtocol) + ) + furl = attr.ib() + + +def _validate_furl(furl_fname): + """ + Opens and validates a fURL, ensuring location hints. + :returns: the furl + :raises: ValueError if no location hints + """ + while not exists(furl_fname): + print("Don't see {} yet".format(furl_fname)) + sleep(.1) + furl = open(furl_fname, 'r').read() + tubID, location_hints, name = decode_furl(furl) + if not location_hints: + # If there are no location hints then nothing can ever possibly + # connect to it and the only thing that can happen next is something + # will hang or time out. So just give up right now. + raise ValueError( + "Introducer ({!r}) fURL has no location hints!".format( + introducer_furl, + ), + ) + return furl + + +@inlineCallbacks +@log_call( + action_type=u"integration:introducer", + include_args=["temp_dir", "flog_gatherer"], + include_result=False, +) +def create_introducer(reactor, request, temp_dir, flog_gatherer, port): + """ + Run a new Introducer and return an Introducer instance. + """ + config = ( + '[node]\n' + 'nickname = introducer{port}\n' + 'web.port = {port}\n' + 'log_gatherer.furl = {log_furl}\n' + ).format( + port=port, + log_furl=flog_gatherer.furl, + ) + + intro_dir = join(temp_dir, 'introducer{}'.format(port)) + + if not exists(intro_dir): + mkdir(intro_dir) + done_proto = _ProcessExitedProtocol() + _tahoe_runner_optional_coverage( + done_proto, + reactor, + request, + ( + 'create-introducer', + '--listen=tcp', + '--hostname=localhost', + intro_dir, + ), + ) + yield done_proto.done + + # over-write the config file with our stuff + with open(join(intro_dir, 'tahoe.cfg'), 'w') as f: + f.write(config) + + # on windows, "tahoe start" means: run forever in the foreground, + # but on linux it means daemonize. "tahoe run" is consistent + # between platforms. + protocol = _MagicTextProtocol('introducer running') + transport = _tahoe_runner_optional_coverage( + protocol, + reactor, + request, + ( + 'run', + intro_dir, + ), + ) + + def clean(): + return _cleanup_tahoe_process(transport, protocol.exited) + request.addfinalizer(clean) + + yield protocol.magic_seen + + furl_fname = join(intro_dir, 'private', 'introducer.furl') + while not exists(furl_fname): + print("Don't see {} yet".format(furl_fname)) + yield deferLater(reactor, .1, lambda: None) + furl = _validate_furl(furl_fname) + + returnValue( + Introducer( + process=TahoeProcess(transport, intro_dir), + protocol=protocol, + furl=furl, + ) + ) + + +@attr.s +class Grid(object): + """ + Represents an entire Tahoe Grid setup + + A Grid includes an Introducer, Flog Gatherer and some number of + Storage Servers. + """ + + _reactor = attr.ib() + _request = attr.ib() + _temp_dir = attr.ib() + _port_allocator = attr.ib() + introducer = attr.ib() + flog_gatherer = attr.ib() + storage_servers = attr.ib(factory=list) + clients = attr.ib(factory=dict) + + @storage_servers.validator + def check(self, attribute, value): + for server in value: + if not isinstance(server, StorageServer): + raise ValueError( + "storage_servers must be StorageServer" + ) + + @inlineCallbacks + def add_storage_node(self): + """ + Creates a new storage node, returns a StorageServer instance + (which will already be added to our .storage_servers list) + """ + port = yield self._port_allocator() + print("make {}".format(port)) + name = 'node{}'.format(port) + web_port = 'tcp:{}:interface=localhost'.format(port) + server = yield create_storage_server( + self._reactor, + self._request, + self._temp_dir, + self.introducer, + self.flog_gatherer, + name, + web_port, + ) + self.storage_servers.append(server) + returnValue(server) + + @inlineCallbacks + def add_client(self, name, needed=2, happy=3, total=4): + """ + Create a new client node + """ + port = yield self._port_allocator() + web_port = 'tcp:{}:interface=localhost'.format(port) + client = yield create_client( + self._reactor, + self._request, + self._temp_dir, + self.introducer, + self.flog_gatherer, + name, + web_port, + needed=needed, + happy=happy, + total=total, + ) + self.clients[name] = client + yield await_client_ready(client.process) + returnValue(client) + + + +# XXX THINK can we tie a whole *grid* to a single request? (I think +# that's all that makes sense) +@inlineCallbacks +def create_grid(reactor, request, temp_dir, flog_gatherer, port_allocator): + """ + """ + intro_port = yield port_allocator() + introducer = yield create_introducer(reactor, request, temp_dir, flog_gatherer, intro_port) + grid = Grid( + reactor, + request, + temp_dir, + port_allocator, + introducer, + flog_gatherer, + ) + returnValue(grid) + + +def create_port_allocator(start_port): + """ + Returns a new port-allocator .. which is a zero-argument function + that returns Deferreds that fire with new, sequential ports + starting at `start_port` skipping any that already appear to have + a listener. + + There can still be a race against other processes allocating ports + -- between the time when we check the status of the port and when + our subprocess starts up. This *could* be mitigated by instructing + the OS to not randomly-allocate ports in some range, and then + using that range here (explicitly, ourselves). + + NB once we're Python3-only this could be an async-generator + """ + port = [start_port - 1] + + # import stays here to not interfere with reactor selection -- but + # maybe this function should be arranged to be called once from a + # fixture (with the reactor)? + from twisted.internet import reactor + + class NothingProtocol(Protocol): + """ + I do nothing. + """ + + def port_generator(): + print("Checking port {}".format(port)) + port[0] += 1 + ep = TCP4ServerEndpoint(reactor, port[0], interface="localhost") + d = ep.listen(Factory.forProtocol(NothingProtocol)) + + def good(listening_port): + unlisten_d = maybeDeferred(listening_port.stopListening) + def return_port(_): + return port[0] + unlisten_d.addBoth(return_port) + return unlisten_d + + def try_again(fail): + return port_generator() + + d.addCallbacks(good, try_again) + return d + return port_generator diff --git a/integration/test_servers_of_happiness.py b/integration/test_servers_of_happiness.py index 97392bf00..fe3a466eb 100644 --- a/integration/test_servers_of_happiness.py +++ b/integration/test_servers_of_happiness.py @@ -38,8 +38,7 @@ def test_upload_immutable(reactor, temp_dir, introducer_furl, flog_gatherer, sto try: yield proto.done assert False, "should raise exception" - except Exception as e: - assert isinstance(e, ProcessTerminated) + except util.ProcessFailed as e: + assert "UploadUnhappinessError" in e.output - output = proto.output.getvalue() - assert "shares could be placed on only" in output + assert "shares could be placed on only" in proto.output.getvalue() diff --git a/integration/test_tor.py b/integration/test_tor.py index 3d169a88f..db38b13ea 100644 --- a/integration/test_tor.py +++ b/integration/test_tor.py @@ -69,25 +69,28 @@ def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_ node_dir = join(temp_dir, name) web_port = "tcp:{}:interface=localhost".format(control_port + 2000) - if True: - print("creating", node_dir) - mkdir(node_dir) - proto = util._DumpOutputProtocol(None) - reactor.spawnProcess( - proto, - sys.executable, - ( - sys.executable, '-m', 'allmydata.scripts.runner', - 'create-node', - '--nickname', name, - '--introducer', introducer_furl, - '--hide-ip', - '--tor-control-port', 'tcp:localhost:{}'.format(control_port), - '--listen', 'tor', - node_dir, - ) + if exists(node_dir): + raise RuntimeError( + "A node already exists in '{}'".format(node_dir) ) - yield proto.done + print("creating", node_dir) + mkdir(node_dir) + proto = util._DumpOutputProtocol(None) + reactor.spawnProcess( + proto, + sys.executable, + ( + sys.executable, '-m', 'allmydata.scripts.runner', + 'create-node', + '--nickname', name, + '--introducer', introducer_furl, + '--hide-ip', + '--tor-control-port', 'tcp:localhost:{}'.format(control_port), + '--listen', 'tor', + node_dir, + ) + ) + yield proto.done with open(join(node_dir, 'tahoe.cfg'), 'w') as f: f.write(''' diff --git a/integration/test_web.py b/integration/test_web.py index fe2137ff3..6986e74c5 100644 --- a/integration/test_web.py +++ b/integration/test_web.py @@ -91,7 +91,7 @@ def test_helper_status(storage_nodes): successfully GET the /helper_status page """ - url = util.node_url(storage_nodes[0].node_dir, "helper_status") + url = util.node_url(storage_nodes[0].process.node_dir, "helper_status") resp = requests.get(url) assert resp.status_code >= 200 and resp.status_code < 300 dom = BeautifulSoup(resp.content, "html5lib") @@ -412,7 +412,7 @@ def test_storage_info(storage_nodes): storage0 = storage_nodes[0] requests.get( - util.node_url(storage0.node_dir, u"storage"), + util.node_url(storage0.process.node_dir, u"storage"), ) @@ -423,7 +423,7 @@ def test_storage_info_json(storage_nodes): storage0 = storage_nodes[0] resp = requests.get( - util.node_url(storage0.node_dir, u"storage"), + util.node_url(storage0.process.node_dir, u"storage"), params={u"t": u"json"}, ) data = json.loads(resp.content) @@ -435,12 +435,12 @@ def test_introducer_info(introducer): retrieve and confirm /introducer URI for the introducer """ resp = requests.get( - util.node_url(introducer.node_dir, u""), + util.node_url(introducer.process.node_dir, u""), ) assert "Introducer" in resp.content resp = requests.get( - util.node_url(introducer.node_dir, u""), + util.node_url(introducer.process.node_dir, u""), params={u"t": u"json"}, ) data = json.loads(resp.content) diff --git a/integration/util.py b/integration/util.py index a64bcbf8e..54898ec4a 100644 --- a/integration/util.py +++ b/integration/util.py @@ -5,6 +5,7 @@ from os import mkdir, environ from os.path import exists, join from six.moves import StringIO from functools import partial +from shutil import rmtree from twisted.internet.defer import Deferred, succeed from twisted.internet.protocol import ProcessProtocol @@ -35,15 +36,38 @@ class _ProcessExitedProtocol(ProcessProtocol): self.done.callback(None) +class ProcessFailed(Exception): + """ + A subprocess has failed. + + :ivar ProcessTerminated reason: the original reason from .processExited + + :ivar StringIO output: all stdout and stderr collected to this point. + """ + + def __init__(self, reason, output): + self.reason = reason + self.output = output + + def __str__(self): + return ":\n{}".format(self.reason, self.output) + + class _CollectOutputProtocol(ProcessProtocol): """ Internal helper. Collects all output (stdout + stderr) into self.output, and callback's on done with all of it after the process exits (for any reason). """ - def __init__(self): + def __init__(self, stdin=None): self.done = Deferred() self.output = StringIO() + self._stdin = stdin + + def connectionMade(self): + if self._stdin is not None: + self.transport.write(self._stdin) + self.transport.closeStdin() def processEnded(self, reason): if not self.done.called: @@ -51,7 +75,7 @@ class _CollectOutputProtocol(ProcessProtocol): def processExited(self, reason): if not isinstance(reason.value, ProcessDone): - self.done.errback(reason) + self.done.errback(ProcessFailed(reason, self.output.getvalue())) def outReceived(self, data): self.output.write(data) @@ -123,13 +147,27 @@ def _cleanup_tahoe_process(tahoe_transport, exited): try: print("signaling {} with TERM".format(tahoe_transport.pid)) tahoe_transport.signalProcess('TERM') - print("signaled, blocking on exit") + print("signaled, blocking on exit {}".format(exited)) pytest_twisted.blockon(exited) print("exited, goodbye") except ProcessExitedAlready: pass +def run_tahoe(reactor, request, *args, **kwargs): + """ + Helper to run tahoe with optional coverage. + + :returns: a Deferred that fires when the command is done (or a + ProcessFailed exception if it exits non-zero) + """ + stdin = kwargs.get("stdin", None) + protocol = _CollectOutputProtocol(stdin=stdin) + process = _tahoe_runner_optional_coverage(protocol, reactor, request, args) + process.exited = protocol.done + return protocol.done + + def _tahoe_runner_optional_coverage(proto, reactor, request, other_args): """ Internal helper. Calls spawnProcess with `-m @@ -232,7 +270,7 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam if exists(node_dir): created_d = succeed(None) else: - print("creating", node_dir) + print("creating: {}".format(node_dir)) mkdir(node_dir) done_proto = _ProcessExitedProtocol() args = [ @@ -257,7 +295,7 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam def created(_): config_path = join(node_dir, 'tahoe.cfg') config = get_config(config_path) - set_config(config, 'node', 'log_gatherer.furl', flog_gatherer) + set_config(config, 'node', 'log_gatherer.furl', flog_gatherer.furl) write_config(config_path, config) created_d.addCallback(created) @@ -444,7 +482,7 @@ def web_post(tahoe, uri_fragment, **kwargs): return resp.content -def await_client_ready(tahoe, timeout=10, liveness=60*2): +def await_client_ready(tahoe, timeout=10, liveness=60*2, servers=1): """ Uses the status API to wait for a client-type node (in `tahoe`, a `TahoeProcess` instance usually from a fixture e.g. `alice`) to be @@ -468,8 +506,8 @@ def await_client_ready(tahoe, timeout=10, liveness=60*2): time.sleep(1) continue - if len(js['servers']) == 0: - print("waiting because no servers at all") + if len(js['servers']) < servers: + print("waiting because fewer than {} server(s)".format(servers)) time.sleep(1) continue server_times = [ From 2e2128619335d7b6a87f7fac060544568582232d Mon Sep 17 00:00:00 2001 From: meejah Date: Mon, 16 Nov 2020 01:19:01 -0700 Subject: [PATCH 002/172] grid-manager tests --- integration/test_grid_manager.py | 274 +++++++++++++++++++++++++++++++ 1 file changed, 274 insertions(+) create mode 100644 integration/test_grid_manager.py diff --git a/integration/test_grid_manager.py b/integration/test_grid_manager.py new file mode 100644 index 000000000..ce426c6d7 --- /dev/null +++ b/integration/test_grid_manager.py @@ -0,0 +1,274 @@ +import sys +import time +import json +import shutil +from os import mkdir, unlink, listdir, utime +from os.path import join, exists, getmtime + +from cryptography.hazmat.primitives.serialization import ( + Encoding, + PublicFormat, +) + +from twisted.internet.utils import ( + getProcessOutputAndValue, +) +from twisted.internet.defer import ( + inlineCallbacks, + returnValue, +) + +from allmydata.crypto import ed25519 +from allmydata.util import base32 +from allmydata.util import configutil + +import util +from grid import ( + create_grid, +) + +import pytest_twisted + + +@inlineCallbacks +def _run_gm(reactor, *args, **kwargs): + """ + Run the grid-manager process, passing all arguments as extra CLI + args. + + :returns: all process output + """ + output, errput, exit_code = yield getProcessOutputAndValue( + sys.executable, + ("-m", "allmydata.cli.grid_manager") + args, + reactor=reactor, + **kwargs + ) + if exit_code != 0: + raise util.ProcessFailed( + RuntimeError("Exit code {}".format(exit_code)), + output + errput, + ) + returnValue(output) + + +@pytest_twisted.inlineCallbacks +def test_create_certificate(reactor, request): + """ + The Grid Manager produces a valid, correctly-signed certificate. + """ + gm_config = yield _run_gm(reactor, "--config", "-", "create") + privkey_bytes = json.loads(gm_config)['private_key'].encode('ascii') + privkey, pubkey = ed25519.signing_keypair_from_string(privkey_bytes) + + # Note that zara + her key here are arbitrary and don't match any + # "actual" clients in the test-grid; we're just checking that the + # Grid Manager signs this properly. + gm_config = yield _run_gm( + reactor, "--config", "-", "add", + "zara", "pub-v0-kzug3ut2m7ziihf3ndpqlquuxeie4foyl36wn54myqc4wmiwe4ga", + stdinBytes=gm_config, + ) + zara_cert_bytes = yield _run_gm( + reactor, "--config", "-", "sign", "zara", "1", + stdinBytes=gm_config, + ) + zara_cert = json.loads(zara_cert_bytes) + + # confirm that zara's certificate is made by the Grid Manager + # (.verify returns None on success, raises exception on error) + pubkey.verify( + base32.a2b(zara_cert['signature'].encode('ascii')), + zara_cert['certificate'].encode('ascii'), + ) + + +@pytest_twisted.inlineCallbacks +def test_remove_client(reactor, request): + """ + A Grid Manager can add and successfully remove a client + """ + gm_config = yield _run_gm( + reactor, "--config", "-", "create", + ) + + gm_config = yield _run_gm( + reactor, "--config", "-", "add", + "zara", "pub-v0-kzug3ut2m7ziihf3ndpqlquuxeie4foyl36wn54myqc4wmiwe4ga", + stdinBytes=gm_config, + ) + gm_config = yield _run_gm( + reactor, "--config", "-", "add", + "yakov", "pub-v0-kvxhb3nexybmipkrar2ztfrwp4uxxsmrjzkpzafit3ket4u5yldq", + stdinBytes=gm_config, + ) + assert "zara" in json.loads(gm_config)['storage_servers'] + assert "yakov" in json.loads(gm_config)['storage_servers'] + + gm_config = yield _run_gm( + reactor, "--config", "-", "remove", + "zara", + stdinBytes=gm_config, + ) + assert "zara" not in json.loads(gm_config)['storage_servers'] + assert "yakov" in json.loads(gm_config)['storage_servers'] + + +@pytest_twisted.inlineCallbacks +def test_remove_last_client(reactor, request): + """ + A Grid Manager can remove all clients + """ + gm_config = yield _run_gm( + reactor, "--config", "-", "create", + ) + + gm_config = yield _run_gm( + reactor, "--config", "-", "add", + "zara", "pub-v0-kzug3ut2m7ziihf3ndpqlquuxeie4foyl36wn54myqc4wmiwe4ga", + stdinBytes=gm_config, + ) + assert "zara" in json.loads(gm_config)['storage_servers'] + + gm_config = yield _run_gm( + reactor, "--config", "-", "remove", + "zara", + stdinBytes=gm_config, + ) + # there are no storage servers left at all now + assert "storage_servers" not in json.loads(gm_config) + + +@pytest_twisted.inlineCallbacks +def test_add_remove_client_file(reactor, request, temp_dir): + """ + A Grid Manager can add and successfully remove a client (when + keeping data on disk) + """ + gmconfig = join(temp_dir, "gmtest") + gmconfig_file = join(temp_dir, "gmtest", "config.json") + yield _run_gm( + reactor, "--config", gmconfig, "create", + ) + + yield _run_gm( + reactor, "--config", gmconfig, "add", + "zara", "pub-v0-kzug3ut2m7ziihf3ndpqlquuxeie4foyl36wn54myqc4wmiwe4ga", + ) + yield _run_gm( + reactor, "--config", gmconfig, "add", + "yakov", "pub-v0-kvxhb3nexybmipkrar2ztfrwp4uxxsmrjzkpzafit3ket4u5yldq", + ) + assert "zara" in json.load(open(gmconfig_file, "r"))['storage_servers'] + assert "yakov" in json.load(open(gmconfig_file, "r"))['storage_servers'] + + yield _run_gm( + reactor, "--config", gmconfig, "remove", + "zara", + ) + assert "zara" not in json.load(open(gmconfig_file, "r"))['storage_servers'] + assert "yakov" in json.load(open(gmconfig_file, "r"))['storage_servers'] + + +@pytest_twisted.inlineCallbacks +def test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_allocator): + """ + A client with happines=2 fails to upload to a Grid when it is + using Grid Manager and there is only 1 storage server with a valid + certificate. + """ + grid = yield create_grid(reactor, request, temp_dir, flog_gatherer, port_allocator) + storage0 = yield grid.add_storage_node() + storage1 = yield grid.add_storage_node() + + gm_config = yield _run_gm( + reactor, "--config", "-", "create", + ) + gm_privkey_bytes = json.loads(gm_config)['private_key'].encode('ascii') + gm_privkey, gm_pubkey = ed25519.signing_keypair_from_string(gm_privkey_bytes) + + # create certificate for the first storage-server + pubkey_fname = join(storage0.process.node_dir, "node.pubkey") + with open(pubkey_fname, 'r') as f: + pubkey_str = f.read().strip() + + gm_config = yield _run_gm( + reactor, "--config", "-", "add", + "storage0", pubkey_str, + stdinBytes=gm_config, + ) + assert json.loads(gm_config)['storage_servers'].keys() == ['storage0'] + + print("inserting certificate") + cert = yield _run_gm( + reactor, "--config", "-", "sign", "storage0", "1", + stdinBytes=gm_config, + ) + print(cert) + + yield util.run_tahoe( + reactor, request, "--node-directory", storage0.process.node_dir, + "admin", "add-grid-manager-cert", + "--name", "default", + "--filename", "-", + stdin=cert, + ) + + # re-start this storage server + yield storage0.restart(reactor, request) + + # now only one storage-server has the certificate .. configure + # diana to have the grid-manager certificate + + diana = yield grid.add_client("diana", needed=2, happy=2, total=2) + + config = configutil.get_config(join(diana.process.node_dir, "tahoe.cfg")) + config.add_section("grid_managers") + config.set("grid_managers", "test", ed25519.string_from_verifying_key(gm_pubkey)) + with open(join(diana.process.node_dir, "tahoe.cfg"), "w") as f: + config.write(f) + + yield diana.restart(reactor, request, servers=2) + + # try to put something into the grid, which should fail (because + # diana has happy=2 but should only find storage0 to be acceptable + # to upload to) + + try: + yield util.run_tahoe( + reactor, request, "--node-directory", diana.process.node_dir, + "put", "-", + stdin="some content\n" * 200, + ) + assert False, "Should get a failure" + except util.ProcessFailed as e: + assert 'UploadUnhappinessError' in e.output + + +@pytest_twisted.inlineCallbacks +def test_identity(reactor, request, temp_dir): + """ + Dump public key to CLI + """ + gm_config = join(temp_dir, "test_identity") + yield _run_gm( + reactor, "--config", gm_config, "create", + ) + + # ask the CLI for the grid-manager pubkey + pubkey = yield _run_gm( + reactor, "--config", gm_config, "public-identity", + ) + alleged_pubkey = ed25519.verifying_key_from_string(pubkey.strip()) + + # load the grid-manager pubkey "ourselves" + with open(join(gm_config, "config.json"), "r") as f: + real_config = json.load(f) + real_privkey, real_pubkey = ed25519.signing_keypair_from_string( + real_config["private_key"].encode("ascii"), + ) + + # confirm the CLI told us the correct thing + alleged_bytes = alleged_pubkey.public_bytes(Encoding.Raw, PublicFormat.Raw) + real_bytes = real_pubkey.public_bytes(Encoding.Raw, PublicFormat.Raw) + assert alleged_bytes == real_bytes, "Keys don't match" From 8400893976966cb698608811473a438511662428 Mon Sep 17 00:00:00 2001 From: meejah Date: Mon, 16 Nov 2020 01:30:38 -0700 Subject: [PATCH 003/172] news --- newsfragments/3508.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/3508.minor diff --git a/newsfragments/3508.minor b/newsfragments/3508.minor new file mode 100644 index 000000000..e69de29bb From 928e61bf224717aba58e4c340f82e0621f040609 Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 18 May 2022 12:12:09 -0600 Subject: [PATCH 004/172] Log 'something' when we fail to instantiate a client --- newsfragments/3899.bugfix | 1 + src/allmydata/storage_client.py | 11 +++++++---- 2 files changed, 8 insertions(+), 4 deletions(-) create mode 100644 newsfragments/3899.bugfix diff --git a/newsfragments/3899.bugfix b/newsfragments/3899.bugfix new file mode 100644 index 000000000..a55239c38 --- /dev/null +++ b/newsfragments/3899.bugfix @@ -0,0 +1 @@ +Print a useful message when a storage-client cannot be matched to configuration diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 68164e697..9056b9e7a 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -667,6 +667,9 @@ def _storage_from_foolscap_plugin(node_config, config, announcement, get_rref): :param allmydata.node._Config node_config: The node configuration to pass to the plugin. + + :param dict announcement: The storage announcement for the storage + server we should build """ plugins = { plugin.name: plugin @@ -687,7 +690,8 @@ def _storage_from_foolscap_plugin(node_config, config, announcement, get_rref): option, get_rref, ) - raise AnnouncementNotMatched() + plugin_names = ", ".join(sorted(list(config.storage_plugins.keys()))) + raise AnnouncementNotMatched(plugin_names) @implementer(IServer) @@ -761,9 +765,8 @@ class NativeStorageServer(service.MultiService): # able to get the most up-to-date value. self.get_rref, ) - except AnnouncementNotMatched: - # Nope. - pass + except AnnouncementNotMatched as e: + print('No plugin for storage-server "{nickname}" from plugins: {plugins}'.format(nickname=ann.get("nickname", ""), plugins=e.args[0])) else: return _FoolscapStorage.from_announcement( self._server_id, From 21112fd22bb780e8fb06478a1e5b43cbc41fecf1 Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 18 May 2022 22:09:21 -0600 Subject: [PATCH 005/172] twisted new-logger, not print() --- src/allmydata/storage_client.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 9056b9e7a..3fc18a908 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -52,6 +52,7 @@ from zope.interface import ( ) from twisted.internet import defer from twisted.application import service +from twisted.logger import Logger from twisted.plugin import ( getPlugins, ) @@ -720,6 +721,7 @@ class NativeStorageServer(service.MultiService): }), "application-version": "unknown: no get_version()", }) + log = Logger() def __init__(self, server_id, ann, tub_maker, handler_overrides, node_config, config=StorageClientConfig()): service.MultiService.__init__(self) @@ -766,7 +768,11 @@ class NativeStorageServer(service.MultiService): self.get_rref, ) except AnnouncementNotMatched as e: - print('No plugin for storage-server "{nickname}" from plugins: {plugins}'.format(nickname=ann.get("nickname", ""), plugins=e.args[0])) + self.log.error( + 'No plugin for storage-server "{nickname}" from plugins: {plugins}', + nickname=ann.get("nickname", ""), + plugins=e.args[0], + ) else: return _FoolscapStorage.from_announcement( self._server_id, From 839aaea541a2d9504abe46a1b8ecb97e333f8971 Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 8 Jun 2022 21:26:40 -0600 Subject: [PATCH 006/172] let misconfigured servers show up, and display information about missing plugins --- src/allmydata/storage_client.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 3fc18a908..a3974d4b9 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -661,6 +661,19 @@ class AnnouncementNotMatched(Exception): """ +@attr.s(auto_exc=True) +class MissingPlugin(Exception): + """ + A particular plugin was request, but is missing + """ + + plugin_name = attr.ib() + nickname = attr.ib() + + def __str__(self): + return "Missing plugin '{}' for server '{}'".format(self.plugin_name, self.nickname) + + def _storage_from_foolscap_plugin(node_config, config, announcement, get_rref): """ Construct an ``IStorageServer`` from the most locally-preferred plugin @@ -682,7 +695,7 @@ def _storage_from_foolscap_plugin(node_config, config, announcement, get_rref): try: plugin = plugins[plugin_name] except KeyError: - raise ValueError("{} not installed".format(plugin_name)) + raise MissingPlugin(plugin_name, announcement.get(u"nickname", "")) for option in storage_options: if plugin_name == option[u"name"]: furl = option[u"storage-server-FURL"] @@ -773,6 +786,11 @@ class NativeStorageServer(service.MultiService): nickname=ann.get("nickname", ""), plugins=e.args[0], ) + except MissingPlugin as e: + self.log.failure("Missing plugin") + ns = _NullStorage() + ns.longname = ''.format(e.args[0]) + return ns else: return _FoolscapStorage.from_announcement( self._server_id, From 6116b04ff7bd7a910651fb53f94b437f5595243f Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 10 Jun 2022 14:08:53 -0600 Subject: [PATCH 007/172] ignore incorrectly packaged autobahn versions --- setup.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index c84d0ecde..b14893712 100644 --- a/setup.py +++ b/setup.py @@ -114,7 +114,9 @@ install_requires = [ "attrs >= 18.2.0", # WebSocket library for twisted and asyncio - "autobahn >= 19.5.2", + "autobahn >= 19.5.2, != 22.5.1, != 22.4.2, != 22.4.1" + # (the ignored versions above don't have autobahn.twisted.testing + # packaged properly) # Support for Python 3 transition "future >= 0.18.2", From 671e829f4e5d7a58be75ff1c4ef673b7f7e2fb3d Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 27 Jul 2022 12:23:20 -0400 Subject: [PATCH 008/172] We need to pass in the furl here. --- integration/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration/util.py b/integration/util.py index f84a6aed4..5e644f19d 100644 --- a/integration/util.py +++ b/integration/util.py @@ -336,7 +336,7 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam config, u'node', u'log_gatherer.furl', - flog_gatherer, + flog_gatherer.furl, ) write_config(FilePath(config_path), config) created_d.addCallback(created) From 2999ca45798d466ff8ee8f94eef1ac841f4d93db Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 27 Jul 2022 12:23:34 -0400 Subject: [PATCH 009/172] It's bytes now. --- integration/test_servers_of_happiness.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration/test_servers_of_happiness.py b/integration/test_servers_of_happiness.py index 3376b91d0..4cbb94654 100644 --- a/integration/test_servers_of_happiness.py +++ b/integration/test_servers_of_happiness.py @@ -51,7 +51,7 @@ def test_upload_immutable(reactor, temp_dir, introducer_furl, flog_gatherer, sto yield proto.done assert False, "should raise exception" except util.ProcessFailed as e: - assert "UploadUnhappinessError" in e.output + assert b"UploadUnhappinessError" in e.output output = proto.output.getvalue() assert b"shares could be placed on only" in output From 106b67db5588364727d3f6add1b9942943b34f58 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 27 Jul 2022 12:23:40 -0400 Subject: [PATCH 010/172] It's bytes now. --- integration/test_grid_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration/test_grid_manager.py b/integration/test_grid_manager.py index c01773b96..704dee04b 100644 --- a/integration/test_grid_manager.py +++ b/integration/test_grid_manager.py @@ -239,7 +239,7 @@ def test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_a ) assert False, "Should get a failure" except util.ProcessFailed as e: - assert 'UploadUnhappinessError' in e.output + assert b'UploadUnhappinessError' in e.output @pytest_twisted.inlineCallbacks From 02cb4105b3a182c82e8b2a2b66755dcac6385ac8 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 29 Jul 2022 09:43:37 -0400 Subject: [PATCH 011/172] A lot closer to passing grid manager integration tests. --- integration/test_grid_manager.py | 6 +++--- src/allmydata/cli/grid_manager.py | 4 ++++ src/allmydata/storage_client.py | 7 ++++--- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/integration/test_grid_manager.py b/integration/test_grid_manager.py index 704dee04b..63ee827b0 100644 --- a/integration/test_grid_manager.py +++ b/integration/test_grid_manager.py @@ -194,7 +194,7 @@ def test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_a "storage0", pubkey_str, stdinBytes=gm_config, ) - assert json.loads(gm_config)['storage_servers'].keys() == ['storage0'] + assert json.loads(gm_config)['storage_servers'].keys() == {'storage0'} print("inserting certificate") cert = yield _run_gm( @@ -221,7 +221,7 @@ def test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_a config = configutil.get_config(join(diana.process.node_dir, "tahoe.cfg")) config.add_section("grid_managers") - config.set("grid_managers", "test", ed25519.string_from_verifying_key(gm_pubkey)) + config.set("grid_managers", "test", str(ed25519.string_from_verifying_key(gm_pubkey), "ascii")) with open(join(diana.process.node_dir, "tahoe.cfg"), "w") as f: config.write(f) @@ -235,7 +235,7 @@ def test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_a yield util.run_tahoe( reactor, request, "--node-directory", diana.process.node_dir, "put", "-", - stdin="some content\n" * 200, + stdin=b"some content\n" * 200, ) assert False, "Should get a failure" except util.ProcessFailed as e: diff --git a/src/allmydata/cli/grid_manager.py b/src/allmydata/cli/grid_manager.py index 4ef53887c..d3a11b62d 100644 --- a/src/allmydata/cli/grid_manager.py +++ b/src/allmydata/cli/grid_manager.py @@ -225,3 +225,7 @@ def _config_path_from_option(config: str) -> Optional[FilePath]: if config == "-": return None return FilePath(config) + + +if __name__ == '__main__': + grid_manager() diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 91073579d..7fe4d6bd2 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -37,8 +37,9 @@ from os import urandom import re import time import hashlib - +from io import StringIO from configparser import NoSectionError +import json import attr from zope.interface import ( @@ -67,7 +68,7 @@ from allmydata.interfaces import ( IFoolscapStoragePlugin, ) from allmydata.grid_manager import ( - create_grid_manager_verifier, + create_grid_manager_verifier, SignedCertificate ) from allmydata.crypto import ( ed25519, @@ -289,7 +290,7 @@ class StorageFarmBroker(service.MultiService): handler_overrides = server.get("connections", {}) gm_verifier = create_grid_manager_verifier( self.storage_client_config.grid_manager_keys, - server["ann"].get("grid-manager-certificates", []), + [SignedCertificate.load(StringIO(json.dumps(data))) for data in server["ann"].get("grid-manager-certificates", [])], "pub-{}".format(str(server_id, "ascii")), # server_id is v0- not pub-v0-key .. for reasons? ) From ad027aff7656abc098539f4159fa30638581ad35 Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 12 Aug 2022 00:34:47 -0600 Subject: [PATCH 012/172] compare bytes to bytes --- src/allmydata/grid_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/grid_manager.py b/src/allmydata/grid_manager.py index f502c413e..d776fb7d0 100644 --- a/src/allmydata/grid_manager.py +++ b/src/allmydata/grid_manager.py @@ -466,7 +466,7 @@ def create_grid_manager_verifier(keys, certs, public_key, now_fn=None, bad_cert= now = now_fn() for cert in valid_certs: expires = datetime.utcfromtimestamp(cert['expires']) - if cert['public_key'].encode("ascii") == public_key: + if cert['public_key'] == public_key: if expires > now: # not-expired return True From cb065aefbd417cc1d546744ff746e25dbe35f999 Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 12 Aug 2022 01:17:22 -0600 Subject: [PATCH 013/172] key is bytes --- src/allmydata/storage_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 7fe4d6bd2..49663f141 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -291,7 +291,7 @@ class StorageFarmBroker(service.MultiService): gm_verifier = create_grid_manager_verifier( self.storage_client_config.grid_manager_keys, [SignedCertificate.load(StringIO(json.dumps(data))) for data in server["ann"].get("grid-manager-certificates", [])], - "pub-{}".format(str(server_id, "ascii")), # server_id is v0- not pub-v0-key .. for reasons? + "pub-{}".format(str(server_id, "ascii")).encode("ascii"), # server_id is v0- not pub-v0-key .. for reasons? ) s = NativeStorageServer( From 4d779cfe0742fb9e3d75ca33fb3984d33e5e7586 Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 12 Aug 2022 01:17:33 -0600 Subject: [PATCH 014/172] more assert --- src/allmydata/grid_manager.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/allmydata/grid_manager.py b/src/allmydata/grid_manager.py index d776fb7d0..0201eace5 100644 --- a/src/allmydata/grid_manager.py +++ b/src/allmydata/grid_manager.py @@ -466,7 +466,9 @@ def create_grid_manager_verifier(keys, certs, public_key, now_fn=None, bad_cert= now = now_fn() for cert in valid_certs: expires = datetime.utcfromtimestamp(cert['expires']) - if cert['public_key'] == public_key: + pc = cert['public_key'].encode('ascii') + assert type(pc) == type(public_key), "{} isn't {}".format(type(pc), type(public_key)) + if pc == public_key: if expires > now: # not-expired return True From 1676e9e7c5e6b9c4208c6b71e6379581ee17e047 Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 12 Aug 2022 01:27:01 -0600 Subject: [PATCH 015/172] unused --- integration/test_servers_of_happiness.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/integration/test_servers_of_happiness.py b/integration/test_servers_of_happiness.py index 4cbb94654..3adc11340 100644 --- a/integration/test_servers_of_happiness.py +++ b/integration/test_servers_of_happiness.py @@ -13,8 +13,6 @@ if PY2: import sys from os.path import join -from twisted.internet.error import ProcessTerminated - from . import util import pytest_twisted From 9ff863e6cd47ad6c255491b2a6127b944b835f9a Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 12 Aug 2022 09:54:12 -0400 Subject: [PATCH 016/172] Fix lint. --- integration/test_servers_of_happiness.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/integration/test_servers_of_happiness.py b/integration/test_servers_of_happiness.py index 4cbb94654..b85eb8e5b 100644 --- a/integration/test_servers_of_happiness.py +++ b/integration/test_servers_of_happiness.py @@ -1,4 +1,4 @@ -""" +P""" Ported to Python 3. """ from __future__ import absolute_import @@ -13,8 +13,6 @@ if PY2: import sys from os.path import join -from twisted.internet.error import ProcessTerminated - from . import util import pytest_twisted From 0c6881e6150ffa590aec6775586de0d83d657017 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 12 Aug 2022 09:59:43 -0400 Subject: [PATCH 017/172] Fix race condition. --- integration/test_grid_manager.py | 28 +++++++++++++++++++--------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/integration/test_grid_manager.py b/integration/test_grid_manager.py index 63ee827b0..866856be7 100644 --- a/integration/test_grid_manager.py +++ b/integration/test_grid_manager.py @@ -214,6 +214,9 @@ def test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_a # re-start this storage server yield storage0.restart(reactor, request) + import time + time.sleep(1) + # now only one storage-server has the certificate .. configure # diana to have the grid-manager certificate @@ -231,15 +234,22 @@ def test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_a # diana has happy=2 but should only find storage0 to be acceptable # to upload to) - try: - yield util.run_tahoe( - reactor, request, "--node-directory", diana.process.node_dir, - "put", "-", - stdin=b"some content\n" * 200, - ) - assert False, "Should get a failure" - except util.ProcessFailed as e: - assert b'UploadUnhappinessError' in e.output + # Takes a little bit of time for node to connect: + for i in range(10): + try: + yield util.run_tahoe( + reactor, request, "--node-directory", diana.process.node_dir, + "put", "-", + stdin=b"some content\n" * 200, + ) + assert False, "Should get a failure" + except util.ProcessFailed as e: + if b'UploadUnhappinessError' in e.output: + # We're done! We've succeeded. + return + time.sleep(0.2) + + assert False, "Failed to see one of out of two servers" @pytest_twisted.inlineCallbacks From 298600969af240a3caff30fb2f2735fa669606ee Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 12 Aug 2022 10:06:35 -0400 Subject: [PATCH 018/172] Fix typo. --- integration/test_servers_of_happiness.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration/test_servers_of_happiness.py b/integration/test_servers_of_happiness.py index b85eb8e5b..3adc11340 100644 --- a/integration/test_servers_of_happiness.py +++ b/integration/test_servers_of_happiness.py @@ -1,4 +1,4 @@ -P""" +""" Ported to Python 3. """ from __future__ import absolute_import From c4a32b65ff7ada59ed4de0324e2634f3dc50bc29 Mon Sep 17 00:00:00 2001 From: meejah Date: Sat, 13 Aug 2022 11:45:51 -0600 Subject: [PATCH 019/172] actually wait --- integration/grid.py | 1 + 1 file changed, 1 insertion(+) diff --git a/integration/grid.py b/integration/grid.py index 3cb16c929..eb25d9514 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -175,6 +175,7 @@ class StorageServer(object): reactor, self.process.node_dir, request, None, ) self.protocol = self.process.transport._protocol + yield await_client_ready(self.process) @inlineCallbacks From 06a5176626dbd14d41d8ab6c3307462be9cc279c Mon Sep 17 00:00:00 2001 From: meejah Date: Sat, 13 Aug 2022 11:46:02 -0600 Subject: [PATCH 020/172] happy-path grid-manager test --- integration/test_grid_manager.py | 70 ++++++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) diff --git a/integration/test_grid_manager.py b/integration/test_grid_manager.py index 63ee827b0..672700e15 100644 --- a/integration/test_grid_manager.py +++ b/integration/test_grid_manager.py @@ -242,6 +242,76 @@ def test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_a assert b'UploadUnhappinessError' in e.output +@pytest_twisted.inlineCallbacks +def test_accept_storage_server(reactor, request, temp_dir, flog_gatherer, port_allocator): + """ + Successfully upload to a Grid Manager enabled Grid. + """ + grid = yield create_grid(reactor, request, temp_dir, flog_gatherer, port_allocator) + happy0 = yield grid.add_storage_node() + happy1 = yield grid.add_storage_node() + + gm_config = yield _run_gm( + reactor, "--config", "-", "create", + ) + gm_privkey_bytes = json.loads(gm_config)['private_key'].encode('ascii') + gm_privkey, gm_pubkey = ed25519.signing_keypair_from_string(gm_privkey_bytes) + + # create certificate for the storage-servers + servers = ( + ("happy0", happy0), + ("happy1", happy1), + ) + for st_name, st in servers: + pubkey_fname = join(st.process.node_dir, "node.pubkey") + with open(pubkey_fname, 'r') as f: + pubkey_str = f.read().strip() + + gm_config = yield _run_gm( + reactor, "--config", "-", "add", + st_name, pubkey_str, + stdinBytes=gm_config, + ) + assert json.loads(gm_config)['storage_servers'].keys() == {'happy0', 'happy1'} + + print("inserting certificates") + for st_name, st in servers: + cert = yield _run_gm( + reactor, "--config", "-", "sign", st_name, "1", + stdinBytes=gm_config, + ) + + yield util.run_tahoe( + reactor, request, "--node-directory", st.process.node_dir, + "admin", "add-grid-manager-cert", + "--name", "default", + "--filename", "-", + stdin=cert, + ) + + # re-start the storage servers + yield happy0.restart(reactor, request) + yield happy1.restart(reactor, request) + + # configure edna to have the grid-manager certificate + + edna = yield grid.add_client("edna", needed=2, happy=2, total=2) + + config = configutil.get_config(join(edna.process.node_dir, "tahoe.cfg")) + config.add_section("grid_managers") + config.set("grid_managers", "test", str(ed25519.string_from_verifying_key(gm_pubkey), "ascii")) + with open(join(edna.process.node_dir, "tahoe.cfg"), "w") as f: + config.write(f) + + yield edna.restart(reactor, request, servers=2) + + yield util.run_tahoe( + reactor, request, "--node-directory", edna.process.node_dir, + "put", "-", + stdin=b"some content\n" * 200, + ) + + @pytest_twisted.inlineCallbacks def test_identity(reactor, request, temp_dir): """ From 34dd39bfbf78e627a2ca05e457c444d0e0ee5e8e Mon Sep 17 00:00:00 2001 From: meejah Date: Sat, 13 Aug 2022 11:51:01 -0600 Subject: [PATCH 021/172] fix race with 'await_client_ready' instead --- integration/grid.py | 1 + integration/test_grid_manager.py | 28 +++++++++++----------------- 2 files changed, 12 insertions(+), 17 deletions(-) diff --git a/integration/grid.py b/integration/grid.py index eb25d9514..4e5d8a900 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -228,6 +228,7 @@ class Client(object): ) self.process = process self.protocol = self.process.transport._protocol + yield await_client_ready(self.process, minimum_number_of_servers=servers) # XXX add stop / start / restart diff --git a/integration/test_grid_manager.py b/integration/test_grid_manager.py index 35ea10c9f..b24149a3b 100644 --- a/integration/test_grid_manager.py +++ b/integration/test_grid_manager.py @@ -214,9 +214,6 @@ def test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_a # re-start this storage server yield storage0.restart(reactor, request) - import time - time.sleep(1) - # now only one storage-server has the certificate .. configure # diana to have the grid-manager certificate @@ -234,20 +231,17 @@ def test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_a # diana has happy=2 but should only find storage0 to be acceptable # to upload to) - # Takes a little bit of time for node to connect: - for i in range(10): - try: - yield util.run_tahoe( - reactor, request, "--node-directory", diana.process.node_dir, - "put", "-", - stdin=b"some content\n" * 200, - ) - assert False, "Should get a failure" - except util.ProcessFailed as e: - if b'UploadUnhappinessError' in e.output: - # We're done! We've succeeded. - return - time.sleep(0.2) + try: + yield util.run_tahoe( + reactor, request, "--node-directory", diana.process.node_dir, + "put", "-", + stdin=b"some content\n" * 200, + ) + assert False, "Should get a failure" + except util.ProcessFailed as e: + if b'UploadUnhappinessError' in e.output: + # We're done! We've succeeded. + return assert False, "Failed to see one of out of two servers" From 04b0c30c11343838b246ed276a1fdac230383594 Mon Sep 17 00:00:00 2001 From: meejah Date: Sun, 25 Sep 2022 14:08:05 -0600 Subject: [PATCH 022/172] clean up comments --- integration/test_grid_manager.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/integration/test_grid_manager.py b/integration/test_grid_manager.py index b24149a3b..d89f1e8f6 100644 --- a/integration/test_grid_manager.py +++ b/integration/test_grid_manager.py @@ -261,7 +261,7 @@ def test_accept_storage_server(reactor, request, temp_dir, flog_gatherer, port_a gm_privkey_bytes = json.loads(gm_config)['private_key'].encode('ascii') gm_privkey, gm_pubkey = ed25519.signing_keypair_from_string(gm_privkey_bytes) - # create certificate for the storage-servers + # create certificates for all storage-servers servers = ( ("happy0", happy0), ("happy1", happy1), @@ -278,7 +278,8 @@ def test_accept_storage_server(reactor, request, temp_dir, flog_gatherer, port_a ) assert json.loads(gm_config)['storage_servers'].keys() == {'happy0', 'happy1'} - print("inserting certificates") + # add the certificates from the grid-manager to the storage servers + print("inserting storage-server certificates") for st_name, st in servers: cert = yield _run_gm( reactor, "--config", "-", "sign", st_name, "1", @@ -297,8 +298,7 @@ def test_accept_storage_server(reactor, request, temp_dir, flog_gatherer, port_a yield happy0.restart(reactor, request) yield happy1.restart(reactor, request) - # configure edna to have the grid-manager certificate - + # configure edna (a client) to have the grid-manager certificate edna = yield grid.add_client("edna", needed=2, happy=2, total=2) config = configutil.get_config(join(edna.process.node_dir, "tahoe.cfg")) @@ -309,6 +309,7 @@ def test_accept_storage_server(reactor, request, temp_dir, flog_gatherer, port_a yield edna.restart(reactor, request, servers=2) + # confirm that Edna will upload to the GridManager-enabled Grid yield util.run_tahoe( reactor, request, "--node-directory", edna.process.node_dir, "put", "-", From af227fb31517b11c1117eb0749e7231afc9d9e62 Mon Sep 17 00:00:00 2001 From: meejah Date: Mon, 26 Sep 2022 00:02:40 -0600 Subject: [PATCH 023/172] coverage for grid-manager tests --- integration/test_grid_manager.py | 53 +++++++++++++++++--------------- 1 file changed, 29 insertions(+), 24 deletions(-) diff --git a/integration/test_grid_manager.py b/integration/test_grid_manager.py index d89f1e8f6..0136a11ac 100644 --- a/integration/test_grid_manager.py +++ b/integration/test_grid_manager.py @@ -28,16 +28,21 @@ import pytest_twisted @inlineCallbacks -def _run_gm(reactor, *args, **kwargs): +def _run_gm(reactor, request, *args, **kwargs): """ Run the grid-manager process, passing all arguments as extra CLI args. :returns: all process output """ + if request.config.getoption('coverage'): + base_args = ("-b", "-m", "coverage", "run", "-m", "allmydata.cli.grid_manager") + else: + base_args = ("-m", "allmydata.cli.grid_manager") + output, errput, exit_code = yield getProcessOutputAndValue( sys.executable, - ("-m", "allmydata.cli.grid_manager") + args, + base_args + args, reactor=reactor, **kwargs ) @@ -54,7 +59,7 @@ def test_create_certificate(reactor, request): """ The Grid Manager produces a valid, correctly-signed certificate. """ - gm_config = yield _run_gm(reactor, "--config", "-", "create") + gm_config = yield _run_gm(reactor, request, "--config", "-", "create") privkey_bytes = json.loads(gm_config)['private_key'].encode('ascii') privkey, pubkey = ed25519.signing_keypair_from_string(privkey_bytes) @@ -62,12 +67,12 @@ def test_create_certificate(reactor, request): # "actual" clients in the test-grid; we're just checking that the # Grid Manager signs this properly. gm_config = yield _run_gm( - reactor, "--config", "-", "add", + reactor, request, "--config", "-", "add", "zara", "pub-v0-kzug3ut2m7ziihf3ndpqlquuxeie4foyl36wn54myqc4wmiwe4ga", stdinBytes=gm_config, ) zara_cert_bytes = yield _run_gm( - reactor, "--config", "-", "sign", "zara", "1", + reactor, request, "--config", "-", "sign", "zara", "1", stdinBytes=gm_config, ) zara_cert = json.loads(zara_cert_bytes) @@ -86,16 +91,16 @@ def test_remove_client(reactor, request): A Grid Manager can add and successfully remove a client """ gm_config = yield _run_gm( - reactor, "--config", "-", "create", + reactor, request, "--config", "-", "create", ) gm_config = yield _run_gm( - reactor, "--config", "-", "add", + reactor, request, "--config", "-", "add", "zara", "pub-v0-kzug3ut2m7ziihf3ndpqlquuxeie4foyl36wn54myqc4wmiwe4ga", stdinBytes=gm_config, ) gm_config = yield _run_gm( - reactor, "--config", "-", "add", + reactor, request, "--config", "-", "add", "yakov", "pub-v0-kvxhb3nexybmipkrar2ztfrwp4uxxsmrjzkpzafit3ket4u5yldq", stdinBytes=gm_config, ) @@ -103,7 +108,7 @@ def test_remove_client(reactor, request): assert "yakov" in json.loads(gm_config)['storage_servers'] gm_config = yield _run_gm( - reactor, "--config", "-", "remove", + reactor, request, "--config", "-", "remove", "zara", stdinBytes=gm_config, ) @@ -117,18 +122,18 @@ def test_remove_last_client(reactor, request): A Grid Manager can remove all clients """ gm_config = yield _run_gm( - reactor, "--config", "-", "create", + reactor, request, "--config", "-", "create", ) gm_config = yield _run_gm( - reactor, "--config", "-", "add", + reactor, request, "--config", "-", "add", "zara", "pub-v0-kzug3ut2m7ziihf3ndpqlquuxeie4foyl36wn54myqc4wmiwe4ga", stdinBytes=gm_config, ) assert "zara" in json.loads(gm_config)['storage_servers'] gm_config = yield _run_gm( - reactor, "--config", "-", "remove", + reactor, request, "--config", "-", "remove", "zara", stdinBytes=gm_config, ) @@ -145,22 +150,22 @@ def test_add_remove_client_file(reactor, request, temp_dir): gmconfig = join(temp_dir, "gmtest") gmconfig_file = join(temp_dir, "gmtest", "config.json") yield _run_gm( - reactor, "--config", gmconfig, "create", + reactor, request, "--config", gmconfig, "create", ) yield _run_gm( - reactor, "--config", gmconfig, "add", + reactor, request, "--config", gmconfig, "add", "zara", "pub-v0-kzug3ut2m7ziihf3ndpqlquuxeie4foyl36wn54myqc4wmiwe4ga", ) yield _run_gm( - reactor, "--config", gmconfig, "add", + reactor, request, "--config", gmconfig, "add", "yakov", "pub-v0-kvxhb3nexybmipkrar2ztfrwp4uxxsmrjzkpzafit3ket4u5yldq", ) assert "zara" in json.load(open(gmconfig_file, "r"))['storage_servers'] assert "yakov" in json.load(open(gmconfig_file, "r"))['storage_servers'] yield _run_gm( - reactor, "--config", gmconfig, "remove", + reactor, request, "--config", gmconfig, "remove", "zara", ) assert "zara" not in json.load(open(gmconfig_file, "r"))['storage_servers'] @@ -179,7 +184,7 @@ def test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_a _ = yield grid.add_storage_node() gm_config = yield _run_gm( - reactor, "--config", "-", "create", + reactor, request, "--config", "-", "create", ) gm_privkey_bytes = json.loads(gm_config)['private_key'].encode('ascii') gm_privkey, gm_pubkey = ed25519.signing_keypair_from_string(gm_privkey_bytes) @@ -190,7 +195,7 @@ def test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_a pubkey_str = f.read().strip() gm_config = yield _run_gm( - reactor, "--config", "-", "add", + reactor, request, "--config", "-", "add", "storage0", pubkey_str, stdinBytes=gm_config, ) @@ -198,7 +203,7 @@ def test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_a print("inserting certificate") cert = yield _run_gm( - reactor, "--config", "-", "sign", "storage0", "1", + reactor, request, "--config", "-", "sign", "storage0", "1", stdinBytes=gm_config, ) print(cert) @@ -256,7 +261,7 @@ def test_accept_storage_server(reactor, request, temp_dir, flog_gatherer, port_a happy1 = yield grid.add_storage_node() gm_config = yield _run_gm( - reactor, "--config", "-", "create", + reactor, request, "--config", "-", "create", ) gm_privkey_bytes = json.loads(gm_config)['private_key'].encode('ascii') gm_privkey, gm_pubkey = ed25519.signing_keypair_from_string(gm_privkey_bytes) @@ -272,7 +277,7 @@ def test_accept_storage_server(reactor, request, temp_dir, flog_gatherer, port_a pubkey_str = f.read().strip() gm_config = yield _run_gm( - reactor, "--config", "-", "add", + reactor, request, "--config", "-", "add", st_name, pubkey_str, stdinBytes=gm_config, ) @@ -282,7 +287,7 @@ def test_accept_storage_server(reactor, request, temp_dir, flog_gatherer, port_a print("inserting storage-server certificates") for st_name, st in servers: cert = yield _run_gm( - reactor, "--config", "-", "sign", st_name, "1", + reactor, request, "--config", "-", "sign", st_name, "1", stdinBytes=gm_config, ) @@ -324,12 +329,12 @@ def test_identity(reactor, request, temp_dir): """ gm_config = join(temp_dir, "test_identity") yield _run_gm( - reactor, "--config", gm_config, "create", + reactor, request, "--config", gm_config, "create", ) # ask the CLI for the grid-manager pubkey pubkey = yield _run_gm( - reactor, "--config", gm_config, "public-identity", + reactor, request, "--config", gm_config, "public-identity", ) alleged_pubkey = ed25519.verifying_key_from_string(pubkey.strip()) From 8250c5fdd54a909a17f24d99ae2ec89e78fb4600 Mon Sep 17 00:00:00 2001 From: meejah Date: Mon, 26 Sep 2022 15:40:55 -0600 Subject: [PATCH 024/172] edna -> freya --- integration/test_grid_manager.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/integration/test_grid_manager.py b/integration/test_grid_manager.py index 0136a11ac..1856ef435 100644 --- a/integration/test_grid_manager.py +++ b/integration/test_grid_manager.py @@ -303,20 +303,20 @@ def test_accept_storage_server(reactor, request, temp_dir, flog_gatherer, port_a yield happy0.restart(reactor, request) yield happy1.restart(reactor, request) - # configure edna (a client) to have the grid-manager certificate - edna = yield grid.add_client("edna", needed=2, happy=2, total=2) + # configure freya (a client) to have the grid-manager certificate + freya = yield grid.add_client("freya", needed=2, happy=2, total=2) - config = configutil.get_config(join(edna.process.node_dir, "tahoe.cfg")) + config = configutil.get_config(join(freya.process.node_dir, "tahoe.cfg")) config.add_section("grid_managers") config.set("grid_managers", "test", str(ed25519.string_from_verifying_key(gm_pubkey), "ascii")) - with open(join(edna.process.node_dir, "tahoe.cfg"), "w") as f: + with open(join(freya.process.node_dir, "tahoe.cfg"), "w") as f: config.write(f) - yield edna.restart(reactor, request, servers=2) + yield freya.restart(reactor, request, servers=2) - # confirm that Edna will upload to the GridManager-enabled Grid + # confirm that Freya will upload to the GridManager-enabled Grid yield util.run_tahoe( - reactor, request, "--node-directory", edna.process.node_dir, + reactor, request, "--node-directory", freya.process.node_dir, "put", "-", stdin=b"some content\n" * 200, ) From c6fc82665c3fcf4dba9809be4405daba33f0d66c Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 29 Nov 2022 09:33:05 -0500 Subject: [PATCH 025/172] Pull `_make_storage_system` into a free function for easier testing --- src/allmydata/storage_client.py | 153 ++++++++++++++++++-------------- 1 file changed, 85 insertions(+), 68 deletions(-) diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 410bfd28b..5dc4beb22 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -33,7 +33,7 @@ Ported to Python 3. from __future__ import annotations from six import ensure_text -from typing import Union +from typing import Union, Callable, Optional import re, time, hashlib from os import urandom from configparser import NoSectionError @@ -57,6 +57,7 @@ from twisted.plugin import ( from eliot import ( log_call, ) +from foolscap.ipb import IRemoteReference from foolscap.api import eventually, RemoteException from foolscap.reconnector import ( ReconnectionInfo, @@ -80,6 +81,9 @@ from allmydata.storage.http_client import ( ClientException as HTTPClientException, StorageClientMutables, ReadVector, TestWriteVectors, WriteVector, TestVector, ClientException ) +from .node import _Config + +_log = Logger() ANONYMOUS_STORAGE_NURLS = "anonymous-storage-NURLs" @@ -732,6 +736,85 @@ def _available_space_from_version(version): return available_space +def _make_storage_system( + node_config: _Config, + config: StorageClientConfig, + ann: dict, + server_id: bytes, + get_rref: Callable[[], Optional[IRemoteReference]], +) -> IFoolscapStorageServer: + """ + Create an object for interacting with the storage server described by + the given announcement. + + :param node_config: The node configuration to pass to any configured + storage plugins. + + :param config: Configuration specifying desired storage client behavior. + + :param ann: The storage announcement from the storage server we are meant + to communicate with. + + :param server_id: The unique identifier for the server. + + :param get_rref: A function which returns a remote reference to the + server-side object which implements this storage system, if one is + available (otherwise None). + + :return: An object enabling communication via Foolscap with the server + which generated the announcement. + """ + # Try to match the announcement against a plugin. + try: + furl, storage_server = _storage_from_foolscap_plugin( + node_config, + config, + ann, + # Pass in an accessor for our _rref attribute. The value of + # the attribute may change over time as connections are lost + # and re-established. The _StorageServer should always be + # able to get the most up-to-date value. + get_rref, + ) + except AnnouncementNotMatched as e: + _log.error( + 'No plugin for storage-server "{nickname}" from plugins: {plugins}', + nickname=ann.get("nickname", ""), + plugins=e.args[0], + ) + except MissingPlugin as e: + _log.failure("Missing plugin") + ns = _NullStorage() + ns.longname = ''.format(e.args[0]) + return ns + else: + return _FoolscapStorage.from_announcement( + server_id, + furl, + ann, + storage_server, + ) + + # Try to match the announcement against the anonymous access scheme. + try: + furl = ann[u"anonymous-storage-FURL"] + except KeyError: + # Nope + pass + else: + # See comment above for the _storage_from_foolscap_plugin case + # about passing in get_rref. + storage_server = _StorageServer(get_rref=get_rref) + return _FoolscapStorage.from_announcement( + server_id, + furl, + ann, + storage_server, + ) + + # Nothing matched so we can't talk to this server. + return _null_storage + @implementer(IServer) class NativeStorageServer(service.MultiService): """I hold information about a storage server that we want to connect to. @@ -758,7 +841,6 @@ class NativeStorageServer(service.MultiService): }), "application-version": "unknown: no get_version()", }) - log = Logger() def __init__(self, server_id, ann, tub_maker, handler_overrides, node_config, config=StorageClientConfig()): service.MultiService.__init__(self) @@ -768,7 +850,7 @@ class NativeStorageServer(service.MultiService): self._tub_maker = tub_maker self._handler_overrides = handler_overrides - self._storage = self._make_storage_system(node_config, config, ann) + self._storage = _make_storage_system(node_config, config, ann, self._server_id, self.get_rref) self.last_connect_time = None self.last_loss_time = None @@ -778,71 +860,6 @@ class NativeStorageServer(service.MultiService): self._trigger_cb = None self._on_status_changed = ObserverList() - def _make_storage_system(self, node_config, config, ann): - """ - :param allmydata.node._Config node_config: The node configuration to pass - to any configured storage plugins. - - :param StorageClientConfig config: Configuration specifying desired - storage client behavior. - - :param dict ann: The storage announcement from the storage server we - are meant to communicate with. - - :return IFoolscapStorageServer: An object enabling communication via - Foolscap with the server which generated the announcement. - """ - # Try to match the announcement against a plugin. - try: - furl, storage_server = _storage_from_foolscap_plugin( - node_config, - config, - ann, - # Pass in an accessor for our _rref attribute. The value of - # the attribute may change over time as connections are lost - # and re-established. The _StorageServer should always be - # able to get the most up-to-date value. - self.get_rref, - ) - except AnnouncementNotMatched as e: - self.log.error( - 'No plugin for storage-server "{nickname}" from plugins: {plugins}', - nickname=ann.get("nickname", ""), - plugins=e.args[0], - ) - except MissingPlugin as e: - self.log.failure("Missing plugin") - ns = _NullStorage() - ns.longname = ''.format(e.args[0]) - return ns - else: - return _FoolscapStorage.from_announcement( - self._server_id, - furl, - ann, - storage_server, - ) - - # Try to match the announcement against the anonymous access scheme. - try: - furl = ann[u"anonymous-storage-FURL"] - except KeyError: - # Nope - pass - else: - # See comment above for the _storage_from_foolscap_plugin case - # about passing in get_rref. - storage_server = _StorageServer(get_rref=self.get_rref) - return _FoolscapStorage.from_announcement( - self._server_id, - furl, - ann, - storage_server, - ) - - # Nothing matched so we can't talk to this server. - return _null_storage - def get_permutation_seed(self): return self._storage.permutation_seed def get_name(self): # keep methodname short From c4c9d1389ef10236227d5a58927cebbb0907c3a1 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 29 Nov 2022 09:47:10 -0500 Subject: [PATCH 026/172] Try (but fail) to demonstrate the longname behavior --- setup.py | 2 +- src/allmydata/storage_client.py | 8 ++++---- src/allmydata/test/test_storage_client.py | 20 +++++++++++++++++--- 3 files changed, 22 insertions(+), 8 deletions(-) diff --git a/setup.py b/setup.py index 78f7042a9..480cb0d88 100644 --- a/setup.py +++ b/setup.py @@ -111,7 +111,7 @@ install_requires = [ "pyrsistent", # A great way to define types of values. - "attrs >= 18.2.0", + "attrs >= 20.1.0", # WebSocket library for twisted and asyncio "autobahn >= 22.4.3", diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 5dc4beb22..2ecd3bc0c 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -39,6 +39,7 @@ from os import urandom from configparser import NoSectionError import attr +from attr import define from hyperlink import DecodedURL from zope.interface import ( Attribute, @@ -637,6 +638,7 @@ class _FoolscapStorage(object): @implementer(IFoolscapStorageServer) +@define class _NullStorage(object): """ Abstraction for *not* communicating with a storage server of a type with @@ -650,7 +652,7 @@ class _NullStorage(object): lease_seed = hashlib.sha256(b"").digest() name = "" - longname = "" + longname: str = "" def connect_to(self, tub, got_connection): return NonReconnector() @@ -784,9 +786,7 @@ def _make_storage_system( ) except MissingPlugin as e: _log.failure("Missing plugin") - ns = _NullStorage() - ns.longname = ''.format(e.args[0]) - return ns + return _NullStorage(''.format(e.args[0])) else: return _FoolscapStorage.from_announcement( server_id, diff --git a/src/allmydata/test/test_storage_client.py b/src/allmydata/test/test_storage_client.py index 1a84f35ec..0c05be2e6 100644 --- a/src/allmydata/test/test_storage_client.py +++ b/src/allmydata/test/test_storage_client.py @@ -159,7 +159,7 @@ class GetConnectionStatus(unittest.TestCase): self.assertTrue(IConnectionStatus.providedBy(connection_status)) -class UnrecognizedAnnouncement(unittest.TestCase): +class UnrecognizedAnnouncement(SyncTestCase): """ Tests for handling of announcements that aren't recognized and don't use *anonymous-storage-FURL*. @@ -169,9 +169,14 @@ class UnrecognizedAnnouncement(unittest.TestCase): an announcement generated by a storage server plugin which is not loaded in the client. """ + plugin_name = u"tahoe-lafs-testing-v1" ann = { - u"name": u"tahoe-lafs-testing-v1", - u"any-parameter": 12345, + u"storage-options": [ + { + u"name": plugin_name, + u"any-parameter": 12345, + }, + ], } server_id = b"abc" @@ -234,6 +239,15 @@ class UnrecognizedAnnouncement(unittest.TestCase): server.get_foolscap_write_enabler_seed() server.get_nickname() + def test_longname(self) -> None: + """ + ``NativeStorageServer.get_longname`` describes the missing plugin. + """ + server = self.native_storage_server() + self.assertThat( + server.get_longname(), + Equals(''.format(self.plugin_name)), + ) class PluginMatchedAnnouncement(SyncTestCase): From 347d11a83c3cb184a1f77cc1060f613a01cdb13f Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 2 Dec 2022 01:27:13 -0700 Subject: [PATCH 027/172] fix test, un-log error --- src/allmydata/test/test_storage_client.py | 25 ++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/src/allmydata/test/test_storage_client.py b/src/allmydata/test/test_storage_client.py index 0c05be2e6..04f2c7e29 100644 --- a/src/allmydata/test/test_storage_client.py +++ b/src/allmydata/test/test_storage_client.py @@ -77,6 +77,7 @@ from .common import ( UseNode, SameProcessStreamEndpointAssigner, MemoryIntroducerClient, + flush_logged_errors, ) from .common_web import ( do_http, @@ -92,6 +93,8 @@ from allmydata.storage_client import ( IFoolscapStorageServer, NativeStorageServer, StorageFarmBroker, + StorageClientConfig, + MissingPlugin, _FoolscapStorage, _NullStorage, ) @@ -159,7 +162,7 @@ class GetConnectionStatus(unittest.TestCase): self.assertTrue(IConnectionStatus.providedBy(connection_status)) -class UnrecognizedAnnouncement(SyncTestCase): +class UnrecognizedAnnouncement(unittest.TestCase): """ Tests for handling of announcements that aren't recognized and don't use *anonymous-storage-FURL*. @@ -183,7 +186,7 @@ class UnrecognizedAnnouncement(SyncTestCase): def _tub_maker(self, overrides): return Service() - def native_storage_server(self): + def native_storage_server(self, config=None): """ Make a ``NativeStorageServer`` out of an unrecognizable announcement. """ @@ -192,7 +195,8 @@ class UnrecognizedAnnouncement(SyncTestCase): self.ann, self._tub_maker, {}, - EMPTY_CLIENT_CONFIG, + node_config=EMPTY_CLIENT_CONFIG, + config=config or StorageClientConfig(), ) def test_no_exceptions(self): @@ -243,11 +247,18 @@ class UnrecognizedAnnouncement(SyncTestCase): """ ``NativeStorageServer.get_longname`` describes the missing plugin. """ - server = self.native_storage_server() - self.assertThat( - server.get_longname(), - Equals(''.format(self.plugin_name)), + server = self.native_storage_server( + StorageClientConfig( + storage_plugins={ + "nothing": {} + } + ) ) + self.assertEqual( + server.get_longname(), + '', + ) + self.flushLoggedErrors(MissingPlugin) class PluginMatchedAnnouncement(SyncTestCase): From 22a7aacb9da710d8112d5f40a9bf5f7e82b2e138 Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 2 Dec 2022 10:36:41 -0700 Subject: [PATCH 028/172] flake8 --- src/allmydata/test/test_storage_client.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/allmydata/test/test_storage_client.py b/src/allmydata/test/test_storage_client.py index 04f2c7e29..a92e6723f 100644 --- a/src/allmydata/test/test_storage_client.py +++ b/src/allmydata/test/test_storage_client.py @@ -77,7 +77,6 @@ from .common import ( UseNode, SameProcessStreamEndpointAssigner, MemoryIntroducerClient, - flush_logged_errors, ) from .common_web import ( do_http, From 58e0e3def7fc25a8ed15f7d2203adf8eed0625e4 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Thu, 16 Mar 2023 08:52:15 -0400 Subject: [PATCH 029/172] see if this fixes the AttributeError --- integration/grid.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/integration/grid.py b/integration/grid.py index 4e5d8a900..cec30b79b 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -191,7 +191,11 @@ def create_storage_server(reactor, request, temp_dir, introducer, flog_gatherer, ) storage = StorageServer( process=node_process, - protocol=node_process.transport._protocol, + # node_process is a TahoeProcess. its transport is an + # IProcessTransport. in practice, this means it is a + # twisted.internet._baseprocess.BaseProcess. BaseProcess records the + # process protocol as its proto attribute. + protocol=node_process.transport.proto, ) returnValue(storage) From c9dba4d0a4d3074e3de1ce77c05065e411b6f0b8 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Thu, 16 Mar 2023 09:09:25 -0400 Subject: [PATCH 030/172] Fix a couple other `_protocol` attributes --- integration/grid.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/integration/grid.py b/integration/grid.py index cec30b79b..9b347cf6f 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -231,7 +231,7 @@ class Client(object): reactor, self.process.node_dir, request, None, ) self.process = process - self.protocol = self.process.transport._protocol + self.protocol = self.process.transport.proto yield await_client_ready(self.process, minimum_number_of_servers=servers) @@ -253,7 +253,7 @@ def create_client(reactor, request, temp_dir, introducer, flog_gatherer, name, w returnValue( Client( process=node_process, - protocol=node_process.transport._protocol, + protocol=node_process.transport.proto, ) ) From e6832dd71ca1ad70ccf856f8d6f74751c82b5a0c Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Thu, 16 Mar 2023 09:37:54 -0400 Subject: [PATCH 031/172] another one --- integration/grid.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration/grid.py b/integration/grid.py index 9b347cf6f..8c7e7624b 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -174,7 +174,7 @@ class StorageServer(object): self.process = yield _run_node( reactor, self.process.node_dir, request, None, ) - self.protocol = self.process.transport._protocol + self.protocol = self.process.transport.proto yield await_client_ready(self.process) From d8ca0176ab2341d42c3cd808bd9c1a166eec36f6 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 3 Jul 2023 11:05:29 -0400 Subject: [PATCH 032/172] Pass the correct arguments in. --- integration/conftest.py | 2 +- integration/test_tor.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/integration/conftest.py b/integration/conftest.py index 643295291..43f16d45b 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -214,7 +214,7 @@ def tor_introducer(reactor, temp_dir, flog_gatherer, request): config = read_config(intro_dir, "tub.port") config.set_config("node", "nickname", "introducer-tor") config.set_config("node", "web.port", "4561") - config.set_config("node", "log_gatherer.furl", flog_gatherer) + config.set_config("node", "log_gatherer.furl", flog_gatherer.furl) # "tahoe run" is consistent across Linux/macOS/Windows, unlike the old # "start" command. diff --git a/integration/test_tor.py b/integration/test_tor.py index 32572276a..4d0ce4f16 100644 --- a/integration/test_tor.py +++ b/integration/test_tor.py @@ -128,7 +128,7 @@ def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_ # Which services should this client connect to? write_introducer(node_dir, "default", introducer_furl) - util.basic_node_configuration(request, flog_gatherer, node_dir.path) + util.basic_node_configuration(request, flog_gatherer.furl, node_dir.path) config = read_config(node_dir.path, "tub.port") config.set_config("tor", "onion", "true") From f4ed5cb0f347abb680f7ba143119877b7610a604 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 3 Jul 2023 11:30:35 -0400 Subject: [PATCH 033/172] Fix lint --- integration/conftest.py | 1 - 1 file changed, 1 deletion(-) diff --git a/integration/conftest.py b/integration/conftest.py index 43f16d45b..6de2e84af 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -30,7 +30,6 @@ import pytest import pytest_twisted from .util import ( - _CollectOutputProtocol, _MagicTextProtocol, _DumpOutputProtocol, _ProcessExitedProtocol, From 76f8ab617276e07f0cab382216d40c9dcf9b81c5 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 3 Jul 2023 13:07:56 -0400 Subject: [PATCH 034/172] Set the config the way we were in latest code. --- integration/grid.py | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/integration/grid.py b/integration/grid.py index ec8b1e0e0..794639b2f 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -43,6 +43,7 @@ from twisted.internet.protocol import ( ) from twisted.internet.error import ProcessTerminated +from allmydata.node import read_config from .util import ( _CollectOutputProtocol, _MagicTextProtocol, @@ -306,16 +307,6 @@ def create_introducer(reactor, request, temp_dir, flog_gatherer, port): """ Run a new Introducer and return an Introducer instance. """ - config = ( - '[node]\n' - 'nickname = introducer{port}\n' - 'web.port = {port}\n' - 'log_gatherer.furl = {log_furl}\n' - ).format( - port=port, - log_furl=flog_gatherer.furl, - ) - intro_dir = join(temp_dir, 'introducer{}'.format(port)) if not exists(intro_dir): @@ -334,9 +325,10 @@ def create_introducer(reactor, request, temp_dir, flog_gatherer, port): ) yield done_proto.done - # over-write the config file with our stuff - with open(join(intro_dir, 'tahoe.cfg'), 'w') as f: - f.write(config) + config = read_config(intro_dir, "tub.port") + config.set_config("node", "nickname", f"introducer-{port}") + config.set_config("node", "web.port", f"{port}") + config.set_config("node", "log_gatherer.furl", flog_gatherer.furl) # on windows, "tahoe start" means: run forever in the foreground, # but on linux it means daemonize. "tahoe run" is consistent From 4c8a20c8767bf27881e8151f049dff856780bdb9 Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 12 Jul 2023 18:33:43 -0600 Subject: [PATCH 035/172] When finalizing a process, we can ignore the case where it isn't running --- integration/grid.py | 3 +-- integration/util.py | 45 +++++++++++++++++++++------------------------ 2 files changed, 22 insertions(+), 26 deletions(-) diff --git a/integration/grid.py b/integration/grid.py index 794639b2f..46fde576e 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -235,8 +235,7 @@ class Client(object): self.protocol = self.process.transport.proto yield await_client_ready(self.process, minimum_number_of_servers=servers) - - # XXX add stop / start / restart + # XXX add stop / start ? # ...maybe "reconfig" of some kind? diff --git a/integration/util.py b/integration/util.py index 6a3ec57f3..ff54b1831 100644 --- a/integration/util.py +++ b/integration/util.py @@ -177,38 +177,33 @@ class _MagicTextProtocol(ProcessProtocol): sys.stdout.write(self.name + line + "\n") -def _cleanup_process_async(transport: IProcessTransport, allow_missing: bool) -> None: +def _cleanup_process_async(transport: IProcessTransport) -> None: """ If the given process transport seems to still be associated with a running process, send a SIGTERM to that process. :param transport: The transport to use. - :param allow_missing: If ``True`` then it is not an error for the - transport to have no associated process. Otherwise, an exception will - be raised in that case. - :raise: ``ValueError`` if ``allow_missing`` is ``False`` and the transport has no process. """ if transport.pid is None: - if allow_missing: - print("Process already cleaned up and that's okay.") - return - else: - raise ValueError("Process is not running") + # in cases of "restart", we will have registered a finalizer + # that will kill the process -- but already explicitly killed + # it (and then ran again) due to the "restart". So, if the + # process is already killed, our job is done. + print("Process already cleaned up and that's okay.") + return print("signaling {} with TERM".format(transport.pid)) try: transport.signalProcess('TERM') except ProcessExitedAlready: # The transport object thought it still had a process but the real OS # process has already exited. That's fine. We accomplished what we - # wanted to. We don't care about ``allow_missing`` here because - # there's no way we could have known the real OS process already - # exited. + # wanted to. pass -def _cleanup_tahoe_process(tahoe_transport, exited, allow_missing=False): +def _cleanup_tahoe_process(tahoe_transport, exited): """ Terminate the given process with a kill signal (SIGTERM on POSIX, TerminateProcess on Windows). @@ -219,7 +214,7 @@ def _cleanup_tahoe_process(tahoe_transport, exited, allow_missing=False): :return: After the process has exited. """ from twisted.internet import reactor - _cleanup_process_async(tahoe_transport, allow_missing=allow_missing) + _cleanup_process_async(tahoe_transport) print(f"signaled, blocking on exit {exited}") block_with_timeout(exited, reactor) print("exited, goodbye") @@ -282,16 +277,20 @@ class TahoeProcess(object): ) def kill(self): - """Kill the process, block until it's done.""" + """ + Kill the process, block until it's done. + Does nothing if the process is already stopped (or never started). + """ print(f"TahoeProcess.kill({self.transport.pid} / {self.node_dir})") _cleanup_tahoe_process(self.transport, self.transport.exited) def kill_async(self): """ Kill the process, return a Deferred that fires when it's done. + Does nothing if the process is already stopped (or never started). """ print(f"TahoeProcess.kill_async({self.transport.pid} / {self.node_dir})") - _cleanup_process_async(self.transport, allow_missing=False) + _cleanup_process_async(self.transport) return self.transport.exited def restart_async(self, reactor: IReactorProcess, request: Any) -> Deferred: @@ -302,7 +301,7 @@ class TahoeProcess(object): handle requests. """ d = self.kill_async() - d.addCallback(lambda ignored: _run_node(reactor, self.node_dir, request, None, finalize=False)) + d.addCallback(lambda ignored: _run_node(reactor, self.node_dir, request, None)) def got_new_process(proc): # Grab the new transport since the one we had before is no longer # valid after the stop/start cycle. @@ -314,7 +313,7 @@ class TahoeProcess(object): return "".format(self._node_dir) -def _run_node(reactor, node_dir, request, magic_text, finalize=True): +def _run_node(reactor, node_dir, request, magic_text): """ Run a tahoe process from its node_dir. @@ -343,8 +342,7 @@ def _run_node(reactor, node_dir, request, magic_text, finalize=True): node_dir, ) - if finalize: - request.addfinalizer(tahoe_process.kill) + request.addfinalizer(tahoe_process.kill) d = protocol.magic_seen d.addCallback(lambda ignored: tahoe_process) @@ -386,8 +384,7 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam magic_text=None, needed=2, happy=3, - total=4, - finalize=True): + total=4): """ Helper to create a single node, run it and return the instance spawnProcess returned (ITransport) @@ -427,7 +424,7 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam d = Deferred() d.callback(None) d.addCallback(lambda _: created_d) - d.addCallback(lambda _: _run_node(reactor, node_dir, request, magic_text, finalize=finalize)) + d.addCallback(lambda _: _run_node(reactor, node_dir, request, magic_text)) return d From 0b9506dfada0c26d2fd305e9ed339bc5e2d6562c Mon Sep 17 00:00:00 2001 From: meejah Date: Thu, 13 Jul 2023 17:53:27 -0600 Subject: [PATCH 036/172] try new-enoug to avoid a type error --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 7ca2650d5..9ebdae550 100644 --- a/setup.py +++ b/setup.py @@ -151,7 +151,7 @@ install_requires = [ "pycddl >= 0.4", # Command-line parsing - "click >= 7.0", + "click >= 8.1.1", # for pid-file support "psutil", From a4801cc2ebe396dd29b284b2acc8fecd93ec405b Mon Sep 17 00:00:00 2001 From: meejah Date: Mon, 17 Jul 2023 17:10:45 -0600 Subject: [PATCH 037/172] CI uses tox less than 4 --- setup.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/setup.py b/setup.py index 9ebdae550..a2e870e8b 100644 --- a/setup.py +++ b/setup.py @@ -436,6 +436,8 @@ setup(name="tahoe-lafs", # also set in __init__.py "pytest-timeout", # Does our OpenMetrics endpoint adhere to the spec: "prometheus-client == 0.11.0", + # CI uses "tox<4", change here too if that becomes different + "tox < 4", ] + tor_requires + i2p_requires, "tor": tor_requires, "i2p": i2p_requires, From 6c5cb02ee5e667590d8383944054debada1f1f33 Mon Sep 17 00:00:00 2001 From: meejah Date: Mon, 17 Jul 2023 17:11:00 -0600 Subject: [PATCH 038/172] shush mypy --- src/allmydata/cli/grid_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/cli/grid_manager.py b/src/allmydata/cli/grid_manager.py index dfefeb576..d5a5d7e35 100644 --- a/src/allmydata/cli/grid_manager.py +++ b/src/allmydata/cli/grid_manager.py @@ -226,4 +226,4 @@ def _config_path_from_option(config: str) -> Optional[FilePath]: if __name__ == '__main__': - grid_manager() + grid_manager() # type: ignore From 45898ff8b8ae6218e52397d1d3c55ad9d71fed2e Mon Sep 17 00:00:00 2001 From: meejah Date: Mon, 24 Jul 2023 20:08:41 -0600 Subject: [PATCH 039/172] refactor: make sftp tests (etc) work with 'grid' refactoring --- integration/conftest.py | 67 ++++------------------ integration/grid.py | 97 ++++++++++++++++++++++++++++---- integration/test_get_put.py | 30 +++++----- integration/test_grid_manager.py | 4 +- integration/test_sftp.py | 17 +++--- integration/test_vectors.py | 16 +++--- integration/test_web.py | 40 ++++++------- integration/util.py | 3 +- 8 files changed, 151 insertions(+), 123 deletions(-) diff --git a/integration/conftest.py b/integration/conftest.py index 6de2e84af..837b54aa1 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -162,6 +162,10 @@ def flog_gatherer(reactor, temp_dir, flog_binary, request): @pytest.fixture(scope='session') @log_call(action_type=u"integration:grid", include_args=[]) def grid(reactor, request, temp_dir, flog_gatherer, port_allocator): + # XXX think: this creates an "empty" grid (introducer, no nodes); + # do we want to ensure it has some minimum storage-nodes at least? + # (that is, semantically does it make sense that 'a grid' is + # essentially empty, or not?) g = pytest_twisted.blockon( create_grid(reactor, request, temp_dir, flog_gatherer, port_allocator) ) @@ -271,64 +275,17 @@ def storage_nodes(grid): assert ok, "Storage node creation failed: {}".format(value) return grid.storage_servers -@pytest.fixture(scope="session") -def alice_sftp_client_key_path(temp_dir): - # The client SSH key path is typically going to be somewhere else (~/.ssh, - # typically), but for convenience sake for testing we'll put it inside node. - return join(temp_dir, "alice", "private", "ssh_client_rsa_key") @pytest.fixture(scope='session') @log_call(action_type=u"integration:alice", include_args=[], include_result=False) -def alice( - reactor, - temp_dir, - introducer_furl, - flog_gatherer, - storage_nodes, - alice_sftp_client_key_path, - request, -): - process = pytest_twisted.blockon( - _create_node( - reactor, request, temp_dir, introducer_furl, flog_gatherer, "alice", - web_port="tcp:9980:interface=localhost", - storage=False, - ) - ) - pytest_twisted.blockon(await_client_ready(process)) - - # 1. Create a new RW directory cap: - cli(process, "create-alias", "test") - rwcap = loads(cli(process, "list-aliases", "--json"))["test"]["readwrite"] - - # 2. Enable SFTP on the node: - host_ssh_key_path = join(process.node_dir, "private", "ssh_host_rsa_key") - accounts_path = join(process.node_dir, "private", "accounts") - with open(join(process.node_dir, "tahoe.cfg"), "a") as f: - f.write("""\ -[sftpd] -enabled = true -port = tcp:8022:interface=127.0.0.1 -host_pubkey_file = {ssh_key_path}.pub -host_privkey_file = {ssh_key_path} -accounts.file = {accounts_path} -""".format(ssh_key_path=host_ssh_key_path, accounts_path=accounts_path)) - generate_ssh_key(host_ssh_key_path) - - # 3. Add a SFTP access file with an SSH key for auth. - generate_ssh_key(alice_sftp_client_key_path) - # Pub key format is "ssh-rsa ". We want the key. - ssh_public_key = open(alice_sftp_client_key_path + ".pub").read().strip().split()[1] - with open(accounts_path, "w") as f: - f.write("""\ -alice-key ssh-rsa {ssh_public_key} {rwcap} -""".format(rwcap=rwcap, ssh_public_key=ssh_public_key)) - - # 4. Restart the node with new SFTP config. - pytest_twisted.blockon(process.restart_async(reactor, request)) - pytest_twisted.blockon(await_client_ready(process)) - print(f"Alice pid: {process.transport.pid}") - return process +def alice(reactor, request, grid, storage_nodes): + """ + :returns grid.Client: the associated instance for Alice + """ + alice = pytest_twisted.blockon(grid.add_client("alice")) + pytest_twisted.blockon(alice.add_sftp(reactor, request)) + print(f"Alice pid: {alice.process.transport.pid}") + return alice @pytest.fixture(scope='session') diff --git a/integration/grid.py b/integration/grid.py index 46fde576e..fe3befd3a 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -10,6 +10,7 @@ rely on 'the' global grid as provided by fixtures like 'alice' or from os import mkdir, listdir from os.path import join, exists +from json import loads from tempfile import mktemp from time import sleep @@ -26,6 +27,7 @@ from twisted.internet.defer import ( inlineCallbacks, returnValue, maybeDeferred, + Deferred, ) from twisted.internet.task import ( deferLater, @@ -54,19 +56,20 @@ from .util import ( _tahoe_runner_optional_coverage, TahoeProcess, await_client_ready, + generate_ssh_key, + cli, + reconfigure, ) import attr import pytest_twisted -# further directions: -# - "Grid" is unused, basically -- tie into the rest? -# - could make a Grid instance mandatory for create_* calls -# - could instead make create_* calls methods of Grid -# - Bring more 'util' or 'conftest' code into here -# - stop()/start()/restart() methods on StorageServer etc -# - more-complex stuff like config changes (which imply a restart too)? +# currently, we pass a "request" around a bunch but it seems to only +# be for addfinalizer() calls. +# - is "keeping" a request like that okay? What if it's a session-scoped one? +# (i.e. in Grid etc) +# - maybe limit to "a callback to hang your cleanup off of" (instead of request)? @attr.s @@ -170,6 +173,8 @@ class StorageServer(object): Note that self.process and self.protocol will be new instances after this. """ + # XXX per review comments, _can_ we make this "return a new + # instance" instead of mutating? self.process.transport.signalProcess('TERM') yield self.protocol.exited self.process = yield _run_node( @@ -213,6 +218,27 @@ class Client(object): protocol = attr.ib( validator=attr.validators.provides(IProcessProtocol) ) + request = attr.ib() # original request, for addfinalizer() + +## XXX convenience? or confusion? +# @property +# def node_dir(self): +# return self.process.node_dir + + @inlineCallbacks + def reconfigure_zfec(self, reactor, request, zfec_params, convergence=None, max_segment_size=None): + """ + Reconfigure the ZFEC parameters for this node + """ + # XXX this is a stop-gap to keep tests running "as is" + # -> we should fix the tests so that they create a new client + # in the grid with the required parameters, instead of + # re-configuring Alice (or whomever) + + rtn = yield Deferred.fromCoroutine( + reconfigure(reactor, self.request, self.process, zfec_params, convergence, max_segment_size) + ) + return rtn @inlineCallbacks def restart(self, reactor, request, servers=1): @@ -226,6 +252,8 @@ class Client(object): Note that self.process and self.protocol will be new instances after this. """ + # XXX similar to above, can we make this return a new instance + # instead of mutating? self.process.transport.signalProcess('TERM') yield self.protocol.exited process = yield _run_node( @@ -235,8 +263,55 @@ class Client(object): self.protocol = self.process.transport.proto yield await_client_ready(self.process, minimum_number_of_servers=servers) - # XXX add stop / start ? - # ...maybe "reconfig" of some kind? + @inlineCallbacks + def add_sftp(self, reactor, request): + """ + """ + # if other things need to add or change configuration, further + # refactoring could be useful here (i.e. move reconfigure + # parts to their own functions) + + # XXX why do we need an alias? + # 1. Create a new RW directory cap: + cli(self.process, "create-alias", "test") + rwcap = loads(cli(self.process, "list-aliases", "--json"))["test"]["readwrite"] + + # 2. Enable SFTP on the node: + host_ssh_key_path = join(self.process.node_dir, "private", "ssh_host_rsa_key") + sftp_client_key_path = join(self.process.node_dir, "private", "ssh_client_rsa_key") + accounts_path = join(self.process.node_dir, "private", "accounts") + with open(join(self.process.node_dir, "tahoe.cfg"), "a") as f: + f.write( + ("\n\n[sftpd]\n" + "enabled = true\n" + "port = tcp:8022:interface=127.0.0.1\n" + "host_pubkey_file = {ssh_key_path}.pub\n" + "host_privkey_file = {ssh_key_path}\n" + "accounts.file = {accounts_path}\n").format( + ssh_key_path=host_ssh_key_path, + accounts_path=accounts_path, + ) + ) + generate_ssh_key(host_ssh_key_path) + + # 3. Add a SFTP access file with an SSH key for auth. + generate_ssh_key(sftp_client_key_path) + # Pub key format is "ssh-rsa ". We want the key. + with open(sftp_client_key_path + ".pub") as pubkey_file: + ssh_public_key = pubkey_file.read().strip().split()[1] + with open(accounts_path, "w") as f: + f.write( + "alice-key ssh-rsa {ssh_public_key} {rwcap}\n".format( + rwcap=rwcap, + ssh_public_key=ssh_public_key, + ) + ) + + # 4. Restart the node with new SFTP config. + print("restarting for SFTP") + yield self.restart(reactor, request) + print("restart done") + # XXX i think this is broken because we're "waiting for ready" during first bootstrap? or something? @inlineCallbacks @@ -254,6 +329,7 @@ def create_client(reactor, request, temp_dir, introducer, flog_gatherer, name, w Client( process=node_process, protocol=node_process.transport.proto, + request=request, ) ) @@ -370,7 +446,7 @@ class Grid(object): Represents an entire Tahoe Grid setup A Grid includes an Introducer, Flog Gatherer and some number of - Storage Servers. + Storage Servers. Optionally includes Clients. """ _reactor = attr.ib() @@ -436,7 +512,6 @@ class Grid(object): returnValue(client) - # XXX THINK can we tie a whole *grid* to a single request? (I think # that's all that makes sense) @inlineCallbacks diff --git a/integration/test_get_put.py b/integration/test_get_put.py index e30a34f97..536185ef8 100644 --- a/integration/test_get_put.py +++ b/integration/test_get_put.py @@ -8,9 +8,8 @@ from subprocess import Popen, PIPE, check_output, check_call import pytest from twisted.internet import reactor from twisted.internet.threads import blockingCallFromThread -from twisted.internet.defer import Deferred -from .util import run_in_thread, cli, reconfigure +from .util import run_in_thread, cli DATA = b"abc123 this is not utf-8 decodable \xff\x00\x33 \x11" try: @@ -23,7 +22,7 @@ else: @pytest.fixture(scope="session") def get_put_alias(alice): - cli(alice, "create-alias", "getput") + cli(alice.process, "create-alias", "getput") def read_bytes(path): @@ -39,14 +38,14 @@ def test_put_from_stdin(alice, get_put_alias, tmpdir): """ tempfile = str(tmpdir.join("file")) p = Popen( - ["tahoe", "--node-directory", alice.node_dir, "put", "-", "getput:fromstdin"], + ["tahoe", "--node-directory", alice.process.node_dir, "put", "-", "getput:fromstdin"], stdin=PIPE ) p.stdin.write(DATA) p.stdin.close() assert p.wait() == 0 - cli(alice, "get", "getput:fromstdin", tempfile) + cli(alice.process, "get", "getput:fromstdin", tempfile) assert read_bytes(tempfile) == DATA @@ -58,10 +57,10 @@ def test_get_to_stdout(alice, get_put_alias, tmpdir): tempfile = tmpdir.join("file") with tempfile.open("wb") as f: f.write(DATA) - cli(alice, "put", str(tempfile), "getput:tostdout") + cli(alice.process, "put", str(tempfile), "getput:tostdout") p = Popen( - ["tahoe", "--node-directory", alice.node_dir, "get", "getput:tostdout", "-"], + ["tahoe", "--node-directory", alice.process.node_dir, "get", "getput:tostdout", "-"], stdout=PIPE ) assert p.stdout.read() == DATA @@ -78,11 +77,11 @@ def test_large_file(alice, get_put_alias, tmp_path): tempfile = tmp_path / "file" with tempfile.open("wb") as f: f.write(DATA * 1_000_000) - cli(alice, "put", str(tempfile), "getput:largefile") + cli(alice.process, "put", str(tempfile), "getput:largefile") outfile = tmp_path / "out" check_call( - ["tahoe", "--node-directory", alice.node_dir, "get", "getput:largefile", str(outfile)], + ["tahoe", "--node-directory", alice.process.node_dir, "get", "getput:largefile", str(outfile)], ) assert outfile.read_bytes() == tempfile.read_bytes() @@ -104,31 +103,30 @@ def test_upload_download_immutable_different_default_max_segment_size(alice, get def set_segment_size(segment_size): return blockingCallFromThread( reactor, - lambda: Deferred.fromCoroutine(reconfigure( + lambda: alice.reconfigure_zfec( reactor, request, - alice, (1, 1, 1), None, max_segment_size=segment_size - )) + ) ) # 1. Upload file 1 with default segment size set to 1MB set_segment_size(1024 * 1024) - cli(alice, "put", str(tempfile), "getput:seg1024kb") + cli(alice.process, "put", str(tempfile), "getput:seg1024kb") # 2. Download file 1 with default segment size set to 128KB set_segment_size(128 * 1024) assert large_data == check_output( - ["tahoe", "--node-directory", alice.node_dir, "get", "getput:seg1024kb", "-"] + ["tahoe", "--node-directory", alice.process.node_dir, "get", "getput:seg1024kb", "-"] ) # 3. Upload file 2 with default segment size set to 128KB - cli(alice, "put", str(tempfile), "getput:seg128kb") + cli(alice.process, "put", str(tempfile), "getput:seg128kb") # 4. Download file 2 with default segment size set to 1MB set_segment_size(1024 * 1024) assert large_data == check_output( - ["tahoe", "--node-directory", alice.node_dir, "get", "getput:seg128kb", "-"] + ["tahoe", "--node-directory", alice.process.node_dir, "get", "getput:seg128kb", "-"] ) diff --git a/integration/test_grid_manager.py b/integration/test_grid_manager.py index 1856ef435..437fe7455 100644 --- a/integration/test_grid_manager.py +++ b/integration/test_grid_manager.py @@ -173,7 +173,7 @@ def test_add_remove_client_file(reactor, request, temp_dir): @pytest_twisted.inlineCallbacks -def test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_allocator): +def _test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_allocator): """ A client with happines=2 fails to upload to a Grid when it is using Grid Manager and there is only 1 storage server with a valid @@ -252,7 +252,7 @@ def test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_a @pytest_twisted.inlineCallbacks -def test_accept_storage_server(reactor, request, temp_dir, flog_gatherer, port_allocator): +def _test_accept_storage_server(reactor, request, temp_dir, flog_gatherer, port_allocator): """ Successfully upload to a Grid Manager enabled Grid. """ diff --git a/integration/test_sftp.py b/integration/test_sftp.py index 3fdbb56d7..01ddfdf8a 100644 --- a/integration/test_sftp.py +++ b/integration/test_sftp.py @@ -72,7 +72,7 @@ def test_bad_account_password_ssh_key(alice, tmpdir): another_key = os.path.join(str(tmpdir), "ssh_key") generate_ssh_key(another_key) - good_key = RSAKey(filename=os.path.join(alice.node_dir, "private", "ssh_client_rsa_key")) + good_key = RSAKey(filename=os.path.join(alice.process.node_dir, "private", "ssh_client_rsa_key")) bad_key = RSAKey(filename=another_key) # Wrong key: @@ -87,17 +87,16 @@ def test_bad_account_password_ssh_key(alice, tmpdir): "username": "someoneelse", "pkey": good_key, }) -def sftp_client_key(node): + +def sftp_client_key(client): + """ + :return RSAKey: the RSA client key associated with this grid.Client + """ + # XXX move to Client / grid.py? return RSAKey( - filename=os.path.join(node.node_dir, "private", "ssh_client_rsa_key"), + filename=os.path.join(client.process.node_dir, "private", "ssh_client_rsa_key"), ) -def test_sftp_client_key_exists(alice, alice_sftp_client_key_path): - """ - Weakly validate the sftp client key fixture by asserting that *something* - exists at the supposed key path. - """ - assert os.path.exists(alice_sftp_client_key_path) @run_in_thread def test_ssh_key_auth(alice): diff --git a/integration/test_vectors.py b/integration/test_vectors.py index 6e7b5746a..13a451d1c 100644 --- a/integration/test_vectors.py +++ b/integration/test_vectors.py @@ -15,7 +15,8 @@ from pytest_twisted import ensureDeferred from . import vectors from .vectors import parameters -from .util import reconfigure, upload, TahoeProcess +from .util import reconfigure, upload +from .grid import Client @mark.parametrize('convergence', parameters.CONVERGENCE_SECRETS) def test_convergence(convergence): @@ -36,11 +37,11 @@ async def test_capability(reactor, request, alice, case, expected): computed value. """ # rewrite alice's config to match params and convergence - await reconfigure( - reactor, request, alice, (1, case.params.required, case.params.total), case.convergence, case.segment_size) + await alice.reconfigure_zfec( + reactor, request, (1, case.params.required, case.params.total), case.convergence, case.segment_size) # upload data in the correct format - actual = upload(alice, case.fmt, case.data) + actual = upload(alice.process, case.fmt, case.data) # compare the resulting cap to the expected result assert actual == expected @@ -82,7 +83,7 @@ async def skiptest_generate(reactor, request, alice): async def generate( reactor, request, - alice: TahoeProcess, + alice: Client, cases: Iterator[vectors.Case], ) -> AsyncGenerator[[vectors.Case, str], None]: """ @@ -106,10 +107,9 @@ async def generate( # reliability of this generator, be happy if we can put shares anywhere happy = 1 for case in cases: - await reconfigure( + await alice.reconfigure_zfec( reactor, request, - alice, (happy, case.params.required, case.params.total), case.convergence, case.segment_size @@ -117,5 +117,5 @@ async def generate( # Give the format a chance to make an RSA key if it needs it. case = evolve(case, fmt=case.fmt.customize()) - cap = upload(alice, case.fmt, case.data) + cap = upload(alice.process, case.fmt, case.data) yield case, cap diff --git a/integration/test_web.py b/integration/test_web.py index b863a27fe..01f69bca0 100644 --- a/integration/test_web.py +++ b/integration/test_web.py @@ -33,7 +33,7 @@ def test_index(alice): """ we can download the index file """ - util.web_get(alice, u"") + util.web_get(alice.process, u"") @run_in_thread @@ -41,7 +41,7 @@ def test_index_json(alice): """ we can download the index file as json """ - data = util.web_get(alice, u"", params={u"t": u"json"}) + data = util.web_get(alice.process, u"", params={u"t": u"json"}) # it should be valid json json.loads(data) @@ -55,7 +55,7 @@ def test_upload_download(alice): FILE_CONTENTS = u"some contents" readcap = util.web_post( - alice, u"uri", + alice.process, u"uri", data={ u"t": u"upload", u"format": u"mdmf", @@ -67,7 +67,7 @@ def test_upload_download(alice): readcap = readcap.strip() data = util.web_get( - alice, u"uri", + alice.process, u"uri", params={ u"uri": readcap, u"filename": u"boom", @@ -85,11 +85,11 @@ def test_put(alice): FILE_CONTENTS = b"added via PUT" * 20 resp = requests.put( - util.node_url(alice.node_dir, u"uri"), + util.node_url(alice.process.node_dir, u"uri"), data=FILE_CONTENTS, ) cap = allmydata.uri.from_string(resp.text.strip().encode('ascii')) - cfg = alice.get_config() + cfg = alice.process.get_config() assert isinstance(cap, allmydata.uri.CHKFileURI) assert cap.size == len(FILE_CONTENTS) assert cap.total_shares == int(cfg.get_config("client", "shares.total")) @@ -116,7 +116,7 @@ def test_deep_stats(alice): URIs work """ resp = requests.post( - util.node_url(alice.node_dir, "uri"), + util.node_url(alice.process.node_dir, "uri"), params={ "format": "sdmf", "t": "mkdir", @@ -130,7 +130,7 @@ def test_deep_stats(alice): uri = url_unquote(resp.url) assert 'URI:DIR2:' in uri dircap = uri[uri.find("URI:DIR2:"):].rstrip('/') - dircap_uri = util.node_url(alice.node_dir, "uri/{}".format(url_quote(dircap))) + dircap_uri = util.node_url(alice.process.node_dir, "uri/{}".format(url_quote(dircap))) # POST a file into this directory FILE_CONTENTS = u"a file in a directory" @@ -176,7 +176,7 @@ def test_deep_stats(alice): while tries > 0: tries -= 1 resp = requests.get( - util.node_url(alice.node_dir, u"operations/something_random"), + util.node_url(alice.process.node_dir, u"operations/something_random"), ) d = json.loads(resp.content) if d['size-literal-files'] == len(FILE_CONTENTS): @@ -201,21 +201,21 @@ def test_status(alice): FILE_CONTENTS = u"all the Important Data of alice\n" * 1200 resp = requests.put( - util.node_url(alice.node_dir, u"uri"), + util.node_url(alice.process.node_dir, u"uri"), data=FILE_CONTENTS, ) cap = resp.text.strip() print("Uploaded data, cap={}".format(cap)) resp = requests.get( - util.node_url(alice.node_dir, u"uri/{}".format(url_quote(cap))), + util.node_url(alice.process.node_dir, u"uri/{}".format(url_quote(cap))), ) print("Downloaded {} bytes of data".format(len(resp.content))) assert str(resp.content, "ascii") == FILE_CONTENTS resp = requests.get( - util.node_url(alice.node_dir, "status"), + util.node_url(alice.process.node_dir, "status"), ) dom = html5lib.parse(resp.content) @@ -229,7 +229,7 @@ def test_status(alice): for href in hrefs: if href == u"/" or not href: continue - resp = requests.get(util.node_url(alice.node_dir, href)) + resp = requests.get(util.node_url(alice.process.node_dir, href)) if href.startswith(u"/status/up"): assert b"File Upload Status" in resp.content if b"Total Size: %d" % (len(FILE_CONTENTS),) in resp.content: @@ -241,7 +241,7 @@ def test_status(alice): # download the specialized event information resp = requests.get( - util.node_url(alice.node_dir, u"{}/event_json".format(href)), + util.node_url(alice.process.node_dir, u"{}/event_json".format(href)), ) js = json.loads(resp.content) # there's usually just one "read" operation, but this can handle many .. @@ -264,14 +264,14 @@ async def test_directory_deep_check(reactor, request, alice): required = 2 total = 4 - await util.reconfigure(reactor, request, alice, (happy, required, total), convergence=None) + await alice.reconfigure_zfec(reactor, request, (happy, required, total), convergence=None) await deferToThread(_test_directory_deep_check_blocking, alice) def _test_directory_deep_check_blocking(alice): # create a directory resp = requests.post( - util.node_url(alice.node_dir, u"uri"), + util.node_url(alice.process.node_dir, u"uri"), params={ u"t": u"mkdir", u"redirect_to_result": u"true", @@ -320,7 +320,7 @@ def _test_directory_deep_check_blocking(alice): print("Uploaded data1, cap={}".format(cap1)) resp = requests.get( - util.node_url(alice.node_dir, u"uri/{}".format(url_quote(cap0))), + util.node_url(alice.process.node_dir, u"uri/{}".format(url_quote(cap0))), params={u"t": u"info"}, ) @@ -484,14 +484,14 @@ def test_mkdir_with_children(alice): # create a file to put in our directory FILE_CONTENTS = u"some file contents\n" * 500 resp = requests.put( - util.node_url(alice.node_dir, u"uri"), + util.node_url(alice.process.node_dir, u"uri"), data=FILE_CONTENTS, ) filecap = resp.content.strip() # create a (sub) directory to put in our directory resp = requests.post( - util.node_url(alice.node_dir, u"uri"), + util.node_url(alice.process.node_dir, u"uri"), params={ u"t": u"mkdir", } @@ -534,7 +534,7 @@ def test_mkdir_with_children(alice): # create a new directory with one file and one sub-dir (all-at-once) resp = util.web_post( - alice, u"uri", + alice.process, u"uri", params={u"t": "mkdir-with-children"}, data=json.dumps(meta), ) diff --git a/integration/util.py b/integration/util.py index ff54b1831..b614a84bd 100644 --- a/integration/util.py +++ b/integration/util.py @@ -741,7 +741,6 @@ class SSK: def load(cls, params: dict) -> SSK: assert params.keys() == {"format", "mutable", "key"} return cls(params["format"], params["key"].encode("ascii")) - def customize(self) -> SSK: """ Return an SSK with a newly generated random RSA key. @@ -780,7 +779,7 @@ def upload(alice: TahoeProcess, fmt: CHK | SSK, data: bytes) -> str: f.write(data) f.flush() with fmt.to_argv() as fmt_argv: - argv = [alice, "put"] + fmt_argv + [f.name] + argv = [alice.process, "put"] + fmt_argv + [f.name] return cli(*argv).decode("utf-8").strip() From 6f9b9a3ac1123ca3eb9ecce85e98cc75dc6ccd89 Mon Sep 17 00:00:00 2001 From: meejah Date: Mon, 24 Jul 2023 20:12:01 -0600 Subject: [PATCH 040/172] only use original request --- integration/grid.py | 2 +- integration/test_get_put.py | 1 - integration/test_vectors.py | 3 +-- integration/test_web.py | 2 +- 4 files changed, 3 insertions(+), 5 deletions(-) diff --git a/integration/grid.py b/integration/grid.py index fe3befd3a..5ce4179ec 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -226,7 +226,7 @@ class Client(object): # return self.process.node_dir @inlineCallbacks - def reconfigure_zfec(self, reactor, request, zfec_params, convergence=None, max_segment_size=None): + def reconfigure_zfec(self, reactor, zfec_params, convergence=None, max_segment_size=None): """ Reconfigure the ZFEC parameters for this node """ diff --git a/integration/test_get_put.py b/integration/test_get_put.py index 536185ef8..2f6642493 100644 --- a/integration/test_get_put.py +++ b/integration/test_get_put.py @@ -105,7 +105,6 @@ def test_upload_download_immutable_different_default_max_segment_size(alice, get reactor, lambda: alice.reconfigure_zfec( reactor, - request, (1, 1, 1), None, max_segment_size=segment_size diff --git a/integration/test_vectors.py b/integration/test_vectors.py index 13a451d1c..bd5def8c5 100644 --- a/integration/test_vectors.py +++ b/integration/test_vectors.py @@ -38,7 +38,7 @@ async def test_capability(reactor, request, alice, case, expected): """ # rewrite alice's config to match params and convergence await alice.reconfigure_zfec( - reactor, request, (1, case.params.required, case.params.total), case.convergence, case.segment_size) + reactor, (1, case.params.required, case.params.total), case.convergence, case.segment_size) # upload data in the correct format actual = upload(alice.process, case.fmt, case.data) @@ -109,7 +109,6 @@ async def generate( for case in cases: await alice.reconfigure_zfec( reactor, - request, (happy, case.params.required, case.params.total), case.convergence, case.segment_size diff --git a/integration/test_web.py b/integration/test_web.py index 01f69bca0..08c6e6217 100644 --- a/integration/test_web.py +++ b/integration/test_web.py @@ -264,7 +264,7 @@ async def test_directory_deep_check(reactor, request, alice): required = 2 total = 4 - await alice.reconfigure_zfec(reactor, request, (happy, required, total), convergence=None) + await alice.reconfigure_zfec(reactor, (happy, required, total), convergence=None) await deferToThread(_test_directory_deep_check_blocking, alice) From 849f4ed2a57da1e2dd19b668dccba5967534224c Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 28 Jul 2023 11:14:09 -0400 Subject: [PATCH 041/172] More annotations. --- src/allmydata/storage/http_client.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/allmydata/storage/http_client.py b/src/allmydata/storage/http_client.py index 765e94319..79f6cfa89 100644 --- a/src/allmydata/storage/http_client.py +++ b/src/allmydata/storage/http_client.py @@ -72,7 +72,7 @@ except ImportError: pass -def _encode_si(si): # type: (bytes) -> str +def _encode_si(si: bytes) -> str: """Encode the storage index into Unicode string.""" return str(si_b2a(si), "ascii") @@ -80,7 +80,7 @@ def _encode_si(si): # type: (bytes) -> str class ClientException(Exception): """An unexpected response code from the server.""" - def __init__(self, code, *additional_args): + def __init__(self, code: int, *additional_args): Exception.__init__(self, code, *additional_args) self.code = code @@ -93,7 +93,7 @@ register_exception_extractor(ClientException, lambda e: {"response_code": e.code # Tags are of the form #6.nnn, where the number is documented at # https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml. Notably, #6.258 # indicates a set. -_SCHEMAS = { +_SCHEMAS : Mapping[str,Schema] = { "get_version": Schema( # Note that the single-quoted (`'`) string keys in this schema # represent *byte* strings - per the CDDL specification. Text strings @@ -155,7 +155,7 @@ class _LengthLimitedCollector: timeout_on_silence: IDelayedCall f: BytesIO = field(factory=BytesIO) - def __call__(self, data: bytes): + def __call__(self, data: bytes) -> None: self.timeout_on_silence.reset(60) self.remaining_length -= len(data) if self.remaining_length < 0: @@ -164,7 +164,7 @@ class _LengthLimitedCollector: def limited_content( - response, + response: IResponse, clock: IReactorTime, max_length: int = 30 * 1024 * 1024, ) -> Deferred[BinaryIO]: @@ -300,11 +300,11 @@ class _StorageClientHTTPSPolicy: expected_spki_hash: bytes # IPolicyForHTTPS - def creatorForNetloc(self, hostname, port): + def creatorForNetloc(self, hostname: str, port: int) -> _StorageClientHTTPSPolicy: return self # IOpenSSLClientConnectionCreator - def clientConnectionForTLS(self, tlsProtocol): + def clientConnectionForTLS(self, tlsProtocol: object) -> SSL.Connection: return SSL.Connection( _TLSContextFactory(self.expected_spki_hash).getContext(), None ) @@ -344,7 +344,7 @@ class StorageClientFactory: cls.TEST_MODE_REGISTER_HTTP_POOL = callback @classmethod - def stop_test_mode(cls): + def stop_test_mode(cls) -> None: """Stop testing mode.""" cls.TEST_MODE_REGISTER_HTTP_POOL = None @@ -437,7 +437,7 @@ class StorageClient(object): """Get a URL relative to the base URL.""" return self._base_url.click(path) - def _get_headers(self, headers): # type: (Optional[Headers]) -> Headers + def _get_headers(self, headers: Optional[Headers]) -> Headers: """Return the basic headers to be used by default.""" if headers is None: headers = Headers() @@ -565,7 +565,7 @@ class StorageClient(object): ).read() raise ClientException(response.code, response.phrase, data) - def shutdown(self) -> Deferred: + def shutdown(self) -> Deferred[object]: """Shutdown any connections.""" return self._pool.closeCachedConnections() From 2b7f3d1707b6e91ea8e517bcfa3a6cf892d1715e Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 28 Jul 2023 11:28:13 -0400 Subject: [PATCH 042/172] Add type annotations to `_authorization_decorator`. --- src/allmydata/storage/http_server.py | 44 +++++++++++++++++++------ src/allmydata/test/test_storage_http.py | 3 +- 2 files changed, 36 insertions(+), 11 deletions(-) diff --git a/src/allmydata/storage/http_server.py b/src/allmydata/storage/http_server.py index 3ff98e933..78ed1a974 100644 --- a/src/allmydata/storage/http_server.py +++ b/src/allmydata/storage/http_server.py @@ -4,7 +4,8 @@ HTTP server for storage. from __future__ import annotations -from typing import Any, Callable, Union, cast, Optional +from typing import Any, Callable, Union, cast, Optional, TypeVar, Sequence +from typing_extensions import ParamSpec, Concatenate from functools import wraps from base64 import b64decode import binascii @@ -27,6 +28,7 @@ from twisted.internet.defer import Deferred from twisted.internet.ssl import CertificateOptions, Certificate, PrivateCertificate from twisted.internet.interfaces import IReactorFromThreads from twisted.web.server import Site, Request +from twisted.web.iweb import IRequest from twisted.protocols.tls import TLSMemoryBIOFactory from twisted.python.filepath import FilePath @@ -68,7 +70,7 @@ class ClientSecretsException(Exception): def _extract_secrets( - header_values: list[str], required_secrets: set[Secrets] + header_values: Sequence[str], required_secrets: set[Secrets] ) -> dict[Secrets, bytes]: """ Given list of values of ``X-Tahoe-Authorization`` headers, and required @@ -102,18 +104,32 @@ def _extract_secrets( return result -def _authorization_decorator(required_secrets): +P = ParamSpec("P") +T = TypeVar("T") + + +def _authorization_decorator( + required_secrets: set[Secrets], +) -> Callable[ + [Callable[Concatenate[BaseApp, Request, dict[Secrets, bytes], P], T]], + Callable[Concatenate[BaseApp, Request, P], T], +]: """ 1. Check the ``Authorization`` header matches server swissnum. 2. Extract ``X-Tahoe-Authorization`` headers and pass them in. 3. Log the request and response. """ - def decorator(f): + def decorator( + f: Callable[Concatenate[BaseApp, Request, dict[Secrets, bytes], P], T] + ) -> Callable[Concatenate[BaseApp, Request, P], T]: @wraps(f) - def route(self, request, *args, **kwargs): - # Don't set text/html content type by default: - request.defaultContentType = None + def route( + self: BaseApp, request: Request, *args: P.args, **kwargs: P.kwargs + ) -> T: + # Don't set text/html content type by default. + # None is actually supported, see https://github.com/twisted/twisted/issues/11902 + request.defaultContentType = None # type: ignore[assignment] with start_action( action_type="allmydata:storage:http-server:handle-request", @@ -584,7 +600,13 @@ async def read_encoded( return cbor2.load(request.content) -class HTTPServer(object): +class BaseApp: + """Base class for ``HTTPServer`` and testing equivalent.""" + + _swissnum: bytes + + +class HTTPServer(BaseApp): """ A HTTP interface to the storage server. """ @@ -641,7 +663,6 @@ class HTTPServer(object): # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3861 raise _HTTPError(http.NOT_ACCEPTABLE) - ##### Generic APIs ##### @_authorized_route(_app, set(), "/storage/v1/version", methods=["GET"]) @@ -874,7 +895,10 @@ class HTTPServer(object): async def mutable_read_test_write(self, request, authorization, storage_index): """Read/test/write combined operation for mutables.""" rtw_request = await read_encoded( - self._reactor, request, _SCHEMAS["mutable_read_test_write"], max_size=2**48 + self._reactor, + request, + _SCHEMAS["mutable_read_test_write"], + max_size=2**48, ) secrets = ( authorization[Secrets.WRITE_ENABLER], diff --git a/src/allmydata/test/test_storage_http.py b/src/allmydata/test/test_storage_http.py index 48ca072bc..1a334034d 100644 --- a/src/allmydata/test/test_storage_http.py +++ b/src/allmydata/test/test_storage_http.py @@ -62,6 +62,7 @@ from ..storage.http_server import ( _add_error_handling, read_encoded, _SCHEMAS as SERVER_SCHEMAS, + BaseApp, ) from ..storage.http_client import ( StorageClient, @@ -257,7 +258,7 @@ def gen_bytes(length: int) -> bytes: return result -class TestApp(object): +class TestApp(BaseApp): """HTTP API for testing purposes.""" clock: IReactorTime From 919e6b339d0eaf7019f231ad916b2f7ac25cef48 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 28 Jul 2023 12:58:22 -0400 Subject: [PATCH 043/172] Add type annotation to _authorized_route --- src/allmydata/storage/http_server.py | 77 ++++++++++++++++++------- src/allmydata/test/test_storage_http.py | 2 +- 2 files changed, 56 insertions(+), 23 deletions(-) diff --git a/src/allmydata/storage/http_server.py b/src/allmydata/storage/http_server.py index 78ed1a974..7ceb8328c 100644 --- a/src/allmydata/storage/http_server.py +++ b/src/allmydata/storage/http_server.py @@ -4,7 +4,7 @@ HTTP server for storage. from __future__ import annotations -from typing import Any, Callable, Union, cast, Optional, TypeVar, Sequence +from typing import Any, Callable, Union, cast, Optional, TypeVar, Sequence, Protocol from typing_extensions import ParamSpec, Concatenate from functools import wraps from base64 import b64decode @@ -16,7 +16,7 @@ import mmap from eliot import start_action from cryptography.x509 import Certificate as CryptoCertificate from zope.interface import implementer -from klein import Klein +from klein import Klein, KleinRenderable from twisted.web import http from twisted.internet.interfaces import ( IListeningPort, @@ -104,15 +104,23 @@ def _extract_secrets( return result +class BaseApp(Protocol): + """Protocol for ``HTTPServer`` and testing equivalent.""" + + _swissnum: bytes + + P = ParamSpec("P") T = TypeVar("T") +SecretsDict = dict[Secrets, bytes] +App = TypeVar("App", bound=BaseApp) def _authorization_decorator( required_secrets: set[Secrets], ) -> Callable[ - [Callable[Concatenate[BaseApp, Request, dict[Secrets, bytes], P], T]], - Callable[Concatenate[BaseApp, Request, P], T], + [Callable[Concatenate[App, Request, SecretsDict, P], T]], + Callable[Concatenate[App, Request, P], T], ]: """ 1. Check the ``Authorization`` header matches server swissnum. @@ -121,11 +129,14 @@ def _authorization_decorator( """ def decorator( - f: Callable[Concatenate[BaseApp, Request, dict[Secrets, bytes], P], T] - ) -> Callable[Concatenate[BaseApp, Request, P], T]: + f: Callable[Concatenate[App, Request, SecretsDict, P], T] + ) -> Callable[Concatenate[App, Request, P], T]: @wraps(f) def route( - self: BaseApp, request: Request, *args: P.args, **kwargs: P.kwargs + self: App, + request: Request, + *args: P.args, + **kwargs: P.kwargs, ) -> T: # Don't set text/html content type by default. # None is actually supported, see https://github.com/twisted/twisted/issues/11902 @@ -179,7 +190,22 @@ def _authorization_decorator( return decorator -def _authorized_route(app, required_secrets, *route_args, **route_kwargs): +def _authorized_route( + klein_app: Klein, + required_secrets: set[Secrets], + url: str, + *route_args: Any, + branch: bool = False, + **route_kwargs: Any, +) -> Callable[ + [ + Callable[ + Concatenate[App, Request, SecretsDict, P], + KleinRenderable, + ] + ], + Callable[..., KleinRenderable], +]: """ Like Klein's @route, but with additional support for checking the ``Authorization`` header as well as ``X-Tahoe-Authorization`` headers. The @@ -189,12 +215,23 @@ def _authorized_route(app, required_secrets, *route_args, **route_kwargs): :param required_secrets: Set of required ``Secret`` types. """ - def decorator(f): - @app.route(*route_args, **route_kwargs) + def decorator( + f: Callable[ + Concatenate[App, Request, SecretsDict, P], + KleinRenderable, + ] + ) -> Callable[..., KleinRenderable]: + @klein_app.route(url, *route_args, branch=branch, **route_kwargs) # type: ignore[arg-type] @_authorization_decorator(required_secrets) @wraps(f) - def handle_route(*args, **kwargs): - return f(*args, **kwargs) + def handle_route( + app: App, + request: Request, + secrets: SecretsDict, + *args: P.args, + **kwargs: P.kwargs, + ) -> KleinRenderable: + return f(app, request, secrets, *args, **kwargs) return handle_route @@ -367,7 +404,7 @@ class _ReadAllProducer: start: int = field(default=0) @classmethod - def produce_to(cls, request: Request, read_data: ReadData) -> Deferred: + def produce_to(cls, request: Request, read_data: ReadData) -> Deferred[bytes]: """ Create and register the producer, returning ``Deferred`` that should be returned from a HTTP server endpoint. @@ -600,12 +637,6 @@ async def read_encoded( return cbor2.load(request.content) -class BaseApp: - """Base class for ``HTTPServer`` and testing equivalent.""" - - _swissnum: bytes - - class HTTPServer(BaseApp): """ A HTTP interface to the storage server. @@ -637,7 +668,7 @@ class HTTPServer(BaseApp): """Return twisted.web ``Resource`` for this object.""" return self._app.resource() - def _send_encoded(self, request, data): + def _send_encoded(self, request: Request, data: object) -> Deferred[bytes]: """ Return encoded data suitable for writing as the HTTP body response, by default using CBOR. @@ -666,7 +697,7 @@ class HTTPServer(BaseApp): ##### Generic APIs ##### @_authorized_route(_app, set(), "/storage/v1/version", methods=["GET"]) - def version(self, request, authorization): + def version(self, request: Request, authorization: SecretsDict) -> KleinRenderable: """Return version information.""" return self._send_encoded(request, self._get_version()) @@ -698,7 +729,9 @@ class HTTPServer(BaseApp): methods=["POST"], ) @async_to_deferred - async def allocate_buckets(self, request, authorization, storage_index): + async def allocate_buckets( + self, request: Request, authorization: SecretsDict, storage_index: bytes + ) -> KleinRenderable: """Allocate buckets.""" upload_secret = authorization[Secrets.UPLOAD] # It's just a list of up to ~256 shares, shouldn't use many bytes. diff --git a/src/allmydata/test/test_storage_http.py b/src/allmydata/test/test_storage_http.py index 1a334034d..e7b8059ee 100644 --- a/src/allmydata/test/test_storage_http.py +++ b/src/allmydata/test/test_storage_http.py @@ -266,7 +266,7 @@ class TestApp(BaseApp): _add_error_handling(_app) _swissnum = SWISSNUM_FOR_TEST # Match what the test client is using - @_authorized_route(_app, {}, "/noop", methods=["GET"]) + @_authorized_route(_app, set(), "/noop", methods=["GET"]) def noop(self, request, authorization): return "noop" From d669099a3515b4ff8c1524bc43f8fcd74782560a Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 28 Jul 2023 13:28:02 -0400 Subject: [PATCH 044/172] Add more type annotations. --- src/allmydata/storage/http_server.py | 69 ++++++++++++++++++++++------ 1 file changed, 55 insertions(+), 14 deletions(-) diff --git a/src/allmydata/storage/http_server.py b/src/allmydata/storage/http_server.py index 7ceb8328c..0cf3d25f4 100644 --- a/src/allmydata/storage/http_server.py +++ b/src/allmydata/storage/http_server.py @@ -770,7 +770,13 @@ class HTTPServer(BaseApp): "/storage/v1/immutable///abort", methods=["PUT"], ) - def abort_share_upload(self, request, authorization, storage_index, share_number): + def abort_share_upload( + self, + request: Request, + authorization: SecretsDict, + storage_index: bytes, + share_number: int, + ) -> KleinRenderable: """Abort an in-progress immutable share upload.""" try: bucket = self._uploads.get_write_bucket( @@ -801,7 +807,13 @@ class HTTPServer(BaseApp): "/storage/v1/immutable//", methods=["PATCH"], ) - def write_share_data(self, request, authorization, storage_index, share_number): + def write_share_data( + self, + request: Request, + authorization: SecretsDict, + storage_index: bytes, + share_number: int, + ) -> KleinRenderable: """Write data to an in-progress immutable upload.""" content_range = parse_content_range_header(request.getHeader("content-range")) if content_range is None or content_range.units != "bytes": @@ -811,14 +823,17 @@ class HTTPServer(BaseApp): bucket = self._uploads.get_write_bucket( storage_index, share_number, authorization[Secrets.UPLOAD] ) - offset = content_range.start - remaining = content_range.stop - content_range.start + offset = content_range.start or 0 + # We don't support an unspecified stop for the range: + assert content_range.stop is not None + # Missing body makes no sense: + assert request.content is not None + remaining = content_range.stop - offset finished = False while remaining > 0: data = request.content.read(min(remaining, 65536)) assert data, "uploaded data length doesn't match range" - try: finished = bucket.write(offset, data) except ConflictingWriteError: @@ -844,7 +859,9 @@ class HTTPServer(BaseApp): "/storage/v1/immutable//shares", methods=["GET"], ) - def list_shares(self, request, authorization, storage_index): + def list_shares( + self, request: Request, authorization: SecretsDict, storage_index: bytes + ) -> KleinRenderable: """ List shares for the given storage index. """ @@ -857,7 +874,13 @@ class HTTPServer(BaseApp): "/storage/v1/immutable//", methods=["GET"], ) - def read_share_chunk(self, request, authorization, storage_index, share_number): + def read_share_chunk( + self, + request: Request, + authorization: SecretsDict, + storage_index: bytes, + share_number: int, + ) -> KleinRenderable: """Read a chunk for an already uploaded immutable.""" request.setHeader("content-type", "application/octet-stream") try: @@ -874,7 +897,9 @@ class HTTPServer(BaseApp): "/storage/v1/lease/", methods=["PUT"], ) - def add_or_renew_lease(self, request, authorization, storage_index): + def add_or_renew_lease( + self, request: Request, authorization: SecretsDict, storage_index: bytes + ) -> KleinRenderable: """Update the lease for an immutable or mutable share.""" if not list(self._storage_server.get_shares(storage_index)): raise _HTTPError(http.NOT_FOUND) @@ -897,8 +922,12 @@ class HTTPServer(BaseApp): ) @async_to_deferred async def advise_corrupt_share_immutable( - self, request, authorization, storage_index, share_number - ): + self, + request: Request, + authorization: SecretsDict, + storage_index: bytes, + share_number: int, + ) -> KleinRenderable: """Indicate that given share is corrupt, with a text reason.""" try: bucket = self._storage_server.get_buckets(storage_index)[share_number] @@ -925,7 +954,9 @@ class HTTPServer(BaseApp): methods=["POST"], ) @async_to_deferred - async def mutable_read_test_write(self, request, authorization, storage_index): + async def mutable_read_test_write( + self, request: Request, authorization: SecretsDict, storage_index: bytes + ) -> KleinRenderable: """Read/test/write combined operation for mutables.""" rtw_request = await read_encoded( self._reactor, @@ -967,7 +998,13 @@ class HTTPServer(BaseApp): "/storage/v1/mutable//", methods=["GET"], ) - def read_mutable_chunk(self, request, authorization, storage_index, share_number): + def read_mutable_chunk( + self, + request: Request, + authorization: SecretsDict, + storage_index: bytes, + share_number: int, + ) -> KleinRenderable: """Read a chunk from a mutable.""" request.setHeader("content-type", "application/octet-stream") @@ -1007,8 +1044,12 @@ class HTTPServer(BaseApp): ) @async_to_deferred async def advise_corrupt_share_mutable( - self, request, authorization, storage_index, share_number - ): + self, + request: Request, + authorization: SecretsDict, + storage_index: bytes, + share_number: int, + ) -> KleinRenderable: """Indicate that given share is corrupt, with a text reason.""" if share_number not in { shnum for (shnum, _) in self._storage_server.get_shares(storage_index) From 0d0e32646fe305637d4cebedd8c9e4427db9fedd Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 28 Jul 2023 13:42:00 -0400 Subject: [PATCH 045/172] More type annotations. --- src/allmydata/storage/http_server.py | 28 ++++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/src/allmydata/storage/http_server.py b/src/allmydata/storage/http_server.py index 0cf3d25f4..ce07d8f2e 100644 --- a/src/allmydata/storage/http_server.py +++ b/src/allmydata/storage/http_server.py @@ -31,6 +31,7 @@ from twisted.web.server import Site, Request from twisted.web.iweb import IRequest from twisted.protocols.tls import TLSMemoryBIOFactory from twisted.python.filepath import FilePath +from twisted.python.failure import Failure from attrs import define, field, Factory from werkzeug.http import ( @@ -287,7 +288,7 @@ class UploadsInProgress(object): except (KeyError, IndexError): raise _HTTPError(http.NOT_FOUND) - def remove_write_bucket(self, bucket: BucketWriter): + def remove_write_bucket(self, bucket: BucketWriter) -> None: """Stop tracking the given ``BucketWriter``.""" try: storage_index, share_number = self._bucketwriters.pop(bucket) @@ -303,7 +304,7 @@ class UploadsInProgress(object): def validate_upload_secret( self, storage_index: bytes, share_number: int, upload_secret: bytes - ): + ) -> None: """ Raise an unauthorized-HTTP-response exception if the given storage_index+share_number have a different upload secret than the @@ -325,7 +326,7 @@ class StorageIndexConverter(BaseConverter): regex = "[" + str(rfc3548_alphabet, "ascii") + "]{26}" - def to_python(self, value): + def to_python(self, value: str) -> bytes: try: return si_a2b(value.encode("ascii")) except (AssertionError, binascii.Error, ValueError): @@ -413,7 +414,7 @@ class _ReadAllProducer: request.registerProducer(producer, False) return producer.result - def resumeProducing(self): + def resumeProducing(self) -> None: data = self.read_data(self.start, 65536) if not data: self.request.unregisterProducer() @@ -424,10 +425,10 @@ class _ReadAllProducer: self.request.write(data) self.start += len(data) - def pauseProducing(self): + def pauseProducing(self) -> None: pass - def stopProducing(self): + def stopProducing(self) -> None: pass @@ -445,7 +446,7 @@ class _ReadRangeProducer: start: int remaining: int - def resumeProducing(self): + def resumeProducing(self) -> None: if self.result is None or self.request is None: return @@ -482,10 +483,10 @@ class _ReadRangeProducer: if self.remaining == 0: self.stopProducing() - def pauseProducing(self): + def pauseProducing(self) -> None: pass - def stopProducing(self): + def stopProducing(self) -> None: if self.request is not None: self.request.unregisterProducer() self.request = None @@ -564,12 +565,13 @@ def read_range( return d -def _add_error_handling(app: Klein): +def _add_error_handling(app: Klein) -> None: """Add exception handlers to a Klein app.""" @app.handle_errors(_HTTPError) - def _http_error(_, request, failure): + def _http_error(self: Any, request: IRequest, failure: Failure) -> KleinRenderable: """Handle ``_HTTPError`` exceptions.""" + assert isinstance(failure.value, _HTTPError) request.setResponseCode(failure.value.code) if failure.value.body is not None: return failure.value.body @@ -577,7 +579,9 @@ def _add_error_handling(app: Klein): return b"" @app.handle_errors(CDDLValidationError) - def _cddl_validation_error(_, request, failure): + def _cddl_validation_error( + self: Any, request: IRequest, failure: Failure + ) -> KleinRenderable: """Handle CDDL validation errors.""" request.setResponseCode(http.BAD_REQUEST) return str(failure.value).encode("utf-8") From 00b7e7e17862335edd08b0b38b28f30f64b8f993 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 28 Jul 2023 13:48:43 -0400 Subject: [PATCH 046/172] More type annotations. --- src/allmydata/storage/http_server.py | 11 ++++++++--- src/allmydata/test/test_storage_https.py | 6 ++++-- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/src/allmydata/storage/http_server.py b/src/allmydata/storage/http_server.py index ce07d8f2e..cf0e6dbb4 100644 --- a/src/allmydata/storage/http_server.py +++ b/src/allmydata/storage/http_server.py @@ -17,11 +17,13 @@ from eliot import start_action from cryptography.x509 import Certificate as CryptoCertificate from zope.interface import implementer from klein import Klein, KleinRenderable +from klein.resource import KleinResource from twisted.web import http from twisted.internet.interfaces import ( IListeningPort, IStreamServerEndpoint, IPullProducer, + IProtocolFactory, ) from twisted.internet.address import IPv4Address, IPv6Address from twisted.internet.defer import Deferred @@ -668,7 +670,7 @@ class HTTPServer(BaseApp): self._uploads.remove_write_bucket ) - def get_resource(self): + def get_resource(self) -> KleinResource: """Return twisted.web ``Resource`` for this object.""" return self._app.resource() @@ -1085,7 +1087,10 @@ class _TLSEndpointWrapper(object): @classmethod def from_paths( - cls, endpoint, private_key_path: FilePath, cert_path: FilePath + cls: type[_TLSEndpointWrapper], + endpoint: IStreamServerEndpoint, + private_key_path: FilePath, + cert_path: FilePath, ) -> "_TLSEndpointWrapper": """ Create an endpoint with the given private key and certificate paths on @@ -1100,7 +1105,7 @@ class _TLSEndpointWrapper(object): ) return cls(endpoint=endpoint, context_factory=certificate_options) - def listen(self, factory): + def listen(self, factory: IProtocolFactory) -> Deferred[IListeningPort]: return self.endpoint.listen( TLSMemoryBIOFactory(self.context_factory, False, factory) ) diff --git a/src/allmydata/test/test_storage_https.py b/src/allmydata/test/test_storage_https.py index a11b0eed5..0e0bbcc95 100644 --- a/src/allmydata/test/test_storage_https.py +++ b/src/allmydata/test/test_storage_https.py @@ -109,9 +109,11 @@ class PinningHTTPSValidation(AsyncTestCase): root.isLeaf = True listening_port = await endpoint.listen(Site(root)) try: - yield f"https://127.0.0.1:{listening_port.getHost().port}/" + yield f"https://127.0.0.1:{listening_port.getHost().port}/" # type: ignore[attr-defined] finally: - await listening_port.stopListening() + result = listening_port.stopListening() + if result is not None: + await result def request(self, url: str, expected_certificate: x509.Certificate): """ From f8e9631f532da65c46ef3d04039b34469b6ab11a Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 28 Jul 2023 13:49:21 -0400 Subject: [PATCH 047/172] News fragment. --- newsfragments/4052.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/4052.minor diff --git a/newsfragments/4052.minor b/newsfragments/4052.minor new file mode 100644 index 000000000..e69de29bb From 176fac7360f3797cb637d8fd9d90ba05c3fbe548 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 28 Jul 2023 14:20:05 -0400 Subject: [PATCH 048/172] Work in Python 3.8. --- src/allmydata/storage/http_server.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/allmydata/storage/http_server.py b/src/allmydata/storage/http_server.py index cf0e6dbb4..66b0dd6de 100644 --- a/src/allmydata/storage/http_server.py +++ b/src/allmydata/storage/http_server.py @@ -4,7 +4,17 @@ HTTP server for storage. from __future__ import annotations -from typing import Any, Callable, Union, cast, Optional, TypeVar, Sequence, Protocol +from typing import ( + Any, + Callable, + Union, + cast, + Optional, + TypeVar, + Sequence, + Protocol, + Dict, +) from typing_extensions import ParamSpec, Concatenate from functools import wraps from base64 import b64decode @@ -115,7 +125,7 @@ class BaseApp(Protocol): P = ParamSpec("P") T = TypeVar("T") -SecretsDict = dict[Secrets, bytes] +SecretsDict = Dict[Secrets, bytes] App = TypeVar("App", bound=BaseApp) From 050ef6cca3d19b20f76b7d4bf80b2d82f30f2af6 Mon Sep 17 00:00:00 2001 From: meejah Date: Sat, 29 Jul 2023 04:04:05 -0600 Subject: [PATCH 049/172] tor-tests work; refactor ports --- integration/conftest.py | 49 ++++++++++++++++++++++++++++++++++++----- integration/test_i2p.py | 7 +++--- integration/test_tor.py | 15 ++++++------- integration/util.py | 2 +- 4 files changed, 55 insertions(+), 18 deletions(-) diff --git a/integration/conftest.py b/integration/conftest.py index 837b54aa1..92483da65 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -7,6 +7,7 @@ from __future__ import annotations import os import sys import shutil +from attr import define from time import sleep from os import mkdir, environ from os.path import join, exists @@ -189,7 +190,7 @@ def introducer_furl(introducer, temp_dir): include_args=["temp_dir", "flog_gatherer"], include_result=False, ) -def tor_introducer(reactor, temp_dir, flog_gatherer, request): +def tor_introducer(reactor, temp_dir, flog_gatherer, request, tor_network): intro_dir = join(temp_dir, 'introducer_tor') print("making Tor introducer in {}".format(intro_dir)) print("(this can take tens of seconds to allocate Onion address)") @@ -203,9 +204,7 @@ def tor_introducer(reactor, temp_dir, flog_gatherer, request): request, ( 'create-introducer', - # The control port should agree with the configuration of the - # Tor network we bootstrap with chutney. - '--tor-control-port', 'tcp:localhost:8007', + '--tor-control-port', tor_network.client_control_endpoint, '--hide-ip', '--listen=tor', intro_dir, @@ -306,6 +305,21 @@ def bob(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, reques @pytest.mark.skipif(sys.platform.startswith('win'), 'Tor tests are unstable on Windows') def chutney(reactor, temp_dir: str) -> tuple[str, dict[str, str]]: + """ + Instantiate the "networks/hs-v3" Chutney configuration for a local + Tor network. + + This provides a small, local Tor network that can run v3 Onion + Services. This has 10 tor processes: 3 authorities, 5 + exits+relays, a client (and one service-hosting node we don't use). + + We pin a Chutney revision, so things shouldn't change. Currently, + the ONLY node that exposes a valid SocksPort is "008c" (the + client) on 9008. + + The control ports start at 8000 (so the ControlPort for the one + client node is 8008). + """ # Try to find Chutney already installed in the environment. try: import chutney @@ -363,7 +377,24 @@ def chutney(reactor, temp_dir: str) -> tuple[str, dict[str, str]]: ) pytest_twisted.blockon(proto.done) - return (chutney_dir, {"PYTHONPATH": join(chutney_dir, "lib")}) + return chutney_dir, {"PYTHONPATH": join(chutney_dir, "lib")} + + +@define +class ChutneyTorNetwork: + """ + Represents a running Chutney (tor) network. Returned by the + "tor_network" fixture. + """ + dir: FilePath + environ: dict + client_control_port: int + + @property + def client_control_endpoint(self) -> str: + print("CONTROL", "tcp:localhost:{}".format(self.client_control_port)) + return "tcp:localhost:{}".format(self.client_control_port) + @pytest.fixture(scope='session') @@ -422,3 +453,11 @@ def tor_network(reactor, temp_dir, chutney, request): pytest_twisted.blockon(chutney(("status", basic_network))) except ProcessTerminated: print("Chutney.TorNet status failed (continuing)") + + # the "8008" comes from configuring "networks/basic" in chutney + # and then examining "net/nodes/008c/torrc" for ControlPort value + return ChutneyTorNetwork( + chutney_root, + chutney_env, + 8008, + ) diff --git a/integration/test_i2p.py b/integration/test_i2p.py index 2ee603573..ea3ddb62b 100644 --- a/integration/test_i2p.py +++ b/integration/test_i2p.py @@ -132,8 +132,8 @@ def i2p_introducer_furl(i2p_introducer, temp_dir): @pytest_twisted.inlineCallbacks @pytest.mark.skip("I2P tests are not functioning at all, for unknown reasons") def test_i2p_service_storage(reactor, request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl): - yield _create_anonymous_node(reactor, 'carol_i2p', 8008, request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl) - yield _create_anonymous_node(reactor, 'dave_i2p', 8009, request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl) + yield _create_anonymous_node(reactor, 'carol_i2p', request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl) + yield _create_anonymous_node(reactor, 'dave_i2p', request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl) # ensure both nodes are connected to "a grid" by uploading # something via carol, and retrieve it using dave. gold_path = join(temp_dir, "gold") @@ -179,9 +179,8 @@ def test_i2p_service_storage(reactor, request, temp_dir, flog_gatherer, i2p_netw @pytest_twisted.inlineCallbacks -def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_gatherer, i2p_network, introducer_furl): +def _create_anonymous_node(reactor, name, request, temp_dir, flog_gatherer, i2p_network, introducer_furl): node_dir = FilePath(temp_dir).child(name) - web_port = "tcp:{}:interface=localhost".format(control_port + 2000) print("creating", node_dir.path) node_dir.makedirs() diff --git a/integration/test_tor.py b/integration/test_tor.py index 4d0ce4f16..d7fed5790 100644 --- a/integration/test_tor.py +++ b/integration/test_tor.py @@ -38,8 +38,8 @@ def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_ne The two nodes can talk to the introducer and each other: we upload to one node, read from the other. """ - carol = yield _create_anonymous_node(reactor, 'carol', 8008, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl, 2) - dave = yield _create_anonymous_node(reactor, 'dave', 8009, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl, 2) + carol = yield _create_anonymous_node(reactor, 'carol', 8100, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl, 2) + dave = yield _create_anonymous_node(reactor, 'dave', 8101, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl, 2) yield util.await_client_ready(carol, minimum_number_of_servers=2, timeout=600) yield util.await_client_ready(dave, minimum_number_of_servers=2, timeout=600) yield upload_to_one_download_from_the_other(reactor, temp_dir, carol, dave) @@ -94,9 +94,8 @@ async def upload_to_one_download_from_the_other(reactor, temp_dir, upload_to: ut @pytest_twisted.inlineCallbacks -def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_gatherer, tor_network, introducer_furl, shares_total: int) -> util.TahoeProcess: +def _create_anonymous_node(reactor, name, web_port, request, temp_dir, flog_gatherer, tor_network, introducer_furl, shares_total: int) -> util.TahoeProcess: node_dir = FilePath(temp_dir).child(name) - web_port = "tcp:{}:interface=localhost".format(control_port + 2000) if node_dir.exists(): raise RuntimeError( "A node already exists in '{}'".format(node_dir) @@ -111,10 +110,10 @@ def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_ sys.executable, '-b', '-m', 'allmydata.scripts.runner', 'create-node', '--nickname', name, - '--webport', web_port, + '--webport', str(web_port), '--introducer', introducer_furl, '--hide-ip', - '--tor-control-port', 'tcp:localhost:{}'.format(control_port), + '--tor-control-port', tor_network.client_control_endpoint, '--listen', 'tor', '--shares-needed', '1', '--shares-happy', '1', @@ -133,7 +132,7 @@ def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_ config = read_config(node_dir.path, "tub.port") config.set_config("tor", "onion", "true") config.set_config("tor", "onion.external_port", "3457") - config.set_config("tor", "control.port", f"tcp:port={control_port}:host=127.0.0.1") + config.set_config("tor", "control.port", tor_network.client_control_endpoint) config.set_config("tor", "onion.private_key_file", "private/tor_onion.privkey") print("running") @@ -159,7 +158,7 @@ def test_anonymous_client(reactor, request, temp_dir, flog_gatherer, tor_network ) yield util.await_client_ready(normie) - anonymoose = yield _create_anonymous_node(reactor, 'anonymoose', 8008, request, temp_dir, flog_gatherer, tor_network, introducer_furl, 1) + anonymoose = yield _create_anonymous_node(reactor, 'anonymoose', 8102, request, temp_dir, flog_gatherer, tor_network, introducer_furl, 1) yield util.await_client_ready(anonymoose, minimum_number_of_servers=1, timeout=600) yield upload_to_one_download_from_the_other(reactor, temp_dir, normie, anonymoose) diff --git a/integration/util.py b/integration/util.py index b614a84bd..909def8ef 100644 --- a/integration/util.py +++ b/integration/util.py @@ -659,7 +659,7 @@ def await_client_ready(tahoe, timeout=10, liveness=60*2, minimum_number_of_serve print( f"Now: {time.ctime()}\n" - f"Server last-received-data: {[time.ctime(s['last_received_data']) for s in servers]}" + f"Server last-received-data: {[s['last_received_data'] for s in servers]}" ) server_times = [ From 01a87d85be5a11f40015d651ea1244ffb3a5a487 Mon Sep 17 00:00:00 2001 From: meejah Date: Sat, 29 Jul 2023 04:08:52 -0600 Subject: [PATCH 050/172] refactor: actually parallel --- integration/conftest.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/integration/conftest.py b/integration/conftest.py index 92483da65..aa85a38cd 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -266,8 +266,7 @@ def storage_nodes(grid): nodes_d = [] # start all 5 nodes in parallel for x in range(5): - #nodes_d.append(grid.add_storage_node()) - pytest_twisted.blockon(grid.add_storage_node()) + nodes_d.append(grid.add_storage_node()) nodes_status = pytest_twisted.blockon(DeferredList(nodes_d)) for ok, value in nodes_status: From e565b9e28c00138eed1cf3cfdb064c23ddad9ffc Mon Sep 17 00:00:00 2001 From: meejah Date: Sat, 29 Jul 2023 04:14:39 -0600 Subject: [PATCH 051/172] no, we can't --- integration/grid.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/integration/grid.py b/integration/grid.py index 5ce4179ec..064319f74 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -173,8 +173,6 @@ class StorageServer(object): Note that self.process and self.protocol will be new instances after this. """ - # XXX per review comments, _can_ we make this "return a new - # instance" instead of mutating? self.process.transport.signalProcess('TERM') yield self.protocol.exited self.process = yield _run_node( From c4ac548cba2c397774a5d2af3f09d1bf0a642dbc Mon Sep 17 00:00:00 2001 From: meejah Date: Sat, 29 Jul 2023 13:08:01 -0600 Subject: [PATCH 052/172] reactor from fixture --- integration/conftest.py | 2 +- integration/grid.py | 7 +------ 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/integration/conftest.py b/integration/conftest.py index d2024ce98..04dc400a2 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -117,7 +117,7 @@ def reactor(): @pytest.fixture(scope='session') @log_call(action_type=u"integration:port_allocator", include_result=False) def port_allocator(reactor): - return create_port_allocator(start_port=45000) + return create_port_allocator(reactor, start_port=45000) @pytest.fixture(scope='session') diff --git a/integration/grid.py b/integration/grid.py index 064319f74..343bd779f 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -529,7 +529,7 @@ def create_grid(reactor, request, temp_dir, flog_gatherer, port_allocator): returnValue(grid) -def create_port_allocator(start_port): +def create_port_allocator(reactor, start_port): """ Returns a new port-allocator .. which is a zero-argument function that returns Deferreds that fire with new, sequential ports @@ -546,11 +546,6 @@ def create_port_allocator(start_port): """ port = [start_port - 1] - # import stays here to not interfere with reactor selection -- but - # maybe this function should be arranged to be called once from a - # fixture (with the reactor)? - from twisted.internet import reactor - class NothingProtocol(Protocol): """ I do nothing. From fe96defa2b2e6f7934f97bf76f0b651b1c20b191 Mon Sep 17 00:00:00 2001 From: meejah Date: Sat, 29 Jul 2023 13:15:21 -0600 Subject: [PATCH 053/172] use existing port-allocator instead --- integration/conftest.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/integration/conftest.py b/integration/conftest.py index 04dc400a2..46f5a0a44 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -21,7 +21,7 @@ from eliot import ( from twisted.python.filepath import FilePath from twisted.python.procutils import which -from twisted.internet.defer import DeferredList +from twisted.internet.defer import DeferredList, succeed from twisted.internet.error import ( ProcessExitedAlready, ProcessTerminated, @@ -117,7 +117,16 @@ def reactor(): @pytest.fixture(scope='session') @log_call(action_type=u"integration:port_allocator", include_result=False) def port_allocator(reactor): - return create_port_allocator(reactor, start_port=45000) + from allmydata.util.iputil import allocate_tcp_port + + # these will appear basically random, which can make especially + # manual debugging harder but we're re-using code instead of + # writing our own...so, win? + def allocate(): + port = allocate_tcp_port() + return succeed(port) + return allocate + #return create_port_allocator(reactor, start_port=45000) @pytest.fixture(scope='session') From 7a8752c969d8dc64e3e68ba944f0bf98b4e33f48 Mon Sep 17 00:00:00 2001 From: meejah Date: Sat, 29 Jul 2023 13:18:23 -0600 Subject: [PATCH 054/172] docstring, remove duplicate port-allocator --- integration/conftest.py | 4 +--- integration/grid.py | 52 ++++++----------------------------------- 2 files changed, 8 insertions(+), 48 deletions(-) diff --git a/integration/conftest.py b/integration/conftest.py index 46f5a0a44..55a0bbbb5 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -47,6 +47,7 @@ from .grid import ( create_grid, ) from allmydata.node import read_config +from allmydata.util.iputil import allocate_tcp_port # No reason for HTTP requests to take longer than four minutes in the # integration tests. See allmydata/scripts/common_http.py for usage. @@ -117,8 +118,6 @@ def reactor(): @pytest.fixture(scope='session') @log_call(action_type=u"integration:port_allocator", include_result=False) def port_allocator(reactor): - from allmydata.util.iputil import allocate_tcp_port - # these will appear basically random, which can make especially # manual debugging harder but we're re-using code instead of # writing our own...so, win? @@ -126,7 +125,6 @@ def port_allocator(reactor): port = allocate_tcp_port() return succeed(port) return allocate - #return create_port_allocator(reactor, start_port=45000) @pytest.fixture(scope='session') diff --git a/integration/grid.py b/integration/grid.py index 343bd779f..79b5b45ad 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -510,11 +510,16 @@ class Grid(object): returnValue(client) -# XXX THINK can we tie a whole *grid* to a single request? (I think -# that's all that makes sense) +# A grid is now forever tied to its original 'request' which is where +# it must hang finalizers off of. The "main" one is a session-level +# fixture so it'll live the life of the tests but it could be +# per-function Grid too. @inlineCallbacks def create_grid(reactor, request, temp_dir, flog_gatherer, port_allocator): """ + Create a new grid. This will have one Introducer but zero + storage-servers or clients; those must be added by a test or + subsequent fixtures. """ intro_port = yield port_allocator() introducer = yield create_introducer(reactor, request, temp_dir, flog_gatherer, intro_port) @@ -527,46 +532,3 @@ def create_grid(reactor, request, temp_dir, flog_gatherer, port_allocator): flog_gatherer, ) returnValue(grid) - - -def create_port_allocator(reactor, start_port): - """ - Returns a new port-allocator .. which is a zero-argument function - that returns Deferreds that fire with new, sequential ports - starting at `start_port` skipping any that already appear to have - a listener. - - There can still be a race against other processes allocating ports - -- between the time when we check the status of the port and when - our subprocess starts up. This *could* be mitigated by instructing - the OS to not randomly-allocate ports in some range, and then - using that range here (explicitly, ourselves). - - NB once we're Python3-only this could be an async-generator - """ - port = [start_port - 1] - - class NothingProtocol(Protocol): - """ - I do nothing. - """ - - def port_generator(): - print("Checking port {}".format(port)) - port[0] += 1 - ep = TCP4ServerEndpoint(reactor, port[0], interface="localhost") - d = ep.listen(Factory.forProtocol(NothingProtocol)) - - def good(listening_port): - unlisten_d = maybeDeferred(listening_port.stopListening) - def return_port(_): - return port[0] - unlisten_d.addBoth(return_port) - return unlisten_d - - def try_again(fail): - return port_generator() - - d.addCallbacks(good, try_again) - return d - return port_generator From 67d5c82e103f49fb1d624e3ad6908de885c01842 Mon Sep 17 00:00:00 2001 From: meejah Date: Sat, 29 Jul 2023 13:34:12 -0600 Subject: [PATCH 055/172] codechecks / linter --- integration/conftest.py | 4 ---- integration/grid.py | 8 -------- integration/test_i2p.py | 9 ++++++--- integration/test_vectors.py | 2 +- 4 files changed, 7 insertions(+), 16 deletions(-) diff --git a/integration/conftest.py b/integration/conftest.py index 55a0bbbb5..a26d2043d 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -12,7 +12,6 @@ from time import sleep from os import mkdir, environ from os.path import join, exists from tempfile import mkdtemp -from json import loads from eliot import ( to_file, @@ -37,12 +36,9 @@ from .util import ( _create_node, _tahoe_runner_optional_coverage, await_client_ready, - cli, - generate_ssh_key, block_with_timeout, ) from .grid import ( - create_port_allocator, create_flog_gatherer, create_grid, ) diff --git a/integration/grid.py b/integration/grid.py index 79b5b45ad..94f8c3d7f 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -26,7 +26,6 @@ from twisted.python.procutils import which from twisted.internet.defer import ( inlineCallbacks, returnValue, - maybeDeferred, Deferred, ) from twisted.internet.task import ( @@ -36,13 +35,6 @@ from twisted.internet.interfaces import ( IProcessTransport, IProcessProtocol, ) -from twisted.internet.endpoints import ( - TCP4ServerEndpoint, -) -from twisted.internet.protocol import ( - Factory, - Protocol, -) from twisted.internet.error import ProcessTerminated from allmydata.node import read_config diff --git a/integration/test_i2p.py b/integration/test_i2p.py index ea3ddb62b..c99c469fa 100644 --- a/integration/test_i2p.py +++ b/integration/test_i2p.py @@ -24,6 +24,7 @@ from allmydata.test.common import ( write_introducer, ) from allmydata.node import read_config +from allmydata.util.iputil import allocate_tcp_port if which("docker") is None: @@ -132,8 +133,10 @@ def i2p_introducer_furl(i2p_introducer, temp_dir): @pytest_twisted.inlineCallbacks @pytest.mark.skip("I2P tests are not functioning at all, for unknown reasons") def test_i2p_service_storage(reactor, request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl): - yield _create_anonymous_node(reactor, 'carol_i2p', request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl) - yield _create_anonymous_node(reactor, 'dave_i2p', request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl) + web_port0 = allocate_tcp_port() + web_port1 = allocate_tcp_port() + yield _create_anonymous_node(reactor, 'carol_i2p', web_port0, request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl) + yield _create_anonymous_node(reactor, 'dave_i2p', web_port1, request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl) # ensure both nodes are connected to "a grid" by uploading # something via carol, and retrieve it using dave. gold_path = join(temp_dir, "gold") @@ -179,7 +182,7 @@ def test_i2p_service_storage(reactor, request, temp_dir, flog_gatherer, i2p_netw @pytest_twisted.inlineCallbacks -def _create_anonymous_node(reactor, name, request, temp_dir, flog_gatherer, i2p_network, introducer_furl): +def _create_anonymous_node(reactor, name, web_port, request, temp_dir, flog_gatherer, i2p_network, introducer_furl): node_dir = FilePath(temp_dir).child(name) print("creating", node_dir.path) diff --git a/integration/test_vectors.py b/integration/test_vectors.py index bd5def8c5..1bcbcffa4 100644 --- a/integration/test_vectors.py +++ b/integration/test_vectors.py @@ -15,7 +15,7 @@ from pytest_twisted import ensureDeferred from . import vectors from .vectors import parameters -from .util import reconfigure, upload +from .util import upload from .grid import Client @mark.parametrize('convergence', parameters.CONVERGENCE_SECRETS) From 112770aeb31a7f95e59718c54ea59c69d842c1d7 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 31 Jul 2023 11:07:37 -0400 Subject: [PATCH 056/172] Don't hardcode tox --- setup.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/setup.py b/setup.py index a2e870e8b..86873ad53 100644 --- a/setup.py +++ b/setup.py @@ -435,9 +435,7 @@ setup(name="tahoe-lafs", # also set in __init__.py "paramiko < 2.9", "pytest-timeout", # Does our OpenMetrics endpoint adhere to the spec: - "prometheus-client == 0.11.0", - # CI uses "tox<4", change here too if that becomes different - "tox < 4", + "prometheus-client == 0.11.0" ] + tor_requires + i2p_requires, "tor": tor_requires, "i2p": i2p_requires, From e545ab4a8022c52ee3a450ab501eedc470491d50 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 1 Aug 2023 15:31:38 -0400 Subject: [PATCH 057/172] More accurate type --- src/allmydata/storage/http_client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/allmydata/storage/http_client.py b/src/allmydata/storage/http_client.py index 79f6cfa89..75b6eab22 100644 --- a/src/allmydata/storage/http_client.py +++ b/src/allmydata/storage/http_client.py @@ -41,6 +41,7 @@ from twisted.internet.interfaces import ( IDelayedCall, ) from twisted.internet.ssl import CertificateOptions +from twisted.protocols.tls import TLSMemoryBIOProtocol from twisted.web.client import Agent, HTTPConnectionPool from zope.interface import implementer from hyperlink import DecodedURL @@ -304,7 +305,7 @@ class _StorageClientHTTPSPolicy: return self # IOpenSSLClientConnectionCreator - def clientConnectionForTLS(self, tlsProtocol: object) -> SSL.Connection: + def clientConnectionForTLS(self, tlsProtocol: TLSMemoryBIOProtocol) -> SSL.Connection: return SSL.Connection( _TLSContextFactory(self.expected_spki_hash).getContext(), None ) From 009f063067a156ddab95bb3f554c43244cc05fc1 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 1 Aug 2023 15:34:40 -0400 Subject: [PATCH 058/172] Stricter type checking --- src/allmydata/storage/http_client.py | 14 ++++++++++---- src/allmydata/storage_client.py | 2 +- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/src/allmydata/storage/http_client.py b/src/allmydata/storage/http_client.py index 75b6eab22..b508c07fd 100644 --- a/src/allmydata/storage/http_client.py +++ b/src/allmydata/storage/http_client.py @@ -81,9 +81,13 @@ def _encode_si(si: bytes) -> str: class ClientException(Exception): """An unexpected response code from the server.""" - def __init__(self, code: int, *additional_args): - Exception.__init__(self, code, *additional_args) + def __init__( + self, code: int, message: Optional[str] = None, body: Optional[bytes] = None + ): + Exception.__init__(self, code, message, body) self.code = code + self.message = message + self.body = body register_exception_extractor(ClientException, lambda e: {"response_code": e.code}) @@ -94,7 +98,7 @@ register_exception_extractor(ClientException, lambda e: {"response_code": e.code # Tags are of the form #6.nnn, where the number is documented at # https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml. Notably, #6.258 # indicates a set. -_SCHEMAS : Mapping[str,Schema] = { +_SCHEMAS: Mapping[str, Schema] = { "get_version": Schema( # Note that the single-quoted (`'`) string keys in this schema # represent *byte* strings - per the CDDL specification. Text strings @@ -305,7 +309,9 @@ class _StorageClientHTTPSPolicy: return self # IOpenSSLClientConnectionCreator - def clientConnectionForTLS(self, tlsProtocol: TLSMemoryBIOProtocol) -> SSL.Connection: + def clientConnectionForTLS( + self, tlsProtocol: TLSMemoryBIOProtocol + ) -> SSL.Connection: return SSL.Connection( _TLSContextFactory(self.expected_spki_hash).getContext(), None ) diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 4efc845b4..69ae2c22b 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -1428,7 +1428,7 @@ class _FakeRemoteReference(object): result = yield getattr(self.local_object, action)(*args, **kwargs) defer.returnValue(result) except HTTPClientException as e: - raise RemoteException(e.args) + raise RemoteException((e.code, e.message, e.body)) @attr.s From 14ebeba07d527a189a5cdf93c0f85392302b2ca1 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 1 Aug 2023 15:28:49 -0400 Subject: [PATCH 059/172] avoid re-computing the current time inside this loop It could lead to funny behavior if we cross a boundary at just the wrong time. Also the debug print could be misleading in such a case. --- integration/util.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/integration/util.py b/integration/util.py index 31d351bc1..756489120 100644 --- a/integration/util.py +++ b/integration/util.py @@ -622,8 +622,9 @@ def await_client_ready(tahoe, timeout=10, liveness=60*2, minimum_number_of_serve time.sleep(1) continue + now = time.time() print( - f"Now: {time.ctime()}\n" + f"Now: {time.ctime(now)}\n" f"Server last-received-data: {[time.ctime(s['last_received_data']) for s in servers]}" ) @@ -633,7 +634,7 @@ def await_client_ready(tahoe, timeout=10, liveness=60*2, minimum_number_of_serve ] # check that all times are 'recent enough' (it's OK if _some_ servers # are down, we just want to make sure a sufficient number are up) - if len([time.time() - t <= liveness for t in server_times if t is not None]) < minimum_number_of_servers: + if len([now - t <= liveness for t in server_times if t is not None]) < minimum_number_of_servers: print("waiting because at least one server too old") time.sleep(1) continue From a0b78a134e05e33d90e802011b8dfd428d8d358d Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 1 Aug 2023 15:49:30 -0400 Subject: [PATCH 060/172] Leave a hint about what successful "bootstrap" looks like --- integration/conftest.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/integration/conftest.py b/integration/conftest.py index c94c05429..9a7a47ec4 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -575,6 +575,32 @@ def tor_network(reactor, temp_dir, chutney, request): request.addfinalizer(cleanup) pytest_twisted.blockon(chutney(("start", basic_network))) + + # Wait for the nodes to "bootstrap" - ie, form a network among themselves. + # Successful bootstrap is reported with a message something like: + # + # Everything bootstrapped after 151 sec + # Bootstrap finished: 151 seconds + # Node status: + # test000a : 100, done , Done + # test001a : 100, done , Done + # test002a : 100, done , Done + # test003r : 100, done , Done + # test004r : 100, done , Done + # test005r : 100, done , Done + # test006r : 100, done , Done + # test007r : 100, done , Done + # test008c : 100, done , Done + # test009c : 100, done , Done + # Published dir info: + # test000a : 100, all nodes , desc md md_cons ns_cons , Dir info cached + # test001a : 100, all nodes , desc md md_cons ns_cons , Dir info cached + # test002a : 100, all nodes , desc md md_cons ns_cons , Dir info cached + # test003r : 100, all nodes , desc md md_cons ns_cons , Dir info cached + # test004r : 100, all nodes , desc md md_cons ns_cons , Dir info cached + # test005r : 100, all nodes , desc md md_cons ns_cons , Dir info cached + # test006r : 100, all nodes , desc md md_cons ns_cons , Dir info cached + # test007r : 100, all nodes , desc md md_cons ns_cons , Dir info cached pytest_twisted.blockon(chutney(("wait_for_bootstrap", basic_network))) # print some useful stuff From 871df0b1b4e5d5264c9cd244b02eac09a690510f Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 1 Aug 2023 15:49:44 -0400 Subject: [PATCH 061/172] Dump some more details about what we're waiting for --- integration/util.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/integration/util.py b/integration/util.py index 756489120..768741bd8 100644 --- a/integration/util.py +++ b/integration/util.py @@ -623,18 +623,22 @@ def await_client_ready(tahoe, timeout=10, liveness=60*2, minimum_number_of_serve continue now = time.time() - print( - f"Now: {time.ctime(now)}\n" - f"Server last-received-data: {[time.ctime(s['last_received_data']) for s in servers]}" - ) - server_times = [ server['last_received_data'] - for server in servers + for server + in servers + if server['last_received_data'] is not None ] + print( + f"Now: {time.ctime(now)}\n" + f"Liveness required: {liveness}\n" + f"Server last-received-data: {[time.ctime(s) for s in server_times]}\n" + f"Server ages: {[now - s for s in server_times]}\n" + ) + # check that all times are 'recent enough' (it's OK if _some_ servers # are down, we just want to make sure a sufficient number are up) - if len([now - t <= liveness for t in server_times if t is not None]) < minimum_number_of_servers: + if len([now - t <= liveness for t in server_times]) < minimum_number_of_servers: print("waiting because at least one server too old") time.sleep(1) continue From 9d670e54e29cb2c249100576949539cddc53b7f8 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 1 Aug 2023 15:56:02 -0400 Subject: [PATCH 062/172] Get the liveness filter condition right --- integration/util.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/integration/util.py b/integration/util.py index 768741bd8..7e10b4315 100644 --- a/integration/util.py +++ b/integration/util.py @@ -638,8 +638,12 @@ def await_client_ready(tahoe, timeout=10, liveness=60*2, minimum_number_of_serve # check that all times are 'recent enough' (it's OK if _some_ servers # are down, we just want to make sure a sufficient number are up) - if len([now - t <= liveness for t in server_times]) < minimum_number_of_servers: - print("waiting because at least one server too old") + alive = [t for t in server_times if now - t <= liveness] + if len(alive) < minimum_number_of_servers: + print( + f"waiting because we found {len(alive)} servers " + f"and want {minimum_number_of_servers}" + ) time.sleep(1) continue From b8ee7a4e98e0968d6bc2d8a60f8638a83b1e04ce Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 1 Aug 2023 15:56:20 -0400 Subject: [PATCH 063/172] news fragment --- newsfragments/4055.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/4055.minor diff --git a/newsfragments/4055.minor b/newsfragments/4055.minor new file mode 100644 index 000000000..e69de29bb From e3f30d8e58fa73dbd3a2af870cb1b2c1252eb184 Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 2 Aug 2023 14:48:36 -0600 Subject: [PATCH 064/172] fix comments about tor/chutney in integration config --- integration/conftest.py | 29 +++++++++++++++++------------ 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/integration/conftest.py b/integration/conftest.py index 171310570..89de83cdb 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -308,19 +308,10 @@ def bob(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, reques 'Tor tests are unstable on Windows') def chutney(reactor, temp_dir: str) -> tuple[str, dict[str, str]]: """ - Instantiate the "networks/hs-v3" Chutney configuration for a local - Tor network. + Install the Chutney software that is required to run a small local Tor grid. - This provides a small, local Tor network that can run v3 Onion - Services. This has 10 tor processes: 3 authorities, 5 - exits+relays, a client (and one service-hosting node we don't use). - - We pin a Chutney revision, so things shouldn't change. Currently, - the ONLY node that exposes a valid SocksPort is "008c" (the - client) on 9008. - - The control ports start at 8000 (so the ControlPort for the one - client node is 8008). + (Chutney lacks the normal "python stuff" so we can't just declare + it in Tox or similar dependencies) """ # Try to find Chutney already installed in the environment. try: @@ -404,6 +395,20 @@ def tor_network(reactor, temp_dir, chutney, request): """ Build a basic Tor network. + Instantiate the "networks/basic" Chutney configuration for a local + Tor network. + + This provides a small, local Tor network that can run v3 Onion + Services. It has 3 authorities, 5 relays and 2 clients. + + The 'chutney' fixture pins a Chutney git qrevision, so things + shouldn't change. This network has two clients which are the only + nodes with valid SocksPort configuration ("008c" and "009c" 9008 + and 9009) + + The control ports start at 8000 (so the ControlPort for the client + nodes are 8008 and 8009). + :param chutney: The root directory of a Chutney checkout and a dict of additional environment variables to set so a Python process can use it. From 8ec7f5485a1836c8a79f689a0b609d94aa0caf88 Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 2 Aug 2023 14:48:59 -0600 Subject: [PATCH 065/172] upload() needs the actual alice fixture --- integration/test_vectors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration/test_vectors.py b/integration/test_vectors.py index 1bcbcffa4..f53ec1741 100644 --- a/integration/test_vectors.py +++ b/integration/test_vectors.py @@ -41,7 +41,7 @@ async def test_capability(reactor, request, alice, case, expected): reactor, (1, case.params.required, case.params.total), case.convergence, case.segment_size) # upload data in the correct format - actual = upload(alice.process, case.fmt, case.data) + actual = upload(alice, case.fmt, case.data) # compare the resulting cap to the expected result assert actual == expected From bd0bfa4ab7c3cc366503ef088b8497f60dd4388b Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 2 Aug 2023 14:49:36 -0600 Subject: [PATCH 066/172] define -> frozen Co-authored-by: Jean-Paul Calderone --- integration/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration/conftest.py b/integration/conftest.py index 171310570..36cda8f45 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -382,7 +382,7 @@ def chutney(reactor, temp_dir: str) -> tuple[str, dict[str, str]]: return chutney_dir, {"PYTHONPATH": join(chutney_dir, "lib")} -@define +@frozen class ChutneyTorNetwork: """ Represents a running Chutney (tor) network. Returned by the From 7127ae62a942a329d08c982a49883f3f2ed38ee5 Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 2 Aug 2023 14:50:04 -0600 Subject: [PATCH 067/172] fix types Co-authored-by: Jean-Paul Calderone --- integration/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration/conftest.py b/integration/conftest.py index 36cda8f45..52bffff61 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -389,7 +389,7 @@ class ChutneyTorNetwork: "tor_network" fixture. """ dir: FilePath - environ: dict + environ: Mapping[str, str] client_control_port: int @property From 3e2c784e7794de806280bf1c627ae4884c3ef508 Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 2 Aug 2023 14:58:04 -0600 Subject: [PATCH 068/172] likely to be more-right --- integration/grid.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/integration/grid.py b/integration/grid.py index 94f8c3d7f..00f0dd826 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -116,13 +116,14 @@ def create_flog_gatherer(reactor, request, temp_dir, flog_binary): flogs = [x for x in listdir(flog_dir) if x.endswith('.flog')] print("Dumping {} flogtool logfiles to '{}'".format(len(flogs), flog_file)) - reactor.spawnProcess( - flog_protocol, - flog_binary, - ( - 'flogtool', 'dump', join(temp_dir, 'flog_gather', flogs[0]) - ), - ) + for flog_path in flogs: + reactor.spawnProcess( + flog_protocol, + flog_binary, + ( + 'flogtool', 'dump', join(temp_dir, 'flog_gather', flog_path) + ), + ) print("Waiting for flogtool to complete") try: pytest_twisted.blockon(flog_protocol.done) From 63f4c6fcc6c421a96827558c569a08065a3dc2a6 Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 2 Aug 2023 14:58:55 -0600 Subject: [PATCH 069/172] import to top-level --- integration/grid.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration/grid.py b/integration/grid.py index 00f0dd826..b9af7ed5d 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -51,6 +51,7 @@ from .util import ( generate_ssh_key, cli, reconfigure, + _create_node, ) import attr @@ -181,7 +182,6 @@ def create_storage_server(reactor, request, temp_dir, introducer, flog_gatherer, """ Create a new storage server """ - from .util import _create_node node_process = yield _create_node( reactor, request, temp_dir, introducer.furl, flog_gatherer, name, web_port, storage=True, needed=needed, happy=happy, total=total, From f77b6c433778ed91b7c41abf6b2c1ddb3e5dc94a Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 2 Aug 2023 15:12:38 -0600 Subject: [PATCH 070/172] fix XXX comment + add docstring --- integration/conftest.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/integration/conftest.py b/integration/conftest.py index 89de83cdb..f7fb5f093 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -166,10 +166,12 @@ def flog_gatherer(reactor, temp_dir, flog_binary, request): @pytest.fixture(scope='session') @log_call(action_type=u"integration:grid", include_args=[]) def grid(reactor, request, temp_dir, flog_gatherer, port_allocator): - # XXX think: this creates an "empty" grid (introducer, no nodes); - # do we want to ensure it has some minimum storage-nodes at least? - # (that is, semantically does it make sense that 'a grid' is - # essentially empty, or not?) + """ + Provides a new Grid with a single Introducer and flog-gathering process. + + Notably does _not_ provide storage servers; use the storage_nodes + fixture if your tests need a Grid that can be used for puts / gets. + """ g = pytest_twisted.blockon( create_grid(reactor, request, temp_dir, flog_gatherer, port_allocator) ) From 8b175383af0ce7d4c835bb2a029797730fb4d646 Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 2 Aug 2023 15:15:33 -0600 Subject: [PATCH 071/172] flake8 --- integration/conftest.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/integration/conftest.py b/integration/conftest.py index be467bb34..313ff36c2 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -7,7 +7,7 @@ from __future__ import annotations import os import sys import shutil -from attr import define +from attr import frozen from time import sleep from os import mkdir, environ from os.path import join, exists @@ -28,6 +28,7 @@ from twisted.internet.error import ( import pytest import pytest_twisted +from typing import Mapping from .util import ( _MagicTextProtocol, From f663581ed32e2d0f1206074cca21151d227bf3bc Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 2 Aug 2023 16:27:18 -0600 Subject: [PATCH 072/172] temporarily remove new provides() usage --- integration/grid.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/integration/grid.py b/integration/grid.py index b9af7ed5d..c39b9cff9 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -31,10 +31,8 @@ from twisted.internet.defer import ( from twisted.internet.task import ( deferLater, ) -from twisted.internet.interfaces import ( - IProcessTransport, - IProcessProtocol, -) +from twisted.internet.protocol import ProcessProtocol # see ticket 4056 +from twisted.internet.process import Process # see ticket 4056 from twisted.internet.error import ProcessTerminated from allmydata.node import read_config @@ -71,11 +69,17 @@ class FlogGatherer(object): Flog Gatherer process. """ + # it would be best to use attr.validators.provides() here with the + # corresponding Twisted interface (IProcessTransport, + # IProcessProtocol) but that is deprecated; please replace with + # our own "provides" as part of + # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4056#ticket + # insisting on a subclass is narrower than necessary process = attr.ib( - validator=attr.validators.provides(IProcessTransport) + validator=attr.validators.instance_of(Process) ) protocol = attr.ib( - validator=attr.validators.provides(IProcessProtocol) + validator=attr.validators.instance_of(ProcessProtocol) ) furl = attr.ib() @@ -155,7 +159,7 @@ class StorageServer(object): validator=attr.validators.instance_of(TahoeProcess) ) protocol = attr.ib( - validator=attr.validators.provides(IProcessProtocol) + validator=attr.validators.instance_of(ProcessProtocol) ) @inlineCallbacks @@ -207,7 +211,7 @@ class Client(object): validator=attr.validators.instance_of(TahoeProcess) ) protocol = attr.ib( - validator=attr.validators.provides(IProcessProtocol) + validator=attr.validators.instance_of(ProcessProtocol) ) request = attr.ib() # original request, for addfinalizer() @@ -335,7 +339,7 @@ class Introducer(object): validator=attr.validators.instance_of(TahoeProcess) ) protocol = attr.ib( - validator=attr.validators.provides(IProcessProtocol) + validator=attr.validators.instance_of(ProcessProtocol) ) furl = attr.ib() From d0208bc099a3c500a50fceae1fbe1785c0144725 Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 2 Aug 2023 16:52:29 -0600 Subject: [PATCH 073/172] different Process instance on different platforms --- integration/grid.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/integration/grid.py b/integration/grid.py index c39b9cff9..524da730f 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -32,7 +32,6 @@ from twisted.internet.task import ( deferLater, ) from twisted.internet.protocol import ProcessProtocol # see ticket 4056 -from twisted.internet.process import Process # see ticket 4056 from twisted.internet.error import ProcessTerminated from allmydata.node import read_config @@ -75,9 +74,7 @@ class FlogGatherer(object): # our own "provides" as part of # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4056#ticket # insisting on a subclass is narrower than necessary - process = attr.ib( - validator=attr.validators.instance_of(Process) - ) + process = attr.ib() protocol = attr.ib( validator=attr.validators.instance_of(ProcessProtocol) ) From 4710e7b1772eaeaa8e1bc1138a4d36e6cf69ef87 Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 2 Aug 2023 17:07:09 -0600 Subject: [PATCH 074/172] provide our own provides() validator --- integration/grid.py | 28 ++++++++++------ src/allmydata/storage_client.py | 3 +- src/allmydata/util/attrs_provides.py | 50 ++++++++++++++++++++++++++++ 3 files changed, 69 insertions(+), 12 deletions(-) create mode 100644 src/allmydata/util/attrs_provides.py diff --git a/integration/grid.py b/integration/grid.py index 524da730f..03c3bb6e2 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -31,9 +31,15 @@ from twisted.internet.defer import ( from twisted.internet.task import ( deferLater, ) -from twisted.internet.protocol import ProcessProtocol # see ticket 4056 +from twisted.internet.interfaces import ( + IProcessTransport, + IProcessProtocol, +) from twisted.internet.error import ProcessTerminated +from allmydata.util.attrs_provides import ( + provides, +) from allmydata.node import read_config from .util import ( _CollectOutputProtocol, @@ -68,15 +74,15 @@ class FlogGatherer(object): Flog Gatherer process. """ - # it would be best to use attr.validators.provides() here with the - # corresponding Twisted interface (IProcessTransport, - # IProcessProtocol) but that is deprecated; please replace with - # our own "provides" as part of + # it would be best to use attr.validators.provides() here but that + # is deprecated; please replace with our own "provides" as part of # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4056#ticket - # insisting on a subclass is narrower than necessary - process = attr.ib() + # for now, insisting on a subclass which is narrower than necessary + process = attr.ib( + validator=provides(IProcessTransport) + ) protocol = attr.ib( - validator=attr.validators.instance_of(ProcessProtocol) + validator=provides(IProcessProtocol) ) furl = attr.ib() @@ -156,7 +162,7 @@ class StorageServer(object): validator=attr.validators.instance_of(TahoeProcess) ) protocol = attr.ib( - validator=attr.validators.instance_of(ProcessProtocol) + validator=provides(IProcessProtocol) ) @inlineCallbacks @@ -208,7 +214,7 @@ class Client(object): validator=attr.validators.instance_of(TahoeProcess) ) protocol = attr.ib( - validator=attr.validators.instance_of(ProcessProtocol) + validator=provides(IProcessProtocol) ) request = attr.ib() # original request, for addfinalizer() @@ -336,7 +342,7 @@ class Introducer(object): validator=attr.validators.instance_of(TahoeProcess) ) protocol = attr.ib( - validator=attr.validators.instance_of(ProcessProtocol) + validator=provides(IProcessProtocol) ) furl = attr.ib() diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index d205edf08..8de3a9ca9 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -88,6 +88,7 @@ from allmydata.util.rrefutil import add_version_to_remote_reference from allmydata.util.hashutil import permute_server_hash from allmydata.util.dictutil import BytesKeyDict, UnicodeKeyDict from allmydata.util.deferredutil import async_to_deferred, race +from allmydata.util.attr_provides import provides from allmydata.storage.http_client import ( StorageClient, StorageClientImmutables, StorageClientGeneral, ClientException as HTTPClientException, StorageClientMutables, @@ -659,7 +660,7 @@ class _FoolscapStorage(object): permutation_seed = attr.ib() tubid = attr.ib() - storage_server = attr.ib(validator=attr.validators.provides(IStorageServer)) + storage_server = attr.ib(validator=provides(IStorageServer)) _furl = attr.ib() _short_description = attr.ib() diff --git a/src/allmydata/util/attrs_provides.py b/src/allmydata/util/attrs_provides.py new file mode 100644 index 000000000..4282c3d38 --- /dev/null +++ b/src/allmydata/util/attrs_provides.py @@ -0,0 +1,50 @@ +""" +Utilities related to attrs + +Handling for zope.interface is deprecated in attrs so we copy the +relevant support method here since we depend on zope.interface anyway +""" + +from attr._make import attrs, attrib + + +@attrs(repr=False, slots=True, hash=True) +class _ProvidesValidator: + interface = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.interface.providedBy(value): + raise TypeError( + "'{name}' must provide {interface!r} which {value!r} " + "doesn't.".format( + name=attr.name, interface=self.interface, value=value + ), + attr, + self.interface, + value, + ) + + def __repr__(self): + return "".format( + interface=self.interface + ) + + +def provides(interface): + """ + A validator that raises a `TypeError` if the initializer is called + with an object that does not provide the requested *interface* (checks are + performed using ``interface.providedBy(value)`` (see `zope.interface + `_). + + :param interface: The interface to check for. + :type interface: ``zope.interface.Interface`` + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected interface, and the + value it got. + """ + return _ProvidesValidator(interface) From cbf3eebc78aeb6a907a6e8a1d3dab84a4e5402ad Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 2 Aug 2023 17:08:28 -0600 Subject: [PATCH 075/172] news --- newsfragments/4056.bugfix | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 newsfragments/4056.bugfix diff --git a/newsfragments/4056.bugfix b/newsfragments/4056.bugfix new file mode 100644 index 000000000..1f94de0da --- /dev/null +++ b/newsfragments/4056.bugfix @@ -0,0 +1,3 @@ +Provide our own copy of attrs' "provides()" validor + +This validator is deprecated and slated for removal; that project's suggestion is to copy the code to our project. From 08b594b8be7437b53c7ad71999f130a6a11b54d8 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 10:18:45 -0400 Subject: [PATCH 076/172] Declare the Windows orb for easier Windows environment setup --- .circleci/config.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0c831af04..6b2d84d92 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -11,6 +11,10 @@ # version: 2.1 +orbs: + # Pull in CircleCI support for a Windows executor + windows: "circleci/windows@5.0.0" + # Every job that pushes a Docker image from Docker Hub must authenticate to # it. Define a couple yaml anchors that can be used to supply the necessary # credentials. From 814ba4c88b58c088b5445420ba35de3ed18f1477 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 10:19:07 -0400 Subject: [PATCH 077/172] Add Windows executor holding the Windows test environment config --- .circleci/config.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6b2d84d92..2375368ca 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -531,6 +531,12 @@ jobs: # PYTHON_VERSION: "2" executors: + windows: + # Choose a Windows environment that closest matches our testing + # requirements and goals. + # https://circleci.com/developer/orbs/orb/circleci/windows#executors-server-2022 + executor: "win/server-2022@2023.06.1" + nix: docker: # Run in a highly Nix-capable environment. From f8db7818128347d6e50dcbba93a59663597fe0d4 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 10:19:25 -0400 Subject: [PATCH 078/172] Add a simple test job to see if the other pieces work --- .circleci/config.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 2375368ca..307d3ca69 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -116,6 +116,8 @@ workflows: - "another-locale": {} + - "windows-server-2022" + - "integration": # Run even the slow integration tests here. We need the `--` to # sneak past tox and get to pytest. @@ -137,6 +139,11 @@ workflows: when: "<< pipeline.parameters.build-images >>" jobs: + windows-server-2022: + steps: + - "run": | + Write-Host 'Hello, world.' + codechecks: docker: - <<: *DOCKERHUB_AUTH From d050faac92865a25c4b74f816f5b9796b586715f Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 10:27:52 -0400 Subject: [PATCH 079/172] make the config match the schema --- .circleci/config.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 307d3ca69..07bdc7087 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -140,6 +140,7 @@ workflows: jobs: windows-server-2022: + executor: "windows" steps: - "run": | Write-Host 'Hello, world.' @@ -542,7 +543,8 @@ executors: # Choose a Windows environment that closest matches our testing # requirements and goals. # https://circleci.com/developer/orbs/orb/circleci/windows#executors-server-2022 - executor: "win/server-2022@2023.06.1" + machine: + image: "win/server-2022@2023.06.1" nix: docker: From a8d582237c89c7829c93cc4a744f10a63e17e177 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 10:31:16 -0400 Subject: [PATCH 080/172] dump the useless orb --- .circleci/config.yml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 07bdc7087..b9866ad91 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -11,10 +11,6 @@ # version: 2.1 -orbs: - # Pull in CircleCI support for a Windows executor - windows: "circleci/windows@5.0.0" - # Every job that pushes a Docker image from Docker Hub must authenticate to # it. Define a couple yaml anchors that can be used to supply the necessary # credentials. @@ -544,7 +540,7 @@ executors: # requirements and goals. # https://circleci.com/developer/orbs/orb/circleci/windows#executors-server-2022 machine: - image: "win/server-2022@2023.06.1" + image: "windows-server-2022-gui:2023.06.1"" nix: docker: From f826914c589f7e0505a77d0cff3f6b24b2c2b669 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 10:31:32 -0400 Subject: [PATCH 081/172] syntax --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b9866ad91..6e46d89a5 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -540,7 +540,7 @@ executors: # requirements and goals. # https://circleci.com/developer/orbs/orb/circleci/windows#executors-server-2022 machine: - image: "windows-server-2022-gui:2023.06.1"" + image: "windows-server-2022-gui:2023.06.1" nix: docker: From 4abbadda47faed2d2279787fad6058a83756eb31 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 10:32:31 -0400 Subject: [PATCH 082/172] Try a different tag https://circleci.com/developer/machine/image/windows-server-2022-gui says "2023.06.1" is a valid tag but real execution says "Job was rejected because resource class medium, image windows-server-2022-gui:2023.06.1 is not a valid resource class". Is it even complaining about the image tag? Or is it complaining about the resource class? I don't know. I didn't touch the resource class though. --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6e46d89a5..e4c061da5 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -540,7 +540,7 @@ executors: # requirements and goals. # https://circleci.com/developer/orbs/orb/circleci/windows#executors-server-2022 machine: - image: "windows-server-2022-gui:2023.06.1" + image: "windows-server-2022-gui:current" nix: docker: From bd9d2e08ef4746516d46e168db20b60b7f859dc6 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 10:35:11 -0400 Subject: [PATCH 083/172] eh? --- .circleci/config.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e4c061da5..63bec2dde 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -136,7 +136,8 @@ workflows: jobs: windows-server-2022: - executor: "windows" + executor: + name: "windows" steps: - "run": | Write-Host 'Hello, world.' From 77c677ffc0ab957a0bb089d98f62c3eb9e78d7e3 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 10:37:23 -0400 Subject: [PATCH 084/172] just like the example --- .circleci/config.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 63bec2dde..e1be57175 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -136,8 +136,10 @@ workflows: jobs: windows-server-2022: - executor: - name: "windows" + machine: + image: "windows-server-2022-gui:current" + shell: "powershell.exe -ExecutionPolicy Bypass" + resource_class: "windows.medium" steps: - "run": | Write-Host 'Hello, world.' From 422d4ee9ccfd16e3334a7a0ec743380cd89d1b3b Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 10:37:55 -0400 Subject: [PATCH 085/172] previous rev started an environment, try to get a little closer to the one we want a more precise tag that won't shift around underneath us --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e1be57175..363c0ee6e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -137,7 +137,7 @@ workflows: jobs: windows-server-2022: machine: - image: "windows-server-2022-gui:current" + image: "windows-server-2022-gui:2023.06.1" shell: "powershell.exe -ExecutionPolicy Bypass" resource_class: "windows.medium" steps: From 6400a396615905ebf8fa4ac564f7d966d9d3866e Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 10:39:13 -0400 Subject: [PATCH 086/172] so ... can we use an executor? --- .circleci/config.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 363c0ee6e..368156bb3 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -136,10 +136,7 @@ workflows: jobs: windows-server-2022: - machine: - image: "windows-server-2022-gui:2023.06.1" - shell: "powershell.exe -ExecutionPolicy Bypass" - resource_class: "windows.medium" + executor: "windows" steps: - "run": | Write-Host 'Hello, world.' @@ -543,7 +540,9 @@ executors: # requirements and goals. # https://circleci.com/developer/orbs/orb/circleci/windows#executors-server-2022 machine: - image: "windows-server-2022-gui:current" + image: "windows-server-2022-gui:2023.06.1" + shell: "powershell.exe -ExecutionPolicy Bypass" + resource_class: "windows.medium" nix: docker: From d369dc0f2cdeeca255f53f5418540d3783d9f35f Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 10:43:08 -0400 Subject: [PATCH 087/172] try to install tox --- .circleci/config.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 368156bb3..f86b46636 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -138,8 +138,10 @@ jobs: windows-server-2022: executor: "windows" steps: - - "run": | - Write-Host 'Hello, world.' + - "checkout" + + - "run": + <<: *INSTALL_TOX codechecks: docker: From e6e38128bc1ce1a6630eb95eeadcd6433662cb8e Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 10:43:58 -0400 Subject: [PATCH 088/172] yaml syntax --- .circleci/config.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f86b46636..e70daec41 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -135,14 +135,6 @@ workflows: when: "<< pipeline.parameters.build-images >>" jobs: - windows-server-2022: - executor: "windows" - steps: - - "checkout" - - - "run": - <<: *INSTALL_TOX - codechecks: docker: - <<: *DOCKERHUB_AUTH @@ -161,6 +153,14 @@ jobs: command: | ~/.local/bin/tox -e codechecks + windows-server-2022: + executor: "windows" + steps: + - "checkout" + + - "run": + <<: *INSTALL_TOX + pyinstaller: docker: - <<: *DOCKERHUB_AUTH From 862bda9e631770a06ffa3d9c86b717d2e2b9e0a3 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 10:47:36 -0400 Subject: [PATCH 089/172] attempt to do something useful --- .circleci/config.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index e70daec41..5856af47d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -161,6 +161,16 @@ jobs: - "run": <<: *INSTALL_TOX + - "run": + name: "Display tool versions" + command: | + python misc/build_helpers/show-tool-versions.py + + - "run": + name: "Run Unit Tests" + command: | + python -m tox + pyinstaller: docker: - <<: *DOCKERHUB_AUTH From e2fe1af3d9303ba17e0c60b3e5d2bba43e4340a1 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 11:03:16 -0400 Subject: [PATCH 090/172] Configure Hypothesis for the Windows job --- .circleci/config.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5856af47d..19d70adca 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -155,6 +155,9 @@ jobs: windows-server-2022: executor: "windows" + environment: + TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci" + steps: - "checkout" From 3c8a11d46822a9e7508567b970579a2d66276bf3 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 11:12:47 -0400 Subject: [PATCH 091/172] pick a Python to support on Windows --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 19d70adca..e9e5810e5 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -172,7 +172,7 @@ jobs: - "run": name: "Run Unit Tests" command: | - python -m tox + python -m tox -e py311-coverage pyinstaller: docker: From b440065952cfe9c98d65ab7728571a93aba50a6e Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 12:59:44 -0400 Subject: [PATCH 092/172] avoid trying to call os.getuid on windows --- src/allmydata/test/cli/test_grid_manager.py | 6 ++++-- src/allmydata/test/common.py | 4 ++++ src/allmydata/test/test_client.py | 3 ++- src/allmydata/test/test_node.py | 7 ++++--- 4 files changed, 14 insertions(+), 6 deletions(-) diff --git a/src/allmydata/test/cli/test_grid_manager.py b/src/allmydata/test/cli/test_grid_manager.py index 604cd6b7b..b44b322d2 100644 --- a/src/allmydata/test/cli/test_grid_manager.py +++ b/src/allmydata/test/cli/test_grid_manager.py @@ -23,6 +23,9 @@ import click.testing from ..common_util import ( run_cli, ) +from ..common import ( + superuser, +) from twisted.internet.defer import ( inlineCallbacks, ) @@ -34,7 +37,6 @@ from twisted.python.runtime import ( ) from allmydata.util import jsonbytes as json - class GridManagerCommandLine(TestCase): """ Test the mechanics of the `grid-manager` command @@ -223,7 +225,7 @@ class GridManagerCommandLine(TestCase): ) @skipIf(platform.isWindows(), "We don't know how to set permissions on Windows.") - @skipIf(os.getuid() == 0, "cannot test as superuser with all permissions") + @skipIf(superuser, "cannot test as superuser with all permissions") def test_sign_bad_perms(self): """ Error reported if we can't create certificate file diff --git a/src/allmydata/test/common.py b/src/allmydata/test/common.py index db2921e86..d61bc28f1 100644 --- a/src/allmydata/test/common.py +++ b/src/allmydata/test/common.py @@ -117,6 +117,10 @@ from subprocess import ( PIPE, ) +# Is the process running as an OS user with elevated privileges (ie, root)? +# We only know how to determine this for POSIX systems. +superuser = getattr(os, "getuid", lambda: -1)() == 0 + EMPTY_CLIENT_CONFIG = config_from_string( "/dev/null", "tub.port", diff --git a/src/allmydata/test/test_client.py b/src/allmydata/test/test_client.py index 86c95a310..c0cce2809 100644 --- a/src/allmydata/test/test_client.py +++ b/src/allmydata/test/test_client.py @@ -77,6 +77,7 @@ from allmydata.scripts.common import ( from foolscap.api import flushEventualQueue import allmydata.test.common_util as testutil from .common import ( + superuser, EMPTY_CLIENT_CONFIG, SyncTestCase, AsyncBrokenTestCase, @@ -151,7 +152,7 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase): # EnvironmentError when reading a file that really exists), on # windows, please fix this @skipIf(platform.isWindows(), "We don't know how to set permissions on Windows.") - @skipIf(os.getuid() == 0, "cannot test as superuser with all permissions") + @skipIf(superuser, "cannot test as superuser with all permissions") def test_unreadable_config(self): basedir = "test_client.Basic.test_unreadable_config" os.mkdir(basedir) diff --git a/src/allmydata/test/test_node.py b/src/allmydata/test/test_node.py index 1469ec5b2..90da877fb 100644 --- a/src/allmydata/test/test_node.py +++ b/src/allmydata/test/test_node.py @@ -62,6 +62,7 @@ from .common import ( ConstantAddresses, SameProcessStreamEndpointAssigner, UseNode, + superuser, ) def port_numbers(): @@ -325,7 +326,7 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): self.assertEqual(config.items("nosuch", default), default) @skipIf(platform.isWindows(), "We don't know how to set permissions on Windows.") - @skipIf(os.getuid() == 0, "cannot test as superuser with all permissions") + @skipIf(superuser, "cannot test as superuser with all permissions") def test_private_config_unreadable(self): """ Asking for inaccessible private config is an error @@ -341,7 +342,7 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): config.get_or_create_private_config("foo") @skipIf(platform.isWindows(), "We don't know how to set permissions on Windows.") - @skipIf(os.getuid() == 0, "cannot test as superuser with all permissions") + @skipIf(superuser, "cannot test as superuser with all permissions") def test_private_config_unreadable_preexisting(self): """ error if reading private config data fails @@ -398,7 +399,7 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): self.assertEqual(len(counter), 1) # don't call unless necessary self.assertEqual(value, "newer") - @skipIf(os.getuid() == 0, "cannot test as superuser with all permissions") + @skipIf(superuser, "cannot test as superuser with all permissions") def test_write_config_unwritable_file(self): """ Existing behavior merely logs any errors upon writing From e92e7faeea00c1dacef75d36bf1815b5bdf296ad Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 15:25:58 -0400 Subject: [PATCH 093/172] how much difference does this make --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e9e5810e5..d204a5531 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -557,7 +557,7 @@ executors: machine: image: "windows-server-2022-gui:2023.06.1" shell: "powershell.exe -ExecutionPolicy Bypass" - resource_class: "windows.medium" + resource_class: "windows.large" nix: docker: From 9f5173e7302bb1258f64e03b5f8506587a816a4f Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 15:58:33 -0400 Subject: [PATCH 094/172] attempt to report coverage results to coveralls --- .circleci/config.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index d204a5531..431b2c70a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -174,6 +174,12 @@ jobs: command: | python -m tox -e py311-coverage + - "run": + name: "Upload Coverage" + command: | + python -m pip install coveralls + python -m coveralls + pyinstaller: docker: - <<: *DOCKERHUB_AUTH From f649968ab5442dc1fa76554084cae99fca660a24 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 16:21:27 -0400 Subject: [PATCH 095/172] configure the coveralls tool so it can upload the data --- .coveralls.yml | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .coveralls.yml diff --git a/.coveralls.yml b/.coveralls.yml new file mode 100644 index 000000000..1486cf5b3 --- /dev/null +++ b/.coveralls.yml @@ -0,0 +1,2 @@ +service_name: "circleci" +repo_token: "JPf16rLB7T2yjgATIxFzTsEgMdN1UNq6o" From 371f82bb4da019df5581ed0c41e7d500b902ac3f Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 16:21:50 -0400 Subject: [PATCH 096/172] avoid problems with trial and ENOSPC I don't know if these will show up in this environment ... just copy/pasted from the GitHub Actions config. --- .circleci/config.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 431b2c70a..2f580192d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -172,7 +172,13 @@ jobs: - "run": name: "Run Unit Tests" command: | - python -m tox -e py311-coverage + # On Windows, a non-blocking pipe might respond (when emulating + # Unix-y API) with ENOSPC to indicate buffer full. Trial doesn't + # handle this well, so it breaks test runs. To attempt to solve + # this, we pipe the output through passthrough.py that will + # hopefully be able to do the right thing by using Windows APIs. + python -m pip install twisted pywin32 + python -m tox -e py311-coverage | python misc/windows-enospc/passthrough.py - "run": name: "Upload Coverage" From 4c0b72b353615499f5eb3213d65bdc7644982b8c Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 16:38:56 -0400 Subject: [PATCH 097/172] Delightfully, this deterministically breaks in the CircleCI env --- .circleci/config.yml | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 2f580192d..431b2c70a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -172,13 +172,7 @@ jobs: - "run": name: "Run Unit Tests" command: | - # On Windows, a non-blocking pipe might respond (when emulating - # Unix-y API) with ENOSPC to indicate buffer full. Trial doesn't - # handle this well, so it breaks test runs. To attempt to solve - # this, we pipe the output through passthrough.py that will - # hopefully be able to do the right thing by using Windows APIs. - python -m pip install twisted pywin32 - python -m tox -e py311-coverage | python misc/windows-enospc/passthrough.py + python -m tox -e py311-coverage - "run": name: "Upload Coverage" From e17c8f618ea9d9a2f9fd169d75f714401121c35e Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 16:39:52 -0400 Subject: [PATCH 098/172] run a quick subset of the tests to more quickly test the following bits --- .circleci/config.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 431b2c70a..42e59075b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -171,6 +171,8 @@ jobs: - "run": name: "Run Unit Tests" + environment: + TEST_SUITE: "allmydata.test.test_uri" command: | python -m tox -e py311-coverage From b092dd57cf62b174f731e10c231d26235e72ec1f Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 17:02:55 -0400 Subject: [PATCH 099/172] coveralls failed to find .coveralls.yml ... also tox.ini overrides TEST_SUITE :/ Set it in the right place --- .circleci/config.yml | 5 +++-- .coveralls.yml | 2 -- tox.ini | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) delete mode 100644 .coveralls.yml diff --git a/.circleci/config.yml b/.circleci/config.yml index 42e59075b..077896d92 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -171,15 +171,16 @@ jobs: - "run": name: "Run Unit Tests" - environment: - TEST_SUITE: "allmydata.test.test_uri" command: | python -m tox -e py311-coverage - "run": name: "Upload Coverage" + environment: + COVERALLS_REPO_TOKEN: "JPf16rLB7T2yjgATIxFzTsEgMdN1UNq6o" command: | python -m pip install coveralls + python -m coveralls debug python -m coveralls pyinstaller: diff --git a/.coveralls.yml b/.coveralls.yml deleted file mode 100644 index 1486cf5b3..000000000 --- a/.coveralls.yml +++ /dev/null @@ -1,2 +0,0 @@ -service_name: "circleci" -repo_token: "JPf16rLB7T2yjgATIxFzTsEgMdN1UNq6o" diff --git a/tox.ini b/tox.ini index 67a089b0c..18d7767a2 100644 --- a/tox.ini +++ b/tox.ini @@ -55,7 +55,7 @@ extras = setenv = # Define TEST_SUITE in the environment as an aid to constructing the # correct test command below. - TEST_SUITE = allmydata + TEST_SUITE = allmydata.test.test_uri commands = # As an aid to debugging, dump all of the Python packages and their From a261c1f2025966bdc944f8023185697ed1816895 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 17:11:54 -0400 Subject: [PATCH 100/172] try to match the paths from circleci windows for coverage path rewriting --- .coveragerc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.coveragerc b/.coveragerc index d09554cad..32b803586 100644 --- a/.coveragerc +++ b/.coveragerc @@ -19,9 +19,11 @@ skip_covered = True source = # It looks like this in the checkout src/ -# It looks like this in the Windows build environment +# It looks like this in the GitHub Actions Windows build environment D:/a/tahoe-lafs/tahoe-lafs/.tox/py*-coverage/Lib/site-packages/ # Although sometimes it looks like this instead. Also it looks like this on macOS. .tox/py*-coverage/lib/python*/site-packages/ +# And on the CircleCI Windows build envronment... + .tox/py*-coverage/Lib/site-packages/ # On some Linux CI jobs it looks like this /tmp/tahoe-lafs.tox/py*-coverage/lib/python*/site-packages/ From 5c22bf95b41958d1fbcae5362fb6deea0eed98e9 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Mon, 7 Aug 2023 17:12:06 -0400 Subject: [PATCH 101/172] maybe we don't need the debug info now --- .circleci/config.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 077896d92..fe1f5f8cb 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -180,7 +180,6 @@ jobs: COVERALLS_REPO_TOKEN: "JPf16rLB7T2yjgATIxFzTsEgMdN1UNq6o" command: | python -m pip install coveralls - python -m coveralls debug python -m coveralls pyinstaller: From 7bc1f9300f3da31f5b283b0de20f30b4d9cdd05d Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 08:24:24 -0400 Subject: [PATCH 102/172] try to get test results loaded into circleci --- .circleci/config.yml | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index fe1f5f8cb..ede1cdb8f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -170,9 +170,17 @@ jobs: python misc/build_helpers/show-tool-versions.py - "run": - name: "Run Unit Tests" + name: "Install Dependencies" command: | - python -m tox -e py311-coverage + python -m pip install .[testenv] .[test] + + - "run": + name: "Run Unit Tests" + environment: + SUBUNITREPORTER_OUTPUT_PATH: "test-results.subunit2" + PYTHONUNBUFFERED: "1" + command: | + python -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata - "run": name: "Upload Coverage" @@ -182,6 +190,14 @@ jobs: python -m pip install coveralls python -m coveralls + - "run": + name: "Convert Result Log" + command: | + Get-Content -Path test-results.subunit2 -Raw | subunit2junitxml | Out-File -FilePath test-results.xml + + - "store_artifacts": + path: "test-results.xml" + pyinstaller: docker: - <<: *DOCKERHUB_AUTH From 3f37f9aee5975453a5b0d9d4f8a978c5080a2a05 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 08:44:15 -0400 Subject: [PATCH 103/172] try to force UTF-8 to make subunitreporter work --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index ede1cdb8f..1946c9346 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -180,7 +180,7 @@ jobs: SUBUNITREPORTER_OUTPUT_PATH: "test-results.subunit2" PYTHONUNBUFFERED: "1" command: | - python -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata + python -X utf8 -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata - "run": name: "Upload Coverage" From b03cf0b37b5ca2dbe443aee2c142b8d6ddd4d295 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 09:14:06 -0400 Subject: [PATCH 104/172] send the test results to the place circleci expects for processing --- .circleci/config.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 1946c9346..e8815842e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -195,6 +195,9 @@ jobs: command: | Get-Content -Path test-results.subunit2 -Raw | subunit2junitxml | Out-File -FilePath test-results.xml + - "store_test_results: + path: "test-results.xml" + - "store_artifacts": path: "test-results.xml" From 3a8480126b1813ad6eab3ac93df191176bc1a085 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 09:19:20 -0400 Subject: [PATCH 105/172] syntax --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e8815842e..845ac3662 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -195,7 +195,7 @@ jobs: command: | Get-Content -Path test-results.subunit2 -Raw | subunit2junitxml | Out-File -FilePath test-results.xml - - "store_test_results: + - "store_test_results": path: "test-results.xml" - "store_artifacts": From 286fb206d812f62bd7af08b3332bd3f927e948df Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 09:59:23 -0400 Subject: [PATCH 106/172] try uncorrupting the xml file --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 845ac3662..74bf3019d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -193,7 +193,7 @@ jobs: - "run": name: "Convert Result Log" command: | - Get-Content -Path test-results.subunit2 -Raw | subunit2junitxml | Out-File -FilePath test-results.xml + subunit2junitxml test-results.xml - "store_test_results": path: "test-results.xml" From ab9db4964c0f4fa671d78240d1b015c891f39379 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 10:28:16 -0400 Subject: [PATCH 107/172] another stab --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 74bf3019d..91d46414d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -180,7 +180,7 @@ jobs: SUBUNITREPORTER_OUTPUT_PATH: "test-results.subunit2" PYTHONUNBUFFERED: "1" command: | - python -X utf8 -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata + python -X utf8 -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata.test.test_uri - "run": name: "Upload Coverage" @@ -193,7 +193,7 @@ jobs: - "run": name: "Convert Result Log" command: | - subunit2junitxml test-results.xml + Start-Process subunit2junitxml -Wait -RedirectStandardInput test-results.subunit2 -RedirectStandardOutput test-results.xml - "store_test_results": path: "test-results.xml" From 2649027c74b53499f2bc0315186a14667bac72b8 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 10:38:16 -0400 Subject: [PATCH 108/172] back to the complete test suite --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 91d46414d..d7e576298 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -180,7 +180,7 @@ jobs: SUBUNITREPORTER_OUTPUT_PATH: "test-results.subunit2" PYTHONUNBUFFERED: "1" command: | - python -X utf8 -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata.test.test_uri + python -X utf8 -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata - "run": name: "Upload Coverage" From 208531cddc5db10c419b6509c3223ed3a297b4e2 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 10:38:33 -0400 Subject: [PATCH 109/172] should work without `-X utf8` now --- .circleci/config.yml | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index d7e576298..13936321a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -180,7 +180,7 @@ jobs: SUBUNITREPORTER_OUTPUT_PATH: "test-results.subunit2" PYTHONUNBUFFERED: "1" command: | - python -X utf8 -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata + python -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata - "run": name: "Upload Coverage" diff --git a/setup.py b/setup.py index 86873ad53..433721d2a 100644 --- a/setup.py +++ b/setup.py @@ -413,7 +413,7 @@ setup(name="tahoe-lafs", # also set in __init__.py "pip==22.0.3", "wheel==0.37.1", "setuptools==60.9.1", - "subunitreporter==22.2.0", + "subunitreporter==23.8.0", "python-subunit==1.4.2", "junitxml==0.7", "coverage==7.2.5", From 122655842cd3dd25af9a722639a338caca0fe188 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 11:06:31 -0400 Subject: [PATCH 110/172] try to expose the other test run artifacts --- .circleci/config.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 13936321a..4e4e183a5 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -199,7 +199,10 @@ jobs: path: "test-results.xml" - "store_artifacts": - path: "test-results.xml" + path: "_trial_temp/test.log" + + - "store_artifacts": + path: "eliot.log" pyinstaller: docker: From 085a823dfdfa12a3911a6173ed8659724141af5f Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 11:55:34 -0400 Subject: [PATCH 111/172] put back the full test suite for tox runs --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 18d7767a2..67a089b0c 100644 --- a/tox.ini +++ b/tox.ini @@ -55,7 +55,7 @@ extras = setenv = # Define TEST_SUITE in the environment as an aid to constructing the # correct test command below. - TEST_SUITE = allmydata.test.test_uri + TEST_SUITE = allmydata commands = # As an aid to debugging, dump all of the Python packages and their From 23628fffd84f6a358c22fccf1ca661ac10e9cfb7 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 12:12:45 -0400 Subject: [PATCH 112/172] Try to parameterize the Python version for Windows tests And instantiate the job with two different Python versions --- .circleci/config.yml | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 4e4e183a5..4181025cc 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -112,7 +112,13 @@ workflows: - "another-locale": {} - - "windows-server-2022" + - "windows-server-2022": + name: "Windows Server 2022, Python <>" + matrix: + parameters: + pythonVersion: + - "3.9" + - "3.11" - "integration": # Run even the slow integration tests here. We need the `--` to @@ -154,6 +160,13 @@ jobs: ~/.local/bin/tox -e codechecks windows-server-2022: + parameters: + pythonVersion: + description: >- + An argument to pass to the `py` launcher to choose a Python version. + type: "string" + default: "" + executor: "windows" environment: TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci" @@ -167,12 +180,12 @@ jobs: - "run": name: "Display tool versions" command: | - python misc/build_helpers/show-tool-versions.py + py -<> misc/build_helpers/show-tool-versions.py - "run": name: "Install Dependencies" command: | - python -m pip install .[testenv] .[test] + py -<> -m pip install .[testenv] .[test] - "run": name: "Run Unit Tests" @@ -180,15 +193,15 @@ jobs: SUBUNITREPORTER_OUTPUT_PATH: "test-results.subunit2" PYTHONUNBUFFERED: "1" command: | - python -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata + py -<> -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata - "run": name: "Upload Coverage" environment: COVERALLS_REPO_TOKEN: "JPf16rLB7T2yjgATIxFzTsEgMdN1UNq6o" command: | - python -m pip install coveralls - python -m coveralls + py -<> -m pip install coveralls + py -<> -m coveralls - "run": name: "Convert Result Log" From 67cc25df11d8c202de4ff9addd3b9c798e492009 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 12:13:31 -0400 Subject: [PATCH 113/172] drop Windows unit tests from GitHub Actions This drops Python 3.8 and Python 3.10 Windows coverage. --- .github/workflows/ci.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d3862ffad..0f38b0291 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -44,13 +44,6 @@ jobs: strategy: fail-fast: false matrix: - os: - - windows-latest - python-version: - - "3.8" - - "3.9" - - "3.10" - - "3.11" include: # On macOS don't bother with 3.8, just to get faster builds. - os: macos-12 From 8c4d99f812de8254ce610bb9451fcdae4b4edcca Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 12:22:18 -0400 Subject: [PATCH 114/172] try to do "parallel" coveralls reporting and finish it --- .circleci/config.yml | 34 +++++++++++++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 4181025cc..f7665cc29 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -24,6 +24,11 @@ version: 2.1 dockerhub-context-template: &DOCKERHUB_CONTEXT context: "dockerhub-auth" +# Required environment for using the coveralls tool to upload partial coverage +# reports and then finish the process. +coveralls-environment: &COVERALLS_ENVIRONMENT + COVERALLS_REPO_TOKEN: "JPf16rLB7T2yjgATIxFzTsEgMdN1UNq6o" + # Next is a Docker executor template that gets the credentials from the # environment and supplies them to the executor. dockerhub-auth-template: &DOCKERHUB_AUTH @@ -134,6 +139,11 @@ workflows: - "docs": {} + - "finish-coverage-report": + requires: + - "Windows Server 2022, Python 3.9" + - "Windows Server 2022, Python 3.11" + images: <<: *IMAGES @@ -141,6 +151,20 @@ workflows: when: "<< pipeline.parameters.build-images >>" jobs: + finish-coverage-report: + docker: + - <<: *DOCKERHUB_AUTH + image: "python:3-slim" + + steps: + - run: + name: "Indicate completion to coveralls.io" + environment: + <<: *COVERALLS_ENVIRONMENT + command: | + pip install coveralls==3.2.0 + python -m coveralls --finish + codechecks: docker: - <<: *DOCKERHUB_AUTH @@ -198,7 +222,15 @@ jobs: - "run": name: "Upload Coverage" environment: - COVERALLS_REPO_TOKEN: "JPf16rLB7T2yjgATIxFzTsEgMdN1UNq6o" + <<: *COVERALLS_ENVIRONMENT + # Mark the data as just one piece of many because we have more + # than one instance of this job (two on Windows now, some on other + # platforms later) which collects and reports coverage. This is + # necessary to cause Coveralls to merge multiple coverage results + # into a single report. Note the merge only happens when we + # "finish" a particular build, as identified by its "build_num" + # (aka "service_number"). + COVERALLS_PARALLEL: "true" command: | py -<> -m pip install coveralls py -<> -m coveralls From df05ed3c8f45ef880113c75ca2825e77e4deb0e4 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 13:53:17 -0400 Subject: [PATCH 115/172] maybe ... not gonna use tox at all here? --- .circleci/config.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f7665cc29..66e8c1553 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -198,9 +198,6 @@ jobs: steps: - "checkout" - - "run": - <<: *INSTALL_TOX - - "run": name: "Display tool versions" command: | From 72739b0606c20c5a1cc0223dd7c1415d103ba7cb Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 13:57:17 -0400 Subject: [PATCH 116/172] try to be sure we can do the conversion --- .circleci/config.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 66e8c1553..85cdfb815 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -235,6 +235,10 @@ jobs: - "run": name: "Convert Result Log" command: | + # The Python for which we installed subunit is not necessarily on + # %PATH% so (possibly) re-install it with the default Python. + python -m pip install subunit2junitxml junitxml + Start-Process subunit2junitxml -Wait -RedirectStandardInput test-results.subunit2 -RedirectStandardOutput test-results.xml - "store_test_results": From 179f7b4bcb3f2021cfba2a0231ef0b5fc97fe6aa Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 14:16:55 -0400 Subject: [PATCH 117/172] get the package name right ... sigh ... --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 85cdfb815..b5f081441 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -237,7 +237,7 @@ jobs: command: | # The Python for which we installed subunit is not necessarily on # %PATH% so (possibly) re-install it with the default Python. - python -m pip install subunit2junitxml junitxml + python -m pip install subunit junitxml Start-Process subunit2junitxml -Wait -RedirectStandardInput test-results.subunit2 -RedirectStandardOutput test-results.xml From fd6c7c880d9589e6b09a33630e441aa28f9b2736 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 15:39:17 -0400 Subject: [PATCH 118/172] try to run the program a different way --- .circleci/config.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b5f081441..c9cb657b6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -238,8 +238,7 @@ jobs: # The Python for which we installed subunit is not necessarily on # %PATH% so (possibly) re-install it with the default Python. python -m pip install subunit junitxml - - Start-Process subunit2junitxml -Wait -RedirectStandardInput test-results.subunit2 -RedirectStandardOutput test-results.xml + subunit2junitxml --output-to=test-results.xml test-results.subunit2 - "store_test_results": path: "test-results.xml" From 1a7a552e0dd773f25f982c14e9ff61e39ce00de1 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 16:05:22 -0400 Subject: [PATCH 119/172] where's my test output file --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index c9cb657b6..b164b42f1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -237,6 +237,7 @@ jobs: command: | # The Python for which we installed subunit is not necessarily on # %PATH% so (possibly) re-install it with the default Python. + Set-PSDebug -Trace 2 python -m pip install subunit junitxml subunit2junitxml --output-to=test-results.xml test-results.subunit2 From 9c43a99c53deac29e28c8a408469257aa64f492b Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Tue, 8 Aug 2023 17:02:34 -0400 Subject: [PATCH 120/172] maybe python-subunit is less broken --- .circleci/config.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b164b42f1..a13ce40d2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -237,8 +237,7 @@ jobs: command: | # The Python for which we installed subunit is not necessarily on # %PATH% so (possibly) re-install it with the default Python. - Set-PSDebug -Trace 2 - python -m pip install subunit junitxml + python -m pip install python-subunit junitxml subunit2junitxml --output-to=test-results.xml test-results.subunit2 - "store_test_results": From 7a389bb3149cc6245d8373eacb38aac9976533dd Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 09:23:43 -0400 Subject: [PATCH 121/172] it's cpython --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a13ce40d2..a1092741e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -118,7 +118,7 @@ workflows: {} - "windows-server-2022": - name: "Windows Server 2022, Python <>" + name: "Windows Server 2022, CPython <>" matrix: parameters: pythonVersion: From d56ac6d6a2becf03d0fe3387340dfad19512d7f0 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 09:24:37 -0400 Subject: [PATCH 122/172] note about error behavior of subunit2junitxml --- .circleci/config.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index a1092741e..12acea784 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -238,6 +238,11 @@ jobs: # The Python for which we installed subunit is not necessarily on # %PATH% so (possibly) re-install it with the default Python. python -m pip install python-subunit junitxml + + # subunit2junitxml exits with error if the result stream it is + # converting has test failures in it! So this step might fail. + # Since the step in which we actually _ran_ the tests won't fail + # even if there are test failures, this is a good thing for now. subunit2junitxml --output-to=test-results.xml test-results.subunit2 - "store_test_results": From e04340f30a2b45ddb293c01ba8852a547733303b Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 09:28:42 -0400 Subject: [PATCH 123/172] supposedly this will work --- .circleci/config.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 12acea784..d4a2530c3 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -141,8 +141,12 @@ workflows: - "finish-coverage-report": requires: - - "Windows Server 2022, Python 3.9" - - "Windows Server 2022, Python 3.11" + # Referencing the job by "alias" (as CircleCI calls the mapping + # key) instead of the value of its "name" property causes us to + # require every instance of the job from its matrix expansion. So + # this requirement is enough to require every Windows Server 2022 + # job. + - "windows-server-2022" images: <<: *IMAGES From d8df6d12d783ea6e0362cc62437b7355fa61af9a Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 09:34:32 -0400 Subject: [PATCH 124/172] pin/upgrade coveralls --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index d4a2530c3..6e795c4b9 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -166,7 +166,7 @@ jobs: environment: <<: *COVERALLS_ENVIRONMENT command: | - pip install coveralls==3.2.0 + pip install coveralls==3.3.1 python -m coveralls --finish codechecks: @@ -233,7 +233,7 @@ jobs: # (aka "service_number"). COVERALLS_PARALLEL: "true" command: | - py -<> -m pip install coveralls + py -<> -m pip install coveralls==3.3.1 py -<> -m coveralls - "run": From 7ebb3a2eadbd34cf0bd1a02cc2b2bf2e58551b2d Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 09:34:40 -0400 Subject: [PATCH 125/172] some comments --- .circleci/config.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6e795c4b9..1e45c5235 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -215,9 +215,18 @@ jobs: - "run": name: "Run Unit Tests" environment: + # Configure the results location for the subunitv2-file reporter + # from subunitreporter SUBUNITREPORTER_OUTPUT_PATH: "test-results.subunit2" + + # Try to get prompt output from the reporter to avoid no-output + # timeouts. PYTHONUNBUFFERED: "1" + command: | + # Run the test suite under coverage measurement using the + # parameterized version of Python, writing subunitv2-format + # results to the file given in the environment. py -<> -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata - "run": From 3e1f62fd7b3b6cf8d8ff226048cf36e122345f70 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 09:36:32 -0400 Subject: [PATCH 126/172] cut down the test suite for faster testing, again --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 1e45c5235..dc73c18e7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -227,7 +227,7 @@ jobs: # Run the test suite under coverage measurement using the # parameterized version of Python, writing subunitv2-format # results to the file given in the environment. - py -<> -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata + py -<> -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata.test.test_uri - "run": name: "Upload Coverage" From e27c2e97411fcaba8d0bb105c1636fe1816bd406 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 09:45:29 -0400 Subject: [PATCH 127/172] where's the coverage? --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index dc73c18e7..8dcb4c150 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -243,6 +243,7 @@ jobs: COVERALLS_PARALLEL: "true" command: | py -<> -m pip install coveralls==3.3.1 + py -<> -m coveralls debug py -<> -m coveralls - "run": From 38d6e5d8408e59c00f88d206639da241434dd4a3 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 09:56:43 -0400 Subject: [PATCH 128/172] merge the "parallel" coverage files before invoking coveralls --- .circleci/config.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 8dcb4c150..487182ec3 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -243,7 +243,15 @@ jobs: COVERALLS_PARALLEL: "true" command: | py -<> -m pip install coveralls==3.3.1 - py -<> -m coveralls debug + + # .coveragerc sets parallel = True so we don't have a `.coverage` + # file but a `.coverage.` file (or maybe more than + # one, but probably not). coveralls can't work with these so + # merge them before invoking it. + py -<> -m coverage combine + + # Now coveralls will be able to find the data, so have it do the + # upload. py -<> -m coveralls - "run": From 18c5f090518f694a684aae3948a89df0b7ffec29 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 09:56:57 -0400 Subject: [PATCH 129/172] upload the coverage results to circleci too --- .circleci/config.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 487182ec3..55f881e4b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -276,6 +276,9 @@ jobs: - "store_artifacts": path: "eliot.log" + - "store_artifacts": + path: ".coverage" + pyinstaller: docker: - <<: *DOCKERHUB_AUTH From 571ded8680d09ab5a22b0a326f6b9944b843878f Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 10:18:05 -0400 Subject: [PATCH 130/172] try to get the coveralls we already have instead of installing again --- .circleci/config.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 55f881e4b..865020c1c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -258,14 +258,15 @@ jobs: name: "Convert Result Log" command: | # The Python for which we installed subunit is not necessarily on - # %PATH% so (possibly) re-install it with the default Python. - python -m pip install python-subunit junitxml + # %PATH% so put it there. + $p = py -<> -c "import sys; print(sys.prefix)" + $env:PATH = "$env:PATH;$p\Scripts" # subunit2junitxml exits with error if the result stream it is # converting has test failures in it! So this step might fail. # Since the step in which we actually _ran_ the tests won't fail # even if there are test failures, this is a good thing for now. - subunit2junitxml --output-to=test-results.xml test-results.subunit2 + subunit2junitxml.exe --output-to=test-results.xml test-results.subunit2 - "store_test_results": path: "test-results.xml" From 4c16744199b87e7ba2700219e3a56992f851820c Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 10:24:29 -0400 Subject: [PATCH 131/172] try to settle %PATH% once and for all at the start of the job --- .circleci/config.yml | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 865020c1c..12d3536e0 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -202,15 +202,26 @@ jobs: steps: - "checkout" + - "run": + name: "Fix $env:PATH" + command: | + # The Python for which we installed subunit is not necessarily on + # %PATH% so put it there. + # gets tools from packages we install. + $p = py -<> -c "import sys; print(sys.prefix)" + New-Item $Profile.CurrentUserAllHosts -Force + # $p gets "python" on PATH and $p\Scripts + Add-Content -Path $Profile.CurrentUserAllHosts -Value '$env:PATH = "$p;$p\Scripts;$env:PATH"' + - "run": name: "Display tool versions" command: | - py -<> misc/build_helpers/show-tool-versions.py + python misc/build_helpers/show-tool-versions.py - "run": name: "Install Dependencies" command: | - py -<> -m pip install .[testenv] .[test] + python -m pip install .[testenv] .[test] - "run": name: "Run Unit Tests" @@ -227,7 +238,7 @@ jobs: # Run the test suite under coverage measurement using the # parameterized version of Python, writing subunitv2-format # results to the file given in the environment. - py -<> -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata.test.test_uri + python -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata.test.test_uri - "run": name: "Upload Coverage" @@ -242,26 +253,21 @@ jobs: # (aka "service_number"). COVERALLS_PARALLEL: "true" command: | - py -<> -m pip install coveralls==3.3.1 + python -m pip install coveralls==3.3.1 # .coveragerc sets parallel = True so we don't have a `.coverage` # file but a `.coverage.` file (or maybe more than # one, but probably not). coveralls can't work with these so # merge them before invoking it. - py -<> -m coverage combine + python -m coverage combine # Now coveralls will be able to find the data, so have it do the # upload. - py -<> -m coveralls + python -m coveralls - "run": name: "Convert Result Log" command: | - # The Python for which we installed subunit is not necessarily on - # %PATH% so put it there. - $p = py -<> -c "import sys; print(sys.prefix)" - $env:PATH = "$env:PATH;$p\Scripts" - # subunit2junitxml exits with error if the result stream it is # converting has test failures in it! So this step might fail. # Since the step in which we actually _ran_ the tests won't fail From 139a329b38c9457b7875731dedc713f89a469324 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 10:29:18 -0400 Subject: [PATCH 132/172] debug PATH setup --- .circleci/config.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 12d3536e0..9a1f8cd41 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -213,6 +213,11 @@ jobs: # $p gets "python" on PATH and $p\Scripts Add-Content -Path $Profile.CurrentUserAllHosts -Value '$env:PATH = "$p;$p\Scripts;$env:PATH"' + - "run": + name: "Reveal $env:PATH" + command: | + $env:PATH + - "run": name: "Display tool versions" command: | From 66177ae28e653d8134ae2d2da425a44b5cd5b931 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 10:44:00 -0400 Subject: [PATCH 133/172] how about this impressive construction? previous version was constructing the value string wrong --- .circleci/config.yml | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 9a1f8cd41..e530ddeb5 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -207,11 +207,15 @@ jobs: command: | # The Python for which we installed subunit is not necessarily on # %PATH% so put it there. - # gets tools from packages we install. $p = py -<> -c "import sys; print(sys.prefix)" + $q = py -<> -c "import sysconfig; print(sysconfig.get_path('scripts'))" + New-Item $Profile.CurrentUserAllHosts -Force - # $p gets "python" on PATH and $p\Scripts - Add-Content -Path $Profile.CurrentUserAllHosts -Value '$env:PATH = "$p;$p\Scripts;$env:PATH"' + # $p gets "python" on PATH and $q gets tools from packages we + # install. Note we carefully construct the string so that + # $env:PATH is not substituted now but $p and $q are. ` is the + # PowerShell string escape character. + Add-Content -Path $Profile.CurrentUserAllHosts -Value "`$env:PATH = `"$p;$q;`$env:PATH`"' - "run": name: "Reveal $env:PATH" From 0995b77020116d72abe3390a387231f124a1e599 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 10:44:17 -0400 Subject: [PATCH 134/172] try stripping the interpreter-specific prefix from our paths --- .circleci/config.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e530ddeb5..501e28b9a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -271,8 +271,10 @@ jobs: python -m coverage combine # Now coveralls will be able to find the data, so have it do the - # upload. - python -m coveralls + # upload. Also, have it strip the system config-specific prefix + # from all of the source paths. + $prefix = python -c "import sysconfig; print(sysconfig.get_path('purelib'))" + python -m coveralls --basedir $prefix - "run": name: "Convert Result Log" From f17939009466780d1b4d114cc4e886d160047f78 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 10:52:10 -0400 Subject: [PATCH 135/172] fix quoting bug --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 501e28b9a..050871832 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -215,7 +215,7 @@ jobs: # install. Note we carefully construct the string so that # $env:PATH is not substituted now but $p and $q are. ` is the # PowerShell string escape character. - Add-Content -Path $Profile.CurrentUserAllHosts -Value "`$env:PATH = `"$p;$q;`$env:PATH`"' + Add-Content -Path $Profile.CurrentUserAllHosts -Value "`$env:PATH = `"$p;$q;`$env:PATH`" - "run": name: "Reveal $env:PATH" From e1269c836d39571aae4594be7e1cd9f419aa7fc1 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 10:55:49 -0400 Subject: [PATCH 136/172] try again with closing quote --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 050871832..55be820a7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -215,7 +215,7 @@ jobs: # install. Note we carefully construct the string so that # $env:PATH is not substituted now but $p and $q are. ` is the # PowerShell string escape character. - Add-Content -Path $Profile.CurrentUserAllHosts -Value "`$env:PATH = `"$p;$q;`$env:PATH`" + Add-Content -Path $Profile.CurrentUserAllHosts -Value "`$env:PATH = `"$p;$q;`$env:PATH`"" - "run": name: "Reveal $env:PATH" From 89506a6f828f8da8e845b377fa4a8d4470ca076e Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 11:03:33 -0400 Subject: [PATCH 137/172] back to the full test suite --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 55be820a7..b7f3ffd85 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -247,7 +247,7 @@ jobs: # Run the test suite under coverage measurement using the # parameterized version of Python, writing subunitv2-format # results to the file given in the environment. - python -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata.test.test_uri + python -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata - "run": name: "Upload Coverage" From e072fb60b85f23d801fed8086608bf26db4f0dd6 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 11:20:28 -0400 Subject: [PATCH 138/172] fix the comment above PATH manipulation --- .circleci/config.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b7f3ffd85..465a97a6b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -205,8 +205,11 @@ jobs: - "run": name: "Fix $env:PATH" command: | - # The Python for which we installed subunit is not necessarily on - # %PATH% so put it there. + # The Python this job is parameterized is not necessarily the one + # at the front of $env:PATH. Modify $env:PATH so that it is so we + # can just say "python" in the rest of the steps. Also get the + # related Scripts directory so tools from packages we install are + # also available. $p = py -<> -c "import sys; print(sys.prefix)" $q = py -<> -c "import sysconfig; print(sysconfig.get_path('scripts'))" From 35d731adf02fc3705d95884f1816b61bd6ac502c Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 11:20:38 -0400 Subject: [PATCH 139/172] remove the debug step --- .circleci/config.yml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 465a97a6b..dbd275a76 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -220,11 +220,6 @@ jobs: # PowerShell string escape character. Add-Content -Path $Profile.CurrentUserAllHosts -Value "`$env:PATH = `"$p;$q;`$env:PATH`"" - - "run": - name: "Reveal $env:PATH" - command: | - $env:PATH - - "run": name: "Display tool versions" command: | From ce8a6d49c7c7e03edfe7f9893a958690b7e33089 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 11:30:39 -0400 Subject: [PATCH 140/172] Attempt to cache packages downloaded with pip for Windows jobs --- .circleci/config.yml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index dbd275a76..629488137 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -198,10 +198,18 @@ jobs: executor: "windows" environment: TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci" + # Tell pip where its download cache lives. This must agree with the + # "save_cache" step below or caching won't really work right. + PIP_CACHE_DIR: "pip-cache" steps: - "checkout" + - "restore_cache": + keys: + - "pip-packages-v1-{{ checksum \"setup.py\" }}" + - "pip-packages-v1-" + - "run": name: "Fix $env:PATH" command: | @@ -230,6 +238,12 @@ jobs: command: | python -m pip install .[testenv] .[test] + - "save_cache": + paths: + # Make sure this agrees with PIP_CACHE_DIR in the environment. + - "pip-cache" + key: "pip-packages-v1-{{ checksum \"setup.py\" }}" + - "run": name: "Run Unit Tests" environment: From fa72ac795166091304be631dc0911c2c9356e4cd Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 12:00:56 -0400 Subject: [PATCH 141/172] a couple more comments about the windows job steps --- .circleci/config.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 629488137..d06d9a0a6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -197,7 +197,11 @@ jobs: executor: "windows" environment: + # Tweak Hypothesis to make its behavior more suitable for the CI + # environment. This should improve reproducibility and lessen the + # effects of variable compute resources. TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci" + # Tell pip where its download cache lives. This must agree with the # "save_cache" step below or caching won't really work right. PIP_CACHE_DIR: "pip-cache" @@ -205,6 +209,8 @@ jobs: steps: - "checkout" + # If possible, restore a pip download cache to save us from having to + # download all our Python dependencies from PyPI. - "restore_cache": keys: - "pip-packages-v1-{{ checksum \"setup.py\" }}" From c5cac7b5a7e7a3c5145c3893b6cdeab77bd64854 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 12:01:02 -0400 Subject: [PATCH 142/172] get rid of the partial cache key CircleCI docs don't clearly explain what happens after a partial cache key match and reconstructing our cache is sufficiently cheap that it's probably not worth the complexity / uncertainty. --- .circleci/config.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index d06d9a0a6..7febae61a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -214,7 +214,6 @@ jobs: - "restore_cache": keys: - "pip-packages-v1-{{ checksum \"setup.py\" }}" - - "pip-packages-v1-" - "run": name: "Fix $env:PATH" From 4db44dc1787dc0c98b4e6513728df3ee7faf2287 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 12:34:09 -0400 Subject: [PATCH 143/172] Attempt to cache all the wheels --- .circleci/config.yml | 33 ++++++++++++++++++++++++++++++--- 1 file changed, 30 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 7febae61a..e20587f75 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -206,6 +206,11 @@ jobs: # "save_cache" step below or caching won't really work right. PIP_CACHE_DIR: "pip-cache" + # And tell pip where it can find out cached wheelhouse for fast wheel + # installation, even for projects that don't distribute wheels. This + # must also agree with the "save_cache" step below. + PIP_FIND_LINKS: "wheelhouse" + steps: - "checkout" @@ -213,7 +218,11 @@ jobs: # download all our Python dependencies from PyPI. - "restore_cache": keys: - - "pip-packages-v1-{{ checksum \"setup.py\" }}" + # The download cache and/or the wheelhouse may contain Python + # version-specific binary packages so include the Python version + # in this key, as well as the canonical source of our + # dependencies. + - "pip-packages-v1-{{ parameters.pythonVersion }}-{{ checksum \"setup.py\" }}" - "run": name: "Fix $env:PATH" @@ -239,16 +248,34 @@ jobs: python misc/build_helpers/show-tool-versions.py - "run": - name: "Install Dependencies" + # It's faster to install a wheel than a source package. If we don't + # have a cached wheelhouse then build all of the wheels and dump + # them into a directory where they can become a cached wheelhouse. + # We would have built these wheels during installation anyway so it + # doesn't cost us anything extra and saves us effort next time. + name: "(Maybe) Build Wheels" command: | - python -m pip install .[testenv] .[test] + if ((Test-Path .\wheelhouse) -and (Test-Path .\wheelhouse\*)) { + echo "Found populated wheelhouse, skipping wheel building." + } else { + python -m pip wheel --wheel-dir $env:PIP_FIND_LINKS .[testenv] .[test] + } - "save_cache": paths: # Make sure this agrees with PIP_CACHE_DIR in the environment. - "pip-cache" + - "wheelhouse" key: "pip-packages-v1-{{ checksum \"setup.py\" }}" + - "run": + name: "Install Dependencies" + environment: + # By this point we should no longer need an index. + PIP_NO_INDEX: "1" + command: | + python -m pip install .[testenv] .[test] + - "run": name: "Run Unit Tests" environment: From 65d76c2e3c471efc192ddb1a0f6f67ddec9673b0 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 12:53:07 -0400 Subject: [PATCH 144/172] must install `wheel` to build wheels with `pip wheel` it seems --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index e20587f75..4d2091e79 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -258,6 +258,7 @@ jobs: if ((Test-Path .\wheelhouse) -and (Test-Path .\wheelhouse\*)) { echo "Found populated wheelhouse, skipping wheel building." } else { + python -m pip install wheel python -m pip wheel --wheel-dir $env:PIP_FIND_LINKS .[testenv] .[test] } From a0389e83cc2e61bdade4d581a71f7aa0e33b7d1c Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 12:54:02 -0400 Subject: [PATCH 145/172] use the *correct* templating system for this value --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 4d2091e79..9923a3fcc 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -222,7 +222,7 @@ jobs: # version-specific binary packages so include the Python version # in this key, as well as the canonical source of our # dependencies. - - "pip-packages-v1-{{ parameters.pythonVersion }}-{{ checksum \"setup.py\" }}" + - "pip-packages-v1-<< parameters.pythonVersion >>-{{ checksum \"setup.py\" }}" - "run": name: "Fix $env:PATH" From 14135ea3f0aa2d30c009ef9176170a732e28d5c9 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 13:07:03 -0400 Subject: [PATCH 146/172] make sure the two mentions of the cache key agree --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 9923a3fcc..d892a0efd 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -222,7 +222,7 @@ jobs: # version-specific binary packages so include the Python version # in this key, as well as the canonical source of our # dependencies. - - "pip-packages-v1-<< parameters.pythonVersion >>-{{ checksum \"setup.py\" }}" + - &CACHE_KEY "pip-packages-v1-<< parameters.pythonVersion >>-{{ checksum \"setup.py\" }}" - "run": name: "Fix $env:PATH" @@ -267,7 +267,7 @@ jobs: # Make sure this agrees with PIP_CACHE_DIR in the environment. - "pip-cache" - "wheelhouse" - key: "pip-packages-v1-{{ checksum \"setup.py\" }}" + key: *CACHE_KEY - "run": name: "Install Dependencies" From a73b6d99c4564e0275c315f7c7627c92c21ea732 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 14:10:58 -0400 Subject: [PATCH 147/172] we end up using the coveralls tool to clean up these paths and I stopped using tox so they don't look like this anymore --- .coveragerc | 2 -- 1 file changed, 2 deletions(-) diff --git a/.coveragerc b/.coveragerc index 32b803586..5b41f9ce3 100644 --- a/.coveragerc +++ b/.coveragerc @@ -23,7 +23,5 @@ source = D:/a/tahoe-lafs/tahoe-lafs/.tox/py*-coverage/Lib/site-packages/ # Although sometimes it looks like this instead. Also it looks like this on macOS. .tox/py*-coverage/lib/python*/site-packages/ -# And on the CircleCI Windows build envronment... - .tox/py*-coverage/Lib/site-packages/ # On some Linux CI jobs it looks like this /tmp/tahoe-lafs.tox/py*-coverage/lib/python*/site-packages/ From 27b97dc1d8b8f56887dc2508d91ec6239d25dafd Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 15:01:07 -0400 Subject: [PATCH 148/172] bump it --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 86873ad53..433721d2a 100644 --- a/setup.py +++ b/setup.py @@ -413,7 +413,7 @@ setup(name="tahoe-lafs", # also set in __init__.py "pip==22.0.3", "wheel==0.37.1", "setuptools==60.9.1", - "subunitreporter==22.2.0", + "subunitreporter==23.8.0", "python-subunit==1.4.2", "junitxml==0.7", "coverage==7.2.5", From d93d6122f78f586306a129eb09d13c81b11a2b90 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 15:01:16 -0400 Subject: [PATCH 149/172] news fragment --- newsfragments/4059.minor | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 newsfragments/4059.minor diff --git a/newsfragments/4059.minor b/newsfragments/4059.minor new file mode 100644 index 000000000..e69de29bb From a95a6b88a92f50d1c33e37e1af6bf6d743d16dde Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone Date: Wed, 9 Aug 2023 17:04:26 -0400 Subject: [PATCH 150/172] note motivation for our choice of these python versions --- .circleci/config.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index d892a0efd..d327ecbc7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -121,6 +121,13 @@ workflows: name: "Windows Server 2022, CPython <>" matrix: parameters: + # Run the job for a number of CPython versions. These are the + # two versions installed on the version of the Windows VM image + # we specify (in the executor). This is handy since it means we + # don't have to do any Python installation work. We pin the + # Windows VM image so these shouldn't shuffle around beneath us + # but if we want to update that image or get different versions + # of Python, we probably have to do something here. pythonVersion: - "3.9" - "3.11" From c7f6b6484d033d0b184bb46afec2e134cc389346 Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 9 Aug 2023 15:15:20 -0600 Subject: [PATCH 151/172] spelling --- newsfragments/4056.bugfix | 2 +- src/allmydata/storage_client.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/newsfragments/4056.bugfix b/newsfragments/4056.bugfix index 1f94de0da..7e637b48c 100644 --- a/newsfragments/4056.bugfix +++ b/newsfragments/4056.bugfix @@ -1,3 +1,3 @@ -Provide our own copy of attrs' "provides()" validor +Provide our own copy of attrs' "provides()" validator This validator is deprecated and slated for removal; that project's suggestion is to copy the code to our project. diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 8de3a9ca9..c59db0817 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -88,7 +88,7 @@ from allmydata.util.rrefutil import add_version_to_remote_reference from allmydata.util.hashutil import permute_server_hash from allmydata.util.dictutil import BytesKeyDict, UnicodeKeyDict from allmydata.util.deferredutil import async_to_deferred, race -from allmydata.util.attr_provides import provides +from allmydata.util.attrs_provides import provides from allmydata.storage.http_client import ( StorageClient, StorageClientImmutables, StorageClientGeneral, ClientException as HTTPClientException, StorageClientMutables, From 9758569cffb9f62a5597a330d653cde7e8357169 Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 9 Aug 2023 15:16:07 -0600 Subject: [PATCH 152/172] obsolete comment --- integration/grid.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/integration/grid.py b/integration/grid.py index 03c3bb6e2..b97c22bf7 100644 --- a/integration/grid.py +++ b/integration/grid.py @@ -73,11 +73,6 @@ class FlogGatherer(object): """ Flog Gatherer process. """ - - # it would be best to use attr.validators.provides() here but that - # is deprecated; please replace with our own "provides" as part of - # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4056#ticket - # for now, insisting on a subclass which is narrower than necessary process = attr.ib( validator=provides(IProcessTransport) ) From 295e816d4ee2cfc27130f96994e0742849390009 Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 9 Aug 2023 21:49:37 -0600 Subject: [PATCH 153/172] spell --- src/allmydata/storage_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 1f6b41b1c..9739091dc 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -761,7 +761,7 @@ class AnnouncementNotMatched(Exception): @attr.s(auto_exc=True) class MissingPlugin(Exception): """ - A particular plugin was request, but is missing + A particular plugin was requested but is missing """ plugin_name = attr.ib() From cf4fe0061cdddd254a850efc1f2949f0b49447f9 Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 9 Aug 2023 22:27:55 -0600 Subject: [PATCH 154/172] refactor where plugins are loaded; use this to error early for users --- src/allmydata/client.py | 5 +++ src/allmydata/node.py | 2 + src/allmydata/scripts/tahoe_run.py | 14 +++++++ src/allmydata/storage_client.py | 67 +++++++++++++++++++++--------- 4 files changed, 69 insertions(+), 19 deletions(-) diff --git a/src/allmydata/client.py b/src/allmydata/client.py index aff2d5815..cfc0977a1 100644 --- a/src/allmydata/client.py +++ b/src/allmydata/client.py @@ -483,6 +483,11 @@ def create_storage_farm_broker(config: _Config, default_connection_handlers, foo storage_client_config = storage_client.StorageClientConfig.from_node_config( config, ) + # ensure that we can at least load all plugins that the + # configuration mentions; doing this early (i.e. before creating + # storage-clients themselves) allows us to exit in case of a + # problem. + storage_client_config.get_configured_storage_plugins() def tub_creator(handler_overrides=None, **kwargs): return node.create_tub( diff --git a/src/allmydata/node.py b/src/allmydata/node.py index 6c3082b50..5b06cb963 100644 --- a/src/allmydata/node.py +++ b/src/allmydata/node.py @@ -30,10 +30,12 @@ from twisted.python.filepath import ( from twisted.python import log as twlog from twisted.application import service from twisted.python.failure import Failure +from twisted.plugin import getPlugins from foolscap.api import Tub import foolscap.logging.log +from allmydata.interfaces import IFoolscapStoragePlugin from allmydata.util import log from allmydata.util import fileutil, iputil from allmydata.util.fileutil import abspath_expanduser_unicode diff --git a/src/allmydata/scripts/tahoe_run.py b/src/allmydata/scripts/tahoe_run.py index ff3ff9efd..eba5ae329 100644 --- a/src/allmydata/scripts/tahoe_run.py +++ b/src/allmydata/scripts/tahoe_run.py @@ -42,6 +42,9 @@ from allmydata.util.pid import ( from allmydata.storage.crawler import ( MigratePickleFileError, ) +from allmydata.storage_client import ( + MissingPlugin, +) from allmydata.node import ( PortAssignmentRequired, PrivacyError, @@ -197,6 +200,17 @@ class DaemonizeTheRealService(Service, HookMixin): self.basedir, ) ) + elif reason.check(MissingPlugin): + self.stderr.write( + "Missing Plugin\n" + "The configuration requests a plugin:\n" + "\n {}\n\n" + "...which cannot be found.\n" + "This typically means that some software hasn't been installed or the plugin couldn't be instantiated.\n\n" + .format( + reason.value.plugin_name, + ) + ) else: self.stderr.write("\nUnknown error, here's the traceback:\n") reason.printTraceback(self.stderr) diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 9739091dc..24abe2a18 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -187,6 +187,30 @@ class StorageClientConfig(object): grid_manager_keys, ) + def get_configured_storage_plugins(self): + """ + :returns Dict[str, IFoolscapStoragePlugin]: a dict mapping names + to instances for all available plugins + + :raises MissingPlugin: if the configuration asks for a plugin + for which there is no corresponding instance (e.g. it is + not installed). + """ + plugins = { + plugin.name: plugin + for plugin + in getPlugins(IFoolscapStoragePlugin) + } + + configured = dict() + for plugin_name in self.storage_plugins: + try: + plugin = plugins[plugin_name] + except KeyError: + raise MissingPlugin(plugin_name) + configured[plugin_name] = plugin + return configured + @implementer(IStorageBroker) class StorageFarmBroker(service.MultiService): @@ -765,10 +789,9 @@ class MissingPlugin(Exception): """ plugin_name = attr.ib() - nickname = attr.ib() def __str__(self): - return "Missing plugin '{}' for server '{}'".format(self.plugin_name, self.nickname) + return "Missing plugin '{}'".format(self.plugin_name) def _storage_from_foolscap_plugin(node_config, config, announcement, get_rref): @@ -782,26 +805,32 @@ def _storage_from_foolscap_plugin(node_config, config, announcement, get_rref): :param dict announcement: The storage announcement for the storage server we should build """ - plugins = { - plugin.name: plugin - for plugin - in getPlugins(IFoolscapStoragePlugin) - } storage_options = announcement.get(u"storage-options", []) - for plugin_name, plugin_config in list(config.storage_plugins.items()): + plugins = config.get_configured_storage_plugins() + + # for every storage-option that we have enabled locally (in order + # of preference), see if the announcement asks for such a thing. + # if it does, great: we return that storage-client + # otherwise we've run out of options... + + for options in storage_options: try: - plugin = plugins[plugin_name] + plugin = plugins[options[u"name"]] except KeyError: - raise MissingPlugin(plugin_name, announcement.get(u"nickname", "")) - for option in storage_options: - if plugin_name == option[u"name"]: - furl = option[u"storage-server-FURL"] - return furl, plugin.get_storage_client( - node_config, - option, - get_rref, - ) - plugin_names = ", ".join(sorted(list(config.storage_plugins.keys()))) + # we didn't configure this kind of plugin locally, so + # consider the next announced option + continue + + furl = options[u"storage-server-FURL"] + return furl, plugin.get_storage_client( + node_config, + options, + get_rref, + ) + + # none of the storage options in the announcement are configured + # locally; we can't make a storage-client. + plugin_names = ", ".join(sorted(plugins)) raise AnnouncementNotMatched(plugin_names) From d7cfb5dde9d11b52e8525d5666fbac355ba8eb1b Mon Sep 17 00:00:00 2001 From: meejah Date: Wed, 9 Aug 2023 23:21:28 -0600 Subject: [PATCH 155/172] show WebUI feedback when announcement-match fails --- src/allmydata/storage_client.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 24abe2a18..1b7b92acb 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -830,7 +830,7 @@ def _storage_from_foolscap_plugin(node_config, config, announcement, get_rref): # none of the storage options in the announcement are configured # locally; we can't make a storage-client. - plugin_names = ", ".join(sorted(plugins)) + plugin_names = ", ".join(sorted(option["name"] for option in storage_options)) raise AnnouncementNotMatched(plugin_names) @@ -872,6 +872,7 @@ def _make_storage_system( :return: An object enabling communication via Foolscap with the server which generated the announcement. """ + unmatched = None # Try to match the announcement against a plugin. try: furl, storage_server = _storage_from_foolscap_plugin( @@ -885,14 +886,10 @@ def _make_storage_system( get_rref, ) except AnnouncementNotMatched as e: - _log.error( - 'No plugin for storage-server "{nickname}" from plugins: {plugins}', - nickname=ann.get("nickname", ""), - plugins=e.args[0], - ) - except MissingPlugin as e: - _log.failure("Missing plugin") - return _NullStorage(''.format(e.args[0])) + # show a more-specific error to the user for this server + # (Note this will only be shown if the server _doesn't_ offer + # anonymous service, which will match below) + unmatched = _NullStorage('{}: missing plugin "{}"'.format(server_id.decode("utf8"), str(e))) else: return _FoolscapStorage.from_announcement( server_id, @@ -918,8 +915,10 @@ def _make_storage_system( storage_server, ) - # Nothing matched so we can't talk to this server. - return _null_storage + # Nothing matched so we can't talk to this server. If we have a + # specific reason in "unmatched", use it; otherwise the generic + # one + return unmatched or _null_storage @implementer(IServer) class NativeStorageServer(service.MultiService): From 09ea172b940c607a990e6cd5d4bbb9f98075795e Mon Sep 17 00:00:00 2001 From: meejah Date: Thu, 10 Aug 2023 12:06:29 -0600 Subject: [PATCH 157/172] reformat multiline strings; don't output "storage.plugins = None" --- src/allmydata/test/common.py | 42 ++++++++++++++++++++---------------- 1 file changed, 23 insertions(+), 19 deletions(-) diff --git a/src/allmydata/test/common.py b/src/allmydata/test/common.py index d61bc28f1..744c17efa 100644 --- a/src/allmydata/test/common.py +++ b/src/allmydata/test/common.py @@ -307,13 +307,18 @@ class UseNode(object): if self.plugin_config is None: plugin_config_section = "" else: - plugin_config_section = """ -[storageclient.plugins.{storage_plugin}] -{config} -""".format( - storage_plugin=self.storage_plugin, - config=format_config_items(self.plugin_config), -) + plugin_config_section = + "[storageclient.plugins.{storage_plugin}]\n" + "{config}\n" + .format( + storage_plugin=self.storage_plugin, + config=format_config_items(self.plugin_config), + ) + + if self.storage_plugin is None: + plugins = "" + else: + plugins = "storage.plugins = {}".format(self.storage_plugin) write_introducer( self.basedir, @@ -340,18 +345,17 @@ class UseNode(object): self.config = config_from_string( self.basedir.asTextMode().path, "tub.port", -""" -[node] -{node_config} - -[client] -storage.plugins = {storage_plugin} -{plugin_config_section} -""".format( - storage_plugin=self.storage_plugin, - node_config=format_config_items(node_config), - plugin_config_section=plugin_config_section, -) + "[node]\n" + "{node_config}\n" + "\n" + "[client]\n" + "{plugins}\n" + "{plugin_config_section}\n" + .format( + plugins=plugins, + node_config=format_config_items(node_config), + plugin_config_section=plugin_config_section, + ) ) def create_node(self): From b07d9e90cbcd557821a75a1fd7571e2c169dee73 Mon Sep 17 00:00:00 2001 From: meejah Date: Thu, 10 Aug 2023 12:07:03 -0600 Subject: [PATCH 158/172] correct test --- src/allmydata/test/test_storage_client.py | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/src/allmydata/test/test_storage_client.py b/src/allmydata/test/test_storage_client.py index e3b192a96..d719f227b 100644 --- a/src/allmydata/test/test_storage_client.py +++ b/src/allmydata/test/test_storage_client.py @@ -243,22 +243,18 @@ class UnrecognizedAnnouncement(unittest.TestCase): server.get_foolscap_write_enabler_seed() server.get_nickname() - def test_longname(self) -> None: + def test_missing_plugin(self) -> None: """ - ``NativeStorageServer.get_longname`` describes the missing plugin. + An exception is produced if the plugin is missing """ - server = self.native_storage_server( - StorageClientConfig( - storage_plugins={ - "nothing": {} - } + with self.assertRaises(MissingPlugin): + _ = self.native_storage_server( + StorageClientConfig( + storage_plugins={ + "nothing": {} + } + ) ) - ) - self.assertEqual( - server.get_longname(), - '', - ) - self.flushLoggedErrors(MissingPlugin) class PluginMatchedAnnouncement(SyncTestCase): From e3e5b4bc8d5deacad91a3c25243b9f71eec3a63d Mon Sep 17 00:00:00 2001 From: meejah Date: Thu, 10 Aug 2023 12:19:11 -0600 Subject: [PATCH 159/172] typo --- src/allmydata/test/common.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/allmydata/test/common.py b/src/allmydata/test/common.py index 744c17efa..1186bd540 100644 --- a/src/allmydata/test/common.py +++ b/src/allmydata/test/common.py @@ -307,13 +307,12 @@ class UseNode(object): if self.plugin_config is None: plugin_config_section = "" else: - plugin_config_section = - "[storageclient.plugins.{storage_plugin}]\n" - "{config}\n" - .format( - storage_plugin=self.storage_plugin, - config=format_config_items(self.plugin_config), - ) + plugin_config_section = ( + "[storageclient.plugins.{storage_plugin}]\n" + "{config}\n").format( + storage_plugin=self.storage_plugin, + config=format_config_items(self.plugin_config), + ) if self.storage_plugin is None: plugins = "" From 60e873bbe48f94af3079a1a60e0d5159b73e4c87 Mon Sep 17 00:00:00 2001 From: meejah Date: Thu, 10 Aug 2023 13:22:35 -0600 Subject: [PATCH 160/172] unused --- src/allmydata/node.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/allmydata/node.py b/src/allmydata/node.py index 5b06cb963..6c3082b50 100644 --- a/src/allmydata/node.py +++ b/src/allmydata/node.py @@ -30,12 +30,10 @@ from twisted.python.filepath import ( from twisted.python import log as twlog from twisted.application import service from twisted.python.failure import Failure -from twisted.plugin import getPlugins from foolscap.api import Tub import foolscap.logging.log -from allmydata.interfaces import IFoolscapStoragePlugin from allmydata.util import log from allmydata.util import fileutil, iputil from allmydata.util.fileutil import abspath_expanduser_unicode From 7322d8c0e60ecd33da155d7055c8917fc34aa83a Mon Sep 17 00:00:00 2001 From: meejah Date: Thu, 10 Aug 2023 14:28:55 -0600 Subject: [PATCH 161/172] better news --- newsfragments/3899.bugfix | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/newsfragments/3899.bugfix b/newsfragments/3899.bugfix index a55239c38..55d4fabd4 100644 --- a/newsfragments/3899.bugfix +++ b/newsfragments/3899.bugfix @@ -1 +1,4 @@ -Print a useful message when a storage-client cannot be matched to configuration +Provide better feedback from plugin configuration errors + +Local errors now print a useful message and exit. +Announcements that only contain invalid / unusable plugins now show a message in the Welcome page. From f51d49faa54c0fff3b2146bb6630e83da53484c5 Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 11 Aug 2023 09:03:30 -0600 Subject: [PATCH 162/172] typing Co-authored-by: Jean-Paul Calderone --- src/allmydata/storage_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 1b7b92acb..7040ecd16 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -187,7 +187,7 @@ class StorageClientConfig(object): grid_manager_keys, ) - def get_configured_storage_plugins(self): + def get_configured_storage_plugins(self) -> dict[str, IFoolscapStoragePlugin]: """ :returns Dict[str, IFoolscapStoragePlugin]: a dict mapping names to instances for all available plugins From d81b64ba9e2dd8aa84a2812f702ef55cd1698f52 Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 11 Aug 2023 09:05:16 -0600 Subject: [PATCH 163/172] docstring Co-authored-by: Jean-Paul Calderone --- src/allmydata/storage_client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 7040ecd16..a6a5336c6 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -189,8 +189,8 @@ class StorageClientConfig(object): def get_configured_storage_plugins(self) -> dict[str, IFoolscapStoragePlugin]: """ - :returns Dict[str, IFoolscapStoragePlugin]: a dict mapping names - to instances for all available plugins + :returns: a mapping from names to instances for all available + plugins :raises MissingPlugin: if the configuration asks for a plugin for which there is no corresponding instance (e.g. it is From c03076fe213382af5e754724f072fc50f9b61f49 Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 11 Aug 2023 09:07:00 -0600 Subject: [PATCH 164/172] more robust comparison Co-authored-by: Jean-Paul Calderone --- src/allmydata/test/test_storage_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/test/test_storage_client.py b/src/allmydata/test/test_storage_client.py index d719f227b..328e90499 100644 --- a/src/allmydata/test/test_storage_client.py +++ b/src/allmydata/test/test_storage_client.py @@ -196,7 +196,7 @@ class UnrecognizedAnnouncement(unittest.TestCase): self._tub_maker, {}, node_config=EMPTY_CLIENT_CONFIG, - config=config or StorageClientConfig(), + config=config if config is not None else StorageClientConfig(), ) def test_no_exceptions(self): From a0769f59dce7b3d70f2e4833b0e4405d8ad8e472 Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 11 Aug 2023 09:07:18 -0600 Subject: [PATCH 165/172] naming Co-authored-by: Jean-Paul Calderone --- src/allmydata/test/test_storage_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/test/test_storage_client.py b/src/allmydata/test/test_storage_client.py index 328e90499..5b2f80712 100644 --- a/src/allmydata/test/test_storage_client.py +++ b/src/allmydata/test/test_storage_client.py @@ -251,7 +251,7 @@ class UnrecognizedAnnouncement(unittest.TestCase): _ = self.native_storage_server( StorageClientConfig( storage_plugins={ - "nothing": {} + "missing-plugin-name": {} } ) ) From 2e76d554e2a1b6ecd090eedce64645a84e890710 Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 11 Aug 2023 09:08:03 -0600 Subject: [PATCH 166/172] don't explicitly drop return Co-authored-by: Jean-Paul Calderone --- src/allmydata/test/test_storage_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/test/test_storage_client.py b/src/allmydata/test/test_storage_client.py index 5b2f80712..f8db402d0 100644 --- a/src/allmydata/test/test_storage_client.py +++ b/src/allmydata/test/test_storage_client.py @@ -248,7 +248,7 @@ class UnrecognizedAnnouncement(unittest.TestCase): An exception is produced if the plugin is missing """ with self.assertRaises(MissingPlugin): - _ = self.native_storage_server( + self.native_storage_server( StorageClientConfig( storage_plugins={ "missing-plugin-name": {} From 375ee54c80bff6cb4327eec54e753a086055c4e5 Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 11 Aug 2023 09:08:19 -0600 Subject: [PATCH 167/172] typing Co-authored-by: Jean-Paul Calderone --- src/allmydata/test/test_storage_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/test/test_storage_client.py b/src/allmydata/test/test_storage_client.py index f8db402d0..97ce9fe68 100644 --- a/src/allmydata/test/test_storage_client.py +++ b/src/allmydata/test/test_storage_client.py @@ -186,7 +186,7 @@ class UnrecognizedAnnouncement(unittest.TestCase): def _tub_maker(self, overrides): return Service() - def native_storage_server(self, config=None): + def native_storage_server(self, config: Optional[StorageClientConfig] = None) -> NativeStorageServer: """ Make a ``NativeStorageServer`` out of an unrecognizable announcement. """ From c27b330984afdfc612f90707ccc1ffc2e2473042 Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 11 Aug 2023 19:18:23 -0600 Subject: [PATCH 168/172] don't need fallback --- src/allmydata/storage_client.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 1b7b92acb..2a3b1dbad 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -772,8 +772,6 @@ class NonReconnector(object): def getReconnectionInfo(self): return ReconnectionInfo() -_null_storage = _NullStorage() - class AnnouncementNotMatched(Exception): """ @@ -915,10 +913,11 @@ def _make_storage_system( storage_server, ) - # Nothing matched so we can't talk to this server. If we have a - # specific reason in "unmatched", use it; otherwise the generic - # one - return unmatched or _null_storage + # Nothing matched so we can't talk to this server. (There should + # not be a way to get here without this local being valid) + assert unmatched is not None, "Expected unmatched plugin error" + return unmatched + @implementer(IServer) class NativeStorageServer(service.MultiService): From ffa589d6f827476ff7c7b98a7db52f34be4cf996 Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 11 Aug 2023 21:19:02 -0600 Subject: [PATCH 169/172] import error --- src/allmydata/test/test_storage_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/test/test_storage_client.py b/src/allmydata/test/test_storage_client.py index 97ce9fe68..604884eba 100644 --- a/src/allmydata/test/test_storage_client.py +++ b/src/allmydata/test/test_storage_client.py @@ -8,7 +8,7 @@ from json import ( loads, ) import hashlib -from typing import Union, Any +from typing import Union, Any, Optional from hyperlink import DecodedURL from fixtures import ( From 356a1d0f792ae2c5ea65105f7e9ffb0eb1321aa0 Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 11 Aug 2023 22:01:21 -0600 Subject: [PATCH 170/172] don't know why dict_keys are so confusing to mypy --- src/allmydata/storage_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index c95d72dbf..75e717037 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -209,7 +209,7 @@ class StorageClientConfig(object): except KeyError: raise MissingPlugin(plugin_name) configured[plugin_name] = plugin - return configured + return configured # type: ignore @implementer(IStorageBroker) From a5b95273d7b3b420be6bc57ec9c4cd56897425d5 Mon Sep 17 00:00:00 2001 From: meejah Date: Fri, 11 Aug 2023 23:47:24 -0600 Subject: [PATCH 171/172] typing is .. good? --- src/allmydata/storage_client.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 75e717037..d35cd788b 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -33,7 +33,7 @@ Ported to Python 3. from __future__ import annotations from six import ensure_text -from typing import Union, Callable, Any, Optional, cast +from typing import Union, Callable, Any, Optional, cast, Dict from os import urandom import re import time @@ -202,14 +202,15 @@ class StorageClientConfig(object): in getPlugins(IFoolscapStoragePlugin) } - configured = dict() + # mypy doesn't like "str" in place of Any ... + configured: Dict[Any, IFoolscapStoragePlugin] = dict() for plugin_name in self.storage_plugins: try: plugin = plugins[plugin_name] except KeyError: raise MissingPlugin(plugin_name) configured[plugin_name] = plugin - return configured # type: ignore + return configured @implementer(IStorageBroker) From ad44958f0223c92b0133b4b65325ae540a54dd8a Mon Sep 17 00:00:00 2001 From: meejah Date: Sat, 12 Aug 2023 00:35:49 -0600 Subject: [PATCH 172/172] more kinds of whitespace --- src/allmydata/test/cli/test_run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/allmydata/test/cli/test_run.py b/src/allmydata/test/cli/test_run.py index 2adcfea19..ae0f92131 100644 --- a/src/allmydata/test/cli/test_run.py +++ b/src/allmydata/test/cli/test_run.py @@ -264,7 +264,7 @@ class RunTests(SyncTestCase): self.assertThat(runs, Equals([])) self.assertThat(result_code, Equals(1)) - good_file_content_re = re.compile(r"\w[0-9]*\w[0-9]*\w") + good_file_content_re = re.compile(r"\s[0-9]*\s[0-9]*\s", re.M) @given(text()) def test_pidfile_contents(self, content):