mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-03-25 13:07:37 +00:00
Merge remote-tracking branch 'origin/master' into 4009-more-logging
This commit is contained in:
commit
61dc1e4d43
4
.github/workflows/ci.yml
vendored
4
.github/workflows/ci.yml
vendored
@ -169,12 +169,12 @@ jobs:
|
||||
python-version: "3.9"
|
||||
force-foolscap: false
|
||||
- os: windows-latest
|
||||
python-version: "3.9"
|
||||
python-version: "3.11"
|
||||
force-foolscap: false
|
||||
# 22.04 has some issue with Tor at the moment:
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3943
|
||||
- os: ubuntu-20.04
|
||||
python-version: "3.11"
|
||||
python-version: "3.10"
|
||||
force-foolscap: false
|
||||
steps:
|
||||
|
||||
|
@ -9,4 +9,10 @@ select = [
|
||||
# Make sure we bind closure variables in a loop (equivalent to pylint
|
||||
# cell-var-from-loop):
|
||||
"B023",
|
||||
# Don't silence exceptions in finally by accident:
|
||||
"B012",
|
||||
# Don't use mutable default arguments:
|
||||
"B006",
|
||||
# Errors from PyLint:
|
||||
"PLE",
|
||||
]
|
@ -48,12 +48,16 @@ from .util import (
|
||||
generate_ssh_key,
|
||||
block_with_timeout,
|
||||
)
|
||||
from allmydata.node import read_config
|
||||
|
||||
|
||||
# No reason for HTTP requests to take longer than two minutes in the
|
||||
# No reason for HTTP requests to take longer than four minutes in the
|
||||
# integration tests. See allmydata/scripts/common_http.py for usage.
|
||||
os.environ["__TAHOE_CLI_HTTP_TIMEOUT"] = "120"
|
||||
os.environ["__TAHOE_CLI_HTTP_TIMEOUT"] = "240"
|
||||
|
||||
# Make Foolscap logging go into Twisted logging, so that integration test logs
|
||||
# include extra information
|
||||
# (https://github.com/warner/foolscap/blob/latest-release/doc/logging.rst):
|
||||
os.environ["FLOGTOTWISTED"] = "1"
|
||||
|
||||
# pytest customization hooks
|
||||
|
||||
@ -161,7 +165,7 @@ def flog_gatherer(reactor, temp_dir, flog_binary, request):
|
||||
)
|
||||
pytest_twisted.blockon(out_protocol.done)
|
||||
|
||||
twistd_protocol = _MagicTextProtocol("Gatherer waiting at")
|
||||
twistd_protocol = _MagicTextProtocol("Gatherer waiting at", "gatherer")
|
||||
twistd_process = reactor.spawnProcess(
|
||||
twistd_protocol,
|
||||
which('twistd')[0],
|
||||
@ -212,13 +216,6 @@ def flog_gatherer(reactor, temp_dir, flog_binary, request):
|
||||
include_result=False,
|
||||
)
|
||||
def introducer(reactor, temp_dir, flog_gatherer, request):
|
||||
config = '''
|
||||
[node]
|
||||
nickname = introducer0
|
||||
web.port = 4560
|
||||
log_gatherer.furl = {log_furl}
|
||||
'''.format(log_furl=flog_gatherer)
|
||||
|
||||
intro_dir = join(temp_dir, 'introducer')
|
||||
print("making introducer", intro_dir)
|
||||
|
||||
@ -238,13 +235,14 @@ log_gatherer.furl = {log_furl}
|
||||
)
|
||||
pytest_twisted.blockon(done_proto.done)
|
||||
|
||||
# over-write the config file with our stuff
|
||||
with open(join(intro_dir, 'tahoe.cfg'), 'w') as f:
|
||||
f.write(config)
|
||||
config = read_config(intro_dir, "tub.port")
|
||||
config.set_config("node", "nickname", "introducer-tor")
|
||||
config.set_config("node", "web.port", "4562")
|
||||
config.set_config("node", "log_gatherer.furl", flog_gatherer)
|
||||
|
||||
# "tahoe run" is consistent across Linux/macOS/Windows, unlike the old
|
||||
# "start" command.
|
||||
protocol = _MagicTextProtocol('introducer running')
|
||||
protocol = _MagicTextProtocol('introducer running', "introducer")
|
||||
transport = _tahoe_runner_optional_coverage(
|
||||
protocol,
|
||||
reactor,
|
||||
@ -288,15 +286,9 @@ def introducer_furl(introducer, temp_dir):
|
||||
include_result=False,
|
||||
)
|
||||
def tor_introducer(reactor, temp_dir, flog_gatherer, request):
|
||||
config = '''
|
||||
[node]
|
||||
nickname = introducer_tor
|
||||
web.port = 4561
|
||||
log_gatherer.furl = {log_furl}
|
||||
'''.format(log_furl=flog_gatherer)
|
||||
|
||||
intro_dir = join(temp_dir, 'introducer_tor')
|
||||
print("making introducer", intro_dir)
|
||||
print("making Tor introducer in {}".format(intro_dir))
|
||||
print("(this can take tens of seconds to allocate Onion address)")
|
||||
|
||||
if not exists(intro_dir):
|
||||
mkdir(intro_dir)
|
||||
@ -307,20 +299,25 @@ log_gatherer.furl = {log_furl}
|
||||
request,
|
||||
(
|
||||
'create-introducer',
|
||||
'--tor-control-port', 'tcp:localhost:8010',
|
||||
# The control port should agree with the configuration of the
|
||||
# Tor network we bootstrap with chutney.
|
||||
'--tor-control-port', 'tcp:localhost:8007',
|
||||
'--hide-ip',
|
||||
'--listen=tor',
|
||||
intro_dir,
|
||||
),
|
||||
)
|
||||
pytest_twisted.blockon(done_proto.done)
|
||||
|
||||
# over-write the config file with our stuff
|
||||
with open(join(intro_dir, 'tahoe.cfg'), 'w') as f:
|
||||
f.write(config)
|
||||
# adjust a few settings
|
||||
config = read_config(intro_dir, "tub.port")
|
||||
config.set_config("node", "nickname", "introducer-tor")
|
||||
config.set_config("node", "web.port", "4561")
|
||||
config.set_config("node", "log_gatherer.furl", flog_gatherer)
|
||||
|
||||
# "tahoe run" is consistent across Linux/macOS/Windows, unlike the old
|
||||
# "start" command.
|
||||
protocol = _MagicTextProtocol('introducer running')
|
||||
protocol = _MagicTextProtocol('introducer running', "tor_introducer")
|
||||
transport = _tahoe_runner_optional_coverage(
|
||||
protocol,
|
||||
reactor,
|
||||
@ -339,7 +336,9 @@ log_gatherer.furl = {log_furl}
|
||||
pass
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
print("Waiting for introducer to be ready...")
|
||||
pytest_twisted.blockon(protocol.magic_seen)
|
||||
print("Introducer ready.")
|
||||
return transport
|
||||
|
||||
|
||||
@ -350,6 +349,7 @@ def tor_introducer_furl(tor_introducer, temp_dir):
|
||||
print("Don't see {} yet".format(furl_fname))
|
||||
sleep(.1)
|
||||
furl = open(furl_fname, 'r').read()
|
||||
print(f"Found Tor introducer furl: {furl} in {furl_fname}")
|
||||
return furl
|
||||
|
||||
|
||||
@ -495,7 +495,7 @@ def chutney(reactor, temp_dir: str) -> tuple[str, dict[str, str]]:
|
||||
'git',
|
||||
(
|
||||
'git', 'clone',
|
||||
'https://git.torproject.org/chutney.git',
|
||||
'https://gitlab.torproject.org/tpo/core/chutney.git',
|
||||
chutney_dir,
|
||||
),
|
||||
env=environ,
|
||||
@ -511,7 +511,7 @@ def chutney(reactor, temp_dir: str) -> tuple[str, dict[str, str]]:
|
||||
(
|
||||
'git', '-C', chutney_dir,
|
||||
'reset', '--hard',
|
||||
'c825cba0bcd813c644c6ac069deeb7347d3200ee'
|
||||
'c4f6789ad2558dcbfeb7d024c6481d8112bfb6c2'
|
||||
),
|
||||
env=environ,
|
||||
)
|
||||
@ -538,6 +538,10 @@ def tor_network(reactor, temp_dir, chutney, request):
|
||||
|
||||
env = environ.copy()
|
||||
env.update(chutney_env)
|
||||
env.update({
|
||||
# default is 60, probably too short for reliable automated use.
|
||||
"CHUTNEY_START_TIME": "600",
|
||||
})
|
||||
chutney_argv = (sys.executable, '-m', 'chutney.TorNet')
|
||||
def chutney(argv):
|
||||
proto = _DumpOutputProtocol(None)
|
||||
@ -551,17 +555,9 @@ def tor_network(reactor, temp_dir, chutney, request):
|
||||
return proto.done
|
||||
|
||||
# now, as per Chutney's README, we have to create the network
|
||||
# ./chutney configure networks/basic
|
||||
# ./chutney start networks/basic
|
||||
pytest_twisted.blockon(chutney(("configure", basic_network)))
|
||||
pytest_twisted.blockon(chutney(("start", basic_network)))
|
||||
|
||||
# print some useful stuff
|
||||
try:
|
||||
pytest_twisted.blockon(chutney(("status", basic_network)))
|
||||
except ProcessTerminated:
|
||||
print("Chutney.TorNet status failed (continuing)")
|
||||
|
||||
# before we start the network, ensure we will tear down at the end
|
||||
def cleanup():
|
||||
print("Tearing down Chutney Tor network")
|
||||
try:
|
||||
@ -570,5 +566,13 @@ def tor_network(reactor, temp_dir, chutney, request):
|
||||
# If this doesn't exit cleanly, that's fine, that shouldn't fail
|
||||
# the test suite.
|
||||
pass
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
pytest_twisted.blockon(chutney(("start", basic_network)))
|
||||
pytest_twisted.blockon(chutney(("wait_for_bootstrap", basic_network)))
|
||||
|
||||
# print some useful stuff
|
||||
try:
|
||||
pytest_twisted.blockon(chutney(("status", basic_network)))
|
||||
except ProcessTerminated:
|
||||
print("Chutney.TorNet status failed (continuing)")
|
||||
|
@ -23,6 +23,8 @@ from twisted.internet.error import ProcessExitedAlready
|
||||
from allmydata.test.common import (
|
||||
write_introducer,
|
||||
)
|
||||
from allmydata.node import read_config
|
||||
|
||||
|
||||
if which("docker") is None:
|
||||
pytest.skip('Skipping I2P tests since Docker is unavailable', allow_module_level=True)
|
||||
@ -35,7 +37,7 @@ if sys.platform.startswith('win'):
|
||||
@pytest.fixture
|
||||
def i2p_network(reactor, temp_dir, request):
|
||||
"""Fixture to start up local i2pd."""
|
||||
proto = util._MagicTextProtocol("ephemeral keys")
|
||||
proto = util._MagicTextProtocol("ephemeral keys", "i2pd")
|
||||
reactor.spawnProcess(
|
||||
proto,
|
||||
which("docker"),
|
||||
@ -68,13 +70,6 @@ def i2p_network(reactor, temp_dir, request):
|
||||
include_result=False,
|
||||
)
|
||||
def i2p_introducer(reactor, temp_dir, flog_gatherer, request):
|
||||
config = '''
|
||||
[node]
|
||||
nickname = introducer_i2p
|
||||
web.port = 4561
|
||||
log_gatherer.furl = {log_furl}
|
||||
'''.format(log_furl=flog_gatherer)
|
||||
|
||||
intro_dir = join(temp_dir, 'introducer_i2p')
|
||||
print("making introducer", intro_dir)
|
||||
|
||||
@ -94,12 +89,14 @@ log_gatherer.furl = {log_furl}
|
||||
pytest_twisted.blockon(done_proto.done)
|
||||
|
||||
# over-write the config file with our stuff
|
||||
with open(join(intro_dir, 'tahoe.cfg'), 'w') as f:
|
||||
f.write(config)
|
||||
config = read_config(intro_dir, "tub.port")
|
||||
config.set_config("node", "nickname", "introducer_i2p")
|
||||
config.set_config("node", "web.port", "4563")
|
||||
config.set_config("node", "log_gatherer.furl", flog_gatherer)
|
||||
|
||||
# "tahoe run" is consistent across Linux/macOS/Windows, unlike the old
|
||||
# "start" command.
|
||||
protocol = util._MagicTextProtocol('introducer running')
|
||||
protocol = util._MagicTextProtocol('introducer running', "introducer")
|
||||
transport = util._tahoe_runner_optional_coverage(
|
||||
protocol,
|
||||
reactor,
|
||||
@ -133,6 +130,7 @@ def i2p_introducer_furl(i2p_introducer, temp_dir):
|
||||
|
||||
|
||||
@pytest_twisted.inlineCallbacks
|
||||
@pytest.mark.skip("I2P tests are not functioning at all, for unknown reasons")
|
||||
def test_i2p_service_storage(reactor, request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl):
|
||||
yield _create_anonymous_node(reactor, 'carol_i2p', 8008, request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl)
|
||||
yield _create_anonymous_node(reactor, 'dave_i2p', 8009, request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl)
|
||||
|
@ -18,6 +18,7 @@ from twisted.python.filepath import (
|
||||
from allmydata.test.common import (
|
||||
write_introducer,
|
||||
)
|
||||
from allmydata.client import read_config
|
||||
|
||||
# see "conftest.py" for the fixtures (e.g. "tor_network")
|
||||
|
||||
@ -32,8 +33,8 @@ if sys.platform.startswith('win'):
|
||||
def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl):
|
||||
carol = yield _create_anonymous_node(reactor, 'carol', 8008, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl)
|
||||
dave = yield _create_anonymous_node(reactor, 'dave', 8009, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl)
|
||||
yield util.await_client_ready(carol, minimum_number_of_servers=2)
|
||||
yield util.await_client_ready(dave, minimum_number_of_servers=2)
|
||||
yield util.await_client_ready(carol, minimum_number_of_servers=2, timeout=600)
|
||||
yield util.await_client_ready(dave, minimum_number_of_servers=2, timeout=600)
|
||||
|
||||
# ensure both nodes are connected to "a grid" by uploading
|
||||
# something via carol, and retrieve it using dave.
|
||||
@ -60,7 +61,7 @@ def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_ne
|
||||
)
|
||||
yield proto.done
|
||||
cap = proto.output.getvalue().strip().split()[-1]
|
||||
print("TEH CAP!", cap)
|
||||
print("capability: {}".format(cap))
|
||||
|
||||
proto = util._CollectOutputProtocol(capture_stderr=False)
|
||||
reactor.spawnProcess(
|
||||
@ -85,7 +86,7 @@ def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_
|
||||
web_port = "tcp:{}:interface=localhost".format(control_port + 2000)
|
||||
|
||||
if True:
|
||||
print("creating", node_dir.path)
|
||||
print(f"creating {node_dir.path} with introducer {introducer_furl}")
|
||||
node_dir.makedirs()
|
||||
proto = util._DumpOutputProtocol(None)
|
||||
reactor.spawnProcess(
|
||||
@ -95,10 +96,14 @@ def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_
|
||||
sys.executable, '-b', '-m', 'allmydata.scripts.runner',
|
||||
'create-node',
|
||||
'--nickname', name,
|
||||
'--webport', web_port,
|
||||
'--introducer', introducer_furl,
|
||||
'--hide-ip',
|
||||
'--tor-control-port', 'tcp:localhost:{}'.format(control_port),
|
||||
'--listen', 'tor',
|
||||
'--shares-needed', '1',
|
||||
'--shares-happy', '1',
|
||||
'--shares-total', '2',
|
||||
node_dir.path,
|
||||
),
|
||||
env=environ,
|
||||
@ -108,35 +113,13 @@ def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_
|
||||
|
||||
# Which services should this client connect to?
|
||||
write_introducer(node_dir, "default", introducer_furl)
|
||||
with node_dir.child('tahoe.cfg').open('w') as f:
|
||||
node_config = '''
|
||||
[node]
|
||||
nickname = %(name)s
|
||||
web.port = %(web_port)s
|
||||
web.static = public_html
|
||||
log_gatherer.furl = %(log_furl)s
|
||||
|
||||
[tor]
|
||||
control.port = tcp:localhost:%(control_port)d
|
||||
onion.external_port = 3457
|
||||
onion.local_port = %(local_port)d
|
||||
onion = true
|
||||
onion.private_key_file = private/tor_onion.privkey
|
||||
|
||||
[client]
|
||||
shares.needed = 1
|
||||
shares.happy = 1
|
||||
shares.total = 2
|
||||
|
||||
''' % {
|
||||
'name': name,
|
||||
'web_port': web_port,
|
||||
'log_furl': flog_gatherer,
|
||||
'control_port': control_port,
|
||||
'local_port': control_port + 1000,
|
||||
}
|
||||
node_config = node_config.encode("utf-8")
|
||||
f.write(node_config)
|
||||
config = read_config(node_dir.path, "tub.port")
|
||||
config.set_config("node", "log_gatherer.furl", flog_gatherer)
|
||||
config.set_config("tor", "onion", "true")
|
||||
config.set_config("tor", "onion.external_port", "3457")
|
||||
config.set_config("tor", "control.port", f"tcp:port={control_port}:host=127.0.0.1")
|
||||
config.set_config("tor", "onion.private_key_file", "private/tor_onion.privkey")
|
||||
|
||||
print("running")
|
||||
result = yield util._run_node(reactor, node_dir.path, request, None)
|
||||
|
@ -12,7 +12,7 @@ import sys
|
||||
import time
|
||||
import json
|
||||
from os import mkdir, environ
|
||||
from os.path import exists, join
|
||||
from os.path import exists, join, basename
|
||||
from io import StringIO, BytesIO
|
||||
from subprocess import check_output
|
||||
|
||||
@ -93,7 +93,6 @@ class _CollectOutputProtocol(ProcessProtocol):
|
||||
self.output.write(data)
|
||||
|
||||
def errReceived(self, data):
|
||||
print("ERR: {!r}".format(data))
|
||||
if self.capture_stderr:
|
||||
self.output.write(data)
|
||||
|
||||
@ -129,8 +128,9 @@ class _MagicTextProtocol(ProcessProtocol):
|
||||
and then .callback()s on self.done and .errback's if the process exits
|
||||
"""
|
||||
|
||||
def __init__(self, magic_text):
|
||||
def __init__(self, magic_text: str, name: str) -> None:
|
||||
self.magic_seen = Deferred()
|
||||
self.name = f"{name}: "
|
||||
self.exited = Deferred()
|
||||
self._magic_text = magic_text
|
||||
self._output = StringIO()
|
||||
@ -140,7 +140,7 @@ class _MagicTextProtocol(ProcessProtocol):
|
||||
|
||||
def outReceived(self, data):
|
||||
data = str(data, sys.stdout.encoding)
|
||||
sys.stdout.write(data)
|
||||
sys.stdout.write(self.name + data)
|
||||
self._output.write(data)
|
||||
if not self.magic_seen.called and self._magic_text in self._output.getvalue():
|
||||
print("Saw '{}' in the logs".format(self._magic_text))
|
||||
@ -148,7 +148,7 @@ class _MagicTextProtocol(ProcessProtocol):
|
||||
|
||||
def errReceived(self, data):
|
||||
data = str(data, sys.stderr.encoding)
|
||||
sys.stdout.write(data)
|
||||
sys.stdout.write(self.name + data)
|
||||
|
||||
|
||||
def _cleanup_process_async(transport: IProcessTransport, allow_missing: bool) -> None:
|
||||
@ -282,7 +282,7 @@ def _run_node(reactor, node_dir, request, magic_text, finalize=True):
|
||||
"""
|
||||
if magic_text is None:
|
||||
magic_text = "client running"
|
||||
protocol = _MagicTextProtocol(magic_text)
|
||||
protocol = _MagicTextProtocol(magic_text, basename(node_dir))
|
||||
|
||||
# "tahoe run" is consistent across Linux/macOS/Windows, unlike the old
|
||||
# "start" command.
|
||||
@ -605,19 +605,27 @@ def await_client_ready(tahoe, timeout=10, liveness=60*2, minimum_number_of_serve
|
||||
print("waiting because '{}'".format(e))
|
||||
time.sleep(1)
|
||||
continue
|
||||
servers = js['servers']
|
||||
|
||||
if len(js['servers']) < minimum_number_of_servers:
|
||||
print("waiting because insufficient servers")
|
||||
if len(servers) < minimum_number_of_servers:
|
||||
print(f"waiting because {servers} is fewer than required ({minimum_number_of_servers})")
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
print(
|
||||
f"Now: {time.ctime()}\n"
|
||||
f"Server last-received-data: {[time.ctime(s['last_received_data']) for s in servers]}"
|
||||
)
|
||||
|
||||
server_times = [
|
||||
server['last_received_data']
|
||||
for server in js['servers']
|
||||
for server in servers
|
||||
]
|
||||
# if any times are null/None that server has never been
|
||||
# contacted (so it's down still, probably)
|
||||
if any(t is None for t in server_times):
|
||||
print("waiting because at least one server not contacted")
|
||||
never_received_data = server_times.count(None)
|
||||
if never_received_data > 0:
|
||||
print(f"waiting because {never_received_data} server(s) not contacted")
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
|
1
newsfragments/3999.bugfix
Normal file
1
newsfragments/3999.bugfix
Normal file
@ -0,0 +1 @@
|
||||
A bug where Introducer nodes configured to listen on Tor or I2P would not actually do so has been fixed.
|
0
newsfragments/4015.minor
Normal file
0
newsfragments/4015.minor
Normal file
0
newsfragments/4018.minor
Normal file
0
newsfragments/4018.minor
Normal file
0
newsfragments/4022.minor
Normal file
0
newsfragments/4022.minor
Normal file
3
setup.py
3
setup.py
@ -141,7 +141,6 @@ install_requires = [
|
||||
|
||||
# HTTP server and client
|
||||
"klein",
|
||||
|
||||
# 2.2.0 has a bug: https://github.com/pallets/werkzeug/issues/2465
|
||||
# 2.3.x has an incompatibility with Klein: https://github.com/twisted/klein/pull/575
|
||||
"werkzeug != 2.2.0, < 2.3",
|
||||
@ -419,7 +418,7 @@ setup(name="tahoe-lafs", # also set in __init__.py
|
||||
"subunitreporter==22.2.0",
|
||||
"python-subunit==1.4.2",
|
||||
"junitxml==0.7",
|
||||
"coverage ~= 5.0",
|
||||
"coverage==7.2.5",
|
||||
],
|
||||
|
||||
# Here are the library dependencies of the test suite.
|
||||
|
@ -7,7 +7,7 @@ import os
|
||||
import stat
|
||||
import time
|
||||
import weakref
|
||||
from typing import Optional
|
||||
from typing import Optional, Iterable
|
||||
from base64 import urlsafe_b64encode
|
||||
from functools import partial
|
||||
# On Python 2 this will be the backported package:
|
||||
@ -189,7 +189,7 @@ class Terminator(service.Service):
|
||||
return service.Service.stopService(self)
|
||||
|
||||
|
||||
def read_config(basedir, portnumfile, generated_files=[]):
|
||||
def read_config(basedir, portnumfile, generated_files: Iterable=()):
|
||||
"""
|
||||
Read and validate configuration for a client-style Node. See
|
||||
:method:`allmydata.node.read_config` for parameter meanings (the
|
||||
@ -1103,7 +1103,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
# may get an opaque node if there were any problems.
|
||||
return self.nodemaker.create_from_cap(write_uri, read_uri, deep_immutable=deep_immutable, name=name)
|
||||
|
||||
def create_dirnode(self, initial_children={}, version=None):
|
||||
def create_dirnode(self, initial_children=None, version=None):
|
||||
d = self.nodemaker.create_new_mutable_directory(initial_children, version=version)
|
||||
return d
|
||||
|
||||
|
@ -678,8 +678,10 @@ class DirectoryNode(object):
|
||||
return d
|
||||
|
||||
# XXX: Too many arguments? Worthwhile to break into mutable/immutable?
|
||||
def create_subdirectory(self, namex, initial_children={}, overwrite=True,
|
||||
def create_subdirectory(self, namex, initial_children=None, overwrite=True,
|
||||
mutable=True, mutable_version=None, metadata=None):
|
||||
if initial_children is None:
|
||||
initial_children = {}
|
||||
name = normalize(namex)
|
||||
if self.is_readonly():
|
||||
return defer.fail(NotWriteableError())
|
||||
|
@ -332,7 +332,7 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
|
||||
name += " (leaf [%d] of %d)" % (leafnum, numleaves)
|
||||
return name
|
||||
|
||||
def set_hashes(self, hashes={}, leaves={}):
|
||||
def set_hashes(self, hashes=None, leaves=None):
|
||||
"""Add a bunch of hashes to the tree.
|
||||
|
||||
I will validate these to the best of my ability. If I already have a
|
||||
@ -382,7 +382,10 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
|
||||
corrupted or one of the received hashes was corrupted. If it raises
|
||||
NotEnoughHashesError, then the otherhashes dictionary was incomplete.
|
||||
"""
|
||||
|
||||
if hashes is None:
|
||||
hashes = {}
|
||||
if leaves is None:
|
||||
leaves = {}
|
||||
assert isinstance(hashes, dict)
|
||||
for h in hashes.values():
|
||||
assert isinstance(h, bytes)
|
||||
|
@ -1391,7 +1391,9 @@ class CHKUploader(object):
|
||||
def get_upload_status(self):
|
||||
return self._upload_status
|
||||
|
||||
def read_this_many_bytes(uploadable, size, prepend_data=[]):
|
||||
def read_this_many_bytes(uploadable, size, prepend_data=None):
|
||||
if prepend_data is None:
|
||||
prepend_data = []
|
||||
if size == 0:
|
||||
return defer.succeed([])
|
||||
d = uploadable.read(size)
|
||||
|
@ -1447,7 +1447,7 @@ class IDirectoryNode(IFilesystemNode):
|
||||
is a file, or if must_be_file is True and the child is a directory,
|
||||
I raise ChildOfWrongTypeError."""
|
||||
|
||||
def create_subdirectory(name, initial_children={}, overwrite=True,
|
||||
def create_subdirectory(name, initial_children=None, overwrite=True,
|
||||
mutable=True, mutable_version=None, metadata=None):
|
||||
"""I create and attach a directory at the given name. The new
|
||||
directory can be empty, or it can be populated with children
|
||||
@ -2586,7 +2586,7 @@ class IClient(Interface):
|
||||
@return: a Deferred that fires with an IMutableFileNode instance.
|
||||
"""
|
||||
|
||||
def create_dirnode(initial_children={}):
|
||||
def create_dirnode(initial_children=None):
|
||||
"""Create a new unattached dirnode, possibly with initial children.
|
||||
|
||||
@param initial_children: dict with keys that are unicode child names,
|
||||
@ -2641,7 +2641,7 @@ class INodeMaker(Interface):
|
||||
for use by unit tests, to create mutable files that are smaller than
|
||||
usual."""
|
||||
|
||||
def create_new_mutable_directory(initial_children={}):
|
||||
def create_new_mutable_directory(initial_children=None):
|
||||
"""I create a new mutable directory, and return a Deferred that will
|
||||
fire with the IDirectoryNode instance when it is ready. If
|
||||
initial_children= is provided (a dict mapping unicode child name to
|
||||
|
@ -68,10 +68,6 @@ def create_introducer(basedir=u"."):
|
||||
default_connection_handlers, foolscap_connection_handlers = create_connection_handlers(config, i2p_provider, tor_provider)
|
||||
tub_options = create_tub_options(config)
|
||||
|
||||
# we don't remember these because the Introducer doesn't make
|
||||
# outbound connections.
|
||||
i2p_provider = None
|
||||
tor_provider = None
|
||||
main_tub = create_main_tub(
|
||||
config, tub_options, default_connection_handlers,
|
||||
foolscap_connection_handlers, i2p_provider, tor_provider,
|
||||
@ -83,6 +79,8 @@ def create_introducer(basedir=u"."):
|
||||
i2p_provider,
|
||||
tor_provider,
|
||||
)
|
||||
i2p_provider.setServiceParent(node)
|
||||
tor_provider.setServiceParent(node)
|
||||
return defer.succeed(node)
|
||||
except Exception:
|
||||
return Failure()
|
||||
|
@ -17,7 +17,7 @@ import errno
|
||||
from base64 import b32decode, b32encode
|
||||
from errno import ENOENT, EPERM
|
||||
from warnings import warn
|
||||
from typing import Union
|
||||
from typing import Union, Iterable
|
||||
|
||||
import attr
|
||||
|
||||
@ -172,7 +172,7 @@ def create_node_dir(basedir, readme_text):
|
||||
f.write(readme_text)
|
||||
|
||||
|
||||
def read_config(basedir, portnumfile, generated_files=[], _valid_config=None):
|
||||
def read_config(basedir, portnumfile, generated_files: Iterable = (), _valid_config=None):
|
||||
"""
|
||||
Read and validate configuration.
|
||||
|
||||
@ -741,7 +741,7 @@ def create_connection_handlers(config, i2p_provider, tor_provider):
|
||||
|
||||
|
||||
def create_tub(tub_options, default_connection_handlers, foolscap_connection_handlers,
|
||||
handler_overrides={}, force_foolscap=False, **kwargs):
|
||||
handler_overrides=None, force_foolscap=False, **kwargs):
|
||||
"""
|
||||
Create a Tub with the right options and handlers. It will be
|
||||
ephemeral unless the caller provides certFile= in kwargs
|
||||
@ -755,6 +755,8 @@ def create_tub(tub_options, default_connection_handlers, foolscap_connection_han
|
||||
:param bool force_foolscap: If True, only allow Foolscap, not just HTTPS
|
||||
storage protocol.
|
||||
"""
|
||||
if handler_overrides is None:
|
||||
handler_overrides = {}
|
||||
# We listen simultaneously for both Foolscap and HTTPS on the same port,
|
||||
# so we have to create a special Foolscap Tub for that to work:
|
||||
if force_foolscap:
|
||||
@ -922,7 +924,7 @@ def tub_listen_on(i2p_provider, tor_provider, tub, tubport, location):
|
||||
def create_main_tub(config, tub_options,
|
||||
default_connection_handlers, foolscap_connection_handlers,
|
||||
i2p_provider, tor_provider,
|
||||
handler_overrides={}, cert_filename="node.pem"):
|
||||
handler_overrides=None, cert_filename="node.pem"):
|
||||
"""
|
||||
Creates a 'main' Foolscap Tub, typically for use as the top-level
|
||||
access point for a running Node.
|
||||
@ -943,6 +945,8 @@ def create_main_tub(config, tub_options,
|
||||
:param tor_provider: None, or a _Provider instance if txtorcon +
|
||||
Tor are installed.
|
||||
"""
|
||||
if handler_overrides is None:
|
||||
handler_overrides = {}
|
||||
portlocation = _tub_portlocation(
|
||||
config,
|
||||
iputil.get_local_addresses_sync,
|
||||
|
@ -135,8 +135,9 @@ class NodeMaker(object):
|
||||
d.addCallback(lambda res: n)
|
||||
return d
|
||||
|
||||
def create_new_mutable_directory(self, initial_children={}, version=None):
|
||||
# initial_children must have metadata (i.e. {} instead of None)
|
||||
def create_new_mutable_directory(self, initial_children=None, version=None):
|
||||
if initial_children is None:
|
||||
initial_children = {}
|
||||
for (name, (node, metadata)) in initial_children.items():
|
||||
precondition(isinstance(metadata, dict),
|
||||
"create_new_mutable_directory requires metadata to be a dict, not None", metadata)
|
||||
|
@ -70,7 +70,8 @@ class MemoryWormholeServer(object):
|
||||
appid: str,
|
||||
relay_url: str,
|
||||
reactor: Any,
|
||||
versions: Any={},
|
||||
# Unfortunately we need a mutable default to match the real API
|
||||
versions: Any={}, # noqa: B006
|
||||
delegate: Optional[Any]=None,
|
||||
journal: Optional[Any]=None,
|
||||
tor: Optional[Any]=None,
|
||||
|
@ -819,8 +819,8 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
|
||||
helper_furl = f.read()
|
||||
|
||||
self.helper_furl = helper_furl
|
||||
if self.numclients >= 4:
|
||||
with open(os.path.join(basedirs[3], 'tahoe.cfg'), 'a+') as f:
|
||||
if self.numclients >= 2:
|
||||
with open(os.path.join(basedirs[1], 'tahoe.cfg'), 'a+') as f:
|
||||
f.write(
|
||||
"[client]\n"
|
||||
"helper.furl = {}\n".format(helper_furl)
|
||||
@ -836,9 +836,9 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
|
||||
log.msg("CONNECTED")
|
||||
# now find out where the web port was
|
||||
self.webish_url = self.clients[0].getServiceNamed("webish").getURL()
|
||||
if self.numclients >=4:
|
||||
if self.numclients >=2:
|
||||
# and the helper-using webport
|
||||
self.helper_webish_url = self.clients[3].getServiceNamed("webish").getURL()
|
||||
self.helper_webish_url = self.clients[1].getServiceNamed("webish").getURL()
|
||||
|
||||
def _generate_config(self, which, basedir, force_foolscap=False):
|
||||
config = {}
|
||||
@ -854,10 +854,10 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
|
||||
("node", "tub.location"): allclients,
|
||||
|
||||
# client 0 runs a webserver and a helper
|
||||
# client 3 runs a webserver but no helper
|
||||
("node", "web.port"): {0, 3},
|
||||
# client 1 runs a webserver but no helper
|
||||
("node", "web.port"): {0, 1},
|
||||
("node", "timeout.keepalive"): {0},
|
||||
("node", "timeout.disconnect"): {3},
|
||||
("node", "timeout.disconnect"): {1},
|
||||
|
||||
("helper", "enabled"): {0},
|
||||
}
|
||||
|
@ -476,7 +476,7 @@ class GridTestMixin(object):
|
||||
])
|
||||
|
||||
def set_up_grid(self, num_clients=1, num_servers=10,
|
||||
client_config_hooks={}, oneshare=False):
|
||||
client_config_hooks=None, oneshare=False):
|
||||
"""
|
||||
Create a Tahoe-LAFS storage grid.
|
||||
|
||||
@ -489,6 +489,8 @@ class GridTestMixin(object):
|
||||
|
||||
:return: ``None``
|
||||
"""
|
||||
if client_config_hooks is None:
|
||||
client_config_hooks = {}
|
||||
# self.basedir must be set
|
||||
port_assigner = SameProcessStreamEndpointAssigner()
|
||||
port_assigner.setUp()
|
||||
|
@ -1,20 +1,13 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
# Don't import bytes since it causes issues on (so far unported) modules on Python 2.
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, max, min, str # noqa: F401
|
||||
from __future__ import annotations
|
||||
|
||||
from past.builtins import chr as byteschr, long
|
||||
from six import ensure_text
|
||||
|
||||
import os, re, sys, time, json
|
||||
from typing import Optional
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
@ -56,10 +49,12 @@ from .common_util import run_cli_unicode
|
||||
|
||||
|
||||
class RunBinTahoeMixin(object):
|
||||
def run_bintahoe(self, args, stdin=None, python_options=[], env=None):
|
||||
def run_bintahoe(self, args, stdin=None, python_options:Optional[list[str]]=None, env=None):
|
||||
# test_runner.run_bintahoe has better unicode support but doesn't
|
||||
# support env yet and is also synchronous. If we could get rid of
|
||||
# this in favor of that, though, it would probably be an improvement.
|
||||
if python_options is None:
|
||||
python_options = []
|
||||
command = sys.executable
|
||||
argv = python_options + ["-b", "-m", "allmydata.scripts.runner"] + args
|
||||
|
||||
@ -787,7 +782,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||
|
||||
def test_filesystem(self):
|
||||
self.data = LARGE_DATA
|
||||
d = self.set_up_nodes()
|
||||
d = self.set_up_nodes(2)
|
||||
def _new_happy_semantics(ign):
|
||||
for c in self.clients:
|
||||
c.encoding_params['happy'] = 1
|
||||
@ -1088,7 +1083,9 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||
headers["content-type"] = "multipart/form-data; boundary=%s" % str(sepbase, "ascii")
|
||||
return self.POST2(urlpath, body, headers, use_helper)
|
||||
|
||||
def POST2(self, urlpath, body=b"", headers={}, use_helper=False):
|
||||
def POST2(self, urlpath, body=b"", headers=None, use_helper=False):
|
||||
if headers is None:
|
||||
headers = {}
|
||||
if use_helper:
|
||||
url = self.helper_webish_url + urlpath
|
||||
else:
|
||||
@ -1409,7 +1406,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||
rc,out,err = yield run_cli(verb, *args, nodeargs=nodeargs, **kwargs)
|
||||
defer.returnValue((out,err))
|
||||
|
||||
def _check_ls(out_and_err, expected_children, unexpected_children=[]):
|
||||
def _check_ls(out_and_err, expected_children, unexpected_children=()):
|
||||
(out, err) = out_and_err
|
||||
self.failUnlessEqual(err, "")
|
||||
for s in expected_children:
|
||||
@ -1749,6 +1746,10 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||
|
||||
return d
|
||||
|
||||
# In CI this test can be very slow, so give it a longer timeout:
|
||||
test_filesystem.timeout = 360 # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def test_filesystem_with_cli_in_subprocess(self):
|
||||
# We do this in a separate test so that test_filesystem doesn't skip if we can't run bin/tahoe.
|
||||
|
||||
|
@ -565,7 +565,9 @@ class WebMixin(TimezoneMixin):
|
||||
returnValue(data)
|
||||
|
||||
@inlineCallbacks
|
||||
def HEAD(self, urlpath, return_response=False, headers={}):
|
||||
def HEAD(self, urlpath, return_response=False, headers=None):
|
||||
if headers is None:
|
||||
headers = {}
|
||||
url = self.webish_url + urlpath
|
||||
response = yield treq.request("head", url, persistent=False,
|
||||
headers=headers)
|
||||
@ -573,7 +575,9 @@ class WebMixin(TimezoneMixin):
|
||||
raise Error(response.code, response="")
|
||||
returnValue( ("", response.code, response.headers) )
|
||||
|
||||
def PUT(self, urlpath, data, headers={}):
|
||||
def PUT(self, urlpath, data, headers=None):
|
||||
if headers is None:
|
||||
headers = {}
|
||||
url = self.webish_url + urlpath
|
||||
return do_http("put", url, data=data, headers=headers)
|
||||
|
||||
@ -618,7 +622,9 @@ class WebMixin(TimezoneMixin):
|
||||
body, headers = self.build_form(**fields)
|
||||
return self.POST2(urlpath, body, headers)
|
||||
|
||||
def POST2(self, urlpath, body="", headers={}, followRedirect=False):
|
||||
def POST2(self, urlpath, body="", headers=None, followRedirect=False):
|
||||
if headers is None:
|
||||
headers = {}
|
||||
url = self.webish_url + urlpath
|
||||
if isinstance(body, str):
|
||||
body = body.encode("utf-8")
|
||||
|
@ -25,7 +25,7 @@ class DBError(Exception):
|
||||
|
||||
|
||||
def get_db(dbfile, stderr=sys.stderr,
|
||||
create_version=(None, None), updaters={}, just_create=False, dbname="db",
|
||||
create_version=(None, None), updaters=None, just_create=False, dbname="db",
|
||||
):
|
||||
"""Open or create the given db file. The parent directory must exist.
|
||||
create_version=(SCHEMA, VERNUM), and SCHEMA must have a 'version' table.
|
||||
@ -33,6 +33,8 @@ def get_db(dbfile, stderr=sys.stderr,
|
||||
to get from ver=1 to ver=2. Returns a (sqlite3,db) tuple, or raises
|
||||
DBError.
|
||||
"""
|
||||
if updaters is None:
|
||||
updaters = {}
|
||||
must_create = not os.path.exists(dbfile)
|
||||
try:
|
||||
db = sqlite3.connect(dbfile)
|
||||
|
Loading…
x
Reference in New Issue
Block a user