tahoe-lafs/integration/conftest.py

575 lines
18 KiB
Python
Raw Normal View History

2021-05-12 13:25:52 +00:00
"""
Ported to Python 3.
"""
from __future__ import annotations
import os
import sys
import shutil
2019-02-15 16:50:14 +00:00
from time import sleep
from os import mkdir, listdir, environ
2019-02-15 16:50:14 +00:00
from os.path import join, exists
from tempfile import mkdtemp, mktemp
from functools import partial
from json import loads
from foolscap.furl import (
decode_furl,
)
from eliot import (
to_file,
log_call,
)
from twisted.python.filepath import FilePath
2017-01-09 17:58:41 +00:00
from twisted.python.procutils import which
from twisted.internet.defer import DeferredList
2019-01-24 19:48:09 +00:00
from twisted.internet.error import (
ProcessExitedAlready,
ProcessTerminated,
)
import pytest
2019-02-05 16:03:35 +00:00
import pytest_twisted
from .util import (
2019-02-15 16:50:14 +00:00
_CollectOutputProtocol,
_MagicTextProtocol,
_DumpOutputProtocol,
_ProcessExitedProtocol,
_create_node,
_cleanup_tahoe_process,
_tahoe_runner_optional_coverage,
await_client_ready,
TahoeProcess,
cli,
2021-01-12 18:58:28 +00:00
generate_ssh_key,
block_with_timeout,
2019-02-15 16:50:14 +00:00
)
# No reason for HTTP requests to take longer than two minutes in the
# integration tests. See allmydata/scripts/common_http.py for usage.
os.environ["__TAHOE_CLI_HTTP_TIMEOUT"] = "120"
# pytest customization hooks
def pytest_addoption(parser):
parser.addoption(
"--keep-tempdir", action="store_true", dest="keep",
help="Keep the tmpdir with the client directories (introducer, etc)",
)
2019-07-23 16:39:45 +00:00
parser.addoption(
"--coverage", action="store_true", dest="coverage",
help="Collect coverage statistics",
)
parser.addoption(
"--force-foolscap", action="store_true", default=False,
dest="force_foolscap",
help=("If set, force Foolscap only for the storage protocol. " +
"Otherwise HTTP will be used.")
)
parser.addoption(
"--runslow", action="store_true", default=False,
dest="runslow",
help="If set, run tests marked as slow.",
)
def pytest_collection_modifyitems(session, config, items):
if not config.option.runslow:
# The --runslow option was not given; keep only collected items not
# marked as slow.
items[:] = [
item
for item
in items
if item.get_closest_marker("slow") is None
]
@pytest.fixture(autouse=True, scope='session')
def eliot_logging():
with open("integration.eliot.json", "w") as f:
to_file(f)
yield
# I've mostly defined these fixtures from "easiest" to "most
# complicated", and the dependencies basically go "down the
# page". They're all session-scoped which has the "pro" that we only
# set up the grid once, but the "con" that each test has to be a
# little careful they're not stepping on toes etc :/
@pytest.fixture(scope='session')
@log_call(action_type=u"integration:reactor", include_result=False)
def reactor():
# this is a fixture in case we might want to try different
# reactors for some reason.
from twisted.internet import reactor as _reactor
return _reactor
@pytest.fixture(scope='session')
@log_call(action_type=u"integration:temp_dir", include_args=[])
def temp_dir(request) -> str:
"""
2019-02-15 18:24:17 +00:00
Invoke like 'py.test --keep-tempdir ...' to avoid deleting the temp-dir
"""
tmp = mkdtemp(prefix="tahoe")
if request.config.getoption('keep'):
print("\nWill retain tempdir '{}'".format(tmp))
# I'm leaving this in and always calling it so that the tempdir
# path is (also) printed out near the end of the run
def cleanup():
2019-02-15 18:37:42 +00:00
if request.config.getoption('keep'):
print("Keeping tempdir '{}'".format(tmp))
else:
try:
shutil.rmtree(tmp, ignore_errors=True)
except Exception as e:
print("Failed to remove tmpdir: {}".format(e))
request.addfinalizer(cleanup)
return tmp
@pytest.fixture(scope='session')
@log_call(action_type=u"integration:flog_binary", include_args=[])
def flog_binary():
2017-01-09 19:54:51 +00:00
return which('flogtool')[0]
@pytest.fixture(scope='session')
@log_call(action_type=u"integration:flog_gatherer", include_args=[])
def flog_gatherer(reactor, temp_dir, flog_binary, request):
out_protocol = _CollectOutputProtocol()
gather_dir = join(temp_dir, 'flog_gather')
2019-02-15 16:50:14 +00:00
reactor.spawnProcess(
out_protocol,
flog_binary,
(
'flogtool', 'create-gatherer',
'--location', 'tcp:localhost:3117',
'--port', '3117',
gather_dir,
),
env=environ,
)
2019-02-05 16:03:35 +00:00
pytest_twisted.blockon(out_protocol.done)
twistd_protocol = _MagicTextProtocol("Gatherer waiting at")
twistd_process = reactor.spawnProcess(
twistd_protocol,
2017-01-09 19:54:51 +00:00
which('twistd')[0],
(
'twistd', '--nodaemon', '--python',
join(gather_dir, 'gatherer.tac'),
),
path=gather_dir,
env=environ,
)
2019-02-05 16:03:35 +00:00
pytest_twisted.blockon(twistd_protocol.magic_seen)
def cleanup():
_cleanup_tahoe_process(twistd_process, twistd_protocol.exited)
flog_file = mktemp('.flog_dump')
flog_protocol = _DumpOutputProtocol(open(flog_file, 'w'))
flog_dir = join(temp_dir, 'flog_gather')
flogs = [x for x in listdir(flog_dir) if x.endswith('.flog')]
print("Dumping {} flogtool logfiles to '{}'".format(len(flogs), flog_file))
reactor.spawnProcess(
flog_protocol,
flog_binary,
(
'flogtool', 'dump', join(temp_dir, 'flog_gather', flogs[0])
),
env=environ,
)
print("Waiting for flogtool to complete")
try:
2021-01-12 18:58:28 +00:00
block_with_timeout(flog_protocol.done, reactor)
except ProcessTerminated as e:
print("flogtool exited unexpectedly: {}".format(str(e)))
print("Flogtool completed")
request.addfinalizer(cleanup)
with open(join(gather_dir, 'log_gatherer.furl'), 'r') as f:
furl = f.read().strip()
return furl
@pytest.fixture(scope='session')
@log_call(
action_type=u"integration:introducer",
include_args=["temp_dir", "flog_gatherer"],
include_result=False,
)
def introducer(reactor, temp_dir, flog_gatherer, request):
config = '''
[node]
nickname = introducer0
web.port = 4560
log_gatherer.furl = {log_furl}
'''.format(log_furl=flog_gatherer)
intro_dir = join(temp_dir, 'introducer')
print("making introducer", intro_dir)
if not exists(intro_dir):
mkdir(intro_dir)
done_proto = _ProcessExitedProtocol()
2019-07-23 16:39:45 +00:00
_tahoe_runner_optional_coverage(
done_proto,
2019-07-23 16:39:45 +00:00
reactor,
request,
2016-09-15 16:48:16 +00:00
(
'create-introducer',
'--listen=tcp',
'--hostname=localhost',
intro_dir,
),
)
2019-02-05 16:03:35 +00:00
pytest_twisted.blockon(done_proto.done)
# over-write the config file with our stuff
with open(join(intro_dir, 'tahoe.cfg'), 'w') as f:
f.write(config)
# "tahoe run" is consistent across Linux/macOS/Windows, unlike the old
# "start" command.
protocol = _MagicTextProtocol('introducer running')
transport = _tahoe_runner_optional_coverage(
protocol,
2019-07-23 16:39:45 +00:00
reactor,
request,
2016-09-15 16:48:16 +00:00
(
'run',
intro_dir,
),
)
request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited))
2019-02-05 16:03:35 +00:00
pytest_twisted.blockon(protocol.magic_seen)
return TahoeProcess(transport, intro_dir)
@pytest.fixture(scope='session')
@log_call(action_type=u"integration:introducer:furl", include_args=["temp_dir"])
def introducer_furl(introducer, temp_dir):
furl_fname = join(temp_dir, 'introducer', 'private', 'introducer.furl')
while not exists(furl_fname):
print("Don't see {} yet".format(furl_fname))
2019-02-15 16:50:14 +00:00
sleep(.1)
furl = open(furl_fname, 'r').read()
tubID, location_hints, name = decode_furl(furl)
if not location_hints:
# If there are no location hints then nothing can ever possibly
# connect to it and the only thing that can happen next is something
# will hang or time out. So just give up right now.
raise ValueError(
"Introducer ({!r}) fURL has no location hints!".format(
introducer_furl,
),
)
return furl
@pytest.fixture(scope='session')
@log_call(
action_type=u"integration:tor:introducer",
include_args=["temp_dir", "flog_gatherer"],
include_result=False,
)
def tor_introducer(reactor, temp_dir, flog_gatherer, request):
config = '''
[node]
nickname = introducer_tor
web.port = 4561
log_gatherer.furl = {log_furl}
'''.format(log_furl=flog_gatherer)
intro_dir = join(temp_dir, 'introducer_tor')
print("making introducer", intro_dir)
if not exists(intro_dir):
mkdir(intro_dir)
done_proto = _ProcessExitedProtocol()
2019-07-23 16:39:45 +00:00
_tahoe_runner_optional_coverage(
done_proto,
2019-07-23 16:39:45 +00:00
reactor,
request,
(
'create-introducer',
'--tor-control-port', 'tcp:localhost:8010',
'--listen=tor',
intro_dir,
),
)
2019-02-05 16:03:35 +00:00
pytest_twisted.blockon(done_proto.done)
# over-write the config file with our stuff
with open(join(intro_dir, 'tahoe.cfg'), 'w') as f:
f.write(config)
# "tahoe run" is consistent across Linux/macOS/Windows, unlike the old
# "start" command.
protocol = _MagicTextProtocol('introducer running')
2019-08-10 19:53:09 +00:00
transport = _tahoe_runner_optional_coverage(
protocol,
2019-07-23 16:39:45 +00:00
reactor,
request,
2016-09-15 16:48:16 +00:00
(
'run',
intro_dir,
2016-09-15 16:48:16 +00:00
),
)
def cleanup():
try:
2019-08-10 19:53:09 +00:00
transport.signalProcess('TERM')
2021-01-12 18:58:28 +00:00
block_with_timeout(protocol.exited, reactor)
except ProcessExitedAlready:
pass
request.addfinalizer(cleanup)
2019-02-05 16:03:35 +00:00
pytest_twisted.blockon(protocol.magic_seen)
2019-08-11 02:00:04 +00:00
return transport
@pytest.fixture(scope='session')
def tor_introducer_furl(tor_introducer, temp_dir):
furl_fname = join(temp_dir, 'introducer_tor', 'private', 'introducer.furl')
while not exists(furl_fname):
print("Don't see {} yet".format(furl_fname))
2019-02-15 16:50:14 +00:00
sleep(.1)
furl = open(furl_fname, 'r').read()
return furl
@pytest.fixture(scope='session')
@log_call(
action_type=u"integration:storage_nodes",
include_args=["temp_dir", "introducer_furl", "flog_gatherer"],
include_result=False,
)
def storage_nodes(reactor, temp_dir, introducer, introducer_furl, flog_gatherer, request):
nodes_d = []
# start all 5 nodes in parallel
for x in range(5):
name = 'node{}'.format(x)
web_port= 9990 + x
nodes_d.append(
_create_node(
reactor, request, temp_dir, introducer_furl, flog_gatherer, name,
2019-08-08 22:37:48 +00:00
web_port="tcp:{}:interface=localhost".format(web_port),
storage=True,
)
)
nodes_status = pytest_twisted.blockon(DeferredList(nodes_d))
nodes = []
for ok, process in nodes_status:
assert ok, "Storage node creation failed: {}".format(process)
nodes.append(process)
return nodes
@pytest.fixture(scope="session")
def alice_sftp_client_key_path(temp_dir):
# The client SSH key path is typically going to be somewhere else (~/.ssh,
# typically), but for convenience sake for testing we'll put it inside node.
return join(temp_dir, "alice", "private", "ssh_client_rsa_key")
2022-12-22 16:05:07 +00:00
@pytest.fixture(scope='session')
@log_call(action_type=u"integration:alice", include_args=[], include_result=False)
def alice(
reactor,
temp_dir,
introducer_furl,
flog_gatherer,
storage_nodes,
alice_sftp_client_key_path,
request,
):
2019-02-05 16:03:35 +00:00
process = pytest_twisted.blockon(
_create_node(
reactor, request, temp_dir, introducer_furl, flog_gatherer, "alice",
web_port="tcp:9980:interface=localhost",
storage=False,
# We're going to kill this ourselves, so no need for finalizer to
# do it:
finalize=False,
)
)
pytest_twisted.blockon(await_client_ready(process))
2021-01-07 17:50:31 +00:00
# 1. Create a new RW directory cap:
cli(process, "create-alias", "test")
rwcap = loads(cli(process, "list-aliases", "--json"))["test"]["readwrite"]
2021-01-07 17:50:31 +00:00
# 2. Enable SFTP on the node:
host_ssh_key_path = join(process.node_dir, "private", "ssh_host_rsa_key")
2021-01-07 17:50:31 +00:00
accounts_path = join(process.node_dir, "private", "accounts")
with open(join(process.node_dir, "tahoe.cfg"), "a") as f:
f.write("""\
[sftpd]
enabled = true
port = tcp:8022:interface=127.0.0.1
host_pubkey_file = {ssh_key_path}.pub
host_privkey_file = {ssh_key_path}
accounts.file = {accounts_path}
""".format(ssh_key_path=host_ssh_key_path, accounts_path=accounts_path))
generate_ssh_key(host_ssh_key_path)
2021-01-07 17:50:31 +00:00
# 3. Add a SFTP access file with an SSH key for auth.
generate_ssh_key(alice_sftp_client_key_path)
# Pub key format is "ssh-rsa <thekey> <username>". We want the key.
ssh_public_key = open(alice_sftp_client_key_path + ".pub").read().strip().split()[1]
2021-01-07 17:50:31 +00:00
with open(accounts_path, "w") as f:
f.write("""\
alice-key ssh-rsa {ssh_public_key} {rwcap}
""".format(rwcap=rwcap, ssh_public_key=ssh_public_key))
2021-01-07 17:50:31 +00:00
# 4. Restart the node with new SFTP config.
pytest_twisted.blockon(process.restart_async(reactor, request))
pytest_twisted.blockon(await_client_ready(process))
print(f"Alice pid: {process.transport.pid}")
return process
@pytest.fixture(scope='session')
@log_call(action_type=u"integration:bob", include_args=[], include_result=False)
def bob(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, request):
2019-02-05 16:03:35 +00:00
process = pytest_twisted.blockon(
_create_node(
reactor, request, temp_dir, introducer_furl, flog_gatherer, "bob",
web_port="tcp:9981:interface=localhost",
storage=False,
)
)
pytest_twisted.blockon(await_client_ready(process))
return process
@pytest.fixture(scope='session')
@pytest.mark.skipif(sys.platform.startswith('win'),
'Tor tests are unstable on Windows')
def chutney(reactor, temp_dir: str) -> tuple[str, dict[str, str]]:
# Try to find Chutney already installed in the environment.
try:
import chutney
except ImportError:
# Nope, we'll get our own in a moment.
pass
else:
# We already have one, just use it.
return (
# from `checkout/lib/chutney/__init__.py` we want to get back to
# `checkout` because that's the parent of the directory with all
# of the network definitions. So, great-grand-parent.
FilePath(chutney.__file__).parent().parent().parent().path,
# There's nothing to add to the environment.
{},
)
2020-06-23 00:16:19 +00:00
chutney_dir = join(temp_dir, 'chutney')
mkdir(chutney_dir)
missing = [exe for exe in ["tor", "tor-gencert"] if not which(exe)]
if missing:
pytest.skip(f"Some command-line tools not found: {missing}")
# XXX yuck! should add a setup.py to chutney so we can at least
# "pip install <path to tarball>" and/or depend on chutney in "pip
# install -e .[dev]" (i.e. in the 'dev' extra)
2019-01-24 20:53:05 +00:00
#
# https://trac.torproject.org/projects/tor/ticket/20343
proto = _DumpOutputProtocol(None)
reactor.spawnProcess(
proto,
'git',
(
'git', 'clone',
'https://git.torproject.org/chutney.git',
chutney_dir,
),
env=environ,
)
2019-02-05 16:03:35 +00:00
pytest_twisted.blockon(proto.done)
2022-02-15 15:47:22 +00:00
# XXX: Here we reset Chutney to a specific revision known to work,
# since there are no stability guarantees or releases yet.
proto = _DumpOutputProtocol(None)
reactor.spawnProcess(
proto,
'git',
(
'git', '-C', chutney_dir,
'reset', '--hard',
2022-02-15 15:47:22 +00:00
'c825cba0bcd813c644c6ac069deeb7347d3200ee'
),
env=environ,
)
pytest_twisted.blockon(proto.done)
return (chutney_dir, {"PYTHONPATH": join(chutney_dir, "lib")})
@pytest.fixture(scope='session')
@pytest.mark.skipif(sys.platform.startswith('win'),
reason='Tor tests are unstable on Windows')
def tor_network(reactor, temp_dir, chutney, request):
"""
Build a basic Tor network.
2020-06-23 00:16:19 +00:00
:param chutney: The root directory of a Chutney checkout and a dict of
additional environment variables to set so a Python process can use
it.
:return: None
"""
chutney_root, chutney_env = chutney
basic_network = join(chutney_root, 'networks', 'basic')
env = environ.copy()
env.update(chutney_env)
chutney_argv = (sys.executable, '-m', 'chutney.TorNet')
def chutney(argv):
proto = _DumpOutputProtocol(None)
reactor.spawnProcess(
proto,
sys.executable,
chutney_argv + argv,
path=join(chutney_root),
env=env,
)
return proto.done
# now, as per Chutney's README, we have to create the network
# ./chutney configure networks/basic
# ./chutney start networks/basic
pytest_twisted.blockon(chutney(("configure", basic_network)))
pytest_twisted.blockon(chutney(("start", basic_network)))
# print some useful stuff
2019-01-24 19:48:09 +00:00
try:
pytest_twisted.blockon(chutney(("status", basic_network)))
2019-01-24 19:48:09 +00:00
except ProcessTerminated:
print("Chutney.TorNet status failed (continuing)")
def cleanup():
print("Tearing down Chutney Tor network")
try:
block_with_timeout(chutney(("stop", basic_network)), reactor)
except ProcessTerminated:
# If this doesn't exit cleanly, that's fine, that shouldn't fail
# the test suite.
pass
request.addfinalizer(cleanup)