2021-05-12 13:25:52 +00:00
|
|
|
"""
|
|
|
|
Ported to Python 3.
|
|
|
|
"""
|
2023-03-27 15:24:32 +00:00
|
|
|
|
|
|
|
from __future__ import annotations
|
|
|
|
|
2016-08-22 23:36:56 +00:00
|
|
|
import sys
|
|
|
|
import shutil
|
2019-02-15 16:50:14 +00:00
|
|
|
from time import sleep
|
2019-08-02 22:49:50 +00:00
|
|
|
from os import mkdir, listdir, environ
|
2019-02-15 16:50:14 +00:00
|
|
|
from os.path import join, exists
|
2016-08-22 23:36:56 +00:00
|
|
|
from tempfile import mkdtemp, mktemp
|
2019-02-15 17:41:45 +00:00
|
|
|
from functools import partial
|
2021-01-07 16:25:26 +00:00
|
|
|
from json import loads
|
2016-08-22 23:36:56 +00:00
|
|
|
|
2020-10-16 15:26:05 +00:00
|
|
|
from foolscap.furl import (
|
|
|
|
decode_furl,
|
|
|
|
)
|
|
|
|
|
2019-02-27 14:12:00 +00:00
|
|
|
from eliot import (
|
|
|
|
to_file,
|
|
|
|
log_call,
|
|
|
|
)
|
|
|
|
|
2023-03-27 15:24:32 +00:00
|
|
|
from twisted.python.filepath import FilePath
|
2017-01-09 17:58:41 +00:00
|
|
|
from twisted.python.procutils import which
|
2019-08-09 01:13:02 +00:00
|
|
|
from twisted.internet.defer import DeferredList
|
2019-01-24 19:48:09 +00:00
|
|
|
from twisted.internet.error import (
|
|
|
|
ProcessExitedAlready,
|
|
|
|
ProcessTerminated,
|
|
|
|
)
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
import pytest
|
2019-02-05 16:03:35 +00:00
|
|
|
import pytest_twisted
|
2016-08-22 23:36:56 +00:00
|
|
|
|
2021-05-07 14:06:12 +00:00
|
|
|
from .util import (
|
2019-02-15 16:50:14 +00:00
|
|
|
_CollectOutputProtocol,
|
|
|
|
_MagicTextProtocol,
|
|
|
|
_DumpOutputProtocol,
|
|
|
|
_ProcessExitedProtocol,
|
|
|
|
_create_node,
|
2019-08-08 21:28:54 +00:00
|
|
|
_cleanup_tahoe_process,
|
2019-08-07 20:03:16 +00:00
|
|
|
_tahoe_runner_optional_coverage,
|
2019-08-09 01:13:02 +00:00
|
|
|
await_client_ready,
|
2019-08-12 06:42:58 +00:00
|
|
|
TahoeProcess,
|
2021-01-07 16:25:26 +00:00
|
|
|
cli,
|
2021-01-12 18:58:28 +00:00
|
|
|
generate_ssh_key,
|
|
|
|
block_with_timeout,
|
2019-02-15 16:50:14 +00:00
|
|
|
)
|
2023-03-27 17:32:40 +00:00
|
|
|
from allmydata.node import read_config
|
2016-10-06 05:03:35 +00:00
|
|
|
|
2016-08-22 23:36:56 +00:00
|
|
|
# pytest customization hooks
|
|
|
|
|
|
|
|
def pytest_addoption(parser):
|
|
|
|
parser.addoption(
|
|
|
|
"--keep-tempdir", action="store_true", dest="keep",
|
|
|
|
help="Keep the tmpdir with the client directories (introducer, etc)",
|
|
|
|
)
|
2019-07-23 16:39:45 +00:00
|
|
|
parser.addoption(
|
|
|
|
"--coverage", action="store_true", dest="coverage",
|
|
|
|
help="Collect coverage statistics",
|
|
|
|
)
|
2022-12-12 15:43:36 +00:00
|
|
|
parser.addoption(
|
|
|
|
"--force-foolscap", action="store_true", default=False,
|
|
|
|
dest="force_foolscap",
|
|
|
|
help=("If set, force Foolscap only for the storage protocol. " +
|
|
|
|
"Otherwise HTTP will be used.")
|
|
|
|
)
|
2023-01-17 14:27:17 +00:00
|
|
|
parser.addoption(
|
|
|
|
"--runslow", action="store_true", default=False,
|
|
|
|
dest="runslow",
|
|
|
|
help="If set, run tests marked as slow.",
|
|
|
|
)
|
|
|
|
|
|
|
|
def pytest_collection_modifyitems(session, config, items):
|
|
|
|
if not config.option.runslow:
|
|
|
|
# The --runslow option was not given; keep only collected items not
|
|
|
|
# marked as slow.
|
|
|
|
items[:] = [
|
|
|
|
item
|
|
|
|
for item
|
|
|
|
in items
|
|
|
|
if item.get_closest_marker("slow") is None
|
|
|
|
]
|
2022-12-12 15:43:36 +00:00
|
|
|
|
2016-08-22 23:36:56 +00:00
|
|
|
|
2019-02-27 14:12:00 +00:00
|
|
|
@pytest.fixture(autouse=True, scope='session')
|
|
|
|
def eliot_logging():
|
|
|
|
with open("integration.eliot.json", "w") as f:
|
|
|
|
to_file(f)
|
|
|
|
yield
|
|
|
|
|
|
|
|
|
2016-08-22 23:36:56 +00:00
|
|
|
# I've mostly defined these fixtures from "easiest" to "most
|
|
|
|
# complicated", and the dependencies basically go "down the
|
|
|
|
# page". They're all session-scoped which has the "pro" that we only
|
|
|
|
# set up the grid once, but the "con" that each test has to be a
|
|
|
|
# little careful they're not stepping on toes etc :/
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(action_type=u"integration:reactor", include_result=False)
|
2016-08-22 23:36:56 +00:00
|
|
|
def reactor():
|
|
|
|
# this is a fixture in case we might want to try different
|
|
|
|
# reactors for some reason.
|
|
|
|
from twisted.internet import reactor as _reactor
|
|
|
|
return _reactor
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(action_type=u"integration:temp_dir", include_args=[])
|
2023-03-27 15:24:32 +00:00
|
|
|
def temp_dir(request) -> str:
|
2016-08-22 23:36:56 +00:00
|
|
|
"""
|
2019-02-15 18:24:17 +00:00
|
|
|
Invoke like 'py.test --keep-tempdir ...' to avoid deleting the temp-dir
|
2016-08-22 23:36:56 +00:00
|
|
|
"""
|
|
|
|
tmp = mkdtemp(prefix="tahoe")
|
2019-02-15 18:24:23 +00:00
|
|
|
if request.config.getoption('keep'):
|
2019-02-15 18:37:27 +00:00
|
|
|
print("\nWill retain tempdir '{}'".format(tmp))
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
# I'm leaving this in and always calling it so that the tempdir
|
|
|
|
# path is (also) printed out near the end of the run
|
|
|
|
def cleanup():
|
2019-02-15 18:37:42 +00:00
|
|
|
if request.config.getoption('keep'):
|
2016-08-22 23:36:56 +00:00
|
|
|
print("Keeping tempdir '{}'".format(tmp))
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
shutil.rmtree(tmp, ignore_errors=True)
|
|
|
|
except Exception as e:
|
|
|
|
print("Failed to remove tmpdir: {}".format(e))
|
|
|
|
request.addfinalizer(cleanup)
|
|
|
|
|
|
|
|
return tmp
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(action_type=u"integration:flog_binary", include_args=[])
|
2016-08-22 23:36:56 +00:00
|
|
|
def flog_binary():
|
2017-01-09 19:54:51 +00:00
|
|
|
return which('flogtool')[0]
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(action_type=u"integration:flog_gatherer", include_args=[])
|
2016-08-22 23:36:56 +00:00
|
|
|
def flog_gatherer(reactor, temp_dir, flog_binary, request):
|
|
|
|
out_protocol = _CollectOutputProtocol()
|
|
|
|
gather_dir = join(temp_dir, 'flog_gather')
|
2019-02-15 16:50:14 +00:00
|
|
|
reactor.spawnProcess(
|
2016-08-22 23:36:56 +00:00
|
|
|
out_protocol,
|
|
|
|
flog_binary,
|
|
|
|
(
|
|
|
|
'flogtool', 'create-gatherer',
|
|
|
|
'--location', 'tcp:localhost:3117',
|
|
|
|
'--port', '3117',
|
|
|
|
gather_dir,
|
2023-03-27 18:06:16 +00:00
|
|
|
),
|
|
|
|
env=environ,
|
2016-08-22 23:36:56 +00:00
|
|
|
)
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(out_protocol.done)
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
twistd_protocol = _MagicTextProtocol("Gatherer waiting at")
|
|
|
|
twistd_process = reactor.spawnProcess(
|
|
|
|
twistd_protocol,
|
2017-01-09 19:54:51 +00:00
|
|
|
which('twistd')[0],
|
2016-08-22 23:36:56 +00:00
|
|
|
(
|
|
|
|
'twistd', '--nodaemon', '--python',
|
|
|
|
join(gather_dir, 'gatherer.tac'),
|
|
|
|
),
|
|
|
|
path=gather_dir,
|
2023-03-27 18:06:16 +00:00
|
|
|
env=environ,
|
2016-08-22 23:36:56 +00:00
|
|
|
)
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(twistd_protocol.magic_seen)
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
def cleanup():
|
2019-08-08 21:28:54 +00:00
|
|
|
_cleanup_tahoe_process(twistd_process, twistd_protocol.exited)
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
flog_file = mktemp('.flog_dump')
|
|
|
|
flog_protocol = _DumpOutputProtocol(open(flog_file, 'w'))
|
|
|
|
flog_dir = join(temp_dir, 'flog_gather')
|
|
|
|
flogs = [x for x in listdir(flog_dir) if x.endswith('.flog')]
|
|
|
|
|
|
|
|
print("Dumping {} flogtool logfiles to '{}'".format(len(flogs), flog_file))
|
|
|
|
reactor.spawnProcess(
|
|
|
|
flog_protocol,
|
|
|
|
flog_binary,
|
|
|
|
(
|
|
|
|
'flogtool', 'dump', join(temp_dir, 'flog_gather', flogs[0])
|
|
|
|
),
|
2023-03-27 18:06:16 +00:00
|
|
|
env=environ,
|
2016-08-22 23:36:56 +00:00
|
|
|
)
|
2019-02-15 18:46:25 +00:00
|
|
|
print("Waiting for flogtool to complete")
|
|
|
|
try:
|
2021-01-12 18:58:28 +00:00
|
|
|
block_with_timeout(flog_protocol.done, reactor)
|
2019-02-15 18:46:25 +00:00
|
|
|
except ProcessTerminated as e:
|
|
|
|
print("flogtool exited unexpectedly: {}".format(str(e)))
|
|
|
|
print("Flogtool completed")
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
request.addfinalizer(cleanup)
|
|
|
|
|
|
|
|
with open(join(gather_dir, 'log_gatherer.furl'), 'r') as f:
|
|
|
|
furl = f.read().strip()
|
|
|
|
return furl
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(
|
|
|
|
action_type=u"integration:introducer",
|
|
|
|
include_args=["temp_dir", "flog_gatherer"],
|
|
|
|
include_result=False,
|
|
|
|
)
|
2016-09-13 04:30:55 +00:00
|
|
|
def introducer(reactor, temp_dir, flog_gatherer, request):
|
2016-08-22 23:36:56 +00:00
|
|
|
intro_dir = join(temp_dir, 'introducer')
|
|
|
|
print("making introducer", intro_dir)
|
|
|
|
|
|
|
|
if not exists(intro_dir):
|
|
|
|
mkdir(intro_dir)
|
|
|
|
done_proto = _ProcessExitedProtocol()
|
2019-07-23 16:39:45 +00:00
|
|
|
_tahoe_runner_optional_coverage(
|
2016-08-22 23:36:56 +00:00
|
|
|
done_proto,
|
2019-07-23 16:39:45 +00:00
|
|
|
reactor,
|
|
|
|
request,
|
2016-09-15 16:48:16 +00:00
|
|
|
(
|
|
|
|
'create-introducer',
|
|
|
|
'--listen=tcp',
|
|
|
|
'--hostname=localhost',
|
|
|
|
intro_dir,
|
|
|
|
),
|
2016-08-22 23:36:56 +00:00
|
|
|
)
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(done_proto.done)
|
2016-08-22 23:36:56 +00:00
|
|
|
|
2023-04-13 04:34:45 +00:00
|
|
|
config = read_config(intro_dir, "tub.port")
|
|
|
|
config.set_config("node", "nickname", "introducer-tor")
|
|
|
|
config.set_config("node", "web.port", "4561")
|
|
|
|
config.set_config("node", "log_gatherer.furl", flog_gatherer)
|
2016-08-22 23:36:56 +00:00
|
|
|
# over-write the config file with our stuff
|
|
|
|
with open(join(intro_dir, 'tahoe.cfg'), 'w') as f:
|
|
|
|
f.write(config)
|
|
|
|
|
2020-12-08 21:02:26 +00:00
|
|
|
# "tahoe run" is consistent across Linux/macOS/Windows, unlike the old
|
|
|
|
# "start" command.
|
2016-08-22 23:36:56 +00:00
|
|
|
protocol = _MagicTextProtocol('introducer running')
|
2019-08-12 06:42:58 +00:00
|
|
|
transport = _tahoe_runner_optional_coverage(
|
2016-08-22 23:36:56 +00:00
|
|
|
protocol,
|
2019-07-23 16:39:45 +00:00
|
|
|
reactor,
|
|
|
|
request,
|
2016-09-15 16:48:16 +00:00
|
|
|
(
|
|
|
|
'run',
|
|
|
|
intro_dir,
|
|
|
|
),
|
2016-08-22 23:36:56 +00:00
|
|
|
)
|
2019-08-12 06:42:58 +00:00
|
|
|
request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited))
|
2016-08-22 23:36:56 +00:00
|
|
|
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(protocol.magic_seen)
|
2019-08-12 06:42:58 +00:00
|
|
|
return TahoeProcess(transport, intro_dir)
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(action_type=u"integration:introducer:furl", include_args=["temp_dir"])
|
2016-08-22 23:36:56 +00:00
|
|
|
def introducer_furl(introducer, temp_dir):
|
|
|
|
furl_fname = join(temp_dir, 'introducer', 'private', 'introducer.furl')
|
|
|
|
while not exists(furl_fname):
|
|
|
|
print("Don't see {} yet".format(furl_fname))
|
2019-02-15 16:50:14 +00:00
|
|
|
sleep(.1)
|
2016-08-22 23:36:56 +00:00
|
|
|
furl = open(furl_fname, 'r').read()
|
2020-10-16 15:26:05 +00:00
|
|
|
tubID, location_hints, name = decode_furl(furl)
|
|
|
|
if not location_hints:
|
|
|
|
# If there are no location hints then nothing can ever possibly
|
|
|
|
# connect to it and the only thing that can happen next is something
|
|
|
|
# will hang or time out. So just give up right now.
|
|
|
|
raise ValueError(
|
|
|
|
"Introducer ({!r}) fURL has no location hints!".format(
|
|
|
|
introducer_furl,
|
|
|
|
),
|
|
|
|
)
|
2016-08-22 23:36:56 +00:00
|
|
|
return furl
|
|
|
|
|
|
|
|
|
2016-10-06 05:03:35 +00:00
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(
|
|
|
|
action_type=u"integration:tor:introducer",
|
|
|
|
include_args=["temp_dir", "flog_gatherer"],
|
|
|
|
include_result=False,
|
|
|
|
)
|
2016-10-06 05:03:35 +00:00
|
|
|
def tor_introducer(reactor, temp_dir, flog_gatherer, request):
|
|
|
|
intro_dir = join(temp_dir, 'introducer_tor')
|
2023-04-13 04:34:45 +00:00
|
|
|
print("making Tor introducer in {}".format(intro_dir))
|
|
|
|
print("(this can take tens of seconds to allocate Onion address)")
|
2016-10-06 05:03:35 +00:00
|
|
|
|
|
|
|
if not exists(intro_dir):
|
|
|
|
mkdir(intro_dir)
|
|
|
|
done_proto = _ProcessExitedProtocol()
|
2019-07-23 16:39:45 +00:00
|
|
|
_tahoe_runner_optional_coverage(
|
2016-10-06 05:03:35 +00:00
|
|
|
done_proto,
|
2019-07-23 16:39:45 +00:00
|
|
|
reactor,
|
|
|
|
request,
|
2016-10-06 05:03:35 +00:00
|
|
|
(
|
|
|
|
'create-introducer',
|
2023-04-07 19:23:20 +00:00
|
|
|
'--tor-control-port', 'tcp:localhost:8007',
|
|
|
|
'--hide-ip',
|
2016-10-06 05:03:35 +00:00
|
|
|
'--listen=tor',
|
|
|
|
intro_dir,
|
|
|
|
),
|
|
|
|
)
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(done_proto.done)
|
2016-10-06 05:03:35 +00:00
|
|
|
|
2023-03-27 17:32:40 +00:00
|
|
|
# adjust a few settings
|
|
|
|
config = read_config(intro_dir, "tub.port")
|
|
|
|
config.set_config("node", "nickname", "introducer-tor")
|
|
|
|
config.set_config("node", "web.port", "4561")
|
|
|
|
config.set_config("node", "log_gatherer.furl", flog_gatherer)
|
2016-08-22 23:36:56 +00:00
|
|
|
|
2020-12-08 21:02:26 +00:00
|
|
|
# "tahoe run" is consistent across Linux/macOS/Windows, unlike the old
|
|
|
|
# "start" command.
|
2016-10-06 05:03:35 +00:00
|
|
|
protocol = _MagicTextProtocol('introducer running')
|
2019-08-10 19:53:09 +00:00
|
|
|
transport = _tahoe_runner_optional_coverage(
|
2016-08-22 23:36:56 +00:00
|
|
|
protocol,
|
2019-07-23 16:39:45 +00:00
|
|
|
reactor,
|
|
|
|
request,
|
2016-09-15 16:48:16 +00:00
|
|
|
(
|
|
|
|
'run',
|
2016-10-06 05:03:35 +00:00
|
|
|
intro_dir,
|
2016-09-15 16:48:16 +00:00
|
|
|
),
|
2016-08-22 23:36:56 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
def cleanup():
|
|
|
|
try:
|
2019-08-10 19:53:09 +00:00
|
|
|
transport.signalProcess('TERM')
|
2021-01-12 18:58:28 +00:00
|
|
|
block_with_timeout(protocol.exited, reactor)
|
2016-08-22 23:36:56 +00:00
|
|
|
except ProcessExitedAlready:
|
|
|
|
pass
|
|
|
|
request.addfinalizer(cleanup)
|
|
|
|
|
2023-03-27 17:35:14 +00:00
|
|
|
print("Waiting for introducer to be ready...")
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(protocol.magic_seen)
|
2023-03-27 17:35:14 +00:00
|
|
|
print("Introducer ready.")
|
2019-08-11 02:00:04 +00:00
|
|
|
return transport
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
|
2016-10-06 05:03:35 +00:00
|
|
|
@pytest.fixture(scope='session')
|
|
|
|
def tor_introducer_furl(tor_introducer, temp_dir):
|
|
|
|
furl_fname = join(temp_dir, 'introducer_tor', 'private', 'introducer.furl')
|
|
|
|
while not exists(furl_fname):
|
|
|
|
print("Don't see {} yet".format(furl_fname))
|
2019-02-15 16:50:14 +00:00
|
|
|
sleep(.1)
|
2016-10-06 05:03:35 +00:00
|
|
|
furl = open(furl_fname, 'r').read()
|
2023-04-13 04:34:45 +00:00
|
|
|
print(f"Found Tor introducer furl: {furl} in {furl_fname}")
|
2016-10-06 05:03:35 +00:00
|
|
|
return furl
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(
|
|
|
|
action_type=u"integration:storage_nodes",
|
|
|
|
include_args=["temp_dir", "introducer_furl", "flog_gatherer"],
|
|
|
|
include_result=False,
|
|
|
|
)
|
2016-09-13 04:30:55 +00:00
|
|
|
def storage_nodes(reactor, temp_dir, introducer, introducer_furl, flog_gatherer, request):
|
2019-08-09 01:13:02 +00:00
|
|
|
nodes_d = []
|
2016-08-22 23:36:56 +00:00
|
|
|
# start all 5 nodes in parallel
|
|
|
|
for x in range(5):
|
|
|
|
name = 'node{}'.format(x)
|
2019-08-07 00:17:58 +00:00
|
|
|
web_port= 9990 + x
|
2019-08-09 01:13:02 +00:00
|
|
|
nodes_d.append(
|
|
|
|
_create_node(
|
|
|
|
reactor, request, temp_dir, introducer_furl, flog_gatherer, name,
|
2019-08-08 22:37:48 +00:00
|
|
|
web_port="tcp:{}:interface=localhost".format(web_port),
|
|
|
|
storage=True,
|
2016-08-22 23:36:56 +00:00
|
|
|
)
|
|
|
|
)
|
2019-08-09 01:13:02 +00:00
|
|
|
nodes_status = pytest_twisted.blockon(DeferredList(nodes_d))
|
|
|
|
nodes = []
|
|
|
|
for ok, process in nodes_status:
|
|
|
|
assert ok, "Storage node creation failed: {}".format(process)
|
|
|
|
nodes.append(process)
|
2016-08-22 23:36:56 +00:00
|
|
|
return nodes
|
|
|
|
|
2021-10-26 11:16:24 +00:00
|
|
|
@pytest.fixture(scope="session")
|
|
|
|
def alice_sftp_client_key_path(temp_dir):
|
|
|
|
# The client SSH key path is typically going to be somewhere else (~/.ssh,
|
|
|
|
# typically), but for convenience sake for testing we'll put it inside node.
|
|
|
|
return join(temp_dir, "alice", "private", "ssh_client_rsa_key")
|
2016-08-22 23:36:56 +00:00
|
|
|
|
2022-12-22 16:05:07 +00:00
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(action_type=u"integration:alice", include_args=[], include_result=False)
|
2021-10-26 11:16:24 +00:00
|
|
|
def alice(
|
|
|
|
reactor,
|
|
|
|
temp_dir,
|
|
|
|
introducer_furl,
|
|
|
|
flog_gatherer,
|
|
|
|
storage_nodes,
|
|
|
|
alice_sftp_client_key_path,
|
|
|
|
request,
|
|
|
|
):
|
2019-02-05 16:03:35 +00:00
|
|
|
process = pytest_twisted.blockon(
|
2016-08-22 23:36:56 +00:00
|
|
|
_create_node(
|
2016-09-13 04:30:55 +00:00
|
|
|
reactor, request, temp_dir, introducer_furl, flog_gatherer, "alice",
|
2016-08-22 23:36:56 +00:00
|
|
|
web_port="tcp:9980:interface=localhost",
|
|
|
|
storage=False,
|
2021-01-12 16:16:45 +00:00
|
|
|
# We're going to kill this ourselves, so no need for finalizer to
|
|
|
|
# do it:
|
|
|
|
finalize=False,
|
2016-08-22 23:36:56 +00:00
|
|
|
)
|
|
|
|
)
|
2023-03-20 19:02:35 +00:00
|
|
|
pytest_twisted.blockon(await_client_ready(process))
|
2021-01-07 17:50:31 +00:00
|
|
|
|
|
|
|
# 1. Create a new RW directory cap:
|
2021-01-07 16:25:26 +00:00
|
|
|
cli(process, "create-alias", "test")
|
|
|
|
rwcap = loads(cli(process, "list-aliases", "--json"))["test"]["readwrite"]
|
2021-01-07 17:50:31 +00:00
|
|
|
|
|
|
|
# 2. Enable SFTP on the node:
|
2021-01-07 18:59:57 +00:00
|
|
|
host_ssh_key_path = join(process.node_dir, "private", "ssh_host_rsa_key")
|
2021-01-07 17:50:31 +00:00
|
|
|
accounts_path = join(process.node_dir, "private", "accounts")
|
|
|
|
with open(join(process.node_dir, "tahoe.cfg"), "a") as f:
|
|
|
|
f.write("""\
|
|
|
|
[sftpd]
|
|
|
|
enabled = true
|
|
|
|
port = tcp:8022:interface=127.0.0.1
|
|
|
|
host_pubkey_file = {ssh_key_path}.pub
|
|
|
|
host_privkey_file = {ssh_key_path}
|
|
|
|
accounts.file = {accounts_path}
|
2021-01-07 18:59:57 +00:00
|
|
|
""".format(ssh_key_path=host_ssh_key_path, accounts_path=accounts_path))
|
|
|
|
generate_ssh_key(host_ssh_key_path)
|
2021-01-07 17:50:31 +00:00
|
|
|
|
2021-10-26 11:16:24 +00:00
|
|
|
# 3. Add a SFTP access file with an SSH key for auth.
|
|
|
|
generate_ssh_key(alice_sftp_client_key_path)
|
2021-01-07 18:59:57 +00:00
|
|
|
# Pub key format is "ssh-rsa <thekey> <username>". We want the key.
|
2021-10-26 11:16:24 +00:00
|
|
|
ssh_public_key = open(alice_sftp_client_key_path + ".pub").read().strip().split()[1]
|
2021-01-07 17:50:31 +00:00
|
|
|
with open(accounts_path, "w") as f:
|
|
|
|
f.write("""\
|
2021-10-26 11:16:24 +00:00
|
|
|
alice-key ssh-rsa {ssh_public_key} {rwcap}
|
2021-01-07 18:59:57 +00:00
|
|
|
""".format(rwcap=rwcap, ssh_public_key=ssh_public_key))
|
2021-01-07 17:50:31 +00:00
|
|
|
|
|
|
|
# 4. Restart the node with new SFTP config.
|
2022-12-22 15:51:59 +00:00
|
|
|
pytest_twisted.blockon(process.restart_async(reactor, request))
|
2023-03-20 19:02:35 +00:00
|
|
|
pytest_twisted.blockon(await_client_ready(process))
|
2022-12-22 15:51:59 +00:00
|
|
|
print(f"Alice pid: {process.transport.pid}")
|
2016-08-22 23:36:56 +00:00
|
|
|
return process
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(action_type=u"integration:bob", include_args=[], include_result=False)
|
2016-09-13 04:30:55 +00:00
|
|
|
def bob(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, request):
|
2019-02-05 16:03:35 +00:00
|
|
|
process = pytest_twisted.blockon(
|
2016-08-22 23:36:56 +00:00
|
|
|
_create_node(
|
2016-09-13 04:30:55 +00:00
|
|
|
reactor, request, temp_dir, introducer_furl, flog_gatherer, "bob",
|
2016-08-22 23:36:56 +00:00
|
|
|
web_port="tcp:9981:interface=localhost",
|
|
|
|
storage=False,
|
|
|
|
)
|
|
|
|
)
|
2023-03-20 19:02:35 +00:00
|
|
|
pytest_twisted.blockon(await_client_ready(process))
|
2016-08-22 23:36:56 +00:00
|
|
|
return process
|
|
|
|
|
|
|
|
|
2016-10-06 05:03:35 +00:00
|
|
|
@pytest.fixture(scope='session')
|
2020-07-19 13:19:19 +00:00
|
|
|
@pytest.mark.skipif(sys.platform.startswith('win'),
|
|
|
|
'Tor tests are unstable on Windows')
|
2023-03-27 15:24:32 +00:00
|
|
|
def chutney(reactor, temp_dir: str) -> tuple[str, dict[str, str]]:
|
|
|
|
# Try to find Chutney already installed in the environment.
|
|
|
|
try:
|
|
|
|
import chutney
|
|
|
|
except ImportError:
|
|
|
|
# Nope, we'll get our own in a moment.
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# We already have one, just use it.
|
|
|
|
return (
|
|
|
|
# from `checkout/lib/chutney/__init__.py` we want to get back to
|
|
|
|
# `checkout` because that's the parent of the directory with all
|
|
|
|
# of the network definitions. So, great-grand-parent.
|
|
|
|
FilePath(chutney.__file__).parent().parent().parent().path,
|
|
|
|
# There's nothing to add to the environment.
|
|
|
|
{},
|
|
|
|
)
|
2020-06-23 00:16:19 +00:00
|
|
|
|
2016-10-06 05:03:35 +00:00
|
|
|
chutney_dir = join(temp_dir, 'chutney')
|
|
|
|
mkdir(chutney_dir)
|
|
|
|
|
2023-03-27 17:55:53 +00:00
|
|
|
missing = [exe for exe in ["tor", "tor-gencert"] if not which(exe)]
|
|
|
|
if missing:
|
|
|
|
pytest.skip(f"Some command-line tools not found: {missing}")
|
2016-10-06 05:03:35 +00:00
|
|
|
|
|
|
|
# XXX yuck! should add a setup.py to chutney so we can at least
|
|
|
|
# "pip install <path to tarball>" and/or depend on chutney in "pip
|
|
|
|
# install -e .[dev]" (i.e. in the 'dev' extra)
|
2019-01-24 20:53:05 +00:00
|
|
|
#
|
|
|
|
# https://trac.torproject.org/projects/tor/ticket/20343
|
2016-10-06 05:03:35 +00:00
|
|
|
proto = _DumpOutputProtocol(None)
|
|
|
|
reactor.spawnProcess(
|
|
|
|
proto,
|
2019-08-02 22:50:21 +00:00
|
|
|
'git',
|
2016-10-06 05:03:35 +00:00
|
|
|
(
|
2020-07-21 11:35:04 +00:00
|
|
|
'git', 'clone',
|
2023-04-04 14:58:28 +00:00
|
|
|
'https://gitlab.torproject.org/tpo/core/chutney.git',
|
2016-10-06 05:03:35 +00:00
|
|
|
chutney_dir,
|
2019-08-02 22:49:50 +00:00
|
|
|
),
|
|
|
|
env=environ,
|
2016-10-06 05:03:35 +00:00
|
|
|
)
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(proto.done)
|
2020-07-21 11:35:04 +00:00
|
|
|
|
2022-02-15 15:47:22 +00:00
|
|
|
# XXX: Here we reset Chutney to a specific revision known to work,
|
|
|
|
# since there are no stability guarantees or releases yet.
|
2020-07-21 11:35:04 +00:00
|
|
|
proto = _DumpOutputProtocol(None)
|
|
|
|
reactor.spawnProcess(
|
|
|
|
proto,
|
|
|
|
'git',
|
|
|
|
(
|
|
|
|
'git', '-C', chutney_dir,
|
|
|
|
'reset', '--hard',
|
2023-04-04 14:58:28 +00:00
|
|
|
'c4f6789ad2558dcbfeb7d024c6481d8112bfb6c2'
|
2020-07-21 11:35:04 +00:00
|
|
|
),
|
|
|
|
env=environ,
|
|
|
|
)
|
|
|
|
pytest_twisted.blockon(proto.done)
|
|
|
|
|
2023-04-13 04:34:45 +00:00
|
|
|
return (
|
|
|
|
chutney_dir,
|
|
|
|
{
|
|
|
|
"PYTHONPATH": join(chutney_dir, "lib"),
|
|
|
|
"CHUTNEY_START_TIME": "200", # default is 60
|
|
|
|
}
|
|
|
|
)
|
2016-10-06 05:03:35 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2020-07-19 13:19:19 +00:00
|
|
|
@pytest.mark.skipif(sys.platform.startswith('win'),
|
|
|
|
reason='Tor tests are unstable on Windows')
|
2016-10-06 05:03:35 +00:00
|
|
|
def tor_network(reactor, temp_dir, chutney, request):
|
2023-03-27 15:24:32 +00:00
|
|
|
"""
|
|
|
|
Build a basic Tor network.
|
2020-06-23 00:16:19 +00:00
|
|
|
|
2023-03-27 15:24:32 +00:00
|
|
|
:param chutney: The root directory of a Chutney checkout and a dict of
|
|
|
|
additional environment variables to set so a Python process can use
|
|
|
|
it.
|
2016-10-06 05:03:35 +00:00
|
|
|
|
2023-03-27 15:24:32 +00:00
|
|
|
:return: None
|
|
|
|
"""
|
|
|
|
chutney_root, chutney_env = chutney
|
|
|
|
basic_network = join(chutney_root, 'networks', 'basic')
|
2016-10-06 05:03:35 +00:00
|
|
|
|
2019-08-02 22:49:50 +00:00
|
|
|
env = environ.copy()
|
2023-03-27 15:24:32 +00:00
|
|
|
env.update(chutney_env)
|
|
|
|
chutney_argv = (sys.executable, '-m', 'chutney.TorNet')
|
|
|
|
def chutney(argv):
|
|
|
|
proto = _DumpOutputProtocol(None)
|
|
|
|
reactor.spawnProcess(
|
|
|
|
proto,
|
|
|
|
sys.executable,
|
|
|
|
chutney_argv + argv,
|
|
|
|
path=join(chutney_root),
|
|
|
|
env=env,
|
|
|
|
)
|
|
|
|
return proto.done
|
2016-10-06 05:03:35 +00:00
|
|
|
|
2023-03-27 15:24:32 +00:00
|
|
|
# now, as per Chutney's README, we have to create the network
|
|
|
|
pytest_twisted.blockon(chutney(("configure", basic_network)))
|
2016-10-06 05:03:35 +00:00
|
|
|
|
2023-04-13 04:34:45 +00:00
|
|
|
# ensure we will tear down the network right before we start it
|
2016-10-06 05:03:35 +00:00
|
|
|
def cleanup():
|
|
|
|
print("Tearing down Chutney Tor network")
|
2021-01-12 18:24:42 +00:00
|
|
|
try:
|
2023-03-27 15:24:32 +00:00
|
|
|
block_with_timeout(chutney(("stop", basic_network)), reactor)
|
2021-01-12 18:24:42 +00:00
|
|
|
except ProcessTerminated:
|
|
|
|
# If this doesn't exit cleanly, that's fine, that shouldn't fail
|
|
|
|
# the test suite.
|
|
|
|
pass
|
2016-10-06 05:03:35 +00:00
|
|
|
request.addfinalizer(cleanup)
|
2023-04-13 04:34:45 +00:00
|
|
|
|
|
|
|
pytest_twisted.blockon(chutney(("start", basic_network)))
|
|
|
|
pytest_twisted.blockon(chutney(("wait_for_bootstrap", basic_network)))
|
|
|
|
|
|
|
|
# print some useful stuff
|
|
|
|
try:
|
|
|
|
pytest_twisted.blockon(chutney(("status", basic_network)))
|
|
|
|
except ProcessTerminated:
|
|
|
|
print("Chutney.TorNet status failed (continuing)")
|