2021-05-12 13:25:52 +00:00
|
|
|
"""
|
|
|
|
Ported to Python 3.
|
|
|
|
"""
|
|
|
|
from __future__ import unicode_literals
|
|
|
|
from __future__ import absolute_import
|
|
|
|
from __future__ import division
|
2016-08-22 23:36:56 +00:00
|
|
|
from __future__ import print_function
|
|
|
|
|
2021-05-12 13:25:52 +00:00
|
|
|
from future.utils import PY2
|
|
|
|
if PY2:
|
|
|
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
|
|
|
2016-08-22 23:36:56 +00:00
|
|
|
import sys
|
|
|
|
import shutil
|
2019-02-15 16:50:14 +00:00
|
|
|
from time import sleep
|
2019-08-02 22:49:50 +00:00
|
|
|
from os import mkdir, listdir, environ
|
2019-02-15 16:50:14 +00:00
|
|
|
from os.path import join, exists
|
2016-08-22 23:36:56 +00:00
|
|
|
from tempfile import mkdtemp, mktemp
|
2019-02-15 17:41:45 +00:00
|
|
|
from functools import partial
|
2021-01-07 16:25:26 +00:00
|
|
|
from json import loads
|
2016-08-22 23:36:56 +00:00
|
|
|
|
2020-10-16 15:26:05 +00:00
|
|
|
from foolscap.furl import (
|
|
|
|
decode_furl,
|
|
|
|
)
|
|
|
|
|
2019-02-27 14:12:00 +00:00
|
|
|
from eliot import (
|
|
|
|
to_file,
|
|
|
|
log_call,
|
|
|
|
)
|
|
|
|
|
2017-01-09 17:58:41 +00:00
|
|
|
from twisted.python.procutils import which
|
2019-08-09 01:13:02 +00:00
|
|
|
from twisted.internet.defer import DeferredList
|
2019-01-24 19:48:09 +00:00
|
|
|
from twisted.internet.error import (
|
|
|
|
ProcessExitedAlready,
|
|
|
|
ProcessTerminated,
|
|
|
|
)
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
import pytest
|
2019-02-05 16:03:35 +00:00
|
|
|
import pytest_twisted
|
2016-08-22 23:36:56 +00:00
|
|
|
|
2021-05-07 14:06:12 +00:00
|
|
|
from .util import (
|
2019-02-15 16:50:14 +00:00
|
|
|
_CollectOutputProtocol,
|
|
|
|
_MagicTextProtocol,
|
|
|
|
_DumpOutputProtocol,
|
|
|
|
_ProcessExitedProtocol,
|
|
|
|
_create_node,
|
2019-08-08 21:28:54 +00:00
|
|
|
_cleanup_tahoe_process,
|
2019-08-07 20:03:16 +00:00
|
|
|
_tahoe_runner_optional_coverage,
|
2019-08-09 01:13:02 +00:00
|
|
|
await_client_ready,
|
2019-08-12 06:42:58 +00:00
|
|
|
TahoeProcess,
|
2021-01-07 16:25:26 +00:00
|
|
|
cli,
|
|
|
|
_run_node,
|
2021-01-12 18:58:28 +00:00
|
|
|
generate_ssh_key,
|
|
|
|
block_with_timeout,
|
2019-02-15 16:50:14 +00:00
|
|
|
)
|
2016-10-06 05:03:35 +00:00
|
|
|
|
|
|
|
|
2016-08-22 23:36:56 +00:00
|
|
|
# pytest customization hooks
|
|
|
|
|
|
|
|
def pytest_addoption(parser):
|
|
|
|
parser.addoption(
|
|
|
|
"--keep-tempdir", action="store_true", dest="keep",
|
|
|
|
help="Keep the tmpdir with the client directories (introducer, etc)",
|
|
|
|
)
|
2019-07-23 16:39:45 +00:00
|
|
|
parser.addoption(
|
|
|
|
"--coverage", action="store_true", dest="coverage",
|
|
|
|
help="Collect coverage statistics",
|
|
|
|
)
|
2016-08-22 23:36:56 +00:00
|
|
|
|
2019-02-27 14:12:00 +00:00
|
|
|
@pytest.fixture(autouse=True, scope='session')
|
|
|
|
def eliot_logging():
|
|
|
|
with open("integration.eliot.json", "w") as f:
|
|
|
|
to_file(f)
|
|
|
|
yield
|
|
|
|
|
|
|
|
|
2016-08-22 23:36:56 +00:00
|
|
|
# I've mostly defined these fixtures from "easiest" to "most
|
|
|
|
# complicated", and the dependencies basically go "down the
|
|
|
|
# page". They're all session-scoped which has the "pro" that we only
|
|
|
|
# set up the grid once, but the "con" that each test has to be a
|
|
|
|
# little careful they're not stepping on toes etc :/
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(action_type=u"integration:reactor", include_result=False)
|
2016-08-22 23:36:56 +00:00
|
|
|
def reactor():
|
|
|
|
# this is a fixture in case we might want to try different
|
|
|
|
# reactors for some reason.
|
|
|
|
from twisted.internet import reactor as _reactor
|
|
|
|
return _reactor
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(action_type=u"integration:temp_dir", include_args=[])
|
2016-08-22 23:36:56 +00:00
|
|
|
def temp_dir(request):
|
|
|
|
"""
|
2019-02-15 18:24:17 +00:00
|
|
|
Invoke like 'py.test --keep-tempdir ...' to avoid deleting the temp-dir
|
2016-08-22 23:36:56 +00:00
|
|
|
"""
|
|
|
|
tmp = mkdtemp(prefix="tahoe")
|
2019-02-15 18:24:23 +00:00
|
|
|
if request.config.getoption('keep'):
|
2019-02-15 18:37:27 +00:00
|
|
|
print("\nWill retain tempdir '{}'".format(tmp))
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
# I'm leaving this in and always calling it so that the tempdir
|
|
|
|
# path is (also) printed out near the end of the run
|
|
|
|
def cleanup():
|
2019-02-15 18:37:42 +00:00
|
|
|
if request.config.getoption('keep'):
|
2016-08-22 23:36:56 +00:00
|
|
|
print("Keeping tempdir '{}'".format(tmp))
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
shutil.rmtree(tmp, ignore_errors=True)
|
|
|
|
except Exception as e:
|
|
|
|
print("Failed to remove tmpdir: {}".format(e))
|
|
|
|
request.addfinalizer(cleanup)
|
|
|
|
|
|
|
|
return tmp
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(action_type=u"integration:flog_binary", include_args=[])
|
2016-08-22 23:36:56 +00:00
|
|
|
def flog_binary():
|
2017-01-09 19:54:51 +00:00
|
|
|
return which('flogtool')[0]
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(action_type=u"integration:flog_gatherer", include_args=[])
|
2016-08-22 23:36:56 +00:00
|
|
|
def flog_gatherer(reactor, temp_dir, flog_binary, request):
|
|
|
|
out_protocol = _CollectOutputProtocol()
|
|
|
|
gather_dir = join(temp_dir, 'flog_gather')
|
2019-02-15 16:50:14 +00:00
|
|
|
reactor.spawnProcess(
|
2016-08-22 23:36:56 +00:00
|
|
|
out_protocol,
|
|
|
|
flog_binary,
|
|
|
|
(
|
|
|
|
'flogtool', 'create-gatherer',
|
|
|
|
'--location', 'tcp:localhost:3117',
|
|
|
|
'--port', '3117',
|
|
|
|
gather_dir,
|
|
|
|
)
|
|
|
|
)
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(out_protocol.done)
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
twistd_protocol = _MagicTextProtocol("Gatherer waiting at")
|
|
|
|
twistd_process = reactor.spawnProcess(
|
|
|
|
twistd_protocol,
|
2017-01-09 19:54:51 +00:00
|
|
|
which('twistd')[0],
|
2016-08-22 23:36:56 +00:00
|
|
|
(
|
|
|
|
'twistd', '--nodaemon', '--python',
|
|
|
|
join(gather_dir, 'gatherer.tac'),
|
|
|
|
),
|
|
|
|
path=gather_dir,
|
|
|
|
)
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(twistd_protocol.magic_seen)
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
def cleanup():
|
2019-08-08 21:28:54 +00:00
|
|
|
_cleanup_tahoe_process(twistd_process, twistd_protocol.exited)
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
flog_file = mktemp('.flog_dump')
|
|
|
|
flog_protocol = _DumpOutputProtocol(open(flog_file, 'w'))
|
|
|
|
flog_dir = join(temp_dir, 'flog_gather')
|
|
|
|
flogs = [x for x in listdir(flog_dir) if x.endswith('.flog')]
|
|
|
|
|
|
|
|
print("Dumping {} flogtool logfiles to '{}'".format(len(flogs), flog_file))
|
|
|
|
reactor.spawnProcess(
|
|
|
|
flog_protocol,
|
|
|
|
flog_binary,
|
|
|
|
(
|
|
|
|
'flogtool', 'dump', join(temp_dir, 'flog_gather', flogs[0])
|
|
|
|
),
|
|
|
|
)
|
2019-02-15 18:46:25 +00:00
|
|
|
print("Waiting for flogtool to complete")
|
|
|
|
try:
|
2021-01-12 18:58:28 +00:00
|
|
|
block_with_timeout(flog_protocol.done, reactor)
|
2019-02-15 18:46:25 +00:00
|
|
|
except ProcessTerminated as e:
|
|
|
|
print("flogtool exited unexpectedly: {}".format(str(e)))
|
|
|
|
print("Flogtool completed")
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
request.addfinalizer(cleanup)
|
|
|
|
|
|
|
|
with open(join(gather_dir, 'log_gatherer.furl'), 'r') as f:
|
|
|
|
furl = f.read().strip()
|
|
|
|
return furl
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(
|
|
|
|
action_type=u"integration:introducer",
|
|
|
|
include_args=["temp_dir", "flog_gatherer"],
|
|
|
|
include_result=False,
|
|
|
|
)
|
2016-09-13 04:30:55 +00:00
|
|
|
def introducer(reactor, temp_dir, flog_gatherer, request):
|
2016-08-22 23:36:56 +00:00
|
|
|
config = '''
|
|
|
|
[node]
|
|
|
|
nickname = introducer0
|
|
|
|
web.port = 4560
|
|
|
|
log_gatherer.furl = {log_furl}
|
|
|
|
'''.format(log_furl=flog_gatherer)
|
|
|
|
|
|
|
|
intro_dir = join(temp_dir, 'introducer')
|
|
|
|
print("making introducer", intro_dir)
|
|
|
|
|
|
|
|
if not exists(intro_dir):
|
|
|
|
mkdir(intro_dir)
|
|
|
|
done_proto = _ProcessExitedProtocol()
|
2019-07-23 16:39:45 +00:00
|
|
|
_tahoe_runner_optional_coverage(
|
2016-08-22 23:36:56 +00:00
|
|
|
done_proto,
|
2019-07-23 16:39:45 +00:00
|
|
|
reactor,
|
|
|
|
request,
|
2016-09-15 16:48:16 +00:00
|
|
|
(
|
|
|
|
'create-introducer',
|
|
|
|
'--listen=tcp',
|
|
|
|
'--hostname=localhost',
|
|
|
|
intro_dir,
|
|
|
|
),
|
2016-08-22 23:36:56 +00:00
|
|
|
)
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(done_proto.done)
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
# over-write the config file with our stuff
|
|
|
|
with open(join(intro_dir, 'tahoe.cfg'), 'w') as f:
|
|
|
|
f.write(config)
|
|
|
|
|
2020-12-08 21:02:26 +00:00
|
|
|
# "tahoe run" is consistent across Linux/macOS/Windows, unlike the old
|
|
|
|
# "start" command.
|
2016-08-22 23:36:56 +00:00
|
|
|
protocol = _MagicTextProtocol('introducer running')
|
2019-08-12 06:42:58 +00:00
|
|
|
transport = _tahoe_runner_optional_coverage(
|
2016-08-22 23:36:56 +00:00
|
|
|
protocol,
|
2019-07-23 16:39:45 +00:00
|
|
|
reactor,
|
|
|
|
request,
|
2016-09-15 16:48:16 +00:00
|
|
|
(
|
|
|
|
'run',
|
|
|
|
intro_dir,
|
|
|
|
),
|
2016-08-22 23:36:56 +00:00
|
|
|
)
|
2019-08-12 06:42:58 +00:00
|
|
|
request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited))
|
2016-08-22 23:36:56 +00:00
|
|
|
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(protocol.magic_seen)
|
2019-08-12 06:42:58 +00:00
|
|
|
return TahoeProcess(transport, intro_dir)
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(action_type=u"integration:introducer:furl", include_args=["temp_dir"])
|
2016-08-22 23:36:56 +00:00
|
|
|
def introducer_furl(introducer, temp_dir):
|
|
|
|
furl_fname = join(temp_dir, 'introducer', 'private', 'introducer.furl')
|
|
|
|
while not exists(furl_fname):
|
|
|
|
print("Don't see {} yet".format(furl_fname))
|
2019-02-15 16:50:14 +00:00
|
|
|
sleep(.1)
|
2016-08-22 23:36:56 +00:00
|
|
|
furl = open(furl_fname, 'r').read()
|
2020-10-16 15:26:05 +00:00
|
|
|
tubID, location_hints, name = decode_furl(furl)
|
|
|
|
if not location_hints:
|
|
|
|
# If there are no location hints then nothing can ever possibly
|
|
|
|
# connect to it and the only thing that can happen next is something
|
|
|
|
# will hang or time out. So just give up right now.
|
|
|
|
raise ValueError(
|
|
|
|
"Introducer ({!r}) fURL has no location hints!".format(
|
|
|
|
introducer_furl,
|
|
|
|
),
|
|
|
|
)
|
2016-08-22 23:36:56 +00:00
|
|
|
return furl
|
|
|
|
|
|
|
|
|
2016-10-06 05:03:35 +00:00
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(
|
|
|
|
action_type=u"integration:tor:introducer",
|
|
|
|
include_args=["temp_dir", "flog_gatherer"],
|
|
|
|
include_result=False,
|
|
|
|
)
|
2016-10-06 05:03:35 +00:00
|
|
|
def tor_introducer(reactor, temp_dir, flog_gatherer, request):
|
|
|
|
config = '''
|
|
|
|
[node]
|
|
|
|
nickname = introducer_tor
|
|
|
|
web.port = 4561
|
|
|
|
log_gatherer.furl = {log_furl}
|
|
|
|
'''.format(log_furl=flog_gatherer)
|
|
|
|
|
|
|
|
intro_dir = join(temp_dir, 'introducer_tor')
|
|
|
|
print("making introducer", intro_dir)
|
|
|
|
|
|
|
|
if not exists(intro_dir):
|
|
|
|
mkdir(intro_dir)
|
|
|
|
done_proto = _ProcessExitedProtocol()
|
2019-07-23 16:39:45 +00:00
|
|
|
_tahoe_runner_optional_coverage(
|
2016-10-06 05:03:35 +00:00
|
|
|
done_proto,
|
2019-07-23 16:39:45 +00:00
|
|
|
reactor,
|
|
|
|
request,
|
2016-10-06 05:03:35 +00:00
|
|
|
(
|
|
|
|
'create-introducer',
|
|
|
|
'--tor-control-port', 'tcp:localhost:8010',
|
|
|
|
'--listen=tor',
|
|
|
|
intro_dir,
|
|
|
|
),
|
|
|
|
)
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(done_proto.done)
|
2016-10-06 05:03:35 +00:00
|
|
|
|
|
|
|
# over-write the config file with our stuff
|
|
|
|
with open(join(intro_dir, 'tahoe.cfg'), 'w') as f:
|
|
|
|
f.write(config)
|
2016-08-22 23:36:56 +00:00
|
|
|
|
2020-12-08 21:02:26 +00:00
|
|
|
# "tahoe run" is consistent across Linux/macOS/Windows, unlike the old
|
|
|
|
# "start" command.
|
2016-10-06 05:03:35 +00:00
|
|
|
protocol = _MagicTextProtocol('introducer running')
|
2019-08-10 19:53:09 +00:00
|
|
|
transport = _tahoe_runner_optional_coverage(
|
2016-08-22 23:36:56 +00:00
|
|
|
protocol,
|
2019-07-23 16:39:45 +00:00
|
|
|
reactor,
|
|
|
|
request,
|
2016-09-15 16:48:16 +00:00
|
|
|
(
|
|
|
|
'run',
|
2016-10-06 05:03:35 +00:00
|
|
|
intro_dir,
|
2016-09-15 16:48:16 +00:00
|
|
|
),
|
2016-08-22 23:36:56 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
def cleanup():
|
|
|
|
try:
|
2019-08-10 19:53:09 +00:00
|
|
|
transport.signalProcess('TERM')
|
2021-01-12 18:58:28 +00:00
|
|
|
block_with_timeout(protocol.exited, reactor)
|
2016-08-22 23:36:56 +00:00
|
|
|
except ProcessExitedAlready:
|
|
|
|
pass
|
|
|
|
request.addfinalizer(cleanup)
|
|
|
|
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(protocol.magic_seen)
|
2019-08-11 02:00:04 +00:00
|
|
|
return transport
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
|
2016-10-06 05:03:35 +00:00
|
|
|
@pytest.fixture(scope='session')
|
|
|
|
def tor_introducer_furl(tor_introducer, temp_dir):
|
|
|
|
furl_fname = join(temp_dir, 'introducer_tor', 'private', 'introducer.furl')
|
|
|
|
while not exists(furl_fname):
|
|
|
|
print("Don't see {} yet".format(furl_fname))
|
2019-02-15 16:50:14 +00:00
|
|
|
sleep(.1)
|
2016-10-06 05:03:35 +00:00
|
|
|
furl = open(furl_fname, 'r').read()
|
|
|
|
return furl
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(
|
|
|
|
action_type=u"integration:storage_nodes",
|
|
|
|
include_args=["temp_dir", "introducer_furl", "flog_gatherer"],
|
|
|
|
include_result=False,
|
|
|
|
)
|
2016-09-13 04:30:55 +00:00
|
|
|
def storage_nodes(reactor, temp_dir, introducer, introducer_furl, flog_gatherer, request):
|
2019-08-09 01:13:02 +00:00
|
|
|
nodes_d = []
|
2016-08-22 23:36:56 +00:00
|
|
|
# start all 5 nodes in parallel
|
|
|
|
for x in range(5):
|
|
|
|
name = 'node{}'.format(x)
|
2019-08-07 00:17:58 +00:00
|
|
|
web_port= 9990 + x
|
2019-08-09 01:13:02 +00:00
|
|
|
nodes_d.append(
|
|
|
|
_create_node(
|
|
|
|
reactor, request, temp_dir, introducer_furl, flog_gatherer, name,
|
2019-08-08 22:37:48 +00:00
|
|
|
web_port="tcp:{}:interface=localhost".format(web_port),
|
|
|
|
storage=True,
|
2016-08-22 23:36:56 +00:00
|
|
|
)
|
|
|
|
)
|
2019-08-09 01:13:02 +00:00
|
|
|
nodes_status = pytest_twisted.blockon(DeferredList(nodes_d))
|
|
|
|
nodes = []
|
|
|
|
for ok, process in nodes_status:
|
|
|
|
assert ok, "Storage node creation failed: {}".format(process)
|
|
|
|
nodes.append(process)
|
2016-08-22 23:36:56 +00:00
|
|
|
return nodes
|
|
|
|
|
2021-10-26 11:16:24 +00:00
|
|
|
@pytest.fixture(scope="session")
|
|
|
|
def alice_sftp_client_key_path(temp_dir):
|
|
|
|
# The client SSH key path is typically going to be somewhere else (~/.ssh,
|
|
|
|
# typically), but for convenience sake for testing we'll put it inside node.
|
|
|
|
return join(temp_dir, "alice", "private", "ssh_client_rsa_key")
|
2016-08-22 23:36:56 +00:00
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(action_type=u"integration:alice", include_args=[], include_result=False)
|
2021-10-26 11:16:24 +00:00
|
|
|
def alice(
|
|
|
|
reactor,
|
|
|
|
temp_dir,
|
|
|
|
introducer_furl,
|
|
|
|
flog_gatherer,
|
|
|
|
storage_nodes,
|
|
|
|
alice_sftp_client_key_path,
|
|
|
|
request,
|
|
|
|
):
|
2019-02-05 16:03:35 +00:00
|
|
|
process = pytest_twisted.blockon(
|
2016-08-22 23:36:56 +00:00
|
|
|
_create_node(
|
2016-09-13 04:30:55 +00:00
|
|
|
reactor, request, temp_dir, introducer_furl, flog_gatherer, "alice",
|
2016-08-22 23:36:56 +00:00
|
|
|
web_port="tcp:9980:interface=localhost",
|
|
|
|
storage=False,
|
2021-01-12 16:16:45 +00:00
|
|
|
# We're going to kill this ourselves, so no need for finalizer to
|
|
|
|
# do it:
|
|
|
|
finalize=False,
|
2016-08-22 23:36:56 +00:00
|
|
|
)
|
|
|
|
)
|
2019-08-09 01:13:02 +00:00
|
|
|
await_client_ready(process)
|
2021-01-07 17:50:31 +00:00
|
|
|
|
|
|
|
# 1. Create a new RW directory cap:
|
2021-01-07 16:25:26 +00:00
|
|
|
cli(process, "create-alias", "test")
|
|
|
|
rwcap = loads(cli(process, "list-aliases", "--json"))["test"]["readwrite"]
|
2021-01-07 17:50:31 +00:00
|
|
|
|
|
|
|
# 2. Enable SFTP on the node:
|
2021-01-07 18:59:57 +00:00
|
|
|
host_ssh_key_path = join(process.node_dir, "private", "ssh_host_rsa_key")
|
2021-01-07 17:50:31 +00:00
|
|
|
accounts_path = join(process.node_dir, "private", "accounts")
|
|
|
|
with open(join(process.node_dir, "tahoe.cfg"), "a") as f:
|
|
|
|
f.write("""\
|
|
|
|
[sftpd]
|
|
|
|
enabled = true
|
|
|
|
port = tcp:8022:interface=127.0.0.1
|
|
|
|
host_pubkey_file = {ssh_key_path}.pub
|
|
|
|
host_privkey_file = {ssh_key_path}
|
|
|
|
accounts.file = {accounts_path}
|
2021-01-07 18:59:57 +00:00
|
|
|
""".format(ssh_key_path=host_ssh_key_path, accounts_path=accounts_path))
|
|
|
|
generate_ssh_key(host_ssh_key_path)
|
2021-01-07 17:50:31 +00:00
|
|
|
|
2021-10-26 11:16:24 +00:00
|
|
|
# 3. Add a SFTP access file with an SSH key for auth.
|
|
|
|
generate_ssh_key(alice_sftp_client_key_path)
|
2021-01-07 18:59:57 +00:00
|
|
|
# Pub key format is "ssh-rsa <thekey> <username>". We want the key.
|
2021-10-26 11:16:24 +00:00
|
|
|
ssh_public_key = open(alice_sftp_client_key_path + ".pub").read().strip().split()[1]
|
2021-01-07 17:50:31 +00:00
|
|
|
with open(accounts_path, "w") as f:
|
|
|
|
f.write("""\
|
2021-10-26 11:16:24 +00:00
|
|
|
alice-key ssh-rsa {ssh_public_key} {rwcap}
|
2021-01-07 18:59:57 +00:00
|
|
|
""".format(rwcap=rwcap, ssh_public_key=ssh_public_key))
|
2021-01-07 17:50:31 +00:00
|
|
|
|
|
|
|
# 4. Restart the node with new SFTP config.
|
2021-01-07 16:25:26 +00:00
|
|
|
process.kill()
|
|
|
|
pytest_twisted.blockon(_run_node(reactor, process.node_dir, request, None))
|
2021-01-07 17:50:31 +00:00
|
|
|
|
2021-01-07 16:25:26 +00:00
|
|
|
await_client_ready(process)
|
2016-08-22 23:36:56 +00:00
|
|
|
return process
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2019-02-27 14:12:00 +00:00
|
|
|
@log_call(action_type=u"integration:bob", include_args=[], include_result=False)
|
2016-09-13 04:30:55 +00:00
|
|
|
def bob(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, request):
|
2019-02-05 16:03:35 +00:00
|
|
|
process = pytest_twisted.blockon(
|
2016-08-22 23:36:56 +00:00
|
|
|
_create_node(
|
2016-09-13 04:30:55 +00:00
|
|
|
reactor, request, temp_dir, introducer_furl, flog_gatherer, "bob",
|
2016-08-22 23:36:56 +00:00
|
|
|
web_port="tcp:9981:interface=localhost",
|
|
|
|
storage=False,
|
|
|
|
)
|
|
|
|
)
|
2019-08-09 01:13:02 +00:00
|
|
|
await_client_ready(process)
|
2016-08-22 23:36:56 +00:00
|
|
|
return process
|
|
|
|
|
|
|
|
|
2016-10-06 05:03:35 +00:00
|
|
|
@pytest.fixture(scope='session')
|
2020-07-19 13:19:19 +00:00
|
|
|
@pytest.mark.skipif(sys.platform.startswith('win'),
|
|
|
|
'Tor tests are unstable on Windows')
|
2016-10-06 05:03:35 +00:00
|
|
|
def chutney(reactor, temp_dir):
|
2020-06-23 00:16:19 +00:00
|
|
|
|
2016-10-06 05:03:35 +00:00
|
|
|
chutney_dir = join(temp_dir, 'chutney')
|
|
|
|
mkdir(chutney_dir)
|
|
|
|
|
|
|
|
# TODO:
|
|
|
|
|
|
|
|
# check for 'tor' binary explicitly and emit a "skip" if we can't
|
|
|
|
# find it
|
|
|
|
|
|
|
|
# XXX yuck! should add a setup.py to chutney so we can at least
|
|
|
|
# "pip install <path to tarball>" and/or depend on chutney in "pip
|
|
|
|
# install -e .[dev]" (i.e. in the 'dev' extra)
|
2019-01-24 20:53:05 +00:00
|
|
|
#
|
|
|
|
# https://trac.torproject.org/projects/tor/ticket/20343
|
2016-10-06 05:03:35 +00:00
|
|
|
proto = _DumpOutputProtocol(None)
|
|
|
|
reactor.spawnProcess(
|
|
|
|
proto,
|
2019-08-02 22:50:21 +00:00
|
|
|
'git',
|
2016-10-06 05:03:35 +00:00
|
|
|
(
|
2020-07-21 11:35:04 +00:00
|
|
|
'git', 'clone',
|
2016-10-06 05:03:35 +00:00
|
|
|
'https://git.torproject.org/chutney.git',
|
|
|
|
chutney_dir,
|
2019-08-02 22:49:50 +00:00
|
|
|
),
|
|
|
|
env=environ,
|
2016-10-06 05:03:35 +00:00
|
|
|
)
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(proto.done)
|
2020-07-21 11:35:04 +00:00
|
|
|
|
|
|
|
# XXX: Here we reset Chutney to the last revision known to work
|
2020-07-21 15:56:44 +00:00
|
|
|
# with Python 2, as a workaround for Chutney moving to Python 3.
|
2020-07-21 11:35:04 +00:00
|
|
|
# When this is no longer necessary, we will have to drop this and
|
|
|
|
# add '--depth=1' back to the above 'git clone' subprocess.
|
|
|
|
proto = _DumpOutputProtocol(None)
|
|
|
|
reactor.spawnProcess(
|
|
|
|
proto,
|
|
|
|
'git',
|
|
|
|
(
|
|
|
|
'git', '-C', chutney_dir,
|
|
|
|
'reset', '--hard',
|
|
|
|
'99bd06c7554b9113af8c0877b6eca4ceb95dcbaa'
|
|
|
|
),
|
|
|
|
env=environ,
|
|
|
|
)
|
|
|
|
pytest_twisted.blockon(proto.done)
|
|
|
|
|
2016-10-06 05:03:35 +00:00
|
|
|
return chutney_dir
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
2020-07-19 13:19:19 +00:00
|
|
|
@pytest.mark.skipif(sys.platform.startswith('win'),
|
|
|
|
reason='Tor tests are unstable on Windows')
|
2016-10-06 05:03:35 +00:00
|
|
|
def tor_network(reactor, temp_dir, chutney, request):
|
2020-06-23 00:16:19 +00:00
|
|
|
|
2016-10-06 05:03:35 +00:00
|
|
|
# this is the actual "chutney" script at the root of a chutney checkout
|
|
|
|
chutney_dir = chutney
|
|
|
|
chut = join(chutney_dir, 'chutney')
|
|
|
|
|
|
|
|
# now, as per Chutney's README, we have to create the network
|
|
|
|
# ./chutney configure networks/basic
|
|
|
|
# ./chutney start networks/basic
|
|
|
|
|
2019-08-02 22:49:50 +00:00
|
|
|
env = environ.copy()
|
|
|
|
env.update({"PYTHONPATH": join(chutney_dir, "lib")})
|
2016-10-06 05:03:35 +00:00
|
|
|
proto = _DumpOutputProtocol(None)
|
|
|
|
reactor.spawnProcess(
|
|
|
|
proto,
|
|
|
|
sys.executable,
|
|
|
|
(
|
|
|
|
sys.executable, '-m', 'chutney.TorNet', 'configure',
|
|
|
|
join(chutney_dir, 'networks', 'basic'),
|
|
|
|
),
|
|
|
|
path=join(chutney_dir),
|
2019-08-02 22:49:50 +00:00
|
|
|
env=env,
|
2016-10-06 05:03:35 +00:00
|
|
|
)
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(proto.done)
|
2016-10-06 05:03:35 +00:00
|
|
|
|
|
|
|
proto = _DumpOutputProtocol(None)
|
|
|
|
reactor.spawnProcess(
|
|
|
|
proto,
|
|
|
|
sys.executable,
|
|
|
|
(
|
|
|
|
sys.executable, '-m', 'chutney.TorNet', 'start',
|
|
|
|
join(chutney_dir, 'networks', 'basic'),
|
|
|
|
),
|
|
|
|
path=join(chutney_dir),
|
2019-08-02 22:49:50 +00:00
|
|
|
env=env,
|
2016-10-06 05:03:35 +00:00
|
|
|
)
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(proto.done)
|
2016-10-06 05:03:35 +00:00
|
|
|
|
|
|
|
# print some useful stuff
|
|
|
|
proto = _CollectOutputProtocol()
|
|
|
|
reactor.spawnProcess(
|
|
|
|
proto,
|
|
|
|
sys.executable,
|
|
|
|
(
|
|
|
|
sys.executable, '-m', 'chutney.TorNet', 'status',
|
|
|
|
join(chutney_dir, 'networks', 'basic'),
|
|
|
|
),
|
|
|
|
path=join(chutney_dir),
|
2019-08-02 22:49:50 +00:00
|
|
|
env=env,
|
2016-10-06 05:03:35 +00:00
|
|
|
)
|
2019-01-24 19:48:09 +00:00
|
|
|
try:
|
2019-02-05 16:03:35 +00:00
|
|
|
pytest_twisted.blockon(proto.done)
|
2019-01-24 19:48:09 +00:00
|
|
|
except ProcessTerminated:
|
2019-01-24 20:40:01 +00:00
|
|
|
print("Chutney.TorNet status failed (continuing):")
|
2019-01-24 19:48:09 +00:00
|
|
|
print(proto.output.getvalue())
|
2016-10-06 05:03:35 +00:00
|
|
|
|
|
|
|
def cleanup():
|
|
|
|
print("Tearing down Chutney Tor network")
|
|
|
|
proto = _CollectOutputProtocol()
|
|
|
|
reactor.spawnProcess(
|
|
|
|
proto,
|
|
|
|
sys.executable,
|
|
|
|
(
|
|
|
|
sys.executable, '-m', 'chutney.TorNet', 'stop',
|
|
|
|
join(chutney_dir, 'networks', 'basic'),
|
|
|
|
),
|
|
|
|
path=join(chutney_dir),
|
2019-08-02 22:49:50 +00:00
|
|
|
env=env,
|
2016-10-06 05:03:35 +00:00
|
|
|
)
|
2021-01-12 18:24:42 +00:00
|
|
|
try:
|
2021-01-12 18:58:28 +00:00
|
|
|
block_with_timeout(proto.done, reactor)
|
2021-01-12 18:24:42 +00:00
|
|
|
except ProcessTerminated:
|
|
|
|
# If this doesn't exit cleanly, that's fine, that shouldn't fail
|
|
|
|
# the test suite.
|
|
|
|
pass
|
|
|
|
|
2016-10-06 05:03:35 +00:00
|
|
|
request.addfinalizer(cleanup)
|
|
|
|
|
|
|
|
return chut
|