Merge remote-tracking branch 'origin/master' into integration/storage-economics

This commit is contained in:
Jean-Paul Calderone 2019-08-16 15:39:31 -04:00
commit 21bf7fc25c
48 changed files with 999 additions and 847 deletions

View File

@ -40,7 +40,7 @@ export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}"
"${PIP}" \
wheel \
--wheel-dir "${WHEELHOUSE_PATH}" \
"${PROJECT_ROOT}"[test,tor,i2p] \
"${PROJECT_ROOT}"[test] \
${BASIC_DEPS} \
${TEST_DEPS} \
${REPORTING_DEPS}

View File

@ -43,6 +43,17 @@ else
JUNITXML=""
fi
# A prefix for the test command that ensure it will exit after no more than a
# certain amount of time. Ideally, we would only enforce a "silent" period
# timeout but there isn't obviously a ready-made tool for that. The test
# suite only takes about 5 - 6 minutes on CircleCI right now. 15 minutes
# seems like a moderately safe window.
#
# This is primarily aimed at catching hangs on the PyPy job which runs for
# about 21 minutes and then gets killed by CircleCI in a way that fails the
# job and bypasses our "allowed failure" logic.
TIMEOUT="timeout --kill-after 1m 15m"
# Run the test suite as a non-root user. This is the expected usage some
# small areas of the test suite assume non-root privileges (such as unreadable
# files being unreadable).
@ -63,7 +74,7 @@ else
alternative="false"
fi
${BOOTSTRAP_VENV}/bin/tox \
${TIMEOUT} ${BOOTSTRAP_VENV}/bin/tox \
-c ${PROJECT_ROOT}/tox.ini \
--workdir /tmp/tahoe-lafs.tox \
-e "${TAHOE_LAFS_TOX_ENVIRONMENT}" \

View File

@ -8,3 +8,5 @@ source =
omit =
*/allmydata/test/*
*/allmydata/_version.py
parallel = True
branch = True

View File

@ -193,6 +193,17 @@ You can also install directly from the source tarball URL::
tahoe-lafs: 1.13.0
...
Extras
------
Tahoe-LAFS provides some functionality only when explicitly requested at installation time.
It does this using the "extras" feature of setuptools.
You can request these extra features when running the ``pip install`` command like this::
% venv/bin/pip install tahoe-lafs[tor]
This example enables support for listening and connecting using Tor.
The Tahoe-LAFS documentation for specific features which require an explicit install-time step will mention the "extra" that must be requested.
Hacking On Tahoe-LAFS
---------------------

View File

@ -3,7 +3,7 @@ from __future__ import print_function
import sys
import shutil
from time import sleep
from os import mkdir, listdir
from os import mkdir, listdir, environ
from os.path import join, exists
from tempfile import mkdtemp, mktemp
from functools import partial
@ -15,6 +15,7 @@ from eliot import (
)
from twisted.python.procutils import which
from twisted.internet.defer import DeferredList
from twisted.internet.error import (
ProcessExitedAlready,
ProcessTerminated,
@ -30,7 +31,9 @@ from util import (
_ProcessExitedProtocol,
_create_node,
_run_node,
_cleanup_twistd_process,
_cleanup_tahoe_process,
_tahoe_runner_optional_coverage,
await_client_ready,
)
@ -41,6 +44,10 @@ def pytest_addoption(parser):
"--keep-tempdir", action="store_true", dest="keep",
help="Keep the tmpdir with the client directories (introducer, etc)",
)
parser.addoption(
"--coverage", action="store_true", dest="coverage",
help="Collect coverage statistics",
)
@pytest.fixture(autouse=True, scope='session')
def eliot_logging():
@ -125,7 +132,7 @@ def flog_gatherer(reactor, temp_dir, flog_binary, request):
pytest_twisted.blockon(twistd_protocol.magic_seen)
def cleanup():
_cleanup_twistd_process(twistd_process, twistd_protocol.exited)
_cleanup_tahoe_process(twistd_process, twistd_protocol.exited)
flog_file = mktemp('.flog_dump')
flog_protocol = _DumpOutputProtocol(open(flog_file, 'w'))
@ -174,11 +181,11 @@ log_gatherer.furl = {log_furl}
if not exists(intro_dir):
mkdir(intro_dir)
done_proto = _ProcessExitedProtocol()
reactor.spawnProcess(
_tahoe_runner_optional_coverage(
done_proto,
sys.executable,
reactor,
request,
(
sys.executable, '-m', 'allmydata.scripts.runner',
'create-introducer',
'--listen=tcp',
'--hostname=localhost',
@ -195,16 +202,16 @@ log_gatherer.furl = {log_furl}
# but on linux it means daemonize. "tahoe run" is consistent
# between platforms.
protocol = _MagicTextProtocol('introducer running')
process = reactor.spawnProcess(
process = _tahoe_runner_optional_coverage(
protocol,
sys.executable,
reactor,
request,
(
sys.executable, '-m', 'allmydata.scripts.runner',
'run',
intro_dir,
),
)
request.addfinalizer(partial(_cleanup_twistd_process, process, protocol.exited))
request.addfinalizer(partial(_cleanup_tahoe_process, process, protocol.exited))
pytest_twisted.blockon(protocol.magic_seen)
return process
@ -241,11 +248,11 @@ log_gatherer.furl = {log_furl}
if not exists(intro_dir):
mkdir(intro_dir)
done_proto = _ProcessExitedProtocol()
reactor.spawnProcess(
_tahoe_runner_optional_coverage(
done_proto,
sys.executable,
reactor,
request,
(
sys.executable, '-m', 'allmydata.scripts.runner',
'create-introducer',
'--tor-control-port', 'tcp:localhost:8010',
'--listen=tor',
@ -262,11 +269,11 @@ log_gatherer.furl = {log_furl}
# but on linux it means daemonize. "tahoe run" is consistent
# between platforms.
protocol = _MagicTextProtocol('introducer running')
process = reactor.spawnProcess(
transport = _tahoe_runner_optional_coverage(
protocol,
sys.executable,
reactor,
request,
(
sys.executable, '-m', 'allmydata.scripts.runner',
'run',
intro_dir,
),
@ -274,14 +281,14 @@ log_gatherer.furl = {log_furl}
def cleanup():
try:
process.signalProcess('TERM')
transport.signalProcess('TERM')
pytest_twisted.blockon(protocol.exited)
except ProcessExitedAlready:
pass
request.addfinalizer(cleanup)
pytest_twisted.blockon(protocol.magic_seen)
return process
return transport
@pytest.fixture(scope='session')
@ -301,20 +308,22 @@ def tor_introducer_furl(tor_introducer, temp_dir):
include_result=False,
)
def storage_nodes(reactor, temp_dir, introducer, introducer_furl, flog_gatherer, request):
nodes = []
nodes_d = []
# start all 5 nodes in parallel
for x in range(5):
name = 'node{}'.format(x)
# tub_port = 9900 + x
nodes.append(
pytest_twisted.blockon(
nodes_d.append(
_create_node(
reactor, request, temp_dir, introducer_furl, flog_gatherer, name,
web_port=None, storage=True,
)
)
)
#nodes = pytest_twisted.blockon(DeferredList(nodes))
nodes_status = pytest_twisted.blockon(DeferredList(nodes_d))
nodes = []
for ok, process in nodes_status:
assert ok, "Storage node creation failed: {}".format(process)
nodes.append(process)
return nodes
@ -333,6 +342,7 @@ def alice(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, requ
storage=False,
)
)
await_client_ready(process)
return process
@ -351,6 +361,7 @@ def bob(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, reques
storage=False,
)
)
await_client_ready(process)
return process
@ -363,13 +374,12 @@ def alice_invite(reactor, alice, temp_dir, request):
# FIXME XXX by the time we see "client running" in the logs, the
# storage servers aren't "really" ready to roll yet (uploads fairly
# consistently fail if we don't hack in this pause...)
import time ; time.sleep(5)
proto = _CollectOutputProtocol()
reactor.spawnProcess(
_tahoe_runner_optional_coverage(
proto,
sys.executable,
reactor,
request,
[
sys.executable, '-m', 'allmydata.scripts.runner',
'magic-folder', 'create',
'--poll-interval', '2',
'--basedir', node_dir, 'magik:', 'alice',
@ -380,11 +390,11 @@ def alice_invite(reactor, alice, temp_dir, request):
with start_action(action_type=u"integration:alice:magic_folder:invite") as a:
proto = _CollectOutputProtocol()
reactor.spawnProcess(
_tahoe_runner_optional_coverage(
proto,
sys.executable,
reactor,
request,
[
sys.executable, '-m', 'allmydata.scripts.runner',
'magic-folder', 'invite',
'--basedir', node_dir, 'magik:', 'bob',
]
@ -397,13 +407,14 @@ def alice_invite(reactor, alice, temp_dir, request):
# before magic-folder works, we have to stop and restart (this is
# crappy for the tests -- can we fix it in magic-folder?)
try:
alice.signalProcess('TERM')
pytest_twisted.blockon(alice.exited)
alice.transport.signalProcess('TERM')
pytest_twisted.blockon(alice.transport.exited)
except ProcessExitedAlready:
pass
with start_action(action_type=u"integration:alice:magic_folder:magic-text"):
magic_text = 'Completed initial Magic Folder scan successfully'
pytest_twisted.blockon(_run_node(reactor, node_dir, request, magic_text))
await_client_ready(alice)
return invite
@ -416,13 +427,13 @@ def magic_folder(reactor, alice_invite, alice, bob, temp_dir, request):
print("pairing magic-folder")
bob_dir = join(temp_dir, 'bob')
proto = _CollectOutputProtocol()
reactor.spawnProcess(
_tahoe_runner_optional_coverage(
proto,
sys.executable,
reactor,
request,
[
sys.executable, '-m', 'allmydata.scripts.runner',
'magic-folder', 'join',
'--poll-interval', '2',
'--poll-interval', '1',
'--basedir', bob_dir,
alice_invite,
join(temp_dir, 'magic-bob'),
@ -434,13 +445,14 @@ def magic_folder(reactor, alice_invite, alice, bob, temp_dir, request):
# crappy for the tests -- can we fix it in magic-folder?)
try:
print("Sending TERM to Bob")
bob.signalProcess('TERM')
pytest_twisted.blockon(bob.exited)
bob.transport.signalProcess('TERM')
pytest_twisted.blockon(bob.transport.exited)
except ProcessExitedAlready:
pass
magic_text = 'Completed initial Magic Folder scan successfully'
pytest_twisted.blockon(_run_node(reactor, bob_dir, request, magic_text))
await_client_ready(bob)
return (join(temp_dir, 'magic-alice'), join(temp_dir, 'magic-bob'))
@ -462,12 +474,13 @@ def chutney(reactor, temp_dir):
proto = _DumpOutputProtocol(None)
reactor.spawnProcess(
proto,
'/usr/bin/git',
'git',
(
'/usr/bin/git', 'clone', '--depth=1',
'git', 'clone', '--depth=1',
'https://git.torproject.org/chutney.git',
chutney_dir,
)
),
env=environ,
)
pytest_twisted.blockon(proto.done)
return chutney_dir
@ -483,6 +496,8 @@ def tor_network(reactor, temp_dir, chutney, request):
# ./chutney configure networks/basic
# ./chutney start networks/basic
env = environ.copy()
env.update({"PYTHONPATH": join(chutney_dir, "lib")})
proto = _DumpOutputProtocol(None)
reactor.spawnProcess(
proto,
@ -492,7 +507,7 @@ def tor_network(reactor, temp_dir, chutney, request):
join(chutney_dir, 'networks', 'basic'),
),
path=join(chutney_dir),
env={"PYTHONPATH": join(chutney_dir, "lib")},
env=env,
)
pytest_twisted.blockon(proto.done)
@ -505,7 +520,7 @@ def tor_network(reactor, temp_dir, chutney, request):
join(chutney_dir, 'networks', 'basic'),
),
path=join(chutney_dir),
env={"PYTHONPATH": join(chutney_dir, "lib")},
env=env,
)
pytest_twisted.blockon(proto.done)
@ -519,7 +534,7 @@ def tor_network(reactor, temp_dir, chutney, request):
join(chutney_dir, 'networks', 'basic'),
),
path=join(chutney_dir),
env={"PYTHONPATH": join(chutney_dir, "lib")},
env=env,
)
try:
pytest_twisted.blockon(proto.done)
@ -538,7 +553,7 @@ def tor_network(reactor, temp_dir, chutney, request):
join(chutney_dir, 'networks', 'basic'),
),
path=join(chutney_dir),
env={"PYTHONPATH": join(chutney_dir, "lib")},
env=env,
)
pytest_twisted.blockon(proto.done)
request.addfinalizer(cleanup)

View File

@ -336,10 +336,10 @@ def test_edmond_uploads_then_restarts(reactor, request, temp_dir, introducer_fur
assert created, "Didn't create a magic-folder"
# to actually-start the magic-folder we have to re-start
edmond.signalProcess('TERM')
yield edmond._protocol.exited
time.sleep(1)
edmond = yield util._run_node(reactor, edmond._node_dir, request, 'Completed initial Magic Folder scan successfully')
edmond.transport.signalProcess('TERM')
yield edmond.transport.exited
edmond = yield util._run_node(reactor, edmond.node_dir, request, 'Completed initial Magic Folder scan successfully')
util.await_client_ready(edmond)
# add a thing to the magic-folder
with open(join(magic_folder, "its_a_file"), "w") as f:
@ -383,10 +383,11 @@ def test_edmond_uploads_then_restarts(reactor, request, temp_dir, introducer_fur
# re-starting edmond right now would "normally" trigger the 2880 bug
# kill edmond
edmond.signalProcess('TERM')
yield edmond._protocol.exited
edmond.transport.signalProcess('TERM')
yield edmond.transport.exited
time.sleep(1)
edmond = yield util._run_node(reactor, edmond._node_dir, request, 'Completed initial Magic Folder scan successfully')
edmond = yield util._run_node(reactor, edmond.node_dir, request, 'Completed initial Magic Folder scan successfully')
util.await_client_ready(edmond)
# XXX how can we say for sure if we've waited long enough? look at
# tail of logs for magic-folder ... somethingsomething?
@ -408,7 +409,7 @@ def test_alice_adds_files_while_bob_is_offline(reactor, request, temp_dir, magic
bob_node_dir = join(temp_dir, "bob")
# Take Bob offline.
yield util.cli(reactor, bob_node_dir, "stop")
yield util.cli(request, reactor, bob_node_dir, "stop")
# Create a couple files in Alice's local directory.
some_files = list(
@ -422,7 +423,7 @@ def test_alice_adds_files_while_bob_is_offline(reactor, request, temp_dir, magic
good = False
for i in range(15):
status = yield util.magic_folder_cli(reactor, alice_node_dir, "status")
status = yield util.magic_folder_cli(request, reactor, alice_node_dir, "status")
good = status.count(".added-while-offline (36 B): good, version=0") == len(some_files) * 2
if good:
# We saw each file as having a local good state and a remote good

View File

@ -12,7 +12,7 @@ import pytest_twisted
@pytest_twisted.inlineCallbacks
def test_upload_immutable(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, request):
yield util._create_node(
edna = yield util._create_node(
reactor, request, temp_dir, introducer_furl, flog_gatherer, "edna",
web_port="tcp:9983:interface=localhost",
storage=False,
@ -20,13 +20,10 @@ def test_upload_immutable(reactor, temp_dir, introducer_furl, flog_gatherer, sto
happy=7,
total=10,
)
util.await_client_ready(edna)
node_dir = join(temp_dir, 'edna')
print("waiting 10 seconds unil we're maybe ready")
yield task.deferLater(reactor, 10, lambda: None)
# upload a file, which should fail because we have don't have 7
# storage servers (but happiness is set to 7)
proto = util._CollectOutputProtocol()

View File

@ -1,5 +1,6 @@
import sys
import time
import json
from os import mkdir
from os.path import exists, join
from six.moves import StringIO
@ -9,6 +10,8 @@ from twisted.internet.defer import Deferred, succeed
from twisted.internet.protocol import ProcessProtocol
from twisted.internet.error import ProcessExitedAlready, ProcessDone
import requests
from allmydata.util.configutil import (
get_config,
set_config,
@ -106,19 +109,19 @@ class _MagicTextProtocol(ProcessProtocol):
sys.stdout.write(data)
def _cleanup_twistd_process(twistd_process, exited):
def _cleanup_tahoe_process(tahoe_transport, exited):
"""
Terminate the given process with a kill signal (SIGKILL on POSIX,
TerminateProcess on Windows).
:param twistd_process: The `IProcessTransport` representing the process.
:param tahoe_transport: The `IProcessTransport` representing the process.
:param exited: A `Deferred` which fires when the process has exited.
:return: After the process has exited.
"""
try:
print("signaling {} with KILL".format(twistd_process.pid))
twistd_process.signalProcess('KILL')
print("signaling {} with TERM".format(tahoe_transport.pid))
tahoe_transport.signalProcess('TERM')
print("signaled, blocking on exit")
pytest_twisted.blockon(exited)
print("exited, goodbye")
@ -126,7 +129,48 @@ def _cleanup_twistd_process(twistd_process, exited):
pass
def _tahoe_runner_optional_coverage(proto, reactor, request, other_args):
"""
Internal helper. Calls spawnProcess with `-m
allmydata.scripts.runner` and `other_args`, optionally inserting a
`--coverage` option if the `request` indicates we should.
"""
if request.config.getoption('coverage'):
args = [sys.executable, '-m', 'coverage', 'run', '-m', 'allmydata.scripts.runner', '--coverage']
else:
args = [sys.executable, '-m', 'allmydata.scripts.runner']
args += other_args
return reactor.spawnProcess(
proto,
sys.executable,
args,
)
class TahoeProcess(object):
"""
A running Tahoe process, with associated information.
"""
def __init__(self, process_transport, node_dir):
self._process_transport = process_transport # IProcessTransport instance
self._node_dir = node_dir # path
@property
def transport(self):
return self._process_transport
@property
def node_dir(self):
return self._node_dir
def _run_node(reactor, node_dir, request, magic_text):
"""
Run a tahoe process from its node_dir.
:returns: a TahoeProcess for this node
"""
if magic_text is None:
magic_text = "client running"
protocol = _MagicTextProtocol(magic_text)
@ -134,27 +178,29 @@ def _run_node(reactor, node_dir, request, magic_text):
# on windows, "tahoe start" means: run forever in the foreground,
# but on linux it means daemonize. "tahoe run" is consistent
# between platforms.
process = reactor.spawnProcess(
transport = _tahoe_runner_optional_coverage(
protocol,
sys.executable,
(
sys.executable, '-m', 'allmydata.scripts.runner',
reactor,
request,
[
'--eliot-destination', 'file:{}/logs/eliot.json'.format(node_dir),
'run',
node_dir,
),
],
)
process.exited = protocol.exited
transport.exited = protocol.exited
request.addfinalizer(partial(_cleanup_twistd_process, process, protocol.exited))
request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited))
# we return the 'process' ITransport instance
# XXX abusing the Deferred; should use .when_magic_seen() or something?
# XXX abusing the Deferred; should use .when_magic_seen() pattern
def got_proto(proto):
process._protocol = proto
process._node_dir = node_dir
return process
transport._protocol = proto
return TahoeProcess(
transport,
node_dir,
)
protocol.magic_seen.addCallback(got_proto)
return protocol.magic_seen
@ -179,7 +225,6 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam
mkdir(node_dir)
done_proto = _ProcessExitedProtocol()
args = [
sys.executable, '-m', 'allmydata.scripts.runner',
'create-node',
'--nickname', name,
'--introducer', introducer_furl,
@ -189,16 +234,13 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam
'--shares-needed', unicode(needed),
'--shares-happy', unicode(happy),
'--shares-total', unicode(total),
'--helper',
]
if not storage:
args.append('--no-storage')
args.append(node_dir)
reactor.spawnProcess(
done_proto,
sys.executable,
args,
)
_tahoe_runner_optional_coverage(done_proto, reactor, request, args)
created_d = done_proto.done
def created(_):
@ -331,17 +373,118 @@ def await_file_vanishes(path, timeout=10):
raise FileShouldVanishException(path, timeout)
def cli(reactor, node_dir, *argv):
def cli(request, reactor, node_dir, *argv):
"""
Run a tahoe CLI subcommand for a given node, optionally running
under coverage if '--coverage' was supplied.
"""
proto = _CollectOutputProtocol()
reactor.spawnProcess(
proto,
sys.executable,
[
sys.executable, '-m', 'allmydata.scripts.runner',
'--node-directory', node_dir,
] + list(argv),
_tahoe_runner_optional_coverage(
proto, reactor, request,
['--node-directory', node_dir] + list(argv),
)
return proto.done
def magic_folder_cli(reactor, node_dir, *argv):
return cli(reactor, node_dir, "magic-folder", *argv)
def node_url(node_dir, uri_fragment):
"""
Create a fully qualified URL by reading config from `node_dir` and
adding the `uri_fragment`
"""
with open(join(node_dir, "node.url"), "r") as f:
base = f.read().strip()
url = base + uri_fragment
return url
def _check_status(response):
"""
Check the response code is a 2xx (raise an exception otherwise)
"""
if response.status_code < 200 or response.status_code >= 300:
raise ValueError(
"Expected a 2xx code, got {}".format(response.status_code)
)
def web_get(node_dir, uri_fragment, **kwargs):
"""
Make a GET request to the webport of `node_dir`. This will look
like: `http://localhost:<webport>/<uri_fragment>`. All `kwargs`
are passed on to `requests.get`
"""
url = node_url(node_dir, uri_fragment)
resp = requests.get(url, **kwargs)
_check_status(resp)
return resp.content
def web_post(node_dir, uri_fragment, **kwargs):
"""
Make a POST request to the webport of `node_dir`. This will look
like: `http://localhost:<webport>/<uri_fragment>`. All `kwargs`
are passed on to `requests.post`
"""
url = node_url(node_dir, uri_fragment)
resp = requests.post(url, **kwargs)
_check_status(resp)
return resp.content
def await_client_ready(process, timeout=10, liveness=60*2):
"""
Uses the status API to wait for a client-type node to be
'ready'. A client is deemed ready if:
- it answers http://<node_url>/statistics/?t=json/
- there is at least one storage-server connected
- every storage-server has a "last_received_data" and it is
within the last `liveness` seconds
We will try for up to `timeout` seconds for the above conditions
to be true. Otherwise, an exception is raised
"""
start = time.time()
while (time.time() - start) < float(timeout):
try:
data = web_get(process.node_dir, u"", params={u"t": u"json"})
js = json.loads(data)
except Exception as e:
print("waiting because '{}'".format(e))
time.sleep(1)
continue
if len(js['servers']) == 0:
print("waiting because no servers at all")
time.sleep(1)
continue
server_times = [
server['last_received_data']
for server in js['servers']
]
# if any times are null/None that server has never been
# contacted (so it's down still, probably)
if any(t is None for t in server_times):
print("waiting because at least one server not contacted")
time.sleep(1)
continue
# check that all times are 'recent enough'
if any([time.time() - t > liveness for t in server_times]):
print("waiting because at least one server too old")
time.sleep(1)
continue
# we have a status with at least one server, and all servers
# have been contacted recently
return True
# we only fall out of the loop when we've timed out
raise RuntimeError(
"Waited {} seconds for {} to be 'ready' but it never was".format(
timeout,
process.node_dir,
)
)
def magic_folder_cli(request, reactor, node_dir, *argv):
return cli(request, reactor, node_dir, "magic-folder", *argv)

View File

@ -1,6 +1,6 @@
from __future__ import print_function
import sys, os, io
import sys, os, io, re
from twisted.internet import reactor, protocol, task, defer
from twisted.python.procutils import which
from twisted.python import usage
@ -12,6 +12,7 @@ from twisted.python import usage
class Options(usage.Options):
optParameters = [
["warnings", None, None, "file to write warnings into at end of test run"],
["package", None, None, "Python package to which to restrict warning collection"]
]
def parseArgs(self, command, *args):
@ -19,7 +20,7 @@ class Options(usage.Options):
self["args"] = list(args)
description = """Run as:
PYTHONWARNINGS=default::DeprecationWarning python run-deprecations.py [--warnings=STDERRFILE] COMMAND ARGS..
PYTHONWARNINGS=default::DeprecationWarning python run-deprecations.py [--warnings=STDERRFILE] [--package=PYTHONPACKAGE ] COMMAND ARGS..
"""
class RunPP(protocol.ProcessProtocol):
@ -34,6 +35,34 @@ class RunPP(protocol.ProcessProtocol):
rc = reason.value.exitCode
self.d.callback((signal, rc))
def make_matcher(options):
"""
Make a function that matches a line with a relevant deprecation.
A deprecation warning line looks something like this::
somepath/foo/bar/baz.py:43: DeprecationWarning: Foo is deprecated, try bar instead.
Sadly there is no guarantee warnings begin at the beginning of a line
since they are written to output without coordination with whatever other
Python code is running in the process.
:return: A one-argument callable that accepts a string and returns
``True`` if it contains an interesting warning and ``False``
otherwise.
"""
pattern = r".*\.py[oc]?:\d+:" # (Pending)?DeprecationWarning: .*"
if options["package"]:
pattern = r".*/{}/".format(
re.escape(options["package"]),
) + pattern
expression = re.compile(pattern)
def match(line):
return expression.match(line) is not None
return match
@defer.inlineCallbacks
def run_command(main):
config = Options()
@ -63,6 +92,8 @@ def run_command(main):
reactor.spawnProcess(pp, exe, [exe] + config["args"], env=None)
(signal, rc) = yield pp.d
match = make_matcher(config)
# maintain ordering, but ignore duplicates (for some reason, either the
# 'warnings' module or twisted.python.deprecate isn't quashing them)
already = set()
@ -75,12 +106,12 @@ def run_command(main):
pp.stdout.seek(0)
for line in pp.stdout.readlines():
if "DeprecationWarning" in line:
if match(line):
add(line) # includes newline
pp.stderr.seek(0)
for line in pp.stderr.readlines():
if "DeprecationWarning" in line:
if match(line):
add(line)
if warnings:

0
newsfragments/2283.minor Normal file
View File

View File

@ -0,0 +1 @@
Tahoe-LAFS no longer makes start-up time assertions about the versions of its dependencies. It is the responsibility of the administrator of the installation to ensure the correct version of dependencies are supplied.

0
newsfragments/2766.minor Normal file
View File

0
newsfragments/3232.minor Normal file
View File

0
newsfragments/3233.minor Normal file
View File

1
newsfragments/3234.other Normal file
View File

@ -0,0 +1 @@
Collect coverage information from integration tests

View File

@ -0,0 +1 @@
Enable the helper when creating a node with `tahoe create-node --helper`

1
newsfragments/3237.minor Normal file
View File

@ -0,0 +1 @@
Wait for integration-test clients to be ready using status-API

0
newsfragments/3238.minor Normal file
View File

0
newsfragments/3239.minor Normal file
View File

0
newsfragments/3240.minor Normal file
View File

126
setup.py
View File

@ -30,18 +30,104 @@ def read_version_py(infname):
VERSION_PY_FILENAME = 'src/allmydata/_version.py'
version = read_version_py(VERSION_PY_FILENAME)
# Tahoe's dependencies are managed by the find_links= entry in setup.cfg and
# the _auto_deps.install_requires list, which is used in the call to setup()
# below.
adglobals = {}
auto_deps_fn = "src/allmydata/_auto_deps.py"
if sys.version_info[0] >= 3:
exec(compile(open(auto_deps_fn, 'rb').read(), auto_deps_fn, "exec"),
adglobals, adglobals)
else:
execfile(auto_deps_fn, adglobals)
install_requires = adglobals['install_requires']
setup_requires = adglobals['setup_requires']
install_requires = [
# we don't need much out of setuptools but the version checking stuff
# needs pkg_resources and PEP 440 version specifiers.
"setuptools >= 28.8.0",
"zfec >= 1.1.0",
# zope.interface >= 3.6.0 is required for Twisted >= 12.1.0.
# zope.interface 3.6.3 and 3.6.4 are incompatible with Nevow (#1435).
"zope.interface >= 3.6.0, != 3.6.3, != 3.6.4",
# * foolscap < 0.5.1 had a performance bug which spent O(N**2) CPU for
# transferring large mutable files of size N.
# * foolscap < 0.6 is incompatible with Twisted 10.2.0.
# * foolscap 0.6.1 quiets a DeprecationWarning.
# * foolscap < 0.6.3 is incompatible with Twisted 11.1.0 and newer.
# * foolscap 0.8.0 generates 2048-bit RSA-with-SHA-256 signatures,
# rather than 1024-bit RSA-with-MD5. This also allows us to work
# with a FIPS build of OpenSSL.
# * foolscap >= 0.12.3 provides tcp/tor/i2p connection handlers we need,
# and allocate_tcp_port
# * foolscap >= 0.12.5 has ConnectionInfo and ReconnectionInfo
# * foolscap >= 0.12.6 has an i2p.sam_endpoint() that takes kwargs
"foolscap >= 0.12.6",
# * On Linux we need at least Twisted 10.1.0 for inotify support
# used by the drop-upload frontend.
# * We also need Twisted 10.1.0 for the FTP frontend in order for
# Twisted's FTP server to support asynchronous close.
# * The SFTP frontend depends on Twisted 11.0.0 to fix the SSH server
# rekeying bug <https://twistedmatrix.com/trac/ticket/4395>
# * The FTP frontend depends on Twisted >= 11.1.0 for
# filepath.Permissions
# * Nevow 0.11.1 depends on Twisted >= 13.0.0.
# * The SFTP frontend and manhole depend on the conch extra. However, we
# can't explicitly declare that without an undesirable dependency on gmpy,
# as explained in ticket #2740.
# * Due to a setuptools bug, we need to declare a dependency on the tls
# extra even though we only depend on it via foolscap.
# * Twisted >= 15.1.0 is the first version that provided the [tls] extra.
# * Twisted-16.1.0 fixes https://twistedmatrix.com/trac/ticket/8223,
# which otherwise causes test_system to fail (DirtyReactorError, due to
# leftover timers)
# * Twisted-16.4.0 introduces `python -m twisted.trial` which is needed
# for coverage testing
# * Twisted 16.6.0 drops the undesirable gmpy dependency from the conch
# extra, letting us use that extra instead of trying to duplicate its
# dependencies here. Twisted[conch] >18.7 introduces a dependency on
# bcrypt. It is nice to avoid that if the user ends up with an older
# version of Twisted. That's hard to express except by using the extra.
#
# In a perfect world, Twisted[conch] would be a dependency of an "sftp"
# extra. However, pip fails to resolve the dependencies all
# dependencies when asked for Twisted[tls] *and* Twisted[conch].
# Specifically, "Twisted[conch]" (as the later requirement) is ignored.
# If there were an Tahoe-LAFS sftp extra that dependended on
# Twisted[conch] and install_requires only included Twisted[tls] then
# `pip install tahoe-lafs[sftp]` would not install requirements
# specified by Twisted[conch]. Since this would be the *whole point* of
# an sftp extra in Tahoe-LAFS, there is no point in having one.
"Twisted[tls,conch] >= 16.6.0",
# We need Nevow >= 0.11.1 which can be installed using pip.
"Nevow >= 0.11.1",
"PyYAML >= 3.11",
"six >= 1.10.0",
# for 'tahoe invite' and 'tahoe join'
"magic-wormhole >= 0.10.2",
# Eliot is contemplating dropping Python 2 support. Stick to a version we
# know works on Python 2.7.
"eliot ~= 1.7",
# A great way to define types of values.
"attrs >= 18.2.0",
# WebSocket library for twisted and asyncio
"autobahn >= 19.5.2",
]
setup_requires = [
'setuptools >= 28.8.0', # for PEP-440 style versions
]
tor_requires = [
# This is exactly what `foolscap[tor]` means but pip resolves the pair of
# dependencies "foolscap[i2p] foolscap[tor]" to "foolscap[i2p]" so we lose
# this if we don't declare it ourselves!
"txtorcon >= 0.17.0",
]
i2p_requires = [
# See the comment in tor_requires.
"txi2p >= 0.3.2",
]
if len(sys.argv) > 1 and sys.argv[1] == '--fakedependency':
del sys.argv[1]
@ -265,10 +351,6 @@ setup(name="tahoe-lafs", # also set in __init__.py
"coverage",
"mock",
"tox",
"foolscap[tor] >= 0.12.5",
"txtorcon >= 0.17.0", # in case pip's resolver doesn't work
"foolscap[i2p] >= 0.12.6",
"txi2p >= 0.3.2", # in case pip's resolver doesn't work
"pytest",
"pytest-twisted",
"hypothesis >= 3.6.1",
@ -276,15 +358,9 @@ setup(name="tahoe-lafs", # also set in __init__.py
"towncrier",
"testtools",
"fixtures",
],
"tor": [
"foolscap[tor] >= 0.12.5",
"txtorcon >= 0.17.0", # in case pip's resolver doesn't work
],
"i2p": [
"foolscap[i2p] >= 0.12.6",
"txi2p >= 0.3.2", # in case pip's resolver doesn't work
],
] + tor_requires + i2p_requires,
"tor": tor_requires,
"i2p": i2p_requires,
},
package_data={"allmydata.web": ["*.xhtml",
"static/*.js", "static/*.png", "static/*.css",

View File

@ -3,15 +3,14 @@ Decentralized storage grid.
community web site: U{https://tahoe-lafs.org/}
"""
import six
class PackagingError(EnvironmentError):
"""
Raised when there is an error in packaging of Tahoe-LAFS or its
dependencies which makes it impossible to proceed safely.
"""
pass
__all__ = [
"__version__",
"full_version",
"branch",
"__appname__",
"__full_version__",
]
__version__ = "unknown"
try:
@ -38,470 +37,3 @@ __appname__ = "tahoe-lafs"
# in the "application" part of the Tahoe versioning scheme:
# https://tahoe-lafs.org/trac/tahoe-lafs/wiki/Versioning
__full_version__ = __appname__ + '/' + str(__version__)
import os, platform, re, subprocess, sys, traceback
_distributor_id_cmdline_re = re.compile("(?:Distributor ID:)\s*(.*)", re.I)
_release_cmdline_re = re.compile("(?:Release:)\s*(.*)", re.I)
_distributor_id_file_re = re.compile("(?:DISTRIB_ID\s*=)\s*(.*)", re.I)
_release_file_re = re.compile("(?:DISTRIB_RELEASE\s*=)\s*(.*)", re.I)
_distname = None
_version = None
def get_linux_distro():
""" Tries to determine the name of the Linux OS distribution name.
First, try to parse a file named "/etc/lsb-release". If it exists, and
contains the "DISTRIB_ID=" line and the "DISTRIB_RELEASE=" line, then return
the strings parsed from that file.
If that doesn't work, then invoke platform.dist().
If that doesn't work, then try to execute "lsb_release", as standardized in
2001:
http://refspecs.freestandards.org/LSB_1.0.0/gLSB/lsbrelease.html
The current version of the standard is here:
http://refspecs.freestandards.org/LSB_3.2.0/LSB-Core-generic/LSB-Core-generic/lsbrelease.html
that lsb_release emitted, as strings.
Returns a tuple (distname,version). Distname is what LSB calls a
"distributor id", e.g. "Ubuntu". Version is what LSB calls a "release",
e.g. "8.04".
A version of this has been submitted to python as a patch for the standard
library module "platform":
http://bugs.python.org/issue3937
"""
global _distname,_version
if _distname and _version:
return (_distname, _version)
try:
etclsbrel = open("/etc/lsb-release", "rU")
for line in etclsbrel:
m = _distributor_id_file_re.search(line)
if m:
_distname = m.group(1).strip()
if _distname and _version:
return (_distname, _version)
m = _release_file_re.search(line)
if m:
_version = m.group(1).strip()
if _distname and _version:
return (_distname, _version)
except EnvironmentError:
pass
(_distname, _version) = platform.dist()[:2]
if _distname and _version:
return (_distname, _version)
if os.path.isfile("/usr/bin/lsb_release") or os.path.isfile("/bin/lsb_release"):
try:
p = subprocess.Popen(["lsb_release", "--all"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
rc = p.wait()
if rc == 0:
for line in p.stdout.readlines():
m = _distributor_id_cmdline_re.search(line)
if m:
_distname = m.group(1).strip()
if _distname and _version:
return (_distname, _version)
m = _release_cmdline_re.search(p.stdout.read())
if m:
_version = m.group(1).strip()
if _distname and _version:
return (_distname, _version)
except EnvironmentError:
pass
if os.path.exists("/etc/arch-release"):
return ("Arch_Linux", "")
return (_distname,_version)
def get_platform():
# Our version of platform.platform(), telling us both less and more than the
# Python Standard Library's version does.
# We omit details such as the Linux kernel version number, but we add a
# more detailed and correct rendition of the Linux distribution and
# distribution-version.
if "linux" in platform.system().lower():
return platform.system()+"-"+"_".join(get_linux_distro())+"-"+platform.machine()+"-"+"_".join([x for x in platform.architecture() if x])
else:
return platform.platform()
from allmydata.util import verlib
def normalized_version(verstr, what=None):
try:
suggested = verlib.suggest_normalized_version(verstr) or verstr
return verlib.NormalizedVersion(suggested)
except verlib.IrrationalVersionError:
raise
except StandardError:
cls, value, trace = sys.exc_info()
new_exc = PackagingError("could not parse %s due to %s: %s"
% (what or repr(verstr), cls.__name__, value))
six.reraise(cls, new_exc, trace)
def get_openssl_version():
try:
from OpenSSL import SSL
return extract_openssl_version(SSL)
except Exception:
return ("unknown", None, None)
def extract_openssl_version(ssl_module):
openssl_version = ssl_module.SSLeay_version(ssl_module.SSLEAY_VERSION)
if openssl_version.startswith('OpenSSL '):
openssl_version = openssl_version[8 :]
(version, _, comment) = openssl_version.partition(' ')
try:
openssl_cflags = ssl_module.SSLeay_version(ssl_module.SSLEAY_CFLAGS)
if '-DOPENSSL_NO_HEARTBEATS' in openssl_cflags.split(' '):
comment += ", no heartbeats"
except Exception:
pass
return (version, None, comment if comment else None)
def get_package_versions_and_locations():
import warnings
from _auto_deps import package_imports, global_deprecation_messages, deprecation_messages, \
runtime_warning_messages, warning_imports, ignorable
def package_dir(srcfile):
return os.path.dirname(os.path.dirname(os.path.normcase(os.path.realpath(srcfile))))
# pkg_resources.require returns the distribution that pkg_resources attempted to put
# on sys.path, which can differ from the one that we actually import due to #1258,
# or any other bug that causes sys.path to be set up incorrectly. Therefore we
# must import the packages in order to check their versions and paths.
# This is to suppress all UserWarnings and various DeprecationWarnings and RuntimeWarnings
# (listed in _auto_deps.py).
warnings.filterwarnings("ignore", category=UserWarning, append=True)
for msg in global_deprecation_messages + deprecation_messages:
warnings.filterwarnings("ignore", category=DeprecationWarning, message=msg, append=True)
for msg in runtime_warning_messages:
warnings.filterwarnings("ignore", category=RuntimeWarning, message=msg, append=True)
try:
for modulename in warning_imports:
try:
__import__(modulename)
except ImportError:
pass
finally:
# Leave suppressions for UserWarnings and global_deprecation_messages active.
for _ in runtime_warning_messages + deprecation_messages:
warnings.filters.pop()
packages = []
pkg_resources_vers_and_locs = dict()
if not hasattr(sys, 'frozen'):
import pkg_resources
from _auto_deps import install_requires
pkg_resources_vers_and_locs = dict([(p.project_name.lower(), (str(p.version), p.location))
for p in pkg_resources.require(install_requires)])
def get_version(module):
if hasattr(module, '__version__'):
return str(getattr(module, '__version__'))
elif hasattr(module, 'version'):
ver = getattr(module, 'version')
if isinstance(ver, tuple):
return '.'.join(map(str, ver))
else:
return str(ver)
else:
return 'unknown'
for pkgname, modulename in [(__appname__, 'allmydata')] + package_imports:
if modulename:
try:
__import__(modulename)
module = sys.modules[modulename]
except ImportError:
etype, emsg, etrace = sys.exc_info()
trace_info = (etype, str(emsg), ([None] + traceback.extract_tb(etrace))[-1])
packages.append( (pkgname, (None, None, trace_info)) )
else:
comment = None
if pkgname == __appname__:
comment = "%s: %s" % (branch, full_version)
elif pkgname == 'setuptools' and hasattr(module, '_distribute'):
# distribute does not report its version in any module variables
comment = 'distribute'
ver = get_version(module)
loc = package_dir(module.__file__)
if ver == "unknown" and pkgname in pkg_resources_vers_and_locs:
(pr_ver, pr_loc) = pkg_resources_vers_and_locs[pkgname]
if loc == os.path.normcase(os.path.realpath(pr_loc)):
ver = pr_ver
packages.append( (pkgname, (ver, loc, comment)) )
elif pkgname == 'python':
packages.append( (pkgname, (platform.python_version(), sys.executable, None)) )
elif pkgname == 'platform':
packages.append( (pkgname, (get_platform(), None, None)) )
elif pkgname == 'OpenSSL':
packages.append( (pkgname, get_openssl_version()) )
cross_check_errors = []
if len(pkg_resources_vers_and_locs) > 0:
imported_packages = set([p.lower() for (p, _) in packages])
extra_packages = []
for pr_name, (pr_ver, pr_loc) in pkg_resources_vers_and_locs.iteritems():
if pr_name not in imported_packages and pr_name not in ignorable:
extra_packages.append( (pr_name, (pr_ver, pr_loc, "according to pkg_resources")) )
cross_check_errors = cross_check(pkg_resources_vers_and_locs, packages)
packages += extra_packages
return packages, cross_check_errors
def split_requirement(req):
"""
Split up a single requirement string into the different version constraint pieces.
This is like req.split(",") except it doesn't split on , found inside [].
:return: A list of the split up pieces.
"""
in_extras = False
pieces = []
chunk = ''
for ch in req:
if in_extras:
if ch == ']':
in_extras = False
chunk += ch
else:
if ch == '[':
in_extras = True
chunk += ch
elif ch == ',':
pieces.append(chunk)
chunk = ''
else:
chunk += ch
pieces.append(chunk)
return pieces
def check_requirement(req, vers_and_locs):
# We support only conjunctions of <=, >=, and !=
reqlist = split_requirement(req)
name = reqlist[0].split('<=')[0].split('>=')[0].split('!=')[0].strip(' ').split('[')[0]
if name not in vers_and_locs:
raise PackagingError("no version info for %s" % (name,))
if req.strip(' ') == name:
return
(actual, location, comment) = vers_and_locs[name]
if actual is None:
# comment is (type, message, (filename, line number, function name, text)) for the original ImportError
raise ImportError("for requirement %r: %s" % (req, comment))
if actual == 'unknown':
return
try:
actualver = normalized_version(actual, what="actual version %r of %s from %r" %
(actual, name, location))
matched = match_requirement(req, reqlist, actualver)
except verlib.IrrationalVersionError:
# meh, it probably doesn't matter
return
if not matched:
msg = ("We require %s, but could only find version %s.\n" % (req, actual))
if location and location != 'unknown':
msg += "The version we found is from %r.\n" % (location,)
msg += ("To resolve this problem, uninstall that version, either using your\n"
"operating system's package manager or by moving aside the directory.")
raise PackagingError(msg)
def match_requirement(req, reqlist, actualver):
for r in reqlist:
s = r.split('<=')
if len(s) == 2:
required = s[1].strip(' ')
if not (actualver <= normalized_version(required, what="required maximum version %r in %r" % (required, req))):
return False # maximum requirement not met
else:
s = r.split('>=')
if len(s) == 2:
required = s[1].strip(' ')
if not (actualver >= normalized_version(required, what="required minimum version %r in %r" % (required, req))):
return False # minimum requirement not met
else:
s = r.split('!=')
if len(s) == 2:
required = s[1].strip(' ')
if not (actualver != normalized_version(required, what="excluded version %r in %r" % (required, req))):
return False # not-equal requirement not met
else:
raise PackagingError("no version info or could not understand requirement %r" % (req,))
return True
def cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list):
"""This function returns a list of errors due to any failed cross-checks."""
from _auto_deps import not_import_versionable
errors = []
not_pkg_resourceable = ['python', 'platform', __appname__.lower(), 'openssl']
for name, (imp_ver, imp_loc, imp_comment) in imported_vers_and_locs_list:
name = name.lower()
if name not in not_pkg_resourceable:
if name not in pkg_resources_vers_and_locs:
if name == "setuptools" and "distribute" in pkg_resources_vers_and_locs:
pr_ver, pr_loc = pkg_resources_vers_and_locs["distribute"]
if not (os.path.normpath(os.path.realpath(pr_loc)) == os.path.normpath(os.path.realpath(imp_loc))
and imp_comment == "distribute"):
errors.append("Warning: dependency 'setuptools' found to be version %r of 'distribute' from %r "
"by pkg_resources, but 'import setuptools' gave version %r [%s] from %r. "
"A version mismatch is expected, but a location mismatch is not."
% (pr_ver, pr_loc, imp_ver, imp_comment or 'probably *not* distribute', imp_loc))
else:
errors.append("Warning: dependency %r (version %r imported from %r) was not found by pkg_resources."
% (name, imp_ver, imp_loc))
continue
pr_ver, pr_loc = pkg_resources_vers_and_locs[name]
if imp_ver is None and imp_loc is None:
errors.append("Warning: dependency %r could not be imported. pkg_resources thought it should be possible "
"to import version %r from %r.\nThe exception trace was %r."
% (name, pr_ver, pr_loc, imp_comment))
continue
# If the pkg_resources version is identical to the imported version, don't attempt
# to normalize them, since it is unnecessary and may fail (ticket #2499).
if imp_ver != 'unknown' and pr_ver == imp_ver:
continue
try:
pr_normver = normalized_version(pr_ver)
except verlib.IrrationalVersionError:
continue
except Exception as e:
errors.append("Warning: version number %r found for dependency %r by pkg_resources could not be parsed. "
"The version found by import was %r from %r. "
"pkg_resources thought it should be found at %r. "
"The exception was %s: %s"
% (pr_ver, name, imp_ver, imp_loc, pr_loc, e.__class__.__name__, e))
else:
if imp_ver == 'unknown':
if name not in not_import_versionable:
errors.append("Warning: unexpectedly could not find a version number for dependency %r imported from %r. "
"pkg_resources thought it should be version %r at %r."
% (name, imp_loc, pr_ver, pr_loc))
else:
try:
imp_normver = normalized_version(imp_ver)
except verlib.IrrationalVersionError:
continue
except Exception as e:
errors.append("Warning: version number %r found for dependency %r (imported from %r) could not be parsed. "
"pkg_resources thought it should be version %r at %r. "
"The exception was %s: %s"
% (imp_ver, name, imp_loc, pr_ver, pr_loc, e.__class__.__name__, e))
else:
if pr_ver == 'unknown' or (pr_normver != imp_normver):
if not os.path.normpath(os.path.realpath(pr_loc)) == os.path.normpath(os.path.realpath(imp_loc)):
errors.append("Warning: dependency %r found to have version number %r (normalized to %r, from %r) "
"by pkg_resources, but version %r (normalized to %r, from %r) by import."
% (name, pr_ver, str(pr_normver), pr_loc, imp_ver, str(imp_normver), imp_loc))
return errors
_vers_and_locs_list, _cross_check_errors = get_package_versions_and_locations()
def get_error_string(errors, debug=False):
from allmydata._auto_deps import install_requires
msg = "\n%s\n" % ("\n".join(errors),)
if debug:
msg += ("\n"
"For debugging purposes, the PYTHONPATH was\n"
" %r\n"
"install_requires was\n"
" %r\n"
"sys.path after importing pkg_resources was\n"
" %s\n"
% (os.environ.get('PYTHONPATH'), install_requires, (os.pathsep+"\n ").join(sys.path)) )
return msg
def check_all_requirements():
"""This function returns a list of errors due to any failed checks."""
from allmydata._auto_deps import install_requires
fatal_errors = []
# We require at least 2.6 on all platforms.
# (On Python 3, we'll have failed long before this point.)
if sys.version_info < (2, 6):
try:
version_string = ".".join(map(str, sys.version_info))
except Exception:
version_string = repr(sys.version_info)
fatal_errors.append("Tahoe-LAFS currently requires Python v2.6 or greater (but less than v3), not %s"
% (version_string,))
vers_and_locs = dict(_vers_and_locs_list)
for requirement in install_requires:
try:
check_requirement(requirement, vers_and_locs)
except (ImportError, PackagingError) as e:
fatal_errors.append("%s: %s" % (e.__class__.__name__, e))
if fatal_errors:
raise PackagingError(get_error_string(fatal_errors + _cross_check_errors, debug=True))
check_all_requirements()
def get_package_versions():
return dict([(k, v) for k, (v, l, c) in _vers_and_locs_list])
def get_package_locations():
return dict([(k, l) for k, (v, l, c) in _vers_and_locs_list])
def get_package_versions_string(show_paths=False, debug=False):
res = []
for p, (v, loc, comment) in _vers_and_locs_list:
info = str(p) + ": " + str(v)
if comment:
info = info + " [%s]" % str(comment)
if show_paths:
info = info + " (%s)" % str(loc)
res.append(info)
output = "\n".join(res) + "\n"
if _cross_check_errors:
output += get_error_string(_cross_check_errors, debug=debug)
return output

View File

@ -4,114 +4,6 @@
# It is ok to import modules from the Python Standard Library if they are
# always available, or the import is protected by try...except ImportError.
# The semantics for requirement specs changed incompatibly in setuptools 8,
# which now follows PEP 440. The requirements used in this file must be valid
# under both the old and new semantics. That can be achieved by limiting
# requirement specs to one of the following forms:
#
# * >= X, <= Y where X < Y
# * >= X, != Y, != Z, ... where X < Y < Z...
#
# (In addition, check_requirement in allmydata/__init__.py only supports
# >=, <= and != operators.)
install_requires = [
# we don't need much out of setuptools, but the __init__.py stuff does
# need pkg_resources . We use >=11.3 here because that's what
# "cryptography" requires (which is a sub-dependency of TLS-using
# packages), so there's no point in requiring less.
"setuptools >= 28.8.0",
"zfec >= 1.1.0",
# zope.interface >= 3.6.0 is required for Twisted >= 12.1.0.
# zope.interface 3.6.3 and 3.6.4 are incompatible with Nevow (#1435).
"zope.interface >= 3.6.0, != 3.6.3, != 3.6.4",
# * foolscap < 0.5.1 had a performance bug which spent O(N**2) CPU for
# transferring large mutable files of size N.
# * foolscap < 0.6 is incompatible with Twisted 10.2.0.
# * foolscap 0.6.1 quiets a DeprecationWarning.
# * foolscap < 0.6.3 is incompatible with Twisted 11.1.0 and newer.
# * foolscap 0.8.0 generates 2048-bit RSA-with-SHA-256 signatures,
# rather than 1024-bit RSA-with-MD5. This also allows us to work
# with a FIPS build of OpenSSL.
# * foolscap >= 0.12.3 provides tcp/tor/i2p connection handlers we need,
# and allocate_tcp_port
# * foolscap >= 0.12.5 has ConnectionInfo and ReconnectionInfo
# * foolscap >= 0.12.6 has an i2p.sam_endpoint() that takes kwargs
"foolscap >= 0.12.6",
# cryptography>2.3 because of CVE-2018-10903
'cryptography >= 2.3',
"service-identity", # this is needed to suppress complaints about being unable to verify certs
"characteristic >= 14.0.0", # latest service-identity depends on this version
"pyasn1 >= 0.1.8", # latest pyasn1-modules depends on this version
"pyasn1-modules >= 0.0.5", # service-identity depends on this
# * On Linux we need at least Twisted 10.1.0 for inotify support
# used by the drop-upload frontend.
# * We also need Twisted 10.1.0 for the FTP frontend in order for
# Twisted's FTP server to support asynchronous close.
# * The SFTP frontend depends on Twisted 11.0.0 to fix the SSH server
# rekeying bug <https://twistedmatrix.com/trac/ticket/4395>
# * The FTP frontend depends on Twisted >= 11.1.0 for
# filepath.Permissions
# * Nevow 0.11.1 depends on Twisted >= 13.0.0.
# * The SFTP frontend and manhole depend on the conch extra. However, we
# can't explicitly declare that without an undesirable dependency on gmpy,
# as explained in ticket #2740.
# * Due to a setuptools bug, we need to declare a dependency on the tls
# extra even though we only depend on it via foolscap.
# * Twisted >= 15.1.0 is the first version that provided the [tls] extra.
# * Twisted-16.1.0 fixes https://twistedmatrix.com/trac/ticket/8223,
# which otherwise causes test_system to fail (DirtyReactorError, due to
# leftover timers)
# * Twisted-16.4.0 introduces `python -m twisted.trial` which is needed
# for coverage testing
# * Twisted 16.6.0 drops the undesirable gmpy dependency from the conch
# extra, letting us use that extra instead of trying to duplicate its
# dependencies here. Twisted[conch] >18.7 introduces a dependency on
# bcrypt. It is nice to avoid that if the user ends up with an older
# version of Twisted. That's hard to express except by using the extra.
"Twisted[tls,conch] >= 16.6.0",
# We need Nevow >= 0.11.1 which can be installed using pip.
"Nevow >= 0.11.1",
# * pyOpenSSL is required in order for foolscap to provide secure connections.
# Since foolscap doesn't reliably declare this dependency in a machine-readable
# way, we need to declare a dependency on pyOpenSSL ourselves. Tahoe-LAFS does
# not *directly* depend on pyOpenSSL.
# * pyOpenSSL >= 0.13 is needed in order to avoid
# <https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2005>, and also to check the
# version of OpenSSL that pyOpenSSL is using.
# * pyOpenSSL >= 0.14 is needed in order to avoid
# <https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2474>.
"pyOpenSSL >= 0.14",
"PyYAML >= 3.11",
"six >= 1.10.0",
# for 'tahoe invite' and 'tahoe join'
"magic-wormhole >= 0.10.2",
# Eliot is contemplating dropping Python 2 support. Stick to a version we
# know works on Python 2.7. Because we don't have support for `==`
# constraints, pin 1.7.x this way. I feel pretty safe betting that we
# won't end up stuck on Eliot 1.7.100 with a critical fix only present in
# 1.7.101. And if we do, I know how to deal with that situation.
"eliot >= 1.7.0, <= 1.7.100",
# A great way to define types of values.
"attrs >= 18.2.0",
# WebSocket library for twisted and asyncio
"autobahn >= 19.5.2",
]
# Includes some indirect dependencies, but does not include allmydata.
# These are in the order they should be listed by --version, etc.
package_imports = [
@ -159,11 +51,6 @@ ignorable = [
]
setup_requires = [
'setuptools >= 28.8.0', # for PEP-440 style versions
]
# These are suppressed globally:
global_deprecation_messages = [

View File

@ -17,7 +17,7 @@ from twisted.application import service
from twisted.python.failure import Failure
from foolscap.api import Tub, app_versions
import foolscap.logging.log
from allmydata import get_package_versions, get_package_versions_string
from allmydata.version_checks import get_package_versions, get_package_versions_string
from allmydata.util import log
from allmydata.util import fileutil, iputil
from allmydata.util.assertutil import _assert

View File

@ -180,6 +180,7 @@ class CreateNodeOptions(CreateClientOptions):
optFlags = [
("no-storage", None, "Do not offer storage service to other nodes."),
("storage-dir", None, "Path where the storage will be placed."),
("helper", None, "Enable helper"),
] + TOR_FLAGS + I2P_FLAGS
synopsis = "[options] [NODEDIR]"
@ -334,6 +335,9 @@ def write_client_config(c, config):
c.write("[helper]\n")
c.write("# Shall this node run a helper service that clients can use?\n")
if config.get("helper"):
c.write("enabled = true\n")
else:
c.write("enabled = false\n")
c.write("\n")

View File

@ -6,6 +6,7 @@ from six.moves import StringIO
from twisted.python import usage
from twisted.internet import defer, task, threads
from allmydata.version_checks import get_package_versions_string
from allmydata.scripts.common import get_default_nodedir
from allmydata.scripts import debug, create_node, cli, \
stats_gatherer, admin, magic_folder_cli, tahoe_daemonize, tahoe_start, \
@ -76,13 +77,11 @@ class Options(usage.Options):
]
def opt_version(self):
import allmydata
print(allmydata.get_package_versions_string(debug=True), file=self.stdout)
print(get_package_versions_string(debug=True), file=self.stdout)
self.no_command_needed = True
def opt_version_and_path(self):
import allmydata
print(allmydata.get_package_versions_string(show_paths=True, debug=True), file=self.stdout)
print(get_package_versions_string(show_paths=True, debug=True), file=self.stdout)
self.no_command_needed = True
opt_eliot_destination = opt_eliot_destination
@ -194,7 +193,51 @@ def run():
# doesn't return: calls sys.exit(rc)
task.react(_run_with_reactor)
def _setup_coverage(reactor):
"""
Arrange for coverage to be collected if the 'coverage' package is
installed
"""
# can we put this _setup_coverage call after we hit
# argument-parsing?
if '--coverage' not in sys.argv:
return
sys.argv.remove('--coverage')
try:
import coverage
except ImportError:
raise RuntimeError(
"The 'coveage' package must be installed to use --coverage"
)
# this doesn't change the shell's notion of the environment, but
# it makes the test in process_startup() succeed, which is the
# goal here.
os.environ["COVERAGE_PROCESS_START"] = '.coveragerc'
# maybe-start the global coverage, unless it already got started
cov = coverage.process_startup()
if cov is None:
cov = coverage.process_startup.coverage
def write_coverage_data():
"""
Make sure that coverage has stopped; internally, it depends on
ataxit handlers running which doesn't always happen (Twisted's
shutdown hook also won't run if os._exit() is called, but it
runs more-often than atexit handlers).
"""
cov.stop()
cov.save()
reactor.addSystemEventTrigger('after', 'shutdown', write_coverage_data)
def _run_with_reactor(reactor):
_setup_coverage(reactor)
d = defer.maybeDeferred(parse_or_exit_with_explanation, sys.argv[1:])
d.addCallback(_maybe_enable_eliot_logging, reactor)
d.addCallback(dispatch)

View File

@ -114,7 +114,7 @@ def check(options):
class FakeTransport(object):
disconnecting = False
class DeepCheckOutput(LineOnlyReceiver):
class DeepCheckOutput(LineOnlyReceiver, object):
delimiter = "\n"
def __init__(self, streamer, options):
self.streamer = streamer
@ -173,7 +173,7 @@ class DeepCheckOutput(LineOnlyReceiver):
print("done: %d objects checked, %d healthy, %d unhealthy" \
% (self.num_objects, self.files_healthy, self.files_unhealthy), file=stdout)
class DeepCheckAndRepairOutput(LineOnlyReceiver):
class DeepCheckAndRepairOutput(LineOnlyReceiver, object):
delimiter = "\n"
def __init__(self, streamer, options):
self.streamer = streamer
@ -271,7 +271,7 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver):
% (self.post_repair_files_healthy,
self.post_repair_files_unhealthy), file=stdout)
class DeepCheckStreamer(LineOnlyReceiver):
class DeepCheckStreamer(LineOnlyReceiver, object):
def deepcheck_location(self, options, where):
stdout = options.stdout

View File

@ -12,7 +12,7 @@ from allmydata.util.encodingutil import quote_output, quote_path
class FakeTransport(object):
disconnecting = False
class ManifestStreamer(LineOnlyReceiver):
class ManifestStreamer(LineOnlyReceiver, object):
delimiter = "\n"
def __init__(self):

View File

@ -1,6 +1,6 @@
from foolscap.logging.incident import IncidentQualifier
class NonQualifier(IncidentQualifier):
class NonQualifier(IncidentQualifier, object):
def check_event(self, ev):
return False

View File

@ -15,7 +15,7 @@ from io import BytesIO
from twisted.internet import protocol, defer
class _EverythingGetter(protocol.ProcessProtocol):
class _EverythingGetter(protocol.ProcessProtocol, object):
def __init__(self, deferred, stdinBytes=None):
self.deferred = deferred

View File

@ -270,4 +270,3 @@ while True:
f.write("directories-written: %d\n" % directories_written)
f.close()
os.rename(stats_out+".tmp", stats_out)

View File

@ -14,7 +14,7 @@ from allmydata.util.encodingutil import get_filesystem_encoding
from foolscap.api import Tub, fireEventually, flushEventualQueue
from twisted.python import log, procutils
class StallableHTTPGetterDiscarder(tw_client.HTTPPageGetter):
class StallableHTTPGetterDiscarder(tw_client.HTTPPageGetter, object):
full_speed_ahead = False
_bytes_so_far = 0
stalled = None
@ -41,7 +41,7 @@ class StallableHTTPGetterDiscarder(tw_client.HTTPPageGetter):
self.stalled = None
return tw_client.HTTPPageGetter.handleResponseEnd(self)
class StallableDiscardingHTTPClientFactory(tw_client.HTTPClientFactory):
class StallableDiscardingHTTPClientFactory(tw_client.HTTPClientFactory, object):
protocol = StallableHTTPGetterDiscarder
def discardPage(url, stall=False, *args, **kwargs):
@ -477,7 +477,7 @@ this file are ignored.
return d
class ClientWatcher(protocol.ProcessProtocol):
class ClientWatcher(protocol.ProcessProtocol, object):
ended = False
def outReceived(self, data):
print("OUT:", data)
@ -504,4 +504,3 @@ if __name__ == '__main__':
# removed each time we run.
sf = SystemFramework("_test_memory", mode)
sf.run()

View File

@ -2,7 +2,7 @@ import os.path
from six.moves import cStringIO as StringIO
import urllib, sys
import re
from mock import patch
from mock import patch, Mock
from twisted.trial import unittest
from twisted.python.monkey import MonkeyPatcher
@ -525,7 +525,8 @@ class CLI(CLITestMixin, unittest.TestCase):
self.failUnlessEqual(exitcode, 1)
def fake_react(f):
d = f("reactor")
reactor = Mock()
d = f(reactor)
# normally this Deferred would be errbacked with SystemExit, but
# since we mocked out sys.exit, it will be fired with None. So
# it's safe to drop it on the floor.

View File

@ -49,7 +49,7 @@ from ..util.eliotutil import (
inline_callbacks,
)
class Expect(Protocol):
class Expect(Protocol, object):
def __init__(self):
self._expectations = []
@ -79,7 +79,7 @@ class Expect(Protocol):
d.errback(reason)
class _ProcessProtocolAdapter(ProcessProtocol):
class _ProcessProtocolAdapter(ProcessProtocol, object):
def __init__(self, fds):
self._fds = fds
@ -218,7 +218,7 @@ class CLINodeAPI(object):
return stopping
class _WaitForEnd(ProcessProtocol):
class _WaitForEnd(ProcessProtocol, object):
def __init__(self, ended):
self._ended = ended

View File

@ -42,6 +42,9 @@ import allmydata.util.log
from allmydata.node import OldConfigError, OldConfigOptionError, UnescapedHashError, _Config, create_node_dir
from allmydata.frontends.auth import NeedRootcapLookupScheme
from allmydata.version_checks import (
get_package_versions_string,
)
from allmydata import client
from allmydata.storage_client import (
StorageClientConfig,
@ -622,7 +625,7 @@ class Basic(testutil.ReallyEqualMixin, testutil.NonASCIIPathMixin, unittest.Test
self.failIfEqual(str(allmydata.__version__), "unknown")
self.failUnless("." in str(allmydata.__full_version__),
"non-numeric version in '%s'" % allmydata.__version__)
all_versions = allmydata.get_package_versions_string()
all_versions = get_package_versions_string()
self.failUnless(allmydata.__appname__ in all_versions)
# also test stats
stats = c.get_stats()

View File

@ -1,29 +0,0 @@
from twisted.trial import unittest
from twisted.python.monkey import MonkeyPatcher
import allmydata
import __builtin__
class T(unittest.TestCase):
def test_report_import_error(self):
marker = "wheeeyo"
real_import_func = __import__
def raiseIE_from_this_particular_func(name, *args):
if name == "foolscap":
raise ImportError(marker + " foolscap cant be imported")
else:
return real_import_func(name, *args)
# Let's run as little code as possible with __import__ patched.
patcher = MonkeyPatcher((__builtin__, '__import__', raiseIE_from_this_particular_func))
vers_and_locs, errors = patcher.runWithPatches(allmydata.get_package_versions_and_locations)
foolscap_stuffs = [stuff for (pkg, stuff) in vers_and_locs if pkg == 'foolscap']
self.failUnlessEqual(len(foolscap_stuffs), 1)
comment = str(foolscap_stuffs[0][2])
self.failUnlessIn(marker, comment)
self.failUnlessIn('raiseIE_from_this_particular_func', comment)
self.failUnless([e for e in errors if "dependency \'foolscap\' could not be imported" in e])

View File

@ -1,31 +1,71 @@
"""
Tests to check for Python2 regressions
"""
from twisted.trial import unittest
from inspect import isclass
from twisted.python.modules import getModule
class PythonTwoRegressions(unittest.TestCase):
"""
A test class to hold Python2 regression tests.
"""
from testtools import (
TestCase,
)
from testtools.matchers import (
Equals,
)
def is_new_style(self, cls):
"""check for being a new-style class"""
# another test could be: issubclass(value, type)
has_class_attr = hasattr(cls, '__class__')
dict_or_slots = '__dict__' in dir(cls) or hasattr(cls, '__slots__')
return has_class_attr and dict_or_slots
BLACKLIST = {
"allmydata.test.check_load",
"allmydata.watchdog._watchdog_541",
"allmydata.watchdog.inotify",
"allmydata.windows.inotify",
"allmydata.windows.registry",
"allmydata.windows.tahoesvc",
}
def test_old_style_class(self):
def is_new_style(cls):
"""
Check if all classes are new-style classes
:return bool: ``True`` if and only if the given class is "new style".
"""
# All new-style classes are instances of type. By definition.
return isinstance(cls, type)
def defined_here(cls, where):
"""
:return bool: ``True`` if and only if the given class was defined in a
module with the given name.
:note: Classes can lie about where they are defined. Try not to do that.
"""
return cls.__module__ == where
class PythonTwoRegressions(TestCase):
"""
Regression tests for Python 2 behaviors related to Python 3 porting.
"""
def test_new_style_classes(self):
"""
All classes in Tahoe-LAFS are new-style.
"""
newstyle = set()
classic = set()
for mod in getModule("allmydata").walkModules():
if mod.name in BLACKLIST:
continue
# iterAttributes will only work on loaded modules. So, load it.
mod.load()
for attr in mod.iterAttributes():
value = attr.load()
if isinstance(value, str):
# apparently strings are note a new-style class (in Python 2.7)
# so we skip testing them
return
self.assertTrue(self.is_new_style(value),
"{} does not seem to be a new-style class".format(attr.name))
if isclass(value) and defined_here(value, mod.name):
if is_new_style(value):
newstyle.add(value)
else:
classic.add(value)
self.assertThat(
classic,
Equals(set()),
"Expected to find no classic classes.",
)

View File

@ -23,6 +23,7 @@ from allmydata.util import fileutil, pollmixin
from allmydata.util.encodingutil import unicode_to_argv, unicode_to_output, \
get_filesystem_encoding
from allmydata.test import common_util
from allmydata.version_checks import normalized_version
import allmydata
from allmydata import __appname__
from .common_util import parse_cli, run_cli
@ -112,8 +113,6 @@ class BinTahoe(common_util.SignalMixin, unittest.TestCase, RunBinTahoeMixin):
def test_path(self):
d = self.run_bintahoe(["--version-and-path"])
def _cb(res):
from allmydata import normalized_version
out, err, rc_or_sig = res
self.failUnlessEqual(rc_or_sig, 0, str(res))

View File

@ -12,18 +12,15 @@ from allmydata.util import deferredutil
conch_interfaces = None
sftp = None
sftpd = None
have_pycrypto = False
try:
from Crypto import Util
Util # hush pyflakes
have_pycrypto = True
except ImportError:
pass
if have_pycrypto:
try:
from twisted.conch import interfaces as conch_interfaces
from twisted.conch.ssh import filetransfer as sftp
from allmydata.frontends import sftpd
except ImportError as e:
conch_unavailable_reason = e
else:
conch_unavailable_reason = None
from allmydata.interfaces import IDirectoryNode, ExistingChildError, NoSuchChildError
from allmydata.mutable.common import NotWriteableError
@ -38,8 +35,10 @@ from allmydata.test.common_util import ReallyEqualMixin
class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCase):
"""This is a no-network unit test of the SFTPUserHandler and the abstractions it uses."""
if not have_pycrypto:
skip = "SFTP support requires pycrypto, which is not installed"
if conch_unavailable_reason:
skip = "SFTP support requires Twisted Conch which is not available: {}".format(
conch_unavailable_reason,
)
def shouldFailWithSFTPError(self, expected_code, which, callable, *args, **kwargs):
assert isinstance(expected_code, int), repr(expected_code)

View File

@ -1,12 +1,16 @@
import sys
import pkg_resources
from pkg_resources import Requirement
from operator import (
setitem,
)
from twisted.trial import unittest
from allmydata import check_requirement, cross_check, get_package_versions_and_locations, \
extract_openssl_version, PackagingError
from allmydata.version_checks import (
_cross_check as cross_check,
_extract_openssl_version as extract_openssl_version,
_get_package_versions_and_locations as get_package_versions_and_locations,
)
from allmydata.util.verlib import NormalizedVersion as V, \
IrrationalVersionError, \
suggest_normalized_version as suggest
@ -28,43 +32,6 @@ class MockSSL(object):
class CheckRequirement(unittest.TestCase):
def test_check_requirement(self):
self._check_success("setuptools >= 0.6c6", {"setuptools": ("0.6", "", None)})
self._check_success("setuptools >= 0.6c6", {"setuptools": ("0.6", "", "distribute")})
self._check_success("pycrypto >= 2.1.0, != 2.2, != 2.4", {"pycrypto": ("2.1.0", "", None)})
self._check_success("pycrypto >= 2.1.0, != 2.2, != 2.4", {"pycrypto": ("2.3.0", "", None)})
self._check_success("pycrypto >= 2.1.0, != 2.2, != 2.4", {"pycrypto": ("2.4.1", "", None)})
self._check_success("Twisted >= 11.0.0, <= 12.2.0", {"Twisted": ("11.0.0", "", None)})
self._check_success("Twisted >= 11.0.0, <= 12.2.0", {"Twisted": ("12.2.0", "", None)})
self._check_success("zope.interface", {"zope.interface": ("unknown", "", None)})
self._check_success("mock", {"mock": ("0.6.0", "", None)})
self._check_success("foo >= 1.0", {"foo": ("1.0", "", None), "bar": ("2.0", "", None)})
self._check_success("foolscap[secure_connections] >= 0.6.0", {"foolscap": ("0.7.0", "", None)})
self._check_failure("foolscap[secure_connections] >= 0.6.0", {"foolscap": ("0.5.1", "", None)})
self._check_failure("pycrypto >= 2.1.0, != 2.2, != 2.4", {"pycrypto": ("2.2.0", "", None)})
self._check_failure("pycrypto >= 2.1.0, != 2.2, != 2.4", {"pycrypto": ("2.0.0", "", None)})
self._check_failure("Twisted >= 11.0.0, <= 12.2.0", {"Twisted": ("10.2.0", "", None)})
self._check_failure("Twisted >= 11.0.0, <= 12.2.0", {"Twisted": ("13.0.0", "", None)})
self._check_failure("foo >= 1.0", {})
self.failUnlessRaises(ImportError, check_requirement,
"foo >= 1.0", {"foo": (None, None, "foomodule")})
def _check_success(self, req, vers_and_locs):
check_requirement(req, vers_and_locs)
for pkg, ver in vers_and_locs.items():
self.failUnless(ver[0] in Requirement.parse(req), str((ver, req)))
def _check_failure(self, req, vers_and_locs):
self.failUnlessRaises(PackagingError, check_requirement, req, vers_and_locs)
for pkg, ver in vers_and_locs.items():
self.failIf(ver[0] in Requirement.parse(req), str((ver, req)))
def test_packages_from_pkg_resources(self):
if hasattr(sys, 'frozen'):
raise unittest.SkipTest("This test doesn't apply to frozen builds.")
@ -270,3 +237,26 @@ class VersionTestCase(unittest.TestCase):
# zetuptoolz
self.failUnlessEqual(suggest('0.6c16dev3'), '0.6c16.dev3')
class T(unittest.TestCase):
def test_report_import_error(self):
"""
get_package_versions_and_locations reports a dependency if a dependency
cannot be imported.
"""
# Make sure we don't leave the system in a bad state.
self.addCleanup(
lambda foolscap=sys.modules["foolscap"]: setitem(
sys.modules,
"foolscap",
foolscap,
),
)
# Make it look like Foolscap isn't installed.
sys.modules["foolscap"] = None
vers_and_locs, errors = get_package_versions_and_locations()
foolscap_stuffs = [stuff for (pkg, stuff) in vers_and_locs if pkg == 'foolscap']
self.failUnlessEqual(len(foolscap_stuffs), 1)
self.failUnless([e for e in errors if "dependency \'foolscap\' could not be imported" in e])

View File

@ -1,7 +1,7 @@
import exceptions, os
from repr import Repr
class BetterRepr(Repr):
class BetterRepr(Repr, object):
def __init__(self):
Repr.__init__(self)

View File

@ -19,7 +19,7 @@ class SingleFileError(Exception):
"""You are not permitted to add a job to a full pipeline."""
class ExpandableDeferredList(defer.Deferred):
class ExpandableDeferredList(defer.Deferred, object):
# like DeferredList(fireOnOneErrback=True) with a built-in
# gatherResults(), but you can add new Deferreds until you close it. This
# gives you a chance to add don't-complain-about-unhandled-error errbacks

View File

@ -0,0 +1,389 @@
"""
Produce reports about the versions of Python software in use by Tahoe-LAFS
for debugging and auditing purposes.
"""
__all__ = [
"PackagingError",
"get_package_versions",
"get_package_versions_string",
"normalized_version",
]
import os, platform, re, subprocess, sys, traceback, pkg_resources
import six
from . import (
__appname__,
full_version,
branch,
)
from .util import (
verlib,
)
_INSTALL_REQUIRES = list(
str(req)
for req
in pkg_resources.get_distribution(__appname__).requires()
)
class PackagingError(EnvironmentError):
"""
Raised when there is an error in packaging of Tahoe-LAFS or its
dependencies which makes it impossible to proceed safely.
"""
def get_package_versions():
return dict([(k, v) for k, (v, l, c) in _vers_and_locs_list])
def get_package_versions_string(show_paths=False, debug=False):
res = []
for p, (v, loc, comment) in _vers_and_locs_list:
info = str(p) + ": " + str(v)
if comment:
info = info + " [%s]" % str(comment)
if show_paths:
info = info + " (%s)" % str(loc)
res.append(info)
output = "\n".join(res) + "\n"
if _cross_check_errors:
output += _get_error_string(_cross_check_errors, debug=debug)
return output
_distributor_id_cmdline_re = re.compile("(?:Distributor ID:)\s*(.*)", re.I)
_release_cmdline_re = re.compile("(?:Release:)\s*(.*)", re.I)
_distributor_id_file_re = re.compile("(?:DISTRIB_ID\s*=)\s*(.*)", re.I)
_release_file_re = re.compile("(?:DISTRIB_RELEASE\s*=)\s*(.*)", re.I)
_distname = None
_version = None
def normalized_version(verstr, what=None):
try:
suggested = verlib.suggest_normalized_version(verstr) or verstr
return verlib.NormalizedVersion(suggested)
except verlib.IrrationalVersionError:
raise
except StandardError:
cls, value, trace = sys.exc_info()
new_exc = PackagingError("could not parse %s due to %s: %s"
% (what or repr(verstr), cls.__name__, value))
six.reraise(cls, new_exc, trace)
def _get_error_string(errors, debug=False):
msg = "\n%s\n" % ("\n".join(errors),)
if debug:
msg += (
"\n"
"For debugging purposes, the PYTHONPATH was\n"
" %r\n"
"install_requires was\n"
" %r\n"
"sys.path after importing pkg_resources was\n"
" %s\n"
% (
os.environ.get('PYTHONPATH'),
_INSTALL_REQUIRES,
(os.pathsep+"\n ").join(sys.path),
)
)
return msg
def _cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list):
"""This function returns a list of errors due to any failed cross-checks."""
from _auto_deps import not_import_versionable
errors = []
not_pkg_resourceable = ['python', 'platform', __appname__.lower(), 'openssl']
for name, (imp_ver, imp_loc, imp_comment) in imported_vers_and_locs_list:
name = name.lower()
if name not in not_pkg_resourceable:
if name not in pkg_resources_vers_and_locs:
if name == "setuptools" and "distribute" in pkg_resources_vers_and_locs:
pr_ver, pr_loc = pkg_resources_vers_and_locs["distribute"]
if not (os.path.normpath(os.path.realpath(pr_loc)) == os.path.normpath(os.path.realpath(imp_loc))
and imp_comment == "distribute"):
errors.append("Warning: dependency 'setuptools' found to be version %r of 'distribute' from %r "
"by pkg_resources, but 'import setuptools' gave version %r [%s] from %r. "
"A version mismatch is expected, but a location mismatch is not."
% (pr_ver, pr_loc, imp_ver, imp_comment or 'probably *not* distribute', imp_loc))
else:
errors.append("Warning: dependency %r (version %r imported from %r) was not found by pkg_resources."
% (name, imp_ver, imp_loc))
continue
pr_ver, pr_loc = pkg_resources_vers_and_locs[name]
if imp_ver is None and imp_loc is None:
errors.append("Warning: dependency %r could not be imported. pkg_resources thought it should be possible "
"to import version %r from %r.\nThe exception trace was %r."
% (name, pr_ver, pr_loc, imp_comment))
continue
# If the pkg_resources version is identical to the imported version, don't attempt
# to normalize them, since it is unnecessary and may fail (ticket #2499).
if imp_ver != 'unknown' and pr_ver == imp_ver:
continue
try:
pr_normver = normalized_version(pr_ver)
except verlib.IrrationalVersionError:
continue
except Exception as e:
errors.append("Warning: version number %r found for dependency %r by pkg_resources could not be parsed. "
"The version found by import was %r from %r. "
"pkg_resources thought it should be found at %r. "
"The exception was %s: %s"
% (pr_ver, name, imp_ver, imp_loc, pr_loc, e.__class__.__name__, e))
else:
if imp_ver == 'unknown':
if name not in not_import_versionable:
errors.append("Warning: unexpectedly could not find a version number for dependency %r imported from %r. "
"pkg_resources thought it should be version %r at %r."
% (name, imp_loc, pr_ver, pr_loc))
else:
try:
imp_normver = normalized_version(imp_ver)
except verlib.IrrationalVersionError:
continue
except Exception as e:
errors.append("Warning: version number %r found for dependency %r (imported from %r) could not be parsed. "
"pkg_resources thought it should be version %r at %r. "
"The exception was %s: %s"
% (imp_ver, name, imp_loc, pr_ver, pr_loc, e.__class__.__name__, e))
else:
if pr_ver == 'unknown' or (pr_normver != imp_normver):
if not os.path.normpath(os.path.realpath(pr_loc)) == os.path.normpath(os.path.realpath(imp_loc)):
errors.append("Warning: dependency %r found to have version number %r (normalized to %r, from %r) "
"by pkg_resources, but version %r (normalized to %r, from %r) by import."
% (name, pr_ver, str(pr_normver), pr_loc, imp_ver, str(imp_normver), imp_loc))
return errors
def _get_openssl_version():
try:
from OpenSSL import SSL
return _extract_openssl_version(SSL)
except Exception:
return ("unknown", None, None)
def _extract_openssl_version(ssl_module):
openssl_version = ssl_module.SSLeay_version(ssl_module.SSLEAY_VERSION)
if openssl_version.startswith('OpenSSL '):
openssl_version = openssl_version[8 :]
(version, _, comment) = openssl_version.partition(' ')
try:
openssl_cflags = ssl_module.SSLeay_version(ssl_module.SSLEAY_CFLAGS)
if '-DOPENSSL_NO_HEARTBEATS' in openssl_cflags.split(' '):
comment += ", no heartbeats"
except Exception:
pass
return (version, None, comment if comment else None)
def _get_linux_distro():
""" Tries to determine the name of the Linux OS distribution name.
First, try to parse a file named "/etc/lsb-release". If it exists, and
contains the "DISTRIB_ID=" line and the "DISTRIB_RELEASE=" line, then return
the strings parsed from that file.
If that doesn't work, then invoke platform.dist().
If that doesn't work, then try to execute "lsb_release", as standardized in
2001:
http://refspecs.freestandards.org/LSB_1.0.0/gLSB/lsbrelease.html
The current version of the standard is here:
http://refspecs.freestandards.org/LSB_3.2.0/LSB-Core-generic/LSB-Core-generic/lsbrelease.html
that lsb_release emitted, as strings.
Returns a tuple (distname,version). Distname is what LSB calls a
"distributor id", e.g. "Ubuntu". Version is what LSB calls a "release",
e.g. "8.04".
A version of this has been submitted to python as a patch for the standard
library module "platform":
http://bugs.python.org/issue3937
"""
global _distname,_version
if _distname and _version:
return (_distname, _version)
try:
etclsbrel = open("/etc/lsb-release", "rU")
for line in etclsbrel:
m = _distributor_id_file_re.search(line)
if m:
_distname = m.group(1).strip()
if _distname and _version:
return (_distname, _version)
m = _release_file_re.search(line)
if m:
_version = m.group(1).strip()
if _distname and _version:
return (_distname, _version)
except EnvironmentError:
pass
(_distname, _version) = platform.dist()[:2]
if _distname and _version:
return (_distname, _version)
if os.path.isfile("/usr/bin/lsb_release") or os.path.isfile("/bin/lsb_release"):
try:
p = subprocess.Popen(["lsb_release", "--all"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
rc = p.wait()
if rc == 0:
for line in p.stdout.readlines():
m = _distributor_id_cmdline_re.search(line)
if m:
_distname = m.group(1).strip()
if _distname and _version:
return (_distname, _version)
m = _release_cmdline_re.search(p.stdout.read())
if m:
_version = m.group(1).strip()
if _distname and _version:
return (_distname, _version)
except EnvironmentError:
pass
if os.path.exists("/etc/arch-release"):
return ("Arch_Linux", "")
return (_distname,_version)
def _get_platform():
# Our version of platform.platform(), telling us both less and more than the
# Python Standard Library's version does.
# We omit details such as the Linux kernel version number, but we add a
# more detailed and correct rendition of the Linux distribution and
# distribution-version.
if "linux" in platform.system().lower():
return (
platform.system() + "-" +
"_".join(_get_linux_distro()) + "-" +
platform.machine() + "-" +
"_".join([x for x in platform.architecture() if x])
)
else:
return platform.platform()
def _get_package_versions_and_locations():
import warnings
from _auto_deps import package_imports, global_deprecation_messages, deprecation_messages, \
runtime_warning_messages, warning_imports, ignorable
def package_dir(srcfile):
return os.path.dirname(os.path.dirname(os.path.normcase(os.path.realpath(srcfile))))
# pkg_resources.require returns the distribution that pkg_resources attempted to put
# on sys.path, which can differ from the one that we actually import due to #1258,
# or any other bug that causes sys.path to be set up incorrectly. Therefore we
# must import the packages in order to check their versions and paths.
# This is to suppress all UserWarnings and various DeprecationWarnings and RuntimeWarnings
# (listed in _auto_deps.py).
warnings.filterwarnings("ignore", category=UserWarning, append=True)
for msg in global_deprecation_messages + deprecation_messages:
warnings.filterwarnings("ignore", category=DeprecationWarning, message=msg, append=True)
for msg in runtime_warning_messages:
warnings.filterwarnings("ignore", category=RuntimeWarning, message=msg, append=True)
try:
for modulename in warning_imports:
try:
__import__(modulename)
except ImportError:
pass
finally:
# Leave suppressions for UserWarnings and global_deprecation_messages active.
for _ in runtime_warning_messages + deprecation_messages:
warnings.filters.pop()
packages = []
pkg_resources_vers_and_locs = dict()
if not hasattr(sys, 'frozen'):
pkg_resources_vers_and_locs = {
p.project_name.lower(): (str(p.version), p.location)
for p
in pkg_resources.require(_INSTALL_REQUIRES)
}
def get_version(module):
if hasattr(module, '__version__'):
return str(getattr(module, '__version__'))
elif hasattr(module, 'version'):
ver = getattr(module, 'version')
if isinstance(ver, tuple):
return '.'.join(map(str, ver))
else:
return str(ver)
else:
return 'unknown'
for pkgname, modulename in [(__appname__, 'allmydata')] + package_imports:
if modulename:
try:
__import__(modulename)
module = sys.modules[modulename]
except ImportError:
etype, emsg, etrace = sys.exc_info()
trace_info = (etype, str(emsg), ([None] + traceback.extract_tb(etrace))[-1])
packages.append( (pkgname, (None, None, trace_info)) )
else:
comment = None
if pkgname == __appname__:
comment = "%s: %s" % (branch, full_version)
elif pkgname == 'setuptools' and hasattr(module, '_distribute'):
# distribute does not report its version in any module variables
comment = 'distribute'
ver = get_version(module)
loc = package_dir(module.__file__)
if ver == "unknown" and pkgname in pkg_resources_vers_and_locs:
(pr_ver, pr_loc) = pkg_resources_vers_and_locs[pkgname]
if loc == os.path.normcase(os.path.realpath(pr_loc)):
ver = pr_ver
packages.append( (pkgname, (ver, loc, comment)) )
elif pkgname == 'python':
packages.append( (pkgname, (platform.python_version(), sys.executable, None)) )
elif pkgname == 'platform':
packages.append( (pkgname, (_get_platform(), None, None)) )
elif pkgname == 'OpenSSL':
packages.append( (pkgname, _get_openssl_version()) )
cross_check_errors = []
if len(pkg_resources_vers_and_locs) > 0:
imported_packages = set([p.lower() for (p, _) in packages])
extra_packages = []
for pr_name, (pr_ver, pr_loc) in pkg_resources_vers_and_locs.iteritems():
if pr_name not in imported_packages and pr_name not in ignorable:
extra_packages.append( (pr_name, (pr_ver, pr_loc, "according to pkg_resources")) )
cross_check_errors = _cross_check(pkg_resources_vers_and_locs, packages)
packages += extra_packages
return packages, cross_check_errors
_vers_and_locs_list, _cross_check_errors = _get_package_versions_and_locations()

View File

@ -322,7 +322,7 @@ def humanize_failure(f):
return (f.getTraceback(), http.REQUEST_ENTITY_TOO_LARGE)
return (str(f), None)
class MyExceptionHandler(appserver.DefaultExceptionHandler):
class MyExceptionHandler(appserver.DefaultExceptionHandler, object):
def simple(self, ctx, text, code=http.BAD_REQUEST):
req = IRequest(ctx)
req.setResponseCode(code)
@ -461,7 +461,7 @@ class MultiFormatPage(Page):
class TokenOnlyWebApi(resource.Resource):
class TokenOnlyWebApi(resource.Resource, object):
"""
I provide a rend.Page implementation that only accepts POST calls,
and only if they have a 'token=' arg with the correct

View File

@ -5,7 +5,7 @@ from nevow.static import File as nevow_File
from nevow.util import resource_filename
import allmydata
import json
from allmydata import get_package_versions_string
from allmydata.version_checks import get_package_versions_string
from allmydata.util import idlib
from allmydata.web.common import (
getxmlfile,

View File

@ -7,7 +7,7 @@ from nevow.static import File as nevow_File # TODO: merge with static.File?
from nevow.util import resource_filename
import allmydata # to display import path
from allmydata import get_package_versions_string
from allmydata.version_checks import get_package_versions_string
from allmydata.util import log
from allmydata.interfaces import IFileNode
from allmydata.web import filenode, directory, unlinked, status, operations

View File

@ -24,7 +24,7 @@ from .web.storage_plugins import (
# surgery may induce a dependency upon a particular version of twisted.web
parse_qs = http.parse_qs
class MyRequest(appserver.NevowRequest):
class MyRequest(appserver.NevowRequest, object):
fields = None
_tahoe_request_had_error = None

11
tox.ini
View File

@ -49,9 +49,13 @@ commands =
trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata}
[testenv:integration]
setenv =
COVERAGE_PROCESS_START=.coveragerc
commands =
# NOTE: 'run with "py.test --keep-tempdir -s -v integration/" to debug failures'
py.test -v integration/
py.test --coverage -v integration/
coverage combine
coverage report
[testenv:coverage]
# coverage (with --branch) takes about 65% longer to run
@ -64,6 +68,7 @@ commands =
pip freeze
tahoe --version
coverage run --branch -m twisted.trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors --reporter=timing} {posargs:allmydata}
coverage combine
coverage xml
[testenv:codechecks]
@ -97,7 +102,7 @@ commands =
setenv =
PYTHONWARNINGS=default::DeprecationWarning
commands =
python misc/build_helpers/run-deprecations.py --warnings={env:TAHOE_LAFS_WARNINGS_LOG:_trial_temp/deprecation-warnings.log} trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata}
python misc/build_helpers/run-deprecations.py --package allmydata --warnings={env:TAHOE_LAFS_WARNINGS_LOG:_trial_temp/deprecation-warnings.log} trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata}
[testenv:upcoming-deprecations]
setenv =
@ -109,7 +114,7 @@ deps =
git+https://github.com/warner/foolscap
commands =
flogtool --version
python misc/build_helpers/run-deprecations.py --warnings={env:TAHOE_LAFS_WARNINGS_LOG:_trial_temp/deprecation-warnings.log} trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata}
python misc/build_helpers/run-deprecations.py --package allmydata --warnings={env:TAHOE_LAFS_WARNINGS_LOG:_trial_temp/deprecation-warnings.log} trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata}
[testenv:checkmemory]
commands =