Merge 'origin/master' into 3254.status-status-nevow-to-twisted-web

This commit is contained in:
Sajith Sasidharan 2020-07-18 07:53:36 -04:00
commit 0f835e94f5
40 changed files with 1274 additions and 801 deletions

3
.gitignore vendored
View File

@ -44,6 +44,9 @@ zope.interface-*.egg
/docs/_build/ /docs/_build/
/coverage.xml /coverage.xml
/.hypothesis/ /.hypothesis/
/eliot.log
/misc/python3/results.xml
/misc/python3/results.subunit2
# This is the plaintext of the private environment needed for some CircleCI # This is the plaintext of the private environment needed for some CircleCI
# operations. It's never supposed to be checked in. # operations. It's never supposed to be checked in.

View File

@ -1,7 +1,7 @@
sudo: false sudo: false
language: python language: python
cache: pip cache: pip
dist: trusty dist: xenial
before_cache: before_cache:
- rm -f $HOME/.cache/pip/log/debug.log - rm -f $HOME/.cache/pip/log/debug.log
git: git:
@ -16,19 +16,15 @@ install:
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then export PATH=$HOME/Library/Python/2.7/bin:$PATH; fi - if [ "${TRAVIS_OS_NAME}" = "osx" ]; then export PATH=$HOME/Library/Python/2.7/bin:$PATH; fi
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then wget https://bootstrap.pypa.io/get-pip.py && sudo python ./get-pip.py; fi - if [ "${TRAVIS_OS_NAME}" = "osx" ]; then wget https://bootstrap.pypa.io/get-pip.py && sudo python ./get-pip.py; fi
- pip list - pip list
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then pip install --user --upgrade codecov tox setuptools; fi - if [ "${TRAVIS_OS_NAME}" = "osx" ]; then pip install --user --upgrade codecov tox setuptools virtualenv; fi
- if [ "${TRAVIS_OS_NAME}" = "linux" ]; then pip install --upgrade codecov tox setuptools; fi - if [ "${TRAVIS_OS_NAME}" = "linux" ]; then pip install --upgrade codecov tox setuptools virtualenv; fi
- echo $PATH; which python; which pip; which tox - echo $PATH; which python; which pip; which tox
- python misc/build_helpers/show-tool-versions.py - python misc/build_helpers/show-tool-versions.py
script: script:
- | - |
set -eo pipefail set -eo pipefail
if [ "${T}" = "py35" ]; then
python3 -m compileall -f -x tahoe-depgraph.py .
else
tox -e ${T} tox -e ${T}
fi
# To verify that the resultant PyInstaller-generated binary executes # To verify that the resultant PyInstaller-generated binary executes
# cleanly (i.e., that it terminates with an exit code of 0 and isn't # cleanly (i.e., that it terminates with an exit code of 0 and isn't
# failing due to import/packaging-related errors, etc.). # failing due to import/packaging-related errors, etc.).
@ -69,9 +65,8 @@ matrix:
python: '2.7' python: '2.7'
env: T=pyinstaller LANG=en_US.UTF-8 env: T=pyinstaller LANG=en_US.UTF-8
language: generic # "python" is not available on OS-X language: generic # "python" is not available on OS-X
# this is a "lint" job that checks for python3 compatibility
- os: linux - os: linux
python: '3.5' python: '3.6'
env: T=py35 env: T=py36
fast_finish: true fast_finish: true

View File

@ -0,0 +1,13 @@
allmydata.test.mutable.test_exceptions.Exceptions.test_repr
allmydata.test.test_deferredutil.DeferredUtilTests.test_failure
allmydata.test.test_deferredutil.DeferredUtilTests.test_gather_results
allmydata.test.test_deferredutil.DeferredUtilTests.test_success
allmydata.test.test_deferredutil.DeferredUtilTests.test_wait_for_delayed_calls
allmydata.test.test_humanreadable.HumanReadable.test_repr
allmydata.test.test_observer.Observer.test_lazy_oneshot
allmydata.test.test_observer.Observer.test_observerlist
allmydata.test.test_observer.Observer.test_oneshot
allmydata.test.test_observer.Observer.test_oneshot_fireagain
allmydata.test.test_python3.Python3PortingEffortTests.test_finished_porting
allmydata.test.test_python3.Python3PortingEffortTests.test_ported_modules_distinct
allmydata.test.test_python3.Python3PortingEffortTests.test_ported_modules_exist

409
misc/python3/ratchet.py Executable file
View File

@ -0,0 +1,409 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''Ratchet up passing tests, or ratchet down failing tests.
Usage:
ratchet.py <"up" or "down"> <junitxml file path> <tracking file path>
This script helps when you expect a large test suite to fail spectactularly in
some environment, and you want to gradually improve the situation with minimal
impact to forward development of the same codebase for other environments. The
initial and primary usecase is porting from Python 2 to Python 3.
The idea is to emit JUnit XML from your test runner, and then invoke ratchet.py
to consume this XML output and operate on a so-called "tracking" file. When
ratcheting up passing tests, the tracking file will contain a list of tests,
one per line, that passed. When ratching down, the tracking file contains a
list of failing tests. On each subsequent run, ratchet.py will compare the
prior results in the tracking file with the new results in the XML, and will
report on both welcome and unwelcome changes. It will modify the tracking file
in the case of welcome changes, and therein lies the ratcheting.
The exit codes are:
0 - no changes observed
1 - changes observed, whether welcome or unwelcome
2 - invocation error
If <junitxml file path> does not exist, you'll get a FileNotFoundError:
>>> _test('up', None, None) # doctest: +ELLIPSIS
Traceback (most recent call last):
...
FileNotFoundError: ...
If <tracking file path> does not exist, that's fine:
>>> _test('up', '1', None)
Some tests not required to pass did:
c0.t
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
Eep! 0 test(s) were required to pass, but instead 1 did. 🐭
Same if you're ratcheting down:
>>> _test('down', '1', None)
All and only tests expected to fail did. 💃
If the test run has the same output as last time, it's all good:
>>> _test('up', '01001110', '01001110')
All and only tests required to pass did. 💃
>>> _test('down', '01001110', '10110001')
All and only tests expected to fail did. 💃
If there's a welcome change, that's noted:
>>> _test('up', '0101', '0100')
Some tests not required to pass did:
c3.t
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
Eep! 1 test(s) were required to pass, but instead 2 did. 🐭
>>> _test('down', '0011', '1110')
Some tests expected to fail didn't:
c2.t
Conveniently, they have been removed from `<tracking_path>` for you. Perhaps commit that?
Eep! 3 test(s) were expected to fail, but instead 2 did. 🐭
And if there is an unwelcome change, that is noted as well:
>>> _test('up', '1101', '1111')
Some tests required to pass didn't:
c2.t
Eep! 4 test(s) were required to pass, but instead 3 did. 🐭
>>> _test('down', '0000', '1101')
Some tests not expected to fail did:
c2.t
Eep! 3 test(s) were expected to fail, but instead 4 did. 🐭
And if there are both welcome and unwelcome changes, they are both noted:
>>> _test('up', '1101', '1011')
Some tests not required to pass did:
c1.t
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
Some tests required to pass didn't:
c2.t
Eep! 3 test(s) were required to pass, but instead 3 did. 🐭
>>> _test('down', '0100', '1100')
Some tests not expected to fail did:
c2.t
c3.t
Some tests expected to fail didn't:
c1.t
Conveniently, they have been removed from `<tracking_path>` for you. Perhaps commit that?
Eep! 2 test(s) were expected to fail, but instead 3 did. 🐭
To test ratchet.py itself:
python3 -m doctest ratchet.py
'''
from __future__ import absolute_import, division, print_function, unicode_literals
import io
import os
import re
import sys
import tempfile
import xml.etree.ElementTree as Etree
class JUnitXMLFile(object):
'''Represent a file containing test results in JUnit XML format.
>>> eg = _mktemp_junitxml('0100111')
>>> results = JUnitXMLFile(eg.name).parse()
>>> results.failed
['c0.t', 'c2.t', 'c3.t']
>>> results.passed
['c1.t', 'c4.t', 'c5.t', 'c6.t']
'''
def __init__(self, filepath):
self.filepath = filepath
self.failed = []
self.failed_aggregates = {}
self.stderr_output = []
self.passed = []
self._tree = None
def parse(self):
if self._tree:
raise RuntimeError('already parsed')
self._tree = Etree.parse(self.filepath)
for testcase in self._tree.findall('testcase'):
self.process_testcase(testcase)
return self
def process_testcase(self, case):
key = self.case_key(case)
# look at children but throw away stderr output
nonpassing = [c for c in case if not c.tag == 'system-err']
n = len(nonpassing)
if n > 1:
raise RuntimeError(f'multiple results for {key}: {nonpassing}')
elif n == 1:
result = nonpassing.pop()
self.failed.append(key)
message = result.get('message')
self.failed_aggregates.setdefault(message, []).append(key)
else:
self.passed.append(key)
@staticmethod
def case_key(case):
return f'{case.get("classname")}.{case.get("name")}'
def report(self, details=False):
for k, v in sorted(
self.failed_aggregates.items(),
key = lambda i: len(i[1]),
reverse=True):
print(f'# {k}')
for t in v:
print(f' - {t}')
def load_previous_results(txt):
try:
previous_results = open(txt).read()
except FileNotFoundError:
previous_results = ''
parsed = set()
for line in previous_results.splitlines():
if not line or line.startswith('#'):
continue
parsed.add(line)
return parsed
def print_tests(tests):
for test in sorted(tests):
print(' ', test)
def ratchet_up_passing(tracking_path, tests):
try:
old = set(open(tracking_path, 'r'))
except FileNotFoundError:
old = set()
new = set(t + '\n' for t in tests)
merged = sorted(old | new)
open(tracking_path, 'w+').writelines(merged)
def ratchet_down_failing(tracking_path, tests):
new = set(t + '\n' for t in tests)
open(tracking_path, 'w+').writelines(sorted(new))
def main(direction, junitxml_path, tracking_path):
'''Takes a string indicating which direction to ratchet, "up" or "down,"
and two paths, one to test-runner output in JUnit XML format, the other to
a file tracking test results (one test case dotted name per line). Walk the
former looking for the latter, and react appropriately.
>>> inp = _mktemp_junitxml('0100111')
>>> out = _mktemp_tracking('0000000')
>>> _test_main('up', inp.name, out.name)
Some tests not required to pass did:
c1.t
c4.t
c5.t
c6.t
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
Eep! 0 test(s) were required to pass, but instead 4 did. 🐭
'''
results = JUnitXMLFile(junitxml_path).parse()
if tracking_path == '...':
# Shortcut to aid in debugging XML parsing issues.
results.report()
return
previous = load_previous_results(tracking_path)
current = set(results.passed if direction == 'up' else results.failed)
subjunctive = {'up': 'required to pass', 'down': 'expected to fail'}[direction]
ratchet = None
too_many = current - previous
if too_many:
print(f'Some tests not {subjunctive} did:')
print_tests(too_many)
if direction == 'up':
# Too many passing tests is good -- let's do more of those!
ratchet_up_passing(tracking_path, current)
print(f'Conveniently, they have been added to `{tracking_path}` for you. Perhaps commit that?')
not_enough = previous - current
if not_enough:
print(f'Some tests {subjunctive} didn\'t:')
print_tests(not_enough)
if direction == 'down':
# Not enough failing tests is good -- let's do more of those!
ratchet_down_failing(tracking_path, current)
print(f'Conveniently, they have been removed from `{tracking_path}` for you. Perhaps commit that?')
if too_many or not_enough:
print(f'Eep! {len(previous)} test(s) were {subjunctive}, but instead {len(current)} did. 🐭')
return 1
print(f'All and only tests {subjunctive} did. 💃')
return 0
# When called as an executable ...
if __name__ == '__main__':
try:
direction, junitxml_path, tracking_path = sys.argv[1:4]
if direction not in ('up', 'down'):
raise ValueError
except ValueError:
doc = '\n'.join(__doc__.splitlines()[:6])
doc = re.sub(' ratchet.py', f' {sys.argv[0]}', doc)
print(doc, file=sys.stderr)
exit_code = 2
else:
exit_code = main(direction, junitxml_path, tracking_path)
sys.exit(exit_code)
# Helpers for when called under doctest ...
def _test(*a):
return _test_main(*_mk(*a))
def _test_main(direction, junitxml, tracking):
'''Takes a string 'up' or 'down' and paths to (or open file objects for)
the JUnit XML and tracking files to use for this test run. Captures and
emits stdout (slightly modified) for inspection via doctest.'''
junitxml_path = junitxml.name if hasattr(junitxml, 'name') else junitxml
tracking_path = tracking.name if hasattr(tracking, 'name') else tracking
old_stdout = sys.stdout
sys.stdout = io.StringIO()
try:
main(direction, junitxml_path, tracking_path)
finally:
sys.stdout.seek(0)
out = sys.stdout.read()
out = re.sub('`.*?`', '`<tracking_path>`', out).strip()
sys.stdout = old_stdout
print(out)
class _PotentialFile(object):
'''Represent a file that we are able to create but which doesn't exist yet,
and which, if we create it, will be automatically torn down when the test
run is over.'''
def __init__(self, filename):
self.d = tempfile.TemporaryDirectory()
self.name = os.path.join(self.d.name, filename)
def _mk(direction, spec_junitxml, spec_tracking):
'''Takes a string 'up' or 'down' and two bit strings specifying the state
of the JUnit XML results file and the tracking file to set up for this test
case. Returns the direction (unharmed) and two file-ish objects.
If a spec string is None the corresponding return value will be a
_PotentialFile object, which has a .name attribute (like a true file
object) that points to a file that does not exist, but could.
The reason not to simply return the path in all cases is that the file
objects are actually temporary file objects that destroy the underlying
file when they go out of scope, and we want to keep the underlying file
around until the end of the test run.'''
if None not in(spec_junitxml, spec_tracking):
if len(spec_junitxml) != len(spec_tracking):
raise ValueError('if both given, must be the same length: `{spec_junitxml}` and `{spec_tracking}`')
if spec_junitxml is None:
junitxml_fp = _PotentialFile('results.xml')
else:
junitxml_fp = _mktemp_junitxml(spec_junitxml)
if spec_tracking is None:
tracking_fp = _PotentialFile('tracking')
else:
tracking_fp = _mktemp_tracking(spec_tracking)
return direction, junitxml_fp, tracking_fp
def _mktemp_junitxml(spec):
'''Test helper to generate a raw JUnit XML file.
>>> fp = _mktemp_junitxml('00101')
>>> open(fp.name).read()[:11]
'<testsuite>'
'''
fp = tempfile.NamedTemporaryFile()
fp.write(b'<testsuite>')
passed = '''\
<testcase classname="c{i}" name="t"></testcase>
'''
failed = '''\
<testcase classname="c{i}" name="t">
<failure>Traceback (most recent call last):
File "/foo/bar/baz/buz.py", line 1, in &lt;module>
NameError: name 'heck' is not defined
</failure>
</testcase>
'''
i = 0
for c in spec:
if c == '0':
out = failed
elif c == '1':
out = passed
else:
raise ValueError(f'bad c: `{c}`')
fp.write(out.format(i=i).encode('utf8'))
i += 1
fp.write(b'</testsuite>')
fp.flush()
return fp
def _mktemp_tracking(spec):
'''Test helper to prefabricate a tracking file.
>>> fp = _mktemp_tracking('01101')
>>> print(open(fp.name).read()[:-1])
c1.t
c2.t
c4.t
'''
fp = tempfile.NamedTemporaryFile()
i = 0
for c in spec:
if c == '0':
pass
elif c == '1':
fp.write(f'c{i}.t\n'.encode('utf8'))
else:
raise ValueError(f'bad c: `{c}`')
i += 1
fp.flush()
return fp

37
misc/python3/ratchet.sh Executable file
View File

@ -0,0 +1,37 @@
#!/usr/bin/env bash
set -euxo pipefail
tracking_filename="ratchet-passing"
# Start somewhere predictable.
cd "$(dirname $0)"
base=$(pwd)
# Actually, though, trial outputs some things that are only gitignored in the project root.
cd "../.."
# Since both of the next calls are expected to exit non-0, relax our guard.
set +e
SUBUNITREPORTER_OUTPUT_PATH="$base/results.subunit2" trial --reporter subunitv2-file allmydata
subunit2junitxml < "$base/results.subunit2" > "$base/results.xml"
set -e
# Okay, now we're clear.
cd "$base"
# Make sure ratchet.py itself is clean.
python3 -m doctest ratchet.py
# Now see about Tahoe-LAFS (also expected to fail) ...
set +e
python3 ratchet.py up results.xml "$tracking_filename"
code=$?
set -e
# Emit a diff of the tracking file, to aid in the situation where changes are
# not discovered until CI (where TERM might `dumb`).
if [ $TERM = 'dumb' ]; then
export TERM=ansi
fi
git diff "$tracking_filename"
exit $code

0
newsfragments/3288.minor Normal file
View File

0
newsfragments/3325.other Normal file
View File

0
newsfragments/3339.other Normal file
View File

0
newsfragments/3340.other Normal file
View File

0
newsfragments/3341.other Normal file
View File

0
newsfragments/3343.other Normal file
View File

View File

@ -15,6 +15,9 @@ self: super: {
# slightly newer version than appears in nixos 19.09 is helpful. # slightly newer version than appears in nixos 19.09 is helpful.
future = python-super.callPackage ./future.nix { }; future = python-super.callPackage ./future.nix { };
# Need version of pyutil that supports Python 3. The version in 19.09
# is too old.
pyutil = python-super.callPackage ./pyutil.nix { };
}; };
}; };
} }

48
nix/pyutil.nix Normal file
View File

@ -0,0 +1,48 @@
{ stdenv
, buildPythonPackage
, fetchPypi
, setuptoolsDarcs
, setuptoolsTrial
, simplejson
, twisted
, isPyPy
}:
buildPythonPackage rec {
pname = "pyutil";
version = "3.3.0";
src = fetchPypi {
inherit pname version;
sha256 = "8c4d4bf668c559186389bb9bce99e4b1b871c09ba252a756ccaacd2b8f401848";
};
buildInputs = [ setuptoolsDarcs setuptoolsTrial ] ++ (if doCheck then [ simplejson ] else []);
propagatedBuildInputs = [ twisted ];
# Tests fail because they try to write new code into the twisted
# package, apparently some kind of plugin.
doCheck = false;
prePatch = stdenv.lib.optionalString isPyPy ''
grep -rl 'utf-8-with-signature-unix' ./ | xargs sed -i -e "s|utf-8-with-signature-unix|utf-8|g"
'';
meta = with stdenv.lib; {
description = "Pyutil, a collection of mature utilities for Python programmers";
longDescription = ''
These are a few data structures, classes and functions which
we've needed over many years of Python programming and which
seem to be of general use to other Python programmers. Many of
the modules that have existed in pyutil over the years have
subsequently been obsoleted by new features added to the
Python language or its standard library, thus showing that
we're not alone in wanting tools like these.
'';
homepage = "http://allmydata.org/trac/pyutil";
license = licenses.gpl2Plus;
};
}

View File

@ -4,7 +4,7 @@
, setuptools, setuptoolsTrial, pyasn1, zope_interface , setuptools, setuptoolsTrial, pyasn1, zope_interface
, service-identity, pyyaml, magic-wormhole, treq, appdirs , service-identity, pyyaml, magic-wormhole, treq, appdirs
, beautifulsoup4, eliot, autobahn, cryptography , beautifulsoup4, eliot, autobahn, cryptography
, html5lib , html5lib, pyutil
}: }:
python.pkgs.buildPythonPackage rec { python.pkgs.buildPythonPackage rec {
version = "1.14.0.dev"; version = "1.14.0.dev";
@ -50,7 +50,7 @@ python.pkgs.buildPythonPackage rec {
setuptoolsTrial pyasn1 zope_interface setuptoolsTrial pyasn1 zope_interface
service-identity pyyaml magic-wormhole treq service-identity pyyaml magic-wormhole treq
eliot autobahn cryptography setuptools eliot autobahn cryptography setuptools
future future pyutil
]; ];
checkInputs = with python.pkgs; [ checkInputs = with python.pkgs; [

View File

@ -124,6 +124,9 @@ install_requires = [
# Support for Python 3 transition # Support for Python 3 transition
"future >= 0.18.2", "future >= 0.18.2",
# Utility code:
"pyutil >= 3.3.0",
] ]
setup_requires = [ setup_requires = [
@ -138,8 +141,10 @@ tor_requires = [
] ]
i2p_requires = [ i2p_requires = [
# See the comment in tor_requires. # txi2p has Python 3 support, but it's unreleased: https://github.com/str4d/txi2p/issues/10.
"txi2p >= 0.3.2", # URL lookups are in PEP-508 (via https://stackoverflow.com/a/54794506).
# Also see the comment in tor_requires.
"txi2p @ git+https://github.com/str4d/txi2p@0611b9a86172cb70d2f5e415a88eee9f230590b3#egg=txi2p",
] ]
if len(sys.argv) > 1 and sys.argv[1] == '--fakedependency': if len(sys.argv) > 1 and sys.argv[1] == '--fakedependency':
@ -350,7 +355,9 @@ setup(name="tahoe-lafs", # also set in __init__.py
package_dir = {'':'src'}, package_dir = {'':'src'},
packages=find_packages('src') + ['allmydata.test.plugins'], packages=find_packages('src') + ['allmydata.test.plugins'],
classifiers=trove_classifiers, classifiers=trove_classifiers,
python_requires="<3.0", # We support Python 2.7, and we're working on support for 3.6 (the
# highest version that PyPy currently supports).
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <3.7",
install_requires=install_requires, install_requires=install_requires,
extras_require={ extras_require={
# Duplicate the Twisted pywin32 dependency here. See # Duplicate the Twisted pywin32 dependency here. See
@ -379,6 +386,7 @@ setup(name="tahoe-lafs", # also set in __init__.py
"fixtures", "fixtures",
"beautifulsoup4", "beautifulsoup4",
"html5lib", "html5lib",
"junitxml",
] + tor_requires + i2p_requires, ] + tor_requires + i2p_requires,
"tor": tor_requires, "tor": tor_requires,
"i2p": i2p_requires, "i2p": i2p_requires,

View File

@ -222,7 +222,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
UEB""" UEB"""
precondition(share_hash_tree[0] is not None, share_hash_tree) precondition(share_hash_tree[0] is not None, share_hash_tree)
prefix = "%d-%s-%s" % (sharenum, bucket, prefix = "%d-%s-%s" % (sharenum, bucket,
base32.b2a_l(share_hash_tree[0][:8], 60)) base32.b2a(share_hash_tree[0][:8])[:12])
log.PrefixingLogMixin.__init__(self, log.PrefixingLogMixin.__init__(self,
facility="tahoe.immutable.download", facility="tahoe.immutable.download",
prefix=prefix) prefix=prefix)
@ -465,7 +465,7 @@ class Checker(log.PrefixingLogMixin):
monitor): monitor):
assert precondition(isinstance(verifycap, CHKFileVerifierURI), verifycap, type(verifycap)) assert precondition(isinstance(verifycap, CHKFileVerifierURI), verifycap, type(verifycap))
prefix = "%s" % base32.b2a_l(verifycap.get_storage_index()[:8], 60) prefix = "%s" % base32.b2a(verifycap.get_storage_index()[:8])[:12]
log.PrefixingLogMixin.__init__(self, facility="tahoe.immutable.checker", prefix=prefix) log.PrefixingLogMixin.__init__(self, facility="tahoe.immutable.checker", prefix=prefix)
self._verifycap = verifycap self._verifycap = verifycap

View File

@ -43,7 +43,7 @@ class ShareFinder(object):
self.overdue_timers = {} self.overdue_timers = {}
self._storage_index = verifycap.storage_index self._storage_index = verifycap.storage_index
self._si_prefix = base32.b2a_l(self._storage_index[:8], 60) self._si_prefix = base32.b2a(self._storage_index[:8])[:12]
self._node_logparent = logparent self._node_logparent = logparent
self._download_status = download_status self._download_status = download_status
self._lp = log.msg(format="ShareFinder[si=%(si)s] starting", self._lp = log.msg(format="ShareFinder[si=%(si)s] starting",

View File

@ -44,7 +44,7 @@ class DownloadNode(object):
assert isinstance(verifycap, uri.CHKFileVerifierURI) assert isinstance(verifycap, uri.CHKFileVerifierURI)
self._verifycap = verifycap self._verifycap = verifycap
self._storage_broker = storage_broker self._storage_broker = storage_broker
self._si_prefix = base32.b2a_l(verifycap.storage_index[:8], 60) self._si_prefix = base32.b2a(verifycap.storage_index[:8])[:12]
self.running = True self.running = True
if terminator: if terminator:
terminator.register(self) # calls self.stop() at stopService() terminator.register(self) # calls self.stop() at stopService()

View File

@ -298,7 +298,7 @@ class BucketReader(Referenceable):
def __repr__(self): def __repr__(self):
return "<%s %s %s>" % (self.__class__.__name__, return "<%s %s %s>" % (self.__class__.__name__,
base32.b2a_l(self.storage_index[:8], 60), base32.b2a(self.storage_index[:8])[:12],
self.shnum) self.shnum)
def remote_read(self, offset, length): def remote_read(self, offset, length):

View File

@ -1,6 +1,6 @@
from __future__ import print_function from __future__ import print_function
import os, signal, sys, time import os, signal, time
from random import randrange from random import randrange
from six.moves import StringIO from six.moves import StringIO
@ -8,7 +8,6 @@ from twisted.internet import reactor, defer
from twisted.python import failure from twisted.python import failure
from twisted.trial import unittest from twisted.trial import unittest
from allmydata.util import fileutil, log
from ..util.assertutil import precondition from ..util.assertutil import precondition
from allmydata.util.encodingutil import (unicode_platform, get_filesystem_encoding, from allmydata.util.encodingutil import (unicode_platform, get_filesystem_encoding,
get_io_encoding) get_io_encoding)
@ -89,39 +88,6 @@ class ReallyEqualMixin(object):
self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg)) self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg))
class NonASCIIPathMixin(object):
def mkdir_nonascii(self, dirpath):
# Kludge to work around the fact that buildbot can't remove a directory tree that has
# any non-ASCII directory names on Windows. (#1472)
if sys.platform == "win32":
def _cleanup():
try:
fileutil.rm_dir(dirpath)
finally:
if os.path.exists(dirpath):
msg = ("We were unable to delete a non-ASCII directory %r created by the test. "
"This is liable to cause failures on future builds." % (dirpath,))
print(msg)
log.err(msg)
self.addCleanup(_cleanup)
os.mkdir(dirpath)
def unicode_or_fallback(self, unicode_name, fallback_name, io_as_well=False):
if not unicode_platform():
try:
unicode_name.encode(get_filesystem_encoding())
except UnicodeEncodeError:
return fallback_name
if io_as_well:
try:
unicode_name.encode(get_io_encoding())
except UnicodeEncodeError:
return fallback_name
return unicode_name
class SignalMixin(object): class SignalMixin(object):
# This class is necessary for any code which wants to use Processes # This class is necessary for any code which wants to use Processes
# outside the usual reactor.run() environment. It is copied from # outside the usual reactor.run() environment. It is copied from

View File

@ -0,0 +1,37 @@
"""
Tests for allmydata.util.base32.
"""
import base64
from twisted.trial import unittest
from hypothesis import (
strategies as st,
given,
)
from allmydata.util import base32
class Base32(unittest.TestCase):
@given(input_bytes=st.binary(max_size=100))
def test_a2b_b2a_match_Pythons(self, input_bytes):
encoded = base32.b2a(input_bytes)
x = base64.b32encode(input_bytes).rstrip(b"=").lower()
self.failUnlessEqual(encoded, x)
self.assertIsInstance(encoded, bytes)
self.assertTrue(base32.could_be_base32_encoded(encoded))
self.assertEqual(base32.a2b(encoded), input_bytes)
def test_b2a(self):
self.failUnlessEqual(base32.b2a(b"\x12\x34"), b"ci2a")
def test_b2a_or_none(self):
self.failUnlessEqual(base32.b2a_or_none(None), None)
self.failUnlessEqual(base32.b2a_or_none(b"\x12\x34"), b"ci2a")
def test_a2b(self):
self.failUnlessEqual(base32.a2b(b"ci2a"), b"\x12\x34")
self.failUnlessRaises(AssertionError, base32.a2b, b"b0gus")
self.assertFalse(base32.could_be_base32_encoded(b"b0gus"))

View File

@ -1,9 +1,16 @@
import random, unittest import random, unittest
from past.builtins import chr as byteschr
from hypothesis import (
strategies as st,
given,
)
from allmydata.util import base62, mathutil from allmydata.util import base62, mathutil
def insecurerandstr(n): def insecurerandstr(n):
return ''.join(map(chr, map(random.randrange, [0]*n, [256]*n))) return b''.join(map(byteschr, map(random.randrange, [0]*n, [256]*n)))
class T(unittest.TestCase): class T(unittest.TestCase):
def _test_num_octets_that_encode_to_this_many_chars(self, chars, octets): def _test_num_octets_that_encode_to_this_many_chars(self, chars, octets):
@ -14,6 +21,10 @@ class T(unittest.TestCase):
bs2=base62.a2b(ascii) bs2=base62.a2b(ascii)
assert bs2 == bs, "bs2: %s:%s, bs: %s:%s, ascii: %s:%s" % (len(bs2), repr(bs2), len(bs), repr(bs), len(ascii), repr(ascii)) assert bs2 == bs, "bs2: %s:%s, bs: %s:%s, ascii: %s:%s" % (len(bs2), repr(bs2), len(bs), repr(bs), len(ascii), repr(ascii))
@given(input_bytes=st.binary(max_size=100))
def test_roundtrip(self, input_bytes):
self._test_ende(input_bytes)
def test_num_octets_that_encode_to_this_many_chars(self): def test_num_octets_that_encode_to_this_many_chars(self):
return self._test_num_octets_that_encode_to_this_many_chars(2, 1) return self._test_num_octets_that_encode_to_this_many_chars(2, 1)
return self._test_num_octets_that_encode_to_this_many_chars(3, 2) return self._test_num_octets_that_encode_to_this_many_chars(3, 2)
@ -21,19 +32,19 @@ class T(unittest.TestCase):
return self._test_num_octets_that_encode_to_this_many_chars(6, 4) return self._test_num_octets_that_encode_to_this_many_chars(6, 4)
def test_ende_0x00(self): def test_ende_0x00(self):
return self._test_ende('\x00') return self._test_ende(b'\x00')
def test_ende_0x01(self): def test_ende_0x01(self):
return self._test_ende('\x01') return self._test_ende(b'\x01')
def test_ende_0x0100(self): def test_ende_0x0100(self):
return self._test_ende('\x01\x00') return self._test_ende(b'\x01\x00')
def test_ende_0x000000(self): def test_ende_0x000000(self):
return self._test_ende('\x00\x00\x00') return self._test_ende(b'\x00\x00\x00')
def test_ende_0x010000(self): def test_ende_0x010000(self):
return self._test_ende('\x01\x00\x00') return self._test_ende(b'\x01\x00\x00')
def test_ende_randstr(self): def test_ende_randstr(self):
return self._test_ende(insecurerandstr(2**4)) return self._test_ende(insecurerandstr(2**4))

View File

@ -83,7 +83,7 @@ BASECONFIG_I = ("[client]\n"
"introducer.furl = %s\n" "introducer.furl = %s\n"
) )
class Basic(testutil.ReallyEqualMixin, testutil.NonASCIIPathMixin, unittest.TestCase): class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
def test_loadable(self): def test_loadable(self):
basedir = "test_client.Basic.test_loadable" basedir = "test_client.Basic.test_loadable"
os.mkdir(basedir) os.mkdir(basedir)

View File

@ -0,0 +1,76 @@
"""
Tests for allmydata.util.deferredutil.
Ported to Python 3.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from twisted.internet import defer, reactor
from twisted.python.failure import Failure
from allmydata.util import deferredutil
class DeferredUtilTests(unittest.TestCase, deferredutil.WaitForDelayedCallsMixin):
def test_gather_results(self):
d1 = defer.Deferred()
d2 = defer.Deferred()
res = deferredutil.gatherResults([d1, d2])
d1.errback(ValueError("BAD"))
def _callb(res):
self.fail("Should have errbacked, not resulted in %s" % (res,))
def _errb(thef):
thef.trap(ValueError)
res.addCallbacks(_callb, _errb)
return res
def test_success(self):
d1, d2 = defer.Deferred(), defer.Deferred()
good = []
bad = []
dlss = deferredutil.DeferredListShouldSucceed([d1,d2])
dlss.addCallbacks(good.append, bad.append)
d1.callback(1)
d2.callback(2)
self.failUnlessEqual(good, [[1,2]])
self.failUnlessEqual(bad, [])
def test_failure(self):
d1, d2 = defer.Deferred(), defer.Deferred()
good = []
bad = []
dlss = deferredutil.DeferredListShouldSucceed([d1,d2])
dlss.addCallbacks(good.append, bad.append)
d1.addErrback(lambda _ignore: None)
d2.addErrback(lambda _ignore: None)
d1.callback(1)
d2.errback(ValueError())
self.failUnlessEqual(good, [])
self.failUnlessEqual(len(bad), 1)
f = bad[0]
self.failUnless(isinstance(f, Failure))
self.failUnless(f.check(ValueError))
def test_wait_for_delayed_calls(self):
"""
This tests that 'wait_for_delayed_calls' does in fact wait for a
delayed call that is active when the test returns. If it didn't,
Trial would report an unclean reactor error for this test.
"""
def _trigger():
#print "trigger"
pass
reactor.callLater(0.1, _trigger)
d = defer.succeed(None)
d.addBoth(self.wait_for_delayed_calls)
return d

View File

@ -15,7 +15,7 @@ from twisted.python.failure import Failure
from twisted.python import log from twisted.python import log
from allmydata.util import base32, idlib, mathutil, hashutil from allmydata.util import base32, idlib, mathutil, hashutil
from allmydata.util import assertutil, fileutil, deferredutil, abbreviate from allmydata.util import fileutil, abbreviate
from allmydata.util import limiter, time_format, pollmixin from allmydata.util import limiter, time_format, pollmixin
from allmydata.util import statistics, dictutil, pipeline, yamlutil from allmydata.util import statistics, dictutil, pipeline, yamlutil
from allmydata.util import log as tahoe_log from allmydata.util import log as tahoe_log
@ -36,25 +36,6 @@ def sha256(data):
return binascii.hexlify(hashlib.sha256(data).digest()) return binascii.hexlify(hashlib.sha256(data).digest())
class Base32(unittest.TestCase):
def test_b2a_matches_Pythons(self):
import base64
y = "\x12\x34\x45\x67\x89\x0a\xbc\xde\xf0"
x = base64.b32encode(y)
while x and x[-1] == '=':
x = x[:-1]
x = x.lower()
self.failUnlessEqual(base32.b2a(y), x)
def test_b2a(self):
self.failUnlessEqual(base32.b2a("\x12\x34"), "ci2a")
def test_b2a_or_none(self):
self.failUnlessEqual(base32.b2a_or_none(None), None)
self.failUnlessEqual(base32.b2a_or_none("\x12\x34"), "ci2a")
def test_a2b(self):
self.failUnlessEqual(base32.a2b("ci2a"), "\x12\x34")
self.failUnlessRaises(AssertionError, base32.a2b, "b0gus")
class IDLib(unittest.TestCase): class IDLib(unittest.TestCase):
def test_nodeid_b2a(self): def test_nodeid_b2a(self):
self.failUnlessEqual(idlib.nodeid_b2a("\x00"*20), "a"*32) self.failUnlessEqual(idlib.nodeid_b2a("\x00"*20), "a"*32)
@ -64,97 +45,6 @@ class MyList(list):
pass pass
class Math(unittest.TestCase): class Math(unittest.TestCase):
def test_div_ceil(self):
f = mathutil.div_ceil
self.failUnlessEqual(f(0, 1), 0)
self.failUnlessEqual(f(0, 2), 0)
self.failUnlessEqual(f(0, 3), 0)
self.failUnlessEqual(f(1, 3), 1)
self.failUnlessEqual(f(2, 3), 1)
self.failUnlessEqual(f(3, 3), 1)
self.failUnlessEqual(f(4, 3), 2)
self.failUnlessEqual(f(5, 3), 2)
self.failUnlessEqual(f(6, 3), 2)
self.failUnlessEqual(f(7, 3), 3)
def test_next_multiple(self):
f = mathutil.next_multiple
self.failUnlessEqual(f(5, 1), 5)
self.failUnlessEqual(f(5, 2), 6)
self.failUnlessEqual(f(5, 3), 6)
self.failUnlessEqual(f(5, 4), 8)
self.failUnlessEqual(f(5, 5), 5)
self.failUnlessEqual(f(5, 6), 6)
self.failUnlessEqual(f(32, 1), 32)
self.failUnlessEqual(f(32, 2), 32)
self.failUnlessEqual(f(32, 3), 33)
self.failUnlessEqual(f(32, 4), 32)
self.failUnlessEqual(f(32, 5), 35)
self.failUnlessEqual(f(32, 6), 36)
self.failUnlessEqual(f(32, 7), 35)
self.failUnlessEqual(f(32, 8), 32)
self.failUnlessEqual(f(32, 9), 36)
self.failUnlessEqual(f(32, 10), 40)
self.failUnlessEqual(f(32, 11), 33)
self.failUnlessEqual(f(32, 12), 36)
self.failUnlessEqual(f(32, 13), 39)
self.failUnlessEqual(f(32, 14), 42)
self.failUnlessEqual(f(32, 15), 45)
self.failUnlessEqual(f(32, 16), 32)
self.failUnlessEqual(f(32, 17), 34)
self.failUnlessEqual(f(32, 18), 36)
self.failUnlessEqual(f(32, 589), 589)
def test_pad_size(self):
f = mathutil.pad_size
self.failUnlessEqual(f(0, 4), 0)
self.failUnlessEqual(f(1, 4), 3)
self.failUnlessEqual(f(2, 4), 2)
self.failUnlessEqual(f(3, 4), 1)
self.failUnlessEqual(f(4, 4), 0)
self.failUnlessEqual(f(5, 4), 3)
def test_is_power_of_k(self):
f = mathutil.is_power_of_k
for i in range(1, 100):
if i in (1, 2, 4, 8, 16, 32, 64):
self.failUnless(f(i, 2), "but %d *is* a power of 2" % i)
else:
self.failIf(f(i, 2), "but %d is *not* a power of 2" % i)
for i in range(1, 100):
if i in (1, 3, 9, 27, 81):
self.failUnless(f(i, 3), "but %d *is* a power of 3" % i)
else:
self.failIf(f(i, 3), "but %d is *not* a power of 3" % i)
def test_next_power_of_k(self):
f = mathutil.next_power_of_k
self.failUnlessEqual(f(0,2), 1)
self.failUnlessEqual(f(1,2), 1)
self.failUnlessEqual(f(2,2), 2)
self.failUnlessEqual(f(3,2), 4)
self.failUnlessEqual(f(4,2), 4)
for i in range(5, 8): self.failUnlessEqual(f(i,2), 8, "%d" % i)
for i in range(9, 16): self.failUnlessEqual(f(i,2), 16, "%d" % i)
for i in range(17, 32): self.failUnlessEqual(f(i,2), 32, "%d" % i)
for i in range(33, 64): self.failUnlessEqual(f(i,2), 64, "%d" % i)
for i in range(65, 100): self.failUnlessEqual(f(i,2), 128, "%d" % i)
self.failUnlessEqual(f(0,3), 1)
self.failUnlessEqual(f(1,3), 1)
self.failUnlessEqual(f(2,3), 3)
self.failUnlessEqual(f(3,3), 3)
for i in range(4, 9): self.failUnlessEqual(f(i,3), 9, "%d" % i)
for i in range(10, 27): self.failUnlessEqual(f(i,3), 27, "%d" % i)
for i in range(28, 81): self.failUnlessEqual(f(i,3), 81, "%d" % i)
for i in range(82, 200): self.failUnlessEqual(f(i,3), 243, "%d" % i)
def test_ave(self):
f = mathutil.ave
self.failUnlessEqual(f([1,2,3]), 2)
self.failUnlessEqual(f([0,0,0,4]), 1)
self.failUnlessAlmostEqual(f([0.0, 1.0, 1.0]), .666666666666)
def test_round_sigfigs(self): def test_round_sigfigs(self):
f = mathutil.round_sigfigs f = mathutil.round_sigfigs
self.failUnlessEqual(f(22.0/3, 4), 7.3330000000000002) self.failUnlessEqual(f(22.0/3, 4), 7.3330000000000002)
@ -297,65 +187,6 @@ class Statistics(unittest.TestCase):
self.failUnlessEqual(f(plist, .5, 3), .02734375) self.failUnlessEqual(f(plist, .5, 3), .02734375)
class Asserts(unittest.TestCase):
def should_assert(self, func, *args, **kwargs):
try:
func(*args, **kwargs)
except AssertionError as e:
return str(e)
except Exception as e:
self.fail("assert failed with non-AssertionError: %s" % e)
self.fail("assert was not caught")
def should_not_assert(self, func, *args, **kwargs):
try:
func(*args, **kwargs)
except AssertionError as e:
self.fail("assertion fired when it should not have: %s" % e)
except Exception as e:
self.fail("assertion (which shouldn't have failed) failed with non-AssertionError: %s" % e)
return # we're happy
def test_assert(self):
f = assertutil._assert
self.should_assert(f)
self.should_assert(f, False)
self.should_not_assert(f, True)
m = self.should_assert(f, False, "message")
self.failUnlessEqual(m, "'message' <type 'str'>", m)
m = self.should_assert(f, False, "message1", othermsg=12)
self.failUnlessEqual("'message1' <type 'str'>, othermsg: 12 <type 'int'>", m)
m = self.should_assert(f, False, othermsg="message2")
self.failUnlessEqual("othermsg: 'message2' <type 'str'>", m)
def test_precondition(self):
f = assertutil.precondition
self.should_assert(f)
self.should_assert(f, False)
self.should_not_assert(f, True)
m = self.should_assert(f, False, "message")
self.failUnlessEqual("precondition: 'message' <type 'str'>", m)
m = self.should_assert(f, False, "message1", othermsg=12)
self.failUnlessEqual("precondition: 'message1' <type 'str'>, othermsg: 12 <type 'int'>", m)
m = self.should_assert(f, False, othermsg="message2")
self.failUnlessEqual("precondition: othermsg: 'message2' <type 'str'>", m)
def test_postcondition(self):
f = assertutil.postcondition
self.should_assert(f)
self.should_assert(f, False)
self.should_not_assert(f, True)
m = self.should_assert(f, False, "message")
self.failUnlessEqual("postcondition: 'message' <type 'str'>", m)
m = self.should_assert(f, False, "message1", othermsg=12)
self.failUnlessEqual("postcondition: 'message1' <type 'str'>, othermsg: 12 <type 'int'>", m)
m = self.should_assert(f, False, othermsg="message2")
self.failUnlessEqual("postcondition: othermsg: 'message2' <type 'str'>", m)
class FileUtil(ReallyEqualMixin, unittest.TestCase): class FileUtil(ReallyEqualMixin, unittest.TestCase):
def mkdir(self, basedir, path, mode=0o777): def mkdir(self, basedir, path, mode=0o777):
fn = os.path.join(basedir, path) fn = os.path.join(basedir, path)
@ -750,60 +581,6 @@ class PollMixinTests(unittest.TestCase):
d.addCallbacks(_suc, _err) d.addCallbacks(_suc, _err)
return d return d
class DeferredUtilTests(unittest.TestCase, deferredutil.WaitForDelayedCallsMixin):
def test_gather_results(self):
d1 = defer.Deferred()
d2 = defer.Deferred()
res = deferredutil.gatherResults([d1, d2])
d1.errback(ValueError("BAD"))
def _callb(res):
self.fail("Should have errbacked, not resulted in %s" % (res,))
def _errb(thef):
thef.trap(ValueError)
res.addCallbacks(_callb, _errb)
return res
def test_success(self):
d1, d2 = defer.Deferred(), defer.Deferred()
good = []
bad = []
dlss = deferredutil.DeferredListShouldSucceed([d1,d2])
dlss.addCallbacks(good.append, bad.append)
d1.callback(1)
d2.callback(2)
self.failUnlessEqual(good, [[1,2]])
self.failUnlessEqual(bad, [])
def test_failure(self):
d1, d2 = defer.Deferred(), defer.Deferred()
good = []
bad = []
dlss = deferredutil.DeferredListShouldSucceed([d1,d2])
dlss.addCallbacks(good.append, bad.append)
d1.addErrback(lambda _ignore: None)
d2.addErrback(lambda _ignore: None)
d1.callback(1)
d2.errback(ValueError())
self.failUnlessEqual(good, [])
self.failUnlessEqual(len(bad), 1)
f = bad[0]
self.failUnless(isinstance(f, Failure))
self.failUnless(f.check(ValueError))
def test_wait_for_delayed_calls(self):
"""
This tests that 'wait_for_delayed_calls' does in fact wait for a
delayed call that is active when the test returns. If it didn't,
Trial would report an unclean reactor error for this test.
"""
def _trigger():
#print "trigger"
pass
reactor.callLater(0.1, _trigger)
d = defer.succeed(None)
d.addBoth(self.wait_for_delayed_calls)
return d
class HashUtilTests(unittest.TestCase): class HashUtilTests(unittest.TestCase):

View File

@ -10,6 +10,12 @@ from allmydata.web.status import (
StatusElement, StatusElement,
) )
from zope.interface import implementer
from allmydata.interfaces import IDownloadResults
from allmydata.web.status import DownloadStatusElement
from allmydata.immutable.downloader.status import DownloadStatus
from .common import ( from .common import (
assert_soup_has_favicon, assert_soup_has_favicon,
assert_soup_has_tag_with_content, assert_soup_has_tag_with_content,
@ -70,3 +76,155 @@ class StatusTests(TrialTestCase):
self, soup, u"h2", self, soup, u"h2",
"Recent Operations:" "Recent Operations:"
) )
@implementer(IDownloadResults)
class FakeDownloadResults(object):
def __init__(self,
file_size=0,
servers_used=None,
server_problems=None,
servermap=None,
timings=None):
"""
See IDownloadResults for parameters.
"""
self.file_size = file_size
self.servers_used = servers_used
self.server_problems = server_problems
self.servermap = servermap
self.timings = timings
class FakeDownloadStatus(DownloadStatus):
def __init__(self,
storage_index = None,
file_size = 0,
servers_used = None,
server_problems = None,
servermap = None,
timings = None):
"""
See IDownloadStatus and IDownloadResults for parameters.
"""
super(FakeDownloadStatus, self).__init__(storage_index, file_size)
self.servers_used = servers_used
self.server_problems = server_problems
self.servermap = servermap
self.timings = timings
def get_results(self):
return FakeDownloadResults(self.size,
self.servers_used,
self.server_problems,
self.servermap,
self.timings)
class DownloadStatusElementTests(TrialTestCase):
"""
Tests for ```allmydata.web.status.DownloadStatusElement```.
"""
def _render_download_status_element(self, status):
"""
:param IDownloadStatus status:
:return: HTML string rendered by DownloadStatusElement
"""
elem = DownloadStatusElement(status)
d = flattenString(None, elem)
return self.successResultOf(d)
def test_download_status_element(self):
"""
See if we can render the page almost fully.
"""
status = FakeDownloadStatus(
"si-1", 123,
["s-1", "s-2", "s-3"],
{"s-1": "unknown problem"},
{"s-1": [1], "s-2": [1,2], "s-3": [2,3]},
{"fetch_per_server":
{"s-1": [1], "s-2": [2,3], "s-3": [3,2]}}
)
result = self._render_download_status_element(status)
soup = BeautifulSoup(result, 'html5lib')
assert_soup_has_favicon(self, soup)
assert_soup_has_tag_with_content(
self, soup, u"title", u"Tahoe-LAFS - File Download Status"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"File Size: 123 bytes"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"Progress: 0.0%"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"Servers Used: [omwtc], [omwte], [omwtg]"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"Server Problems:"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"[omwtc]: unknown problem"
)
assert_soup_has_tag_with_content(self, soup, u"li", u"Servermap:")
assert_soup_has_tag_with_content(
self, soup, u"li", u"[omwtc] has share: #1"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"[omwte] has shares: #1,#2"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"[omwtg] has shares: #2,#3"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"Per-Server Segment Fetch Response Times:"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"[omwtc]: 1.00s"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"[omwte]: 2.00s, 3.00s"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"[omwtg]: 3.00s, 2.00s"
)
def test_download_status_element_partial(self):
"""
See if we can render the page with incomplete download status.
"""
status = FakeDownloadStatus()
result = self._render_download_status_element(status)
soup = BeautifulSoup(result, 'html5lib')
assert_soup_has_tag_with_content(
self, soup, u"li", u"Servermap: None"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"File Size: 0 bytes"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"Total: None (None)"
)

View File

@ -22,6 +22,9 @@ class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnlessReallyEqual(common.abbreviate_time(0.00123), "1.2ms") self.failUnlessReallyEqual(common.abbreviate_time(0.00123), "1.2ms")
self.failUnlessReallyEqual(common.abbreviate_time(0.000123), "123us") self.failUnlessReallyEqual(common.abbreviate_time(0.000123), "123us")
self.failUnlessReallyEqual(common.abbreviate_time(-123000), "-123000000000us") self.failUnlessReallyEqual(common.abbreviate_time(-123000), "-123000000000us")
self.failUnlessReallyEqual(common.abbreviate_time(2.5), "2.50s")
self.failUnlessReallyEqual(common.abbreviate_time(0.25), "250ms")
self.failUnlessReallyEqual(common.abbreviate_time(0.0021), "2.1ms")
self.failUnlessReallyEqual(common.abbreviate_time(None), "") self.failUnlessReallyEqual(common.abbreviate_time(None), "")
self.failUnlessReallyEqual(common.abbreviate_time(2.5), "2.50s") self.failUnlessReallyEqual(common.abbreviate_time(2.5), "2.50s")
@ -54,6 +57,9 @@ class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnlessReallyEqual(common.abbreviate_rate(1234000), "1.23MBps") self.failUnlessReallyEqual(common.abbreviate_rate(1234000), "1.23MBps")
self.failUnlessReallyEqual(common.abbreviate_rate(12340), "12.3kBps") self.failUnlessReallyEqual(common.abbreviate_rate(12340), "12.3kBps")
self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps") self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps")
self.failUnlessReallyEqual(common.abbreviate_rate(2500000), "2.50MBps")
self.failUnlessReallyEqual(common.abbreviate_rate(30100), "30.1kBps")
self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps")
def test_abbreviate_size(self): def test_abbreviate_size(self):
self.failUnlessReallyEqual(common.abbreviate_size(None), "") self.failUnlessReallyEqual(common.abbreviate_size(None), "")

View File

@ -33,7 +33,6 @@ from allmydata.immutable import upload
from allmydata.immutable.downloader.status import DownloadStatus from allmydata.immutable.downloader.status import DownloadStatus
from allmydata.dirnode import DirectoryNode from allmydata.dirnode import DirectoryNode
from allmydata.nodemaker import NodeMaker from allmydata.nodemaker import NodeMaker
from allmydata.web import status
from allmydata.web.common import WebError, MultiFormatPage from allmydata.web.common import WebError, MultiFormatPage
from allmydata.util import fileutil, base32, hashutil from allmydata.util import fileutil, base32, hashutil
from allmydata.util.consumer import download_to_data from allmydata.util.consumer import download_to_data
@ -1239,18 +1238,6 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
u"Tahoe-LAFS - Mutable File Retrieve Status") u"Tahoe-LAFS - Mutable File Retrieve Status")
return d return d
def test_status_numbers(self):
drrm = status.DownloadResultsRendererMixin()
self.failUnlessReallyEqual(drrm.render_time(None, None), "")
self.failUnlessReallyEqual(drrm.render_time(None, 2.5), "2.50s")
self.failUnlessReallyEqual(drrm.render_time(None, 0.25), "250ms")
self.failUnlessReallyEqual(drrm.render_time(None, 0.0021), "2.1ms")
self.failUnlessReallyEqual(drrm.render_time(None, 0.000123), "123us")
self.failUnlessReallyEqual(drrm.render_rate(None, None), "")
self.failUnlessReallyEqual(drrm.render_rate(None, 2500000), "2.50MBps")
self.failUnlessReallyEqual(drrm.render_rate(None, 30100), "30.1kBps")
self.failUnlessReallyEqual(drrm.render_rate(None, 123), "123Bps")
def test_GET_FILEURL(self): def test_GET_FILEURL(self):
d = self.GET(self.public_url + "/foo/bar.txt") d = self.GET(self.public_url + "/foo/bar.txt")
d.addCallback(self.failUnlessIsBarDotTxt) d.addCallback(self.failUnlessIsBarDotTxt)

View File

@ -15,12 +15,17 @@ if PY2:
# Keep these sorted alphabetically, to reduce merge conflicts: # Keep these sorted alphabetically, to reduce merge conflicts:
PORTED_MODULES = [ PORTED_MODULES = [
"allmydata.util.assertutil",
"allmydata.util.deferredutil",
"allmydata.util.humanreadable", "allmydata.util.humanreadable",
"allmydata.util.mathutil",
"allmydata.util.namespace", "allmydata.util.namespace",
"allmydata.util.pollmixin",
"allmydata.util._python3", "allmydata.util._python3",
] ]
PORTED_TEST_MODULES = [ PORTED_TEST_MODULES = [
"allmydata.test.test_deferredutil",
"allmydata.test.test_humanreadable", "allmydata.test.test_humanreadable",
"allmydata.test.test_python3", "allmydata.test.test_python3",
] ]

View File

@ -1,57 +1,23 @@
""" """
Tests useful in assertion checking, prints out nicely formated messages too. Tests useful in assertion checking, prints out nicely formated messages too.
Backwards compatibility layer, the versions in pyutil are better maintained and
have tests.
Ported to Python 3.
""" """
from allmydata.util.humanreadable import hr from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
def _assert(___cond=False, *___args, **___kwargs): from future.utils import PY2
if ___cond: if PY2:
return True from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
msgbuf=[]
if ___args:
msgbuf.append("%s %s" % tuple(map(hr, (___args[0], type(___args[0]),))))
msgbuf.extend([", %s %s" % tuple(map(hr, (arg, type(arg),))) for arg in ___args[1:]])
if ___kwargs:
msgbuf.append(", %s: %s %s" % ((___kwargs.items()[0][0],) + tuple(map(hr, (___kwargs.items()[0][1], type(___kwargs.items()[0][1]),)))))
else:
if ___kwargs:
msgbuf.append("%s: %s %s" % ((___kwargs.items()[0][0],) + tuple(map(hr, (___kwargs.items()[0][1], type(___kwargs.items()[0][1]),)))))
msgbuf.extend([", %s: %s %s" % tuple(map(hr, (k, v, type(v),))) for k, v in ___kwargs.items()[1:]])
raise AssertionError("".join(msgbuf))
def precondition(___cond=False, *___args, **___kwargs): # The API importers expect:
if ___cond: from pyutil.assertutil import _assert, precondition, postcondition
return True
msgbuf=["precondition", ]
if ___args or ___kwargs:
msgbuf.append(": ")
if ___args:
msgbuf.append("%s %s" % tuple(map(hr, (___args[0], type(___args[0]),))))
msgbuf.extend([", %s %s" % tuple(map(hr, (arg, type(arg),))) for arg in ___args[1:]])
if ___kwargs:
msgbuf.append(", %s: %s %s" % ((___kwargs.items()[0][0],) + tuple(map(hr, (___kwargs.items()[0][1], type(___kwargs.items()[0][1]),)))))
else:
if ___kwargs:
msgbuf.append("%s: %s %s" % ((___kwargs.items()[0][0],) + tuple(map(hr, (___kwargs.items()[0][1], type(___kwargs.items()[0][1]),)))))
msgbuf.extend([", %s: %s %s" % tuple(map(hr, (k, v, type(v),))) for k, v in ___kwargs.items()[1:]])
raise AssertionError("".join(msgbuf)) __all__ = ["_assert", "precondition", "postcondition"]
def postcondition(___cond=False, *___args, **___kwargs):
if ___cond:
return True
msgbuf=["postcondition", ]
if ___args or ___kwargs:
msgbuf.append(": ")
if ___args:
msgbuf.append("%s %s" % tuple(map(hr, (___args[0], type(___args[0]),))))
msgbuf.extend([", %s %s" % tuple(map(hr, (arg, type(arg),))) for arg in ___args[1:]])
if ___kwargs:
msgbuf.append(", %s: %s %s" % ((___kwargs.items()[0][0],) + tuple(map(hr, (___kwargs.items()[0][1], type(___kwargs.items()[0][1]),)))))
else:
if ___kwargs:
msgbuf.append("%s: %s %s" % ((___kwargs.items()[0][0],) + tuple(map(hr, (___kwargs.items()[0][1], type(___kwargs.items()[0][1]),)))))
msgbuf.extend([", %s: %s %s" % tuple(map(hr, (k, v, type(v),))) for k, v in ___kwargs.items()[1:]])
raise AssertionError("".join(msgbuf))

View File

@ -52,13 +52,13 @@ def b2a(os):
@return the contents of os in base-32 encoded form @return the contents of os in base-32 encoded form
""" """
return b2a_l(os, len(os)*8) return _b2a_l(os, len(os)*8)
def b2a_or_none(os): def b2a_or_none(os):
if os is not None: if os is not None:
return b2a(os) return b2a(os)
def b2a_l(os, lengthinbits): def _b2a_l(os, lengthinbits):
""" """
@param os the data to be encoded (a string) @param os the data to be encoded (a string)
@param lengthinbits the number of bits of data in os to be encoded @param lengthinbits the number of bits of data in os to be encoded
@ -204,9 +204,9 @@ def a2b(cs):
precondition(could_be_base32_encoded(cs), "cs is required to be possibly base32 encoded data.", cs=cs) precondition(could_be_base32_encoded(cs), "cs is required to be possibly base32 encoded data.", cs=cs)
precondition(isinstance(cs, six.binary_type), cs) precondition(isinstance(cs, six.binary_type), cs)
return a2b_l(cs, num_octets_that_encode_to_this_many_quintets(len(cs))*8) return _a2b_l(cs, num_octets_that_encode_to_this_many_quintets(len(cs))*8)
def a2b_l(cs, lengthinbits): def _a2b_l(cs, lengthinbits):
""" """
@param lengthinbits the number of bits of data in encoded into cs @param lengthinbits the number of bits of data in encoded into cs
@ -261,5 +261,8 @@ def a2b_l(cs, lengthinbits):
pos = pos * 256 pos = pos * 256
assert len(octets) == numoctets, "len(octets): %s, numoctets: %s, octets: %s" % (len(octets), numoctets, octets,) assert len(octets) == numoctets, "len(octets): %s, numoctets: %s, octets: %s" % (len(octets), numoctets, octets,)
res = ''.join(map(chr, octets)) res = ''.join(map(chr, octets))
precondition(b2a_l(res, lengthinbits) == cs, "cs is required to be the canonical base-32 encoding of some data.", b2a(res), res=res, cs=cs) precondition(_b2a_l(res, lengthinbits) == cs, "cs is required to be the canonical base-32 encoding of some data.", b2a(res), res=res, cs=cs)
return res return res
__all__ = ["b2a", "a2b", "b2a_or_none", "BASE32CHAR_3bits", "BASE32CHAR_1bits", "BASE32CHAR", "BASE32STR_anybytes", "could_be_base32_encoded"]

View File

@ -1,7 +1,21 @@
"""
Utilities for working with Twisted Deferreds.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
import time import time
from foolscap.api import eventually, fireEventually from foolscap.api import eventually
from twisted.internet import defer, reactor, error from twisted.internet import defer, reactor, error
from twisted.python.failure import Failure from twisted.python.failure import Failure
@ -130,7 +144,7 @@ class HookMixin(object):
self._hooks[name] = (d, ignore_count) self._hooks[name] = (d, ignore_count)
return d return d
def _call_hook(self, res, name, async=False): def _call_hook(self, res, name, **kwargs):
""" """
Called to trigger the hook, with argument 'res'. This is a no-op if Called to trigger the hook, with argument 'res'. This is a no-op if
the hook is unset. If the hook's ignore_count is positive, it will be the hook is unset. If the hook's ignore_count is positive, it will be
@ -142,7 +156,10 @@ class HookMixin(object):
which will typically cause the test to also fail. which will typically cause the test to also fail.
'res' is returned so that the current result or failure will be passed 'res' is returned so that the current result or failure will be passed
through. through.
Accepts a single keyword argument, async, defaulting to False.
""" """
async_ = kwargs.get("async", False)
hook = self._hooks[name] hook = self._hooks[name]
if hook is None: if hook is None:
return res # pass on error/result return res # pass on error/result
@ -153,7 +170,7 @@ class HookMixin(object):
self._hooks[name] = (d, ignore_count - 1) self._hooks[name] = (d, ignore_count - 1)
else: else:
self._hooks[name] = None self._hooks[name] = None
if async: if async_:
_with_log(eventually_callback(d), res) _with_log(eventually_callback(d), res)
else: else:
_with_log(d.callback, res) _with_log(d.callback, res)
@ -163,42 +180,6 @@ class HookMixin(object):
log.msg(msg, level=log.NOISY) log.msg(msg, level=log.NOISY)
def async_iterate(process, iterable, *extra_args, **kwargs):
"""
I iterate over the elements of 'iterable' (which may be deferred), eventually
applying 'process' to each one, optionally with 'extra_args' and 'kwargs'.
'process' should return a (possibly deferred) boolean: True to continue the
iteration, False to stop.
I return a Deferred that fires with True if all elements of the iterable
were processed (i.e. 'process' only returned True values); with False if
the iteration was stopped by 'process' returning False; or that fails with
the first failure of either 'process' or the iterator.
"""
iterator = iter(iterable)
d = defer.succeed(None)
def _iterate(ign):
d2 = defer.maybeDeferred(iterator.next)
def _cb(item):
d3 = defer.maybeDeferred(process, item, *extra_args, **kwargs)
def _maybe_iterate(res):
if res:
d4 = fireEventually()
d4.addCallback(_iterate)
return d4
return False
d3.addCallback(_maybe_iterate)
return d3
def _eb(f):
f.trap(StopIteration)
return True
d2.addCallbacks(_cb, _eb)
return d2
d.addCallback(_iterate)
return d
def for_items(cb, mapping): def for_items(cb, mapping):
""" """
For each (key, value) pair in a mapping, I add a callback to cb(None, key, value) For each (key, value) pair in a mapping, I add a callback to cb(None, key, value)

View File

@ -12,7 +12,7 @@ from errno import ENOENT
if sys.platform == "win32": if sys.platform == "win32":
from ctypes import WINFUNCTYPE, WinError, windll, POINTER, byref, c_ulonglong, \ from ctypes import WINFUNCTYPE, WinError, windll, POINTER, byref, c_ulonglong, \
create_unicode_buffer, get_last_error create_unicode_buffer, get_last_error
from ctypes.wintypes import BOOL, DWORD, LPCWSTR, LPWSTR, LPVOID, HANDLE from ctypes.wintypes import BOOL, DWORD, LPCWSTR, LPWSTR, LPVOID
from twisted.python import log from twisted.python import log
@ -538,60 +538,6 @@ def get_available_space(whichdir, reserved_space):
return 0 return 0
if sys.platform == "win32":
# <http://msdn.microsoft.com/en-us/library/aa363858%28v=vs.85%29.aspx>
CreateFileW = WINFUNCTYPE(
HANDLE, LPCWSTR, DWORD, DWORD, LPVOID, DWORD, DWORD, HANDLE,
use_last_error=True
)(("CreateFileW", windll.kernel32))
GENERIC_WRITE = 0x40000000
FILE_SHARE_READ = 0x00000001
FILE_SHARE_WRITE = 0x00000002
OPEN_EXISTING = 3
INVALID_HANDLE_VALUE = 0xFFFFFFFF
# <http://msdn.microsoft.com/en-us/library/aa364439%28v=vs.85%29.aspx>
FlushFileBuffers = WINFUNCTYPE(
BOOL, HANDLE,
use_last_error=True
)(("FlushFileBuffers", windll.kernel32))
# <http://msdn.microsoft.com/en-us/library/ms724211%28v=vs.85%29.aspx>
CloseHandle = WINFUNCTYPE(
BOOL, HANDLE,
use_last_error=True
)(("CloseHandle", windll.kernel32))
# <http://social.msdn.microsoft.com/forums/en-US/netfxbcl/thread/4465cafb-f4ed-434f-89d8-c85ced6ffaa8/>
def flush_volume(path):
abspath = os.path.realpath(path)
if abspath.startswith("\\\\?\\"):
abspath = abspath[4 :]
drive = os.path.splitdrive(abspath)[0]
print("flushing %r" % (drive,))
hVolume = CreateFileW(u"\\\\.\\" + drive,
GENERIC_WRITE,
FILE_SHARE_READ | FILE_SHARE_WRITE,
None,
OPEN_EXISTING,
0,
None
)
if hVolume == INVALID_HANDLE_VALUE:
raise WinError(get_last_error())
if FlushFileBuffers(hVolume) == 0:
raise WinError(get_last_error())
CloseHandle(hVolume)
else:
def flush_volume(path):
# use sync()?
pass
class ConflictError(Exception): class ConflictError(Exception):
pass pass

View File

@ -1,71 +1,28 @@
""" """
A few commonly needed functions. A few commonly needed functions.
Backwards compatibility for direct imports.
Ported to Python 3.
""" """
import math from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
def div_ceil(n, d): from future.utils import PY2
""" if PY2:
The smallest integer k such that k*d >= n. from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
"""
return (n/d) + (n%d != 0)
def next_multiple(n, k):
"""
The smallest multiple of k which is >= n.
"""
return div_ceil(n, k) * k
def pad_size(n, k): # The API importers expect:
""" from pyutil.mathutil import div_ceil, next_multiple, pad_size, is_power_of_k, next_power_of_k, ave, log_ceil, log_floor
The smallest number that has to be added to n so that n is a multiple of k.
"""
if n%k:
return k - n%k
else:
return 0
def is_power_of_k(n, k):
return k**int(math.log(n, k) + 0.5) == n
def next_power_of_k(n, k):
if n == 0:
x = 0
else:
x = int(math.log(n, k) + 0.5)
if k**x < n:
return k**(x+1)
else:
return k**x
def ave(l):
return sum(l) / len(l)
def log_ceil(n, b):
"""
The smallest integer k such that b^k >= n.
log_ceil(n, 2) is the number of bits needed to store any of n values, e.g.
the number of bits needed to store any of 128 possible values is 7.
"""
p = 1
k = 0
while p < n:
p *= b
k += 1
return k
def log_floor(n, b):
"""
The largest integer k such that b^k <= n.
"""
p = 1
k = 0
while p <= n:
p *= b
k += 1
return k - 1
# This function is not present in pyutil.mathutil:
def round_sigfigs(f, n): def round_sigfigs(f, n):
fmt = "%." + str(n-1) + "e" fmt = "%." + str(n-1) + "e"
return float(fmt % f) return float(fmt % f)
__all__ = ["div_ceil", "next_multiple", "pad_size", "is_power_of_k", "next_power_of_k", "ave", "log_ceil", "log_floor", "round_sigfigs"]

View File

@ -1,4 +1,17 @@
"""
Polling utility that returns Deferred.
Ported to Python 3.
"""
from __future__ import print_function from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
import time import time
from twisted.internet import task from twisted.internet import task

View File

@ -1,7 +1,7 @@
from twisted.internet import address from twisted.internet import address
from foolscap.api import Violation, RemoteException, DeadReferenceError, \ from foolscap.api import Violation, RemoteException, SturdyRef
SturdyRef
def add_version_to_remote_reference(rref, default): def add_version_to_remote_reference(rref, default):
"""I try to add a .version attribute to the given RemoteReference. I call """I try to add a .version attribute to the given RemoteReference. I call
@ -19,12 +19,6 @@ def add_version_to_remote_reference(rref, default):
d.addCallbacks(_got_version, _no_get_version) d.addCallbacks(_got_version, _no_get_version)
return d return d
def trap_and_discard(f, *errorTypes):
f.trap(*errorTypes)
def trap_deadref(f):
return trap_and_discard(f, DeadReferenceError)
def connection_hints_for_furl(furl): def connection_hints_for_furl(furl):
hints = [] hints = []

View File

@ -297,7 +297,7 @@ def _get_platform():
def _get_package_versions_and_locations(): def _get_package_versions_and_locations():
import warnings import warnings
from _auto_deps import package_imports, global_deprecation_messages, deprecation_messages, \ from ._auto_deps import package_imports, global_deprecation_messages, deprecation_messages, \
runtime_warning_messages, warning_imports, ignorable runtime_warning_messages, warning_imports, ignorable
def package_dir(srcfile): def package_dir(srcfile):

View File

@ -1,53 +1,57 @@
<html xmlns:n="http://nevow.com/ns/nevow/0.1"> <html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
<head> <head>
<title>Tahoe-LAFS - File Download Status</title> <title>Tahoe-LAFS - File Download Status</title>
<link href="/tahoe.css" rel="stylesheet" type="text/css"/> <link href="/tahoe.css" rel="stylesheet" type="text/css"/>
<link href="/icon.png" rel="shortcut icon" /> <link href="/icon.png" rel="shortcut icon" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
</head> </head>
<body> <body>
<h1>File Download Status</h1> <h1>File Download Status</h1>
<ul> <ul>
<li>Started: <span n:render="started"/></li> <li>Started: <t:transparent t:render="started"/></li>
<li>Storage Index: <span n:render="si"/></li> <li>Storage Index: <t:transparent t:render="si"/></li>
<li>Helper?: <span n:render="helper"/></li> <li>Helper?: <t:transparent t:render="helper"/></li>
<li>Total Size: <span n:render="total_size"/></li> <li>Total Size: <t:transparent t:render="total_size"/></li>
<li>Progress: <span n:render="progress"/></li> <li>Progress: <t:transparent t:render="progress"/></li>
<li>Status: <span n:render="status"/></li> <li>Status: <t:transparent t:render="status"/></li>
</ul> </ul>
<div n:render="events"></div> <div t:render="events"></div>
<div t:render="results">
<div n:render="results">
<h2>Download Results</h2> <h2>Download Results</h2>
<ul> <ul>
<li n:render="servers_used" /> <li t:render="servers_used" />
<li>Servermap: <span n:render="servermap" /></li> <li>Servermap: <t:transparent t:render="servermap" /></li>
<li n:render="problems" /> <li t:render="problems" />
<li>Timings:</li> <li>Timings:</li>
<ul> <ul>
<li>File Size: <span n:render="string" n:data="file_size" /> bytes</li> <li>File Size: <t:transparent t:render="file_size" /> bytes</li>
<li>Total: <span n:render="time" n:data="time_total" /> <li>Total: <t:transparent t:render="time_total" />
(<span n:render="rate" n:data="rate_total" />)</li> (<t:transparent t:render="rate_total" />)</li>
<ul> <ul>
<li>Peer Selection: <span n:render="time" n:data="time_peer_selection" /></li> <li>Peer Selection: <t:transparent t:render="time_peer_selection" /></li>
<li>UEB Fetch: <span n:render="time" n:data="time_uri_extension" /></li> <li>UEB Fetch: <t:transparent t:render="time_uri_extension" /></li>
<li>Hashtree Fetch: <span n:render="time" n:data="time_hashtrees" /></li> <li>Hashtree Fetch: <t:transparent t:render="time_hashtrees" /></li>
<li>Segment Fetch: <span n:render="time" n:data="time_segments" /> <li>Segment Fetch: <t:transparent t:render="time_segments" />
(<span n:render="rate" n:data="rate_segments" />)</li> (<t:transparent t:render="rate_segments" />)</li>
<ul> <ul>
<li>Cumulative Fetching: <span n:render="time" n:data="time_cumulative_fetch" /> <li>Cumulative Fetching: <t:transparent t:render="time_cumulative_fetch" />
(<span n:render="rate" n:data="rate_fetch" />)</li> (<t:transparent t:render="rate_fetch" />)</li>
<li>Cumulative Decoding: <span n:render="time" n:data="time_cumulative_decode" /> <li>Cumulative Decoding: <t:transparent t:render="time_cumulative_decode" />
(<span n:render="rate" n:data="rate_decode" />)</li> (<t:transparent t:render="rate_decode" />)</li>
<li>Cumulative Decrypting: <span n:render="time" n:data="time_cumulative_decrypt" /> <li>Cumulative Decrypting: <t:transparent t:render="time_cumulative_decrypt" />
(<span n:render="rate" n:data="rate_decrypt" />)</li> (<t:transparent t:render="rate_decrypt" />)</li>
</ul> </ul>
<li>Paused by client: <span n:render="time" n:data="time_paused" /></li> <li>Paused by client: <t:transparent t:render="time_paused" /></li>
</ul> </ul>
<li n:render="server_timings" /> <li t:render="server_timings" />
</ul> </ul>
</ul> </ul>
</div> </div>

View File

@ -36,14 +36,6 @@ from allmydata.interfaces import (
IServermapUpdaterStatus, IServermapUpdaterStatus,
) )
class RateAndTimeMixin(object):
def render_time(self, ctx, data):
return abbreviate_time(data)
def render_rate(self, ctx, data):
return abbreviate_rate(data)
class UploadResultsRendererMixin(Element): class UploadResultsRendererMixin(Element):
# this requires a method named 'upload_results' # this requires a method named 'upload_results'
@ -275,130 +267,6 @@ class UploadStatusElement(UploadResultsRendererMixin):
return tag(self._upload_status.get_status()) return tag(self._upload_status.get_status())
class DownloadResultsRendererMixin(RateAndTimeMixin):
# this requires a method named 'download_results'
def render_servermap(self, ctx, data):
d = self.download_results()
d.addCallback(lambda res: res.servermap)
def _render(servermap):
if servermap is None:
return "None"
l = T.ul()
for peerid in sorted(servermap.keys()):
peerid_s = idlib.shortnodeid_b2a(peerid)
shares_s = ",".join(["#%d" % shnum
for shnum in servermap[peerid]])
l[T.li["[%s] has share%s: %s" % (peerid_s,
plural(servermap[peerid]),
shares_s)]]
return l
d.addCallback(_render)
return d
def render_servers_used(self, ctx, data):
d = self.download_results()
d.addCallback(lambda res: res.servers_used)
def _got(servers_used):
if not servers_used:
return ""
peerids_s = ", ".join(["[%s]" % idlib.shortnodeid_b2a(peerid)
for peerid in servers_used])
return T.li["Servers Used: ", peerids_s]
d.addCallback(_got)
return d
def render_problems(self, ctx, data):
d = self.download_results()
d.addCallback(lambda res: res.server_problems)
def _got(server_problems):
if not server_problems:
return ""
l = T.ul()
for peerid in sorted(server_problems.keys()):
peerid_s = idlib.shortnodeid_b2a(peerid)
l[T.li["[%s]: %s" % (peerid_s, server_problems[peerid])]]
return T.li["Server Problems:", l]
d.addCallback(_got)
return d
def data_file_size(self, ctx, data):
d = self.download_results()
d.addCallback(lambda res: res.file_size)
return d
def _get_time(self, name):
d = self.download_results()
d.addCallback(lambda res: res.timings.get(name))
return d
def data_time_total(self, ctx, data):
return self._get_time("total")
def data_time_peer_selection(self, ctx, data):
return self._get_time("peer_selection")
def data_time_uri_extension(self, ctx, data):
return self._get_time("uri_extension")
def data_time_hashtrees(self, ctx, data):
return self._get_time("hashtrees")
def data_time_segments(self, ctx, data):
return self._get_time("segments")
def data_time_cumulative_fetch(self, ctx, data):
return self._get_time("cumulative_fetch")
def data_time_cumulative_decode(self, ctx, data):
return self._get_time("cumulative_decode")
def data_time_cumulative_decrypt(self, ctx, data):
return self._get_time("cumulative_decrypt")
def data_time_paused(self, ctx, data):
return self._get_time("paused")
def _get_rate(self, name):
d = self.download_results()
def _convert(r):
file_size = r.file_size
duration = r.timings.get(name)
return compute_rate(file_size, duration)
d.addCallback(_convert)
return d
def data_rate_total(self, ctx, data):
return self._get_rate("total")
def data_rate_segments(self, ctx, data):
return self._get_rate("segments")
def data_rate_fetch(self, ctx, data):
return self._get_rate("cumulative_fetch")
def data_rate_decode(self, ctx, data):
return self._get_rate("cumulative_decode")
def data_rate_decrypt(self, ctx, data):
return self._get_rate("cumulative_decrypt")
def render_server_timings(self, ctx, data):
d = self.download_results()
d.addCallback(lambda res: res.timings.get("fetch_per_server"))
def _render(per_server):
if per_server is None:
return ""
l = T.ul()
for peerid in sorted(per_server.keys()):
peerid_s = idlib.shortnodeid_b2a(peerid)
times_s = ", ".join([abbreviate_time(t)
for t in per_server[peerid]])
l[T.li["[%s]: %s" % (peerid_s, times_s)]]
return T.li["Per-Server Segment Fetch Response Times: ", l]
d.addCallback(_render)
return d
def _find_overlap(events, start_key, end_key): def _find_overlap(events, start_key, end_key):
""" """
given a list of event dicts, return a new list in which each event given a list of event dicts, return a new list in which each event
@ -547,50 +415,85 @@ class _EventJson(Resource, object):
return json.dumps(data, indent=1) + "\n" return json.dumps(data, indent=1) + "\n"
class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page): class DownloadStatusPage(Resource, object):
docFactory = getxmlfile("download-status.xhtml") """Renders /status/down-%d."""
def __init__(self, data): def __init__(self, download_status):
rend.Page.__init__(self, data) """
self.download_status = data :param IDownloadStatus download_status: stats provider
self.putChild("event_json", _EventJson(self.download_status)) """
super(DownloadStatusPage, self).__init__()
self._download_status = download_status
self.putChild("event_json", _EventJson(self._download_status))
def render_GET(self, req):
elem = DownloadStatusElement(self._download_status)
return renderElement(req, elem)
class DownloadStatusElement(Element):
loader = XMLFile(FilePath(__file__).sibling("download-status.xhtml"))
def __init__(self, download_status):
super(DownloadStatusElement, self).__init__()
self._download_status = download_status
# XXX: fun fact: the `get_results()` method which we wind up
# invoking here (see immutable.downloader.status.DownloadStatus)
# is unimplemented, and simply returns a `None`. As a result,
# `results()` renderer returns an empty tag, and does not invoke
# any of the subsequent renderers. Thus we end up not displaying
# download results on the download status page.
#
# See #3310: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3310
def download_results(self): def download_results(self):
return defer.maybeDeferred(self.download_status.get_results) return self._download_status.get_results()
def relative_time(self, t): def _relative_time(self, t):
if t is None: if t is None:
return t return t
if self.download_status.first_timestamp is not None: if self._download_status.first_timestamp is not None:
return t - self.download_status.first_timestamp return t - self._download_status.first_timestamp
return t return t
def short_relative_time(self, t):
t = self.relative_time(t) def _short_relative_time(self, t):
t = self._relative_time(t)
if t is None: if t is None:
return "" return ""
return "+%.6fs" % t return "+%.6fs" % t
def render_timeline_link(self, ctx, data):
from nevow import url
return T.a(href=url.URL.fromContext(ctx).child("timeline"))["timeline"]
def _rate_and_time(self, bytes, seconds): def _rate_and_time(self, bytes, seconds):
time_s = self.render_time(None, seconds) time_s = abbreviate_time(seconds)
if seconds != 0: if seconds != 0:
rate = self.render_rate(None, 1.0 * bytes / seconds) rate = abbreviate_rate(1.0 * bytes / seconds)
return T.span(title=rate)[time_s] return tags.span(time_s, title=rate)
return T.span[time_s] return tags.span(time_s)
def render_events(self, ctx, data): # XXX: This method is a candidate for refactoring. It renders
if not self.download_status.storage_index: # four tables from this function. Layout part of those tables
return # could be moved to download-status.xhtml.
srt = self.short_relative_time #
l = T.div() # See #3311: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3311
@renderer
def events(self, req, tag):
if not self._download_status.get_storage_index():
return tag
t = T.table(align="left", class_="status-download-events") srt = self._short_relative_time
t[T.tr[T.th["serverid"], T.th["sent"], T.th["received"],
T.th["shnums"], T.th["RTT"]]] evtag = tags.div()
for d_ev in self.download_status.dyhb_requests:
# "DYHB Requests" table.
dyhbtag = tags.table(align="left", class_="status-download-events")
dyhbtag(tags.tr(tags.th("serverid"),
tags.th("sent"),
tags.th("received"),
tags.th("shnums"),
tags.th("RTT")))
for d_ev in self._download_status.dyhb_requests:
server = d_ev["server"] server = d_ev["server"]
sent = d_ev["start_time"] sent = d_ev["start_time"]
shnums = d_ev["response_shnums"] shnums = d_ev["response_shnums"]
@ -600,20 +503,32 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
rtt = received - sent rtt = received - sent
if not shnums: if not shnums:
shnums = ["-"] shnums = ["-"]
t[T.tr(style="background: %s" % _color(server))[
[T.td[server.get_name()], T.td[srt(sent)], T.td[srt(received)],
T.td[",".join([str(shnum) for shnum in shnums])],
T.td[self.render_time(None, rtt)],
]]]
l[T.h2["DYHB Requests:"], t] dyhbtag(tags.tr(style="background: %s" % _color(server))(
l[T.br(clear="all")] (tags.td(server.get_name()),
tags.td(srt(sent)),
tags.td(srt(received)),
tags.td(",".join([str(shnum) for shnum in shnums])),
tags.td(abbreviate_time(rtt)),
)))
t = T.table(align="left",class_="status-download-events") evtag(tags.h2("DYHB Requests:"), dyhbtag)
t[T.tr[T.th["range"], T.th["start"], T.th["finish"], T.th["got"], evtag(tags.br(clear="all"))
T.th["time"], T.th["decrypttime"], T.th["pausedtime"],
T.th["speed"]]] # "Read Events" table.
for r_ev in self.download_status.read_events: readtag = tags.table(align="left",class_="status-download-events")
readtag(tags.tr((
tags.th("range"),
tags.th("start"),
tags.th("finish"),
tags.th("got"),
tags.th("time"),
tags.th("decrypttime"),
tags.th("pausedtime"),
tags.th("speed"))))
for r_ev in self._download_status.read_events:
start = r_ev["start"] start = r_ev["start"]
length = r_ev["length"] length = r_ev["length"]
bytes = r_ev["bytes_returned"] bytes = r_ev["bytes_returned"]
@ -623,25 +538,38 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
speed, rtt = "","" speed, rtt = "",""
if r_ev["finish_time"] is not None: if r_ev["finish_time"] is not None:
rtt = r_ev["finish_time"] - r_ev["start_time"] - r_ev["paused_time"] rtt = r_ev["finish_time"] - r_ev["start_time"] - r_ev["paused_time"]
speed = self.render_rate(None, compute_rate(bytes, rtt)) speed = abbreviate_rate(compute_rate(bytes, rtt))
rtt = self.render_time(None, rtt) rtt = abbreviate_time(rtt)
paused = self.render_time(None, r_ev["paused_time"]) paused = abbreviate_time(r_ev["paused_time"])
t[T.tr[T.td["[%d:+%d]" % (start, length)], readtag(tags.tr(
T.td[srt(r_ev["start_time"])], T.td[srt(r_ev["finish_time"])], tags.td("[%d:+%d]" % (start, length)),
T.td[bytes], T.td[rtt], tags.td(srt(r_ev["start_time"])),
T.td[decrypt_time], T.td[paused], tags.td(srt(r_ev["finish_time"])),
T.td[speed], tags.td(str(bytes)),
]] tags.td(rtt),
tags.td(decrypt_time),
tags.td(paused),
tags.td(speed),
))
l[T.h2["Read Events:"], t] evtag(tags.h2("Read Events:"), readtag)
l[T.br(clear="all")] evtag(tags.br(clear="all"))
t = T.table(align="left",class_="status-download-events") # "Segment Events" table.
t[T.tr[T.th["segnum"], T.th["start"], T.th["active"], T.th["finish"], segtag = tags.table(align="left",class_="status-download-events")
T.th["range"],
T.th["decodetime"], T.th["segtime"], T.th["speed"]]] segtag(tags.tr(
for s_ev in self.download_status.segment_events: tags.th("segnum"),
tags.th("start"),
tags.th("active"),
tags.th("finish"),
tags.th("range"),
tags.th("decodetime"),
tags.th("segtime"),
tags.th("speed")))
for s_ev in self._download_status.segment_events:
range_s = "-" range_s = "-"
segtime_s = "-" segtime_s = "-"
speed = "-" speed = "-"
@ -649,10 +577,10 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
if s_ev["finish_time"] is not None: if s_ev["finish_time"] is not None:
if s_ev["success"]: if s_ev["success"]:
segtime = s_ev["finish_time"] - s_ev["active_time"] segtime = s_ev["finish_time"] - s_ev["active_time"]
segtime_s = self.render_time(None, segtime) segtime_s = abbreviate_time(segtime)
seglen = s_ev["segment_length"] seglen = s_ev["segment_length"]
range_s = "[%d:+%d]" % (s_ev["segment_start"], seglen) range_s = "[%d:+%d]" % (s_ev["segment_start"], seglen)
speed = self.render_rate(None, compute_rate(seglen, segtime)) speed = abbreviate_rate(compute_rate(seglen, segtime))
decode_time = self._rate_and_time(seglen, s_ev["decode_time"]) decode_time = self._rate_and_time(seglen, s_ev["decode_time"])
else: else:
# error # error
@ -661,76 +589,213 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
# not finished yet # not finished yet
pass pass
t[T.tr[T.td["seg%d" % s_ev["segment_number"]], segtag(tags.tr(
T.td[srt(s_ev["start_time"])], tags.td("seg%d" % s_ev["segment_number"]),
T.td[srt(s_ev["active_time"])], tags.td(srt(s_ev["start_time"])),
T.td[srt(s_ev["finish_time"])], tags.td(srt(s_ev["active_time"])),
T.td[range_s], tags.td(srt(s_ev["finish_time"])),
T.td[decode_time], tags.td(range_s),
T.td[segtime_s], T.td[speed]]] tags.td(decode_time),
tags.td(segtime_s),
tags.td(speed)))
l[T.h2["Segment Events:"], t] evtag(tags.h2("Segment Events:"), segtag)
l[T.br(clear="all")] evtag(tags.br(clear="all"))
t = T.table(align="left",class_="status-download-events")
t[T.tr[T.th["serverid"], T.th["shnum"], T.th["range"], # "Requests" table.
T.th["txtime"], T.th["rxtime"], reqtab = tags.table(align="left",class_="status-download-events")
T.th["received"], T.th["RTT"]]]
for r_ev in self.download_status.block_requests: reqtab(tags.tr(
tags.th("serverid"),
tags.th("shnum"),
tags.th("range"),
tags.th("txtime"),
tags.th("rxtime"),
tags.th("received"),
tags.th("RTT")))
for r_ev in self._download_status.block_requests:
server = r_ev["server"] server = r_ev["server"]
rtt = None rtt = None
if r_ev["finish_time"] is not None: if r_ev["finish_time"] is not None:
rtt = r_ev["finish_time"] - r_ev["start_time"] rtt = r_ev["finish_time"] - r_ev["start_time"]
color = _color(server) color = _color(server)
t[T.tr(style="background: %s" % color)[ reqtab(tags.tr(style="background: %s" % color)
T.td[server.get_name()], T.td[r_ev["shnum"]], (
T.td["[%d:+%d]" % (r_ev["start"], r_ev["length"])], tags.td(server.get_name()),
T.td[srt(r_ev["start_time"])], T.td[srt(r_ev["finish_time"])], tags.td(str(r_ev["shnum"])),
T.td[r_ev["response_length"] or ""], tags.td("[%d:+%d]" % (r_ev["start"], r_ev["length"])),
T.td[self.render_time(None, rtt)], tags.td(srt(r_ev["start_time"])),
]] tags.td(srt(r_ev["finish_time"])),
tags.td(str(r_ev["response_length"]) or ""),
tags.td(abbreviate_time(rtt)),
))
l[T.h2["Requests:"], t] evtag(tags.h2("Requests:"), reqtab)
l[T.br(clear="all")] evtag(tags.br(clear="all"))
return l return evtag
def render_results(self, ctx, data): @renderer
d = self.download_results() def results(self, req, tag):
def _got_results(results): if self.download_results():
if results: return tag
return ctx.tag
return "" return ""
d.addCallback(_got_results)
return d
def render_started(self, ctx, data): @renderer
started_s = render_time(data.get_started()) def started(self, req, tag):
return started_s + " (%s)" % data.get_started() started_s = render_time(self._download_status.get_started())
return tag(started_s + " (%s)" % self._download_status.get_started())
def render_si(self, ctx, data): @renderer
si_s = base32.b2a_or_none(data.get_storage_index()) def si(self, req, tag):
si_s = base32.b2a_or_none(self._download_status.get_storage_index())
if si_s is None: if si_s is None:
si_s = "(None)" si_s = "(None)"
return si_s return tag(si_s)
def render_helper(self, ctx, data): @renderer
return {True: "Yes", def helper(self, req, tag):
False: "No"}[data.using_helper()] return tag({True: "Yes",
False: "No"}[self._download_status.using_helper()])
def render_total_size(self, ctx, data): @renderer
size = data.get_size() def total_size(self, req, tag):
size = self._download_status.get_size()
if size is None: if size is None:
return "(unknown)" return "(unknown)"
return size return tag(str(size))
def render_progress(self, ctx, data): @renderer
progress = data.get_progress() def progress(self, req, tag):
progress = self._download_status.get_progress()
# TODO: make an ascii-art bar # TODO: make an ascii-art bar
return "%.1f%%" % (100.0 * progress) return tag("%.1f%%" % (100.0 * progress))
def render_status(self, ctx, data): @renderer
return data.get_status() def status(self, req, tag):
return tag(self._download_status.get_status())
@renderer
def servers_used(self, req, tag):
servers_used = self.download_results().servers_used
if not servers_used:
return ""
peerids_s = ", ".join(["[%s]" % idlib.shortnodeid_b2a(peerid)
for peerid in servers_used])
return tags.li("Servers Used: ", peerids_s)
@renderer
def servermap(self, req, tag):
servermap = self.download_results().servermap
if not servermap:
return tag("None")
ul = tags.ul()
for peerid in sorted(servermap.keys()):
peerid_s = idlib.shortnodeid_b2a(peerid)
shares_s = ",".join(["#%d" % shnum
for shnum in servermap[peerid]])
ul(tags.li("[%s] has share%s: %s" % (peerid_s,
plural(servermap[peerid]),
shares_s)))
return ul
@renderer
def problems(self, req, tag):
server_problems = self.download_results().server_problems
if not server_problems:
return ""
ul = tags.ul()
for peerid in sorted(server_problems.keys()):
peerid_s = idlib.shortnodeid_b2a(peerid)
ul(tags.li("[%s]: %s" % (peerid_s, server_problems[peerid])))
return tags.li("Server Problems:", ul)
@renderer
def file_size(self, req, tag):
return tag(str(self.download_results().file_size))
def _get_time(self, name):
if self.download_results().timings:
return self.download_results().timings.get(name)
return None
@renderer
def time_total(self, req, tag):
return tag(str(self._get_time("total")))
@renderer
def time_peer_selection(self, req, tag):
return tag(str(self._get_time("peer_selection")))
@renderer
def time_uri_extension(self, req, tag):
return tag(str(self._get_time("uri_extension")))
@renderer
def time_hashtrees(self, req, tag):
return tag(str(self._get_time("hashtrees")))
@renderer
def time_segments(self, req, tag):
return tag(str(self._get_time("segments")))
@renderer
def time_cumulative_fetch(self, req, tag):
return tag(str(self._get_time("cumulative_fetch")))
@renderer
def time_cumulative_decode(self, req, tag):
return tag(str(self._get_time("cumulative_decode")))
@renderer
def time_cumulative_decrypt(self, req, tag):
return tag(str(self._get_time("cumulative_decrypt")))
@renderer
def time_paused(self, req, tag):
return tag(str(self._get_time("paused")))
def _get_rate(self, name):
r = self.download_results()
file_size = r.file_size
duration = None
if r.timings:
duration = r.timings.get(name)
return compute_rate(file_size, duration)
@renderer
def rate_total(self, req, tag):
return tag(str(self._get_rate("total")))
@renderer
def rate_segments(self, req, tag):
return tag(str(self._get_rate("segments")))
@renderer
def rate_fetch(self, req, tag):
return tag(str(self._get_rate("cumulative_fetch")))
@renderer
def rate_decode(self, req, tag):
return tag(str(self._get_rate("cumulative_decode")))
@renderer
def rate_decrypt(self, req, tag):
return tag(str(self._get_rate("cumulative_decrypt")))
@renderer
def server_timings(self, req, tag):
per_server = self._get_time("fetch_per_server")
if per_server is None:
return ""
ul = tags.ul()
for peerid in sorted(per_server.keys()):
peerid_s = idlib.shortnodeid_b2a(peerid)
times_s = ", ".join([abbreviate_time(t)
for t in per_server[peerid]])
ul(tags.li("[%s]: %s" % (peerid_s, times_s)))
return tags.li("Per-Server Segment Fetch Response Times: ", ul)
class RetrieveStatusPage(MultiFormatResource): class RetrieveStatusPage(MultiFormatResource):

View File

@ -7,7 +7,7 @@
twisted = 1 twisted = 1
[tox] [tox]
envlist = {py27,pypy27}{-coverage,} envlist = {py27,pypy27,py36}{-coverage,}
minversion = 2.4 minversion = 2.4
[testenv] [testenv]
@ -45,8 +45,13 @@ usedevelop = False
# tests. # tests.
extras = test extras = test
commands = commands =
tahoe --version
trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata} trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata}
tahoe --version
[testenv:py36]
# git inside of ratchet.sh needs $HOME.
passenv = HOME
commands = {toxinidir}/misc/python3/ratchet.sh
[testenv:integration] [testenv:integration]
setenv = setenv =