Merge 'origin/master' into 3320.github-actions-windows-integration-tests

This commit is contained in:
Sajith Sasidharan 2020-07-21 13:50:47 -04:00
commit 74a77a48cc
85 changed files with 3102 additions and 1455 deletions

3
.gitignore vendored
View File

@ -44,6 +44,9 @@ zope.interface-*.egg
/docs/_build/
/coverage.xml
/.hypothesis/
/eliot.log
/misc/python3/results.xml
/misc/python3/results.subunit2
# This is the plaintext of the private environment needed for some CircleCI
# operations. It's never supposed to be checked in.

View File

@ -1,7 +1,7 @@
sudo: false
language: python
cache: pip
dist: trusty
dist: xenial
before_cache:
- rm -f $HOME/.cache/pip/log/debug.log
git:
@ -12,30 +12,14 @@ env:
- TAHOE_LAFS_HYPOTHESIS_PROFILE=ci
install:
# ~/.local/bin is on $PATH by default, but on OS-X, --user puts it elsewhere
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then export PATH=$HOME/Library/Python/2.7/bin:$PATH; fi
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then wget https://bootstrap.pypa.io/get-pip.py && sudo python ./get-pip.py; fi
- pip list
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then pip install --user --upgrade codecov tox setuptools; fi
- if [ "${TRAVIS_OS_NAME}" = "linux" ]; then pip install --upgrade codecov tox setuptools; fi
- pip install --upgrade tox setuptools virtualenv
- echo $PATH; which python; which pip; which tox
- python misc/build_helpers/show-tool-versions.py
script:
- |
set -eo pipefail
if [ "${T}" = "py35" ]; then
python3 -m compileall -f -x tahoe-depgraph.py .
else
tox -e ${T}
fi
# To verify that the resultant PyInstaller-generated binary executes
# cleanly (i.e., that it terminates with an exit code of 0 and isn't
# failing due to import/packaging-related errors, etc.).
if [ "${T}" = "pyinstaller" ]; then dist/Tahoe-LAFS/tahoe --version; fi
after_success:
- if [ "${T}" = "coverage" ]; then codecov; fi
tox -e ${T}
notifications:
email: false
@ -50,28 +34,7 @@ notifications:
matrix:
include:
- os: linux
python: '2.7'
env: T=coverage LANG=en_US.UTF-8
- os: linux
python: '2.7'
env: T=codechecks LANG=en_US.UTF-8
- os: linux
python: '2.7'
env: T=pyinstaller LANG=en_US.UTF-8
- os: linux
python: '2.7'
env: T=py27 LANG=C
- os: osx
python: '2.7'
env: T=py27 LANG=en_US.UTF-8
language: generic # "python" is not available on OS-X
- os: osx
python: '2.7'
env: T=pyinstaller LANG=en_US.UTF-8
language: generic # "python" is not available on OS-X
# this is a "lint" job that checks for python3 compatibility
- os: linux
python: '3.5'
env: T=py35
python: '3.6'
env: T=py36
fast_finish: true

View File

@ -6,7 +6,7 @@ default:
PYTHON=python
export PYTHON
PYFLAKES=pyflakes
PYFLAKES=flake8
export PYFLAKES
SOURCES=src/allmydata static misc setup.py

View File

@ -10,7 +10,8 @@ function correctly, preserving your privacy and security.
For full documentation, please see
http://tahoe-lafs.readthedocs.io/en/latest/ .
|readthedocs| |travis| |circleci| |codecov|
|Contributor Covenant| |readthedocs| |travis| |circleci| |codecov|
INSTALLING
==========
@ -105,3 +106,7 @@ slides.
.. |codecov| image:: https://codecov.io/github/tahoe-lafs/tahoe-lafs/coverage.svg?branch=master
:alt: test coverage percentage
:target: https://codecov.io/github/tahoe-lafs/tahoe-lafs?branch=master
.. |Contributor Covenant| image:: https://img.shields.io/badge/Contributor%20Covenant-v2.0%20adopted-ff69b4.svg
:alt: code of conduct
:target: docs/CODE_OF_CONDUCT.md

54
docs/CODE_OF_CONDUCT.md Normal file
View File

@ -0,0 +1,54 @@
# Contributor Code of Conduct
As contributors and maintainers of this project, and in the interest of
fostering an open and welcoming community, we pledge to respect all people who
contribute through reporting issues, posting feature requests, updating
documentation, submitting pull requests or patches, and other activities.
We are committed to making participation in this project a harassment-free
experience for everyone, regardless of level of experience, gender, gender
identity and expression, sexual orientation, disability, personal appearance,
body size, race, ethnicity, age, religion, or nationality.
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery
* Personal attacks
* Trolling or insulting/derogatory comments
* Public or private harassment
* Publishing other's private information, such as physical or electronic
addresses, without explicit permission
* Other unethical or unprofessional conduct
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
By adopting this Code of Conduct, project maintainers commit themselves to
fairly and consistently applying these principles to every aspect of managing
this project. Project maintainers who do not follow or enforce the Code of
Conduct may be permanently removed from the project team.
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community.
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting a project maintainer (see below). All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. Maintainers are
obligated to maintain confidentiality with regard to the reporter of an
incident.
The following community members have made themselves available for conduct issues:
- Jean-Paul Calderone (jean-paul at leastauthority dot com)
- meejah (meejah at meejah dot ca)
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 1.3.0, available at
[http://contributor-covenant.org/version/1/3/0/][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/3/0/

View File

@ -24,6 +24,7 @@ Contents:
known_issues
../.github/CONTRIBUTING
CODE_OF_CONDUCT
servers
helper

View File

@ -380,13 +380,31 @@ def chutney(reactor, temp_dir):
proto,
'git',
(
'git', 'clone', '--depth=1',
'git', 'clone',
'https://git.torproject.org/chutney.git',
chutney_dir,
),
env=environ,
)
pytest_twisted.blockon(proto.done)
# XXX: Here we reset Chutney to the last revision known to work
# with Python 2, as a workaround for Chutney moving to Python 3.
# When this is no longer necessary, we will have to drop this and
# add '--depth=1' back to the above 'git clone' subprocess.
proto = _DumpOutputProtocol(None)
reactor.spawnProcess(
proto,
'git',
(
'git', '-C', chutney_dir,
'reset', '--hard',
'99bd06c7554b9113af8c0877b6eca4ceb95dcbaa'
),
env=environ,
)
pytest_twisted.blockon(proto.done)
return chutney_dir

View File

@ -219,23 +219,21 @@ def test_status(alice):
found_upload = False
found_download = False
for href in hrefs:
if href.startswith(u"/") or not href:
if href == u"/" or not href:
continue
resp = requests.get(
util.node_url(alice.node_dir, u"status/{}".format(href)),
)
if href.startswith(u'up'):
resp = requests.get(util.node_url(alice.node_dir, href))
if href.startswith(u"/status/up"):
assert "File Upload Status" in resp.content
if "Total Size: {}".format(len(FILE_CONTENTS)) in resp.content:
found_upload = True
elif href.startswith(u'down'):
elif href.startswith(u"/status/down"):
assert "File Download Status" in resp.content
if "Total Size: {}".format(len(FILE_CONTENTS)) in resp.content:
found_download = True
# download the specialized event information
resp = requests.get(
util.node_url(alice.node_dir, u"status/{}/event_json".format(href)),
util.node_url(alice.node_dir, u"{}/event_json".format(href)),
)
js = json.loads(resp.content)
# there's usually just one "read" operation, but this can handle many ..

View File

@ -0,0 +1,13 @@
allmydata.test.mutable.test_exceptions.Exceptions.test_repr
allmydata.test.test_deferredutil.DeferredUtilTests.test_failure
allmydata.test.test_deferredutil.DeferredUtilTests.test_gather_results
allmydata.test.test_deferredutil.DeferredUtilTests.test_success
allmydata.test.test_deferredutil.DeferredUtilTests.test_wait_for_delayed_calls
allmydata.test.test_humanreadable.HumanReadable.test_repr
allmydata.test.test_observer.Observer.test_lazy_oneshot
allmydata.test.test_observer.Observer.test_observerlist
allmydata.test.test_observer.Observer.test_oneshot
allmydata.test.test_observer.Observer.test_oneshot_fireagain
allmydata.test.test_python3.Python3PortingEffortTests.test_finished_porting
allmydata.test.test_python3.Python3PortingEffortTests.test_ported_modules_distinct
allmydata.test.test_python3.Python3PortingEffortTests.test_ported_modules_exist

409
misc/python3/ratchet.py Executable file
View File

@ -0,0 +1,409 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''Ratchet up passing tests, or ratchet down failing tests.
Usage:
ratchet.py <"up" or "down"> <junitxml file path> <tracking file path>
This script helps when you expect a large test suite to fail spectactularly in
some environment, and you want to gradually improve the situation with minimal
impact to forward development of the same codebase for other environments. The
initial and primary usecase is porting from Python 2 to Python 3.
The idea is to emit JUnit XML from your test runner, and then invoke ratchet.py
to consume this XML output and operate on a so-called "tracking" file. When
ratcheting up passing tests, the tracking file will contain a list of tests,
one per line, that passed. When ratching down, the tracking file contains a
list of failing tests. On each subsequent run, ratchet.py will compare the
prior results in the tracking file with the new results in the XML, and will
report on both welcome and unwelcome changes. It will modify the tracking file
in the case of welcome changes, and therein lies the ratcheting.
The exit codes are:
0 - no changes observed
1 - changes observed, whether welcome or unwelcome
2 - invocation error
If <junitxml file path> does not exist, you'll get a FileNotFoundError:
>>> _test('up', None, None) # doctest: +ELLIPSIS
Traceback (most recent call last):
...
FileNotFoundError: ...
If <tracking file path> does not exist, that's fine:
>>> _test('up', '1', None)
Some tests not required to pass did:
c0.t
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
Eep! 0 test(s) were required to pass, but instead 1 did. 🐭
Same if you're ratcheting down:
>>> _test('down', '1', None)
All and only tests expected to fail did. 💃
If the test run has the same output as last time, it's all good:
>>> _test('up', '01001110', '01001110')
All and only tests required to pass did. 💃
>>> _test('down', '01001110', '10110001')
All and only tests expected to fail did. 💃
If there's a welcome change, that's noted:
>>> _test('up', '0101', '0100')
Some tests not required to pass did:
c3.t
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
Eep! 1 test(s) were required to pass, but instead 2 did. 🐭
>>> _test('down', '0011', '1110')
Some tests expected to fail didn't:
c2.t
Conveniently, they have been removed from `<tracking_path>` for you. Perhaps commit that?
Eep! 3 test(s) were expected to fail, but instead 2 did. 🐭
And if there is an unwelcome change, that is noted as well:
>>> _test('up', '1101', '1111')
Some tests required to pass didn't:
c2.t
Eep! 4 test(s) were required to pass, but instead 3 did. 🐭
>>> _test('down', '0000', '1101')
Some tests not expected to fail did:
c2.t
Eep! 3 test(s) were expected to fail, but instead 4 did. 🐭
And if there are both welcome and unwelcome changes, they are both noted:
>>> _test('up', '1101', '1011')
Some tests not required to pass did:
c1.t
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
Some tests required to pass didn't:
c2.t
Eep! 3 test(s) were required to pass, but instead 3 did. 🐭
>>> _test('down', '0100', '1100')
Some tests not expected to fail did:
c2.t
c3.t
Some tests expected to fail didn't:
c1.t
Conveniently, they have been removed from `<tracking_path>` for you. Perhaps commit that?
Eep! 2 test(s) were expected to fail, but instead 3 did. 🐭
To test ratchet.py itself:
python3 -m doctest ratchet.py
'''
from __future__ import absolute_import, division, print_function, unicode_literals
import io
import os
import re
import sys
import tempfile
import xml.etree.ElementTree as Etree
class JUnitXMLFile(object):
'''Represent a file containing test results in JUnit XML format.
>>> eg = _mktemp_junitxml('0100111')
>>> results = JUnitXMLFile(eg.name).parse()
>>> results.failed
['c0.t', 'c2.t', 'c3.t']
>>> results.passed
['c1.t', 'c4.t', 'c5.t', 'c6.t']
'''
def __init__(self, filepath):
self.filepath = filepath
self.failed = []
self.failed_aggregates = {}
self.stderr_output = []
self.passed = []
self._tree = None
def parse(self):
if self._tree:
raise RuntimeError('already parsed')
self._tree = Etree.parse(self.filepath)
for testcase in self._tree.findall('testcase'):
self.process_testcase(testcase)
return self
def process_testcase(self, case):
key = self.case_key(case)
# look at children but throw away stderr output
nonpassing = [c for c in case if not c.tag == 'system-err']
n = len(nonpassing)
if n > 1:
raise RuntimeError(f'multiple results for {key}: {nonpassing}')
elif n == 1:
result = nonpassing.pop()
self.failed.append(key)
message = result.get('message')
self.failed_aggregates.setdefault(message, []).append(key)
else:
self.passed.append(key)
@staticmethod
def case_key(case):
return f'{case.get("classname")}.{case.get("name")}'
def report(self, details=False):
for k, v in sorted(
self.failed_aggregates.items(),
key = lambda i: len(i[1]),
reverse=True):
print(f'# {k}')
for t in v:
print(f' - {t}')
def load_previous_results(txt):
try:
previous_results = open(txt).read()
except FileNotFoundError:
previous_results = ''
parsed = set()
for line in previous_results.splitlines():
if not line or line.startswith('#'):
continue
parsed.add(line)
return parsed
def print_tests(tests):
for test in sorted(tests):
print(' ', test)
def ratchet_up_passing(tracking_path, tests):
try:
old = set(open(tracking_path, 'r'))
except FileNotFoundError:
old = set()
new = set(t + '\n' for t in tests)
merged = sorted(old | new)
open(tracking_path, 'w+').writelines(merged)
def ratchet_down_failing(tracking_path, tests):
new = set(t + '\n' for t in tests)
open(tracking_path, 'w+').writelines(sorted(new))
def main(direction, junitxml_path, tracking_path):
'''Takes a string indicating which direction to ratchet, "up" or "down,"
and two paths, one to test-runner output in JUnit XML format, the other to
a file tracking test results (one test case dotted name per line). Walk the
former looking for the latter, and react appropriately.
>>> inp = _mktemp_junitxml('0100111')
>>> out = _mktemp_tracking('0000000')
>>> _test_main('up', inp.name, out.name)
Some tests not required to pass did:
c1.t
c4.t
c5.t
c6.t
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
Eep! 0 test(s) were required to pass, but instead 4 did. 🐭
'''
results = JUnitXMLFile(junitxml_path).parse()
if tracking_path == '...':
# Shortcut to aid in debugging XML parsing issues.
results.report()
return
previous = load_previous_results(tracking_path)
current = set(results.passed if direction == 'up' else results.failed)
subjunctive = {'up': 'required to pass', 'down': 'expected to fail'}[direction]
ratchet = None
too_many = current - previous
if too_many:
print(f'Some tests not {subjunctive} did:')
print_tests(too_many)
if direction == 'up':
# Too many passing tests is good -- let's do more of those!
ratchet_up_passing(tracking_path, current)
print(f'Conveniently, they have been added to `{tracking_path}` for you. Perhaps commit that?')
not_enough = previous - current
if not_enough:
print(f'Some tests {subjunctive} didn\'t:')
print_tests(not_enough)
if direction == 'down':
# Not enough failing tests is good -- let's do more of those!
ratchet_down_failing(tracking_path, current)
print(f'Conveniently, they have been removed from `{tracking_path}` for you. Perhaps commit that?')
if too_many or not_enough:
print(f'Eep! {len(previous)} test(s) were {subjunctive}, but instead {len(current)} did. 🐭')
return 1
print(f'All and only tests {subjunctive} did. 💃')
return 0
# When called as an executable ...
if __name__ == '__main__':
try:
direction, junitxml_path, tracking_path = sys.argv[1:4]
if direction not in ('up', 'down'):
raise ValueError
except ValueError:
doc = '\n'.join(__doc__.splitlines()[:6])
doc = re.sub(' ratchet.py', f' {sys.argv[0]}', doc)
print(doc, file=sys.stderr)
exit_code = 2
else:
exit_code = main(direction, junitxml_path, tracking_path)
sys.exit(exit_code)
# Helpers for when called under doctest ...
def _test(*a):
return _test_main(*_mk(*a))
def _test_main(direction, junitxml, tracking):
'''Takes a string 'up' or 'down' and paths to (or open file objects for)
the JUnit XML and tracking files to use for this test run. Captures and
emits stdout (slightly modified) for inspection via doctest.'''
junitxml_path = junitxml.name if hasattr(junitxml, 'name') else junitxml
tracking_path = tracking.name if hasattr(tracking, 'name') else tracking
old_stdout = sys.stdout
sys.stdout = io.StringIO()
try:
main(direction, junitxml_path, tracking_path)
finally:
sys.stdout.seek(0)
out = sys.stdout.read()
out = re.sub('`.*?`', '`<tracking_path>`', out).strip()
sys.stdout = old_stdout
print(out)
class _PotentialFile(object):
'''Represent a file that we are able to create but which doesn't exist yet,
and which, if we create it, will be automatically torn down when the test
run is over.'''
def __init__(self, filename):
self.d = tempfile.TemporaryDirectory()
self.name = os.path.join(self.d.name, filename)
def _mk(direction, spec_junitxml, spec_tracking):
'''Takes a string 'up' or 'down' and two bit strings specifying the state
of the JUnit XML results file and the tracking file to set up for this test
case. Returns the direction (unharmed) and two file-ish objects.
If a spec string is None the corresponding return value will be a
_PotentialFile object, which has a .name attribute (like a true file
object) that points to a file that does not exist, but could.
The reason not to simply return the path in all cases is that the file
objects are actually temporary file objects that destroy the underlying
file when they go out of scope, and we want to keep the underlying file
around until the end of the test run.'''
if None not in(spec_junitxml, spec_tracking):
if len(spec_junitxml) != len(spec_tracking):
raise ValueError('if both given, must be the same length: `{spec_junitxml}` and `{spec_tracking}`')
if spec_junitxml is None:
junitxml_fp = _PotentialFile('results.xml')
else:
junitxml_fp = _mktemp_junitxml(spec_junitxml)
if spec_tracking is None:
tracking_fp = _PotentialFile('tracking')
else:
tracking_fp = _mktemp_tracking(spec_tracking)
return direction, junitxml_fp, tracking_fp
def _mktemp_junitxml(spec):
'''Test helper to generate a raw JUnit XML file.
>>> fp = _mktemp_junitxml('00101')
>>> open(fp.name).read()[:11]
'<testsuite>'
'''
fp = tempfile.NamedTemporaryFile()
fp.write(b'<testsuite>')
passed = '''\
<testcase classname="c{i}" name="t"></testcase>
'''
failed = '''\
<testcase classname="c{i}" name="t">
<failure>Traceback (most recent call last):
File "/foo/bar/baz/buz.py", line 1, in &lt;module>
NameError: name 'heck' is not defined
</failure>
</testcase>
'''
i = 0
for c in spec:
if c == '0':
out = failed
elif c == '1':
out = passed
else:
raise ValueError(f'bad c: `{c}`')
fp.write(out.format(i=i).encode('utf8'))
i += 1
fp.write(b'</testsuite>')
fp.flush()
return fp
def _mktemp_tracking(spec):
'''Test helper to prefabricate a tracking file.
>>> fp = _mktemp_tracking('01101')
>>> print(open(fp.name).read()[:-1])
c1.t
c2.t
c4.t
'''
fp = tempfile.NamedTemporaryFile()
i = 0
for c in spec:
if c == '0':
pass
elif c == '1':
fp.write(f'c{i}.t\n'.encode('utf8'))
else:
raise ValueError(f'bad c: `{c}`')
i += 1
fp.flush()
return fp

37
misc/python3/ratchet.sh Executable file
View File

@ -0,0 +1,37 @@
#!/usr/bin/env bash
set -euxo pipefail
tracking_filename="ratchet-passing"
# Start somewhere predictable.
cd "$(dirname $0)"
base=$(pwd)
# Actually, though, trial outputs some things that are only gitignored in the project root.
cd "../.."
# Since both of the next calls are expected to exit non-0, relax our guard.
set +e
SUBUNITREPORTER_OUTPUT_PATH="$base/results.subunit2" trial --reporter subunitv2-file allmydata
subunit2junitxml < "$base/results.subunit2" > "$base/results.xml"
set -e
# Okay, now we're clear.
cd "$base"
# Make sure ratchet.py itself is clean.
python3 -m doctest ratchet.py
# Now see about Tahoe-LAFS (also expected to fail) ...
set +e
python3 ratchet.py up results.xml "$tracking_filename"
code=$?
set -e
# Emit a diff of the tracking file, to aid in the situation where changes are
# not discovered until CI (where TERM might `dumb`).
if [ $TERM = 'dumb' ]; then
export TERM=ansi
fi
git diff "$tracking_filename"
exit $code

View File

@ -60,7 +60,8 @@ class mymf(modulefinder.ModuleFinder):
self._depgraph[last_caller.__name__].add(fqname)
return r
def load_module(self, fqname, fp, pathname, (suffix, mode, type)):
def load_module(self, fqname, fp, pathname, additional_info):
(suffix, mode, type) = additional_info
r = modulefinder.ModuleFinder.load_module(
self, fqname, fp, pathname, (suffix, mode, type))
if r is not None:
@ -71,7 +72,7 @@ class mymf(modulefinder.ModuleFinder):
return {
'depgraph': {
name: dict.fromkeys(deps, 1)
for name, deps in self._depgraph.iteritems()},
for name, deps in self._depgraph.items()},
'types': self._types,
}
@ -101,20 +102,25 @@ def main(target):
filepath = path
moduleNames.append(reflect.filenameToModuleName(filepath))
with tempfile.NamedTemporaryFile() as tmpfile:
with tempfile.NamedTemporaryFile("w") as tmpfile:
for moduleName in moduleNames:
tmpfile.write('import %s\n' % moduleName)
tmpfile.flush()
mf.run_script(tmpfile.name)
with open('tahoe-deps.json', 'wb') as outfile:
with open('tahoe-deps.json', 'w') as outfile:
json_dump(mf.as_json(), outfile)
outfile.write('\n')
ported_modules_path = os.path.join(target, "src", "allmydata", "ported-modules.txt")
with open(ported_modules_path) as ported_modules:
port_status = dict.fromkeys((line.strip() for line in ported_modules), "ported")
with open('tahoe-ported.json', 'wb') as outfile:
ported_modules_path = os.path.join(target, "src", "allmydata", "util", "_python3.py")
with open(ported_modules_path) as f:
ported_modules = {}
exec(f.read(), ported_modules, ported_modules)
port_status = dict.fromkeys(
ported_modules["PORTED_MODULES"] + ported_modules["PORTED_TEST_MODULES"],
"ported"
)
with open('tahoe-ported.json', 'w') as outfile:
json_dump(port_status, outfile)
outfile.write('\n')

1
newsfragments/2755.other Normal file
View File

@ -0,0 +1 @@
The Tahoe-LAFS project has adopted a formal code of conduct.

0
newsfragments/3247.minor Normal file
View File

0
newsfragments/3254.minor Normal file
View File

0
newsfragments/3287.minor Normal file
View File

0
newsfragments/3288.minor Normal file
View File

1
newsfragments/3313.minor Normal file
View File

@ -0,0 +1 @@
Replace nevow with twisted.web in web.operations.OphandleTable

View File

@ -0,0 +1 @@
allmydata.testing.web, a new module, now offers a supported Python API for testing Tahoe-LAFS web API clients.

0
newsfragments/3324.other Normal file
View File

0
newsfragments/3325.other Normal file
View File

0
newsfragments/3329.other Normal file
View File

0
newsfragments/3330.minor Normal file
View File

0
newsfragments/3331.minor Normal file
View File

0
newsfragments/3332.minor Normal file
View File

0
newsfragments/3333.minor Normal file
View File

0
newsfragments/3334.minor Normal file
View File

0
newsfragments/3335.minor Normal file
View File

0
newsfragments/3338.minor Normal file
View File

0
newsfragments/3339.other Normal file
View File

0
newsfragments/3340.other Normal file
View File

0
newsfragments/3341.other Normal file
View File

0
newsfragments/3343.other Normal file
View File

View File

@ -0,0 +1 @@
Use last known revision of Chutney that is known to work with Python 2 for Tor integration tests.

View File

@ -0,0 +1 @@
Mutable files now use RSA exponent 65537

35
nix/future.nix Normal file
View File

@ -0,0 +1,35 @@
{ lib
, buildPythonPackage
, fetchPypi
}:
buildPythonPackage rec {
pname = "future";
version = "0.18.2";
src = fetchPypi {
inherit pname version;
sha256 = "sha256:0zakvfj87gy6mn1nba06sdha63rn4njm7bhh0wzyrxhcny8avgmi";
};
doCheck = false;
meta = {
description = "Clean single-source support for Python 3 and 2";
longDescription = ''
python-future is the missing compatibility layer between Python 2 and
Python 3. It allows you to use a single, clean Python 3.x-compatible
codebase to support both Python 2 and Python 3 with minimal overhead.
It provides future and past packages with backports and forward ports
of features from Python 3 and 2. It also comes with futurize and
pasteurize, customized 2to3-based scripts that helps you to convert
either Py2 or Py3 code easily to support both Python 2 and 3 in a
single clean Py3-style codebase, module by module.
'';
homepage = https://python-future.org;
downloadPage = https://github.com/PythonCharmers/python-future/releases;
license = with lib.licenses; [ mit ];
maintainers = with lib.maintainers; [ prikhi ];
};
}

View File

@ -10,6 +10,14 @@ self: super: {
# NixOS autobahn package has trollius as a dependency, although
# it is optional. Trollius is unmaintained and fails on CI.
autobahn = python-super.callPackage ./autobahn.nix { };
# Porting to Python 3 is greatly aided by the future package. A
# slightly newer version than appears in nixos 19.09 is helpful.
future = python-super.callPackage ./future.nix { };
# Need version of pyutil that supports Python 3. The version in 19.09
# is too old.
pyutil = python-super.callPackage ./pyutil.nix { };
};
};
}

48
nix/pyutil.nix Normal file
View File

@ -0,0 +1,48 @@
{ stdenv
, buildPythonPackage
, fetchPypi
, setuptoolsDarcs
, setuptoolsTrial
, simplejson
, twisted
, isPyPy
}:
buildPythonPackage rec {
pname = "pyutil";
version = "3.3.0";
src = fetchPypi {
inherit pname version;
sha256 = "8c4d4bf668c559186389bb9bce99e4b1b871c09ba252a756ccaacd2b8f401848";
};
buildInputs = [ setuptoolsDarcs setuptoolsTrial ] ++ (if doCheck then [ simplejson ] else []);
propagatedBuildInputs = [ twisted ];
# Tests fail because they try to write new code into the twisted
# package, apparently some kind of plugin.
doCheck = false;
prePatch = stdenv.lib.optionalString isPyPy ''
grep -rl 'utf-8-with-signature-unix' ./ | xargs sed -i -e "s|utf-8-with-signature-unix|utf-8|g"
'';
meta = with stdenv.lib; {
description = "Pyutil, a collection of mature utilities for Python programmers";
longDescription = ''
These are a few data structures, classes and functions which
we've needed over many years of Python programming and which
seem to be of general use to other Python programmers. Many of
the modules that have existed in pyutil over the years have
subsequently been obsoleted by new features added to the
Python language or its standard library, thus showing that
we're not alone in wanting tools like these.
'';
homepage = "http://allmydata.org/trac/pyutil";
license = licenses.gpl2Plus;
};
}

View File

@ -4,7 +4,7 @@
, setuptools, setuptoolsTrial, pyasn1, zope_interface
, service-identity, pyyaml, magic-wormhole, treq, appdirs
, beautifulsoup4, eliot, autobahn, cryptography
, html5lib
, html5lib, pyutil
}:
python.pkgs.buildPythonPackage rec {
version = "1.14.0.dev";
@ -50,6 +50,7 @@ python.pkgs.buildPythonPackage rec {
setuptoolsTrial pyasn1 zope_interface
service-identity pyyaml magic-wormhole treq
eliot autobahn cryptography setuptools
future pyutil
];
checkInputs = with python.pkgs; [

View File

@ -27,15 +27,29 @@ added_files = [
('src/allmydata/web/static/img/*.png', 'allmydata/web/static/img')]
hidden_imports = [
'__builtin__',
'allmydata.client',
'allmydata.introducer',
'allmydata.stats',
'base64',
'cffi',
'collections',
'commands',
'Crypto',
'functools',
'future.backports.misc',
'itertools',
'math',
'packaging.specifiers',
're',
'reprlib',
'six.moves.html_parser',
'subprocess',
'UserDict',
'UserList',
'UserString',
'yaml',
'zfec'
'zfec',
]
a = Analysis(

View File

@ -5,3 +5,8 @@ install = update_version install
develop = update_version develop
bdist_egg = update_version bdist_egg
bdist_wheel = update_version bdist_wheel
[flake8]
# For now, only use pyflakes errors; flake8 is still helpful because it allows
# ignoring specific errors/warnings when needed.
select = F

View File

@ -54,7 +54,9 @@ install_requires = [
# * foolscap >= 0.12.5 has ConnectionInfo and ReconnectionInfo
# * foolscap >= 0.12.6 has an i2p.sam_endpoint() that takes kwargs
# * foolscap 0.13.2 drops i2p support completely
"foolscap == 0.13.1",
# * foolscap >= 20.4 is necessary for Python 3
"foolscap == 0.13.1 ; python_version < '3.0'",
"foolscap >= 20.4.0 ; python_version > '3.0'",
# * cryptography 2.6 introduced some ed25519 APIs we rely on. Note that
# Twisted[conch] also depends on cryptography and Twisted[tls]
@ -119,6 +121,12 @@ install_requires = [
# WebSocket library for twisted and asyncio
"autobahn >= 19.5.2",
# Support for Python 3 transition
"future >= 0.18.2",
# Utility code:
"pyutil >= 3.3.0",
]
setup_requires = [
@ -133,8 +141,10 @@ tor_requires = [
]
i2p_requires = [
# See the comment in tor_requires.
"txi2p >= 0.3.2",
# txi2p has Python 3 support, but it's unreleased: https://github.com/str4d/txi2p/issues/10.
# URL lookups are in PEP-508 (via https://stackoverflow.com/a/54794506).
# Also see the comment in tor_requires.
"txi2p @ git+https://github.com/str4d/txi2p@0611b9a86172cb70d2f5e415a88eee9f230590b3#egg=txi2p",
]
if len(sys.argv) > 1 and sys.argv[1] == '--fakedependency':
@ -345,7 +355,9 @@ setup(name="tahoe-lafs", # also set in __init__.py
package_dir = {'':'src'},
packages=find_packages('src') + ['allmydata.test.plugins'],
classifiers=trove_classifiers,
python_requires="<3.0",
# We support Python 2.7, and we're working on support for 3.6 (the
# highest version that PyPy currently supports).
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <3.7",
install_requires=install_requires,
extras_require={
# Duplicate the Twisted pywin32 dependency here. See
@ -353,11 +365,12 @@ setup(name="tahoe-lafs", # also set in __init__.py
# discussion.
':sys_platform=="win32"': ["pywin32 != 226"],
"test": [
"flake8",
# Pin a specific pyflakes so we don't have different folks
# disagreeing on what is or is not a lint issue. We can bump
# this version from time to time, but we will do it
# intentionally.
"pyflakes == 2.1.0",
"pyflakes == 2.2.0",
# coverage 5.0 breaks the integration tests in some opaque way.
# This probably needs to be addressed in a more permanent way
# eventually...
@ -373,6 +386,7 @@ setup(name="tahoe-lafs", # also set in __init__.py
"fixtures",
"beautifulsoup4",
"html5lib",
"junitxml",
] + tor_requires + i2p_requires,
"tor": tor_requires,
"i2p": i2p_requires,

View File

@ -37,3 +37,8 @@ __appname__ = "tahoe-lafs"
# in the "application" part of the Tahoe versioning scheme:
# https://tahoe-lafs.org/trac/tahoe-lafs/wiki/Versioning
__full_version__ = __appname__ + '/' + str(__version__)
# Install Python 3 module locations in Python 2:
from future import standard_library
standard_library.install_aliases()

View File

@ -46,18 +46,8 @@ def create_signing_keypair(key_size):
:returns: 2-tuple of (private_key, public_key)
"""
# Tahoe's original use of pycryptopp would use cryptopp's default
# public_exponent, which is 17
#
# Thus, we are using 17 here as well. However, there are other
# choices; see this for more discussion:
# https://security.stackexchange.com/questions/2335/should-rsa-public-exponent-be-only-in-3-5-17-257-or-65537-due-to-security-c
#
# Another popular choice is 65537. See:
# https://cryptography.io/en/latest/hazmat/primitives/asymmetric/rsa/#cryptography.hazmat.primitives.asymmetric.rsa.generate_private_key
# https://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html
priv_key = rsa.generate_private_key(
public_exponent=17,
public_exponent=65537,
key_size=key_size,
backend=default_backend()
)

View File

@ -222,7 +222,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
UEB"""
precondition(share_hash_tree[0] is not None, share_hash_tree)
prefix = "%d-%s-%s" % (sharenum, bucket,
base32.b2a_l(share_hash_tree[0][:8], 60))
base32.b2a(share_hash_tree[0][:8])[:12])
log.PrefixingLogMixin.__init__(self,
facility="tahoe.immutable.download",
prefix=prefix)
@ -465,7 +465,7 @@ class Checker(log.PrefixingLogMixin):
monitor):
assert precondition(isinstance(verifycap, CHKFileVerifierURI), verifycap, type(verifycap))
prefix = "%s" % base32.b2a_l(verifycap.get_storage_index()[:8], 60)
prefix = "%s" % base32.b2a(verifycap.get_storage_index()[:8])[:12]
log.PrefixingLogMixin.__init__(self, facility="tahoe.immutable.checker", prefix=prefix)
self._verifycap = verifycap

View File

@ -43,7 +43,7 @@ class ShareFinder(object):
self.overdue_timers = {}
self._storage_index = verifycap.storage_index
self._si_prefix = base32.b2a_l(self._storage_index[:8], 60)
self._si_prefix = base32.b2a(self._storage_index[:8])[:12]
self._node_logparent = logparent
self._download_status = download_status
self._lp = log.msg(format="ShareFinder[si=%(si)s] starting",

View File

@ -44,7 +44,7 @@ class DownloadNode(object):
assert isinstance(verifycap, uri.CHKFileVerifierURI)
self._verifycap = verifycap
self._storage_broker = storage_broker
self._si_prefix = base32.b2a_l(verifycap.storage_index[:8], 60)
self._si_prefix = base32.b2a(verifycap.storage_index[:8])[:12]
self.running = True
if terminator:
terminator.register(self) # calls self.stop() at stopService()

View File

@ -1 +0,0 @@
allmydata.util.namespace

View File

@ -298,7 +298,7 @@ class BucketReader(Referenceable):
def __repr__(self):
return "<%s %s %s>" % (self.__class__.__name__,
base32.b2a_l(self.storage_index[:8], 60),
base32.b2a(self.storage_index[:8])[:12],
self.shnum)
def remote_read(self, offset, length):

View File

@ -1,6 +1,6 @@
from __future__ import print_function
import os, signal, sys, time
import os, signal, time
from random import randrange
from six.moves import StringIO
@ -8,7 +8,6 @@ from twisted.internet import reactor, defer
from twisted.python import failure
from twisted.trial import unittest
from allmydata.util import fileutil, log
from ..util.assertutil import precondition
from allmydata.util.encodingutil import (unicode_platform, get_filesystem_encoding,
get_io_encoding)
@ -89,39 +88,6 @@ class ReallyEqualMixin(object):
self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg))
class NonASCIIPathMixin(object):
def mkdir_nonascii(self, dirpath):
# Kludge to work around the fact that buildbot can't remove a directory tree that has
# any non-ASCII directory names on Windows. (#1472)
if sys.platform == "win32":
def _cleanup():
try:
fileutil.rm_dir(dirpath)
finally:
if os.path.exists(dirpath):
msg = ("We were unable to delete a non-ASCII directory %r created by the test. "
"This is liable to cause failures on future builds." % (dirpath,))
print(msg)
log.err(msg)
self.addCleanup(_cleanup)
os.mkdir(dirpath)
def unicode_or_fallback(self, unicode_name, fallback_name, io_as_well=False):
if not unicode_platform():
try:
unicode_name.encode(get_filesystem_encoding())
except UnicodeEncodeError:
return fallback_name
if io_as_well:
try:
unicode_name.encode(get_io_encoding())
except UnicodeEncodeError:
return fallback_name
return unicode_name
class SignalMixin(object):
# This class is necessary for any code which wants to use Processes
# outside the usual reactor.run() environment. It is copied from

View File

@ -0,0 +1,37 @@
"""
Tests for allmydata.util.base32.
"""
import base64
from twisted.trial import unittest
from hypothesis import (
strategies as st,
given,
)
from allmydata.util import base32
class Base32(unittest.TestCase):
@given(input_bytes=st.binary(max_size=100))
def test_a2b_b2a_match_Pythons(self, input_bytes):
encoded = base32.b2a(input_bytes)
x = base64.b32encode(input_bytes).rstrip(b"=").lower()
self.failUnlessEqual(encoded, x)
self.assertIsInstance(encoded, bytes)
self.assertTrue(base32.could_be_base32_encoded(encoded))
self.assertEqual(base32.a2b(encoded), input_bytes)
def test_b2a(self):
self.failUnlessEqual(base32.b2a(b"\x12\x34"), b"ci2a")
def test_b2a_or_none(self):
self.failUnlessEqual(base32.b2a_or_none(None), None)
self.failUnlessEqual(base32.b2a_or_none(b"\x12\x34"), b"ci2a")
def test_a2b(self):
self.failUnlessEqual(base32.a2b(b"ci2a"), b"\x12\x34")
self.failUnlessRaises(AssertionError, base32.a2b, b"b0gus")
self.assertFalse(base32.could_be_base32_encoded(b"b0gus"))

View File

@ -1,9 +1,16 @@
import random, unittest
from past.builtins import chr as byteschr
from hypothesis import (
strategies as st,
given,
)
from allmydata.util import base62, mathutil
def insecurerandstr(n):
return ''.join(map(chr, map(random.randrange, [0]*n, [256]*n)))
return b''.join(map(byteschr, map(random.randrange, [0]*n, [256]*n)))
class T(unittest.TestCase):
def _test_num_octets_that_encode_to_this_many_chars(self, chars, octets):
@ -14,6 +21,10 @@ class T(unittest.TestCase):
bs2=base62.a2b(ascii)
assert bs2 == bs, "bs2: %s:%s, bs: %s:%s, ascii: %s:%s" % (len(bs2), repr(bs2), len(bs), repr(bs), len(ascii), repr(ascii))
@given(input_bytes=st.binary(max_size=100))
def test_roundtrip(self, input_bytes):
self._test_ende(input_bytes)
def test_num_octets_that_encode_to_this_many_chars(self):
return self._test_num_octets_that_encode_to_this_many_chars(2, 1)
return self._test_num_octets_that_encode_to_this_many_chars(3, 2)
@ -21,19 +32,19 @@ class T(unittest.TestCase):
return self._test_num_octets_that_encode_to_this_many_chars(6, 4)
def test_ende_0x00(self):
return self._test_ende('\x00')
return self._test_ende(b'\x00')
def test_ende_0x01(self):
return self._test_ende('\x01')
return self._test_ende(b'\x01')
def test_ende_0x0100(self):
return self._test_ende('\x01\x00')
return self._test_ende(b'\x01\x00')
def test_ende_0x000000(self):
return self._test_ende('\x00\x00\x00')
return self._test_ende(b'\x00\x00\x00')
def test_ende_0x010000(self):
return self._test_ende('\x01\x00\x00')
return self._test_ende(b'\x01\x00\x00')
def test_ende_randstr(self):
return self._test_ende(insecurerandstr(2**4))

View File

@ -83,7 +83,7 @@ BASECONFIG_I = ("[client]\n"
"introducer.furl = %s\n"
)
class Basic(testutil.ReallyEqualMixin, testutil.NonASCIIPathMixin, unittest.TestCase):
class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
def test_loadable(self):
basedir = "test_client.Basic.test_loadable"
os.mkdir(basedir)

View File

@ -0,0 +1,76 @@
"""
Tests for allmydata.util.deferredutil.
Ported to Python 3.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from twisted.internet import defer, reactor
from twisted.python.failure import Failure
from allmydata.util import deferredutil
class DeferredUtilTests(unittest.TestCase, deferredutil.WaitForDelayedCallsMixin):
def test_gather_results(self):
d1 = defer.Deferred()
d2 = defer.Deferred()
res = deferredutil.gatherResults([d1, d2])
d1.errback(ValueError("BAD"))
def _callb(res):
self.fail("Should have errbacked, not resulted in %s" % (res,))
def _errb(thef):
thef.trap(ValueError)
res.addCallbacks(_callb, _errb)
return res
def test_success(self):
d1, d2 = defer.Deferred(), defer.Deferred()
good = []
bad = []
dlss = deferredutil.DeferredListShouldSucceed([d1,d2])
dlss.addCallbacks(good.append, bad.append)
d1.callback(1)
d2.callback(2)
self.failUnlessEqual(good, [[1,2]])
self.failUnlessEqual(bad, [])
def test_failure(self):
d1, d2 = defer.Deferred(), defer.Deferred()
good = []
bad = []
dlss = deferredutil.DeferredListShouldSucceed([d1,d2])
dlss.addCallbacks(good.append, bad.append)
d1.addErrback(lambda _ignore: None)
d2.addErrback(lambda _ignore: None)
d1.callback(1)
d2.errback(ValueError())
self.failUnlessEqual(good, [])
self.failUnlessEqual(len(bad), 1)
f = bad[0]
self.failUnless(isinstance(f, Failure))
self.failUnless(f.check(ValueError))
def test_wait_for_delayed_calls(self):
"""
This tests that 'wait_for_delayed_calls' does in fact wait for a
delayed call that is active when the test returns. If it didn't,
Trial would report an unclean reactor error for this test.
"""
def _trigger():
#print "trigger"
pass
reactor.callLater(0.1, _trigger)
d = defer.succeed(None)
d.addBoth(self.wait_for_delayed_calls)
return d

View File

@ -0,0 +1,64 @@
"""
Tests for allmydata.util.humanreadable.
This module has been ported to Python 3.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
from past.builtins import long
from twisted.trial import unittest
from allmydata.util import humanreadable
def foo(): pass # FYI foo()'s line number is used in the test below
class NoArgumentException(Exception):
def __init__(self):
pass
class HumanReadable(unittest.TestCase):
def test_repr(self):
hr = humanreadable.hr
self.failUnlessEqual(hr(foo), "<foo() at test_humanreadable.py:24>")
self.failUnlessEqual(hr(self.test_repr),
"<bound method HumanReadable.test_repr of <allmydata.test.test_humanreadable.HumanReadable testMethod=test_repr>>")
self.failUnlessEqual(hr(long(1)), "1")
self.assertIn(hr(10**40),
["100000000000000000...000000000000000000",
"100000000000000000...0000000000000000000"])
self.failUnlessEqual(hr(self), "<allmydata.test.test_humanreadable.HumanReadable testMethod=test_repr>")
self.failUnlessEqual(hr([1,2]), "[1, 2]")
self.failUnlessEqual(hr({1:2}), "{1:2}")
try:
raise ValueError
except Exception as e:
self.failUnless(
hr(e) == "<ValueError: ()>" # python-2.4
or hr(e) == "ValueError()") # python-2.5
try:
raise ValueError("oops")
except Exception as e:
self.failUnless(
hr(e) == "<ValueError: 'oops'>" # python-2.4
or hr(e) == "ValueError('oops',)" # python-2.5
or hr(e) == "ValueError(u'oops',)" # python 2 during py3 transition
)
try:
raise NoArgumentException
except Exception as e:
self.failUnless(
hr(e) == "<NoArgumentException>" # python-2.4
or hr(e) == "NoArgumentException()" # python-2.5
or hr(e) == "<NoArgumentException: ()>", hr(e)) # python-3

View File

@ -1,10 +1,16 @@
"""
Tests related to the Python 3 porting effort itself.
"""
from pkg_resources import (
resource_stream,
)
This module has been ported to Python 3.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
from twisted.python.modules import (
getModule,
@ -13,11 +19,18 @@ from twisted.trial.unittest import (
SynchronousTestCase,
)
from allmydata.util._python3 import PORTED_MODULES, PORTED_TEST_MODULES
class Python3PortingEffortTests(SynchronousTestCase):
def test_finished_porting(self):
"""
Tahoe-LAFS has been ported to Python 3.
Once
https://tahoe-lafs.org/trac/tahoe-lafs/milestone/Support%20Python%203
is completed this test should pass (and can be deleted!).
"""
tahoe_lafs_module_names = set(all_module_names("allmydata"))
ported_names = set(ported_module_names())
@ -31,7 +44,10 @@ class Python3PortingEffortTests(SynchronousTestCase):
),
),
)
test_finished_porting.todo = "https://tahoe-lafs.org/trac/tahoe-lafs/milestone/Support%20Python%203 should be completed"
if PY2:
test_finished_porting.skip = "For some reason todo isn't working on Python 2 now"
else:
test_finished_porting.todo = "https://tahoe-lafs.org/trac/tahoe-lafs/milestone/Support%20Python%203 should be completed"
def test_ported_modules_exist(self):
"""
@ -70,18 +86,18 @@ def all_module_names(toplevel):
"""
allmydata = getModule(toplevel)
for module in allmydata.walkModules():
yield module.name.decode("utf-8")
name = module.name
if PY2:
name = name.decode("utf-8")
yield name
def ported_module_names():
"""
:return list[unicode]: A ``set`` of ``unicode`` giving the names of
:return list[unicode]: A ``list`` of ``unicode`` giving the names of
Tahoe-LAFS modules which have been ported to Python 3.
"""
return resource_stream(
"allmydata",
u"ported-modules.txt",
).read().splitlines()
return PORTED_MODULES + PORTED_TEST_MODULES
def unported_report(tahoe_lafs_module_names, ported_names):
@ -100,8 +116,8 @@ def count_lines(module_name):
try:
source = module.filePath.getContent()
except Exception as e:
print(module_name, e)
print((module_name, e))
return 0
lines = source.splitlines()
nonblank = filter(None, lines)
nonblank = [_f for _f in lines if _f]
return len(nonblank)

View File

@ -12,6 +12,17 @@ from twisted.trial import unittest
from twisted.internet import defer
from twisted.application import service
from twisted.web.template import flattenString
# We need to use `nevow.inevow.IRequest` for now for compatibility
# with the code in web/common.py. Once nevow bits are gone from
# web/common.py, we can use `twisted.web.iweb.IRequest` here.
from nevow.inevow import IRequest
from twisted.web.server import Request
from twisted.web.test.requesthelper import DummyChannel
from zope.interface import implementer
from foolscap.api import fireEventually
import itertools
from allmydata import interfaces
@ -36,9 +47,12 @@ from allmydata.mutable.layout import MDMFSlotWriteProxy, MDMFSlotReadProxy, \
SHARE_HASH_CHAIN_SIZE
from allmydata.interfaces import BadWriteEnablerError
from allmydata.test.common import LoggingServiceParent, ShouldFailMixin
from allmydata.test.common_web import WebRenderingMixin
from allmydata.test.no_network import NoNetworkServer
from allmydata.web.storage import StorageStatus, remove_prefix
from allmydata.web.storage import (
StorageStatus,
StorageStatusElement,
remove_prefix
)
from allmydata.storage_client import (
_StorageServer,
)
@ -2972,6 +2986,39 @@ def remove_tags(s):
s = re.sub(r'\s+', ' ', s)
return s
def renderSynchronously(ss):
"""
Return fully rendered HTML document.
:param _StorageStatus ss: a StorageStatus instance.
"""
return unittest.TestCase().successResultOf(renderDeferred(ss))
def renderDeferred(ss):
"""
Return a `Deferred` HTML renderer.
:param _StorageStatus ss: a StorageStatus instance.
"""
elem = StorageStatusElement(ss._storage, ss._nickname)
return flattenString(None, elem)
def renderJSON(resource):
"""Render a JSON from the given resource."""
@implementer(IRequest)
class JSONRequest(Request):
"""
A Request with t=json argument added to it. This is useful to
invoke a Resouce.render_JSON() method.
"""
def __init__(self):
Request.__init__(self, DummyChannel())
self.args = {"t": ["json"]}
self.fields = {}
return resource.render(JSONRequest())
class MyBucketCountingCrawler(BucketCountingCrawler):
def finished_prefix(self, cycle, prefix):
BucketCountingCrawler.finished_prefix(self, cycle, prefix)
@ -3008,7 +3055,7 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
w = StorageStatus(ss)
# this sample is before the crawler has started doing anything
html = w.renderSynchronously()
html = renderSynchronously(w)
self.failUnlessIn("<h1>Storage Server Status</h1>", html)
s = remove_tags(html)
self.failUnlessIn("Accepting new shares: Yes", s)
@ -3031,7 +3078,7 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
self.failUnlessEqual(state["last-complete-prefix"],
ss.bucket_counter.prefixes[0])
ss.bucket_counter.cpu_slice = 100.0 # finish as fast as possible
html = w.renderSynchronously()
html = renderSynchronously(w)
s = remove_tags(html)
self.failUnlessIn(" Current crawl ", s)
self.failUnlessIn(" (next work in ", s)
@ -3043,7 +3090,7 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ignored: self.poll(_watch))
def _check2(ignored):
ss.bucket_counter.cpu_slice = orig_cpu_slice
html = w.renderSynchronously()
html = renderSynchronously(w)
s = remove_tags(html)
self.failUnlessIn("Total buckets: 0 (the number of", s)
self.failUnless("Next crawl in 59 minutes" in s or "Next crawl in 60 minutes" in s, s)
@ -3105,20 +3152,20 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
def _check_1(ignored):
# no ETA is available yet
html = w.renderSynchronously()
html = renderSynchronously(w)
s = remove_tags(html)
self.failUnlessIn("complete (next work", s)
def _check_2(ignored):
# one prefix has finished, so an ETA based upon that elapsed time
# should be available.
html = w.renderSynchronously()
html = renderSynchronously(w)
s = remove_tags(html)
self.failUnlessIn("complete (ETA ", s)
def _check_3(ignored):
# two prefixes have finished
html = w.renderSynchronously()
html = renderSynchronously(w)
s = remove_tags(html)
self.failUnlessIn("complete (ETA ", s)
d.callback("done")
@ -3161,7 +3208,7 @@ class InstrumentedStorageServer(StorageServer):
class No_ST_BLOCKS_StorageServer(StorageServer):
LeaseCheckerClass = No_ST_BLOCKS_LeaseCheckingCrawler
class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
def setUp(self):
self.s = service.MultiService()
@ -3291,7 +3338,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
self.failIfEqual(sr2["configured-diskbytes"], None)
self.failIfEqual(sr2["original-sharebytes"], None)
d.addCallback(_after_first_bucket)
d.addCallback(lambda ign: self.render1(webstatus))
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html_in_cycle(html):
s = remove_tags(html)
self.failUnlessIn("So far, this cycle has examined "
@ -3366,7 +3413,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
self.failUnlessEqual(count_leases(mutable_si_2), 1)
self.failUnlessEqual(count_leases(mutable_si_3), 2)
d.addCallback(_after_first_cycle)
d.addCallback(lambda ign: self.render1(webstatus))
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html):
s = remove_tags(html)
self.failUnlessIn("recovered: 0 shares, 0 buckets "
@ -3375,7 +3422,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
"(2 mutable / 2 immutable),", s)
self.failUnlessIn("but expiration was not enabled", s)
d.addCallback(_check_html)
d.addCallback(lambda ign: self.render_json(webstatus))
d.addCallback(lambda ign: renderJSON(webstatus))
def _check_json(raw):
data = json.loads(raw)
self.failUnlessIn("lease-checker", data)
@ -3466,7 +3513,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
d2.addCallback(_after_first_bucket)
return d2
d.addCallback(_after_first_bucket)
d.addCallback(lambda ign: self.render1(webstatus))
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html_in_cycle(html):
s = remove_tags(html)
# the first bucket encountered gets deleted, and its prefix
@ -3525,7 +3572,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
self.failUnless(rec["configured-diskbytes"] >= 0,
rec["configured-diskbytes"])
d.addCallback(_after_first_cycle)
d.addCallback(lambda ign: self.render1(webstatus))
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html):
s = remove_tags(html)
self.failUnlessIn("Expiration Enabled: expired leases will be removed", s)
@ -3610,7 +3657,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
d2.addCallback(_after_first_bucket)
return d2
d.addCallback(_after_first_bucket)
d.addCallback(lambda ign: self.render1(webstatus))
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html_in_cycle(html):
s = remove_tags(html)
# the first bucket encountered gets deleted, and its prefix
@ -3671,7 +3718,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
self.failUnless(rec["configured-diskbytes"] >= 0,
rec["configured-diskbytes"])
d.addCallback(_after_first_cycle)
d.addCallback(lambda ign: self.render1(webstatus))
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html):
s = remove_tags(html)
self.failUnlessIn("Expiration Enabled:"
@ -3733,7 +3780,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
self.failUnlessEqual(count_shares(mutable_si_3), 1)
self.failUnlessEqual(count_leases(mutable_si_3), 2)
d.addCallback(_after_first_cycle)
d.addCallback(lambda ign: self.render1(webstatus))
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html):
s = remove_tags(html)
self.failUnlessIn("The following sharetypes will be expired: immutable.", s)
@ -3790,7 +3837,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
self.failUnlessEqual(count_shares(mutable_si_2), 0)
self.failUnlessEqual(count_shares(mutable_si_3), 0)
d.addCallback(_after_first_cycle)
d.addCallback(lambda ign: self.render1(webstatus))
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html):
s = remove_tags(html)
self.failUnlessIn("The following sharetypes will be expired: mutable.", s)
@ -4012,7 +4059,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
self.failUnlessEqual(so_far["corrupt-shares"], [(first_b32, 0)])
d.addCallback(_after_first_bucket)
d.addCallback(lambda ign: self.render_json(w))
d.addCallback(lambda ign: renderJSON(w))
def _check_json(raw):
data = json.loads(raw)
# grr. json turns all dict keys into strings.
@ -4021,7 +4068,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
# it also turns all tuples into lists
self.failUnlessEqual(corrupt_shares, [[first_b32, 0]])
d.addCallback(_check_json)
d.addCallback(lambda ign: self.render1(w))
d.addCallback(lambda ign: renderDeferred(w))
def _check_html(html):
s = remove_tags(html)
self.failUnlessIn("Corrupt shares: SI %s shnum 0" % first_b32, s)
@ -4039,14 +4086,14 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
self.failUnlessEqual(rec["examined-shares"], 3)
self.failUnlessEqual(last["corrupt-shares"], [(first_b32, 0)])
d.addCallback(_after_first_cycle)
d.addCallback(lambda ign: self.render_json(w))
d.addCallback(lambda ign: renderJSON(w))
def _check_json_history(raw):
data = json.loads(raw)
last = data["lease-checker"]["history"]["0"]
corrupt_shares = last["corrupt-shares"]
self.failUnlessEqual(corrupt_shares, [[first_b32, 0]])
d.addCallback(_check_json_history)
d.addCallback(lambda ign: self.render1(w))
d.addCallback(lambda ign: renderDeferred(w))
def _check_html_history(html):
s = remove_tags(html)
self.failUnlessIn("Corrupt shares: SI %s shnum 0" % first_b32, s)
@ -4059,11 +4106,8 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
d.addBoth(_cleanup)
return d
def render_json(self, page):
d = self.render1(page, args={"t": ["json"]})
return d
class WebStatus(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
class WebStatus(unittest.TestCase, pollmixin.PollMixin):
def setUp(self):
self.s = service.MultiService()
@ -4073,7 +4117,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
def test_no_server(self):
w = StorageStatus(None)
html = w.renderSynchronously()
html = renderSynchronously(w)
self.failUnlessIn("<h1>No Storage Server Running</h1>", html)
def test_status(self):
@ -4083,7 +4127,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
ss = StorageServer(basedir, nodeid)
ss.setServiceParent(self.s)
w = StorageStatus(ss, "nickname")
d = self.render1(w)
d = renderDeferred(w)
def _check_html(html):
self.failUnlessIn("<h1>Storage Server Status</h1>", html)
s = remove_tags(html)
@ -4092,7 +4136,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
self.failUnlessIn("Accepting new shares: Yes", s)
self.failUnlessIn("Reserved space: - 0 B (0)", s)
d.addCallback(_check_html)
d.addCallback(lambda ign: self.render_json(w))
d.addCallback(lambda ign: renderJSON(w))
def _check_json(raw):
data = json.loads(raw)
s = data["stats"]
@ -4103,9 +4147,6 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
d.addCallback(_check_json)
return d
def render_json(self, page):
d = self.render1(page, args={"t": ["json"]})
return d
def test_status_no_disk_stats(self):
def call_get_disk_stats(whichdir, reserved_space=0):
@ -4119,7 +4160,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
ss = StorageServer(basedir, "\x00" * 20)
ss.setServiceParent(self.s)
w = StorageStatus(ss)
html = w.renderSynchronously()
html = renderSynchronously(w)
self.failUnlessIn("<h1>Storage Server Status</h1>", html)
s = remove_tags(html)
self.failUnlessIn("Accepting new shares: Yes", s)
@ -4139,7 +4180,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
ss = StorageServer(basedir, "\x00" * 20)
ss.setServiceParent(self.s)
w = StorageStatus(ss)
html = w.renderSynchronously()
html = renderSynchronously(w)
self.failUnlessIn("<h1>Storage Server Status</h1>", html)
s = remove_tags(html)
self.failUnlessIn("Accepting new shares: No", s)
@ -4175,7 +4216,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
ss.setServiceParent(self.s)
w = StorageStatus(ss)
html = w.renderSynchronously()
html = renderSynchronously(w)
self.failUnlessIn("<h1>Storage Server Status</h1>", html)
s = remove_tags(html)
@ -4193,7 +4234,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
ss = StorageServer(basedir, "\x00" * 20, readonly_storage=True)
ss.setServiceParent(self.s)
w = StorageStatus(ss)
html = w.renderSynchronously()
html = renderSynchronously(w)
self.failUnlessIn("<h1>Storage Server Status</h1>", html)
s = remove_tags(html)
self.failUnlessIn("Accepting new shares: No", s)
@ -4204,7 +4245,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
ss = StorageServer(basedir, "\x00" * 20, reserved_space=10e6)
ss.setServiceParent(self.s)
w = StorageStatus(ss)
html = w.renderSynchronously()
html = renderSynchronously(w)
self.failUnlessIn("<h1>Storage Server Status</h1>", html)
s = remove_tags(html)
self.failUnlessIn("Reserved space: - 10.00 MB (10000000)", s)
@ -4215,16 +4256,16 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
ss = StorageServer(basedir, "\x00" * 20, reserved_space=10e6)
ss.setServiceParent(self.s)
w = StorageStatus(ss)
html = w.renderSynchronously()
html = renderSynchronously(w)
self.failUnlessIn("<h1>Storage Server Status</h1>", html)
s = remove_tags(html)
self.failUnlessIn("Reserved space: - 10.00 MB (10000000)", s)
def test_util(self):
w = StorageStatus(None)
self.failUnlessEqual(w.render_space(None, None), "?")
self.failUnlessEqual(w.render_space(None, 10e6), "10000000")
self.failUnlessEqual(w.render_abbrev_space(None, None), "?")
self.failUnlessEqual(w.render_abbrev_space(None, 10e6), "10.00 MB")
w = StorageStatusElement(None, None)
self.failUnlessEqual(w.render_space(None), "?")
self.failUnlessEqual(w.render_space(10e6), "10000000")
self.failUnlessEqual(w.render_abbrev_space(None), "?")
self.failUnlessEqual(w.render_abbrev_space(10e6), "10.00 MB")
self.failUnlessEqual(remove_prefix("foo.bar", "foo."), "bar")
self.failUnlessEqual(remove_prefix("foo.bar", "baz."), None)

View File

@ -0,0 +1,170 @@
# -*- coding: utf-8 -*-
# Tahoe-LAFS -- secure, distributed storage grid
#
# Copyright © 2020 The Tahoe-LAFS Software Foundation
#
# This file is part of Tahoe-LAFS.
#
# See the docs/about.rst file for licensing information.
"""
Tests for the allmydata.testing helpers
"""
from twisted.internet.defer import (
inlineCallbacks,
)
from allmydata.uri import (
from_string,
CHKFileURI,
)
from allmydata.testing.web import (
create_tahoe_treq_client,
capability_generator,
)
from hyperlink import (
DecodedURL,
)
from hypothesis import (
given,
)
from hypothesis.strategies import (
binary,
)
from testtools import (
TestCase,
)
from testtools.matchers import (
Always,
Equals,
IsInstance,
MatchesStructure,
AfterPreprocessing,
)
from testtools.twistedsupport import (
succeeded,
)
class FakeWebTest(TestCase):
"""
Test the WebUI verified-fakes infrastucture
"""
# Note: do NOT use setUp() because Hypothesis doesn't work
# properly with it. You must instead do all fixture-type work
# yourself in each test.
@given(
content=binary(),
)
def test_create_and_download(self, content):
"""
Upload some content (via 'PUT /uri') and then download it (via
'GET /uri?uri=...')
"""
http_client = create_tahoe_treq_client()
@inlineCallbacks
def do_test():
resp = yield http_client.put("http://example.com/uri", content)
self.assertThat(resp.code, Equals(201))
cap_raw = yield resp.content()
cap = from_string(cap_raw)
self.assertThat(cap, IsInstance(CHKFileURI))
resp = yield http_client.get(
"http://example.com/uri?uri={}".format(cap.to_string())
)
self.assertThat(resp.code, Equals(200))
round_trip_content = yield resp.content()
# using the form "/uri/<cap>" is also valid
resp = yield http_client.get(
"http://example.com/uri/{}".format(cap.to_string())
)
self.assertEqual(resp.code, 200)
round_trip_content = yield resp.content()
self.assertEqual(content, round_trip_content)
self.assertThat(
do_test(),
succeeded(Always()),
)
@given(
content=binary(),
)
def test_duplicate_upload(self, content):
"""
Upload the same content (via 'PUT /uri') twice
"""
http_client = create_tahoe_treq_client()
@inlineCallbacks
def do_test():
resp = yield http_client.put("http://example.com/uri", content)
self.assertEqual(resp.code, 201)
cap_raw = yield resp.content()
self.assertThat(
cap_raw,
AfterPreprocessing(
from_string,
IsInstance(CHKFileURI)
)
)
resp = yield http_client.put("http://example.com/uri", content)
self.assertThat(resp.code, Equals(200))
self.assertThat(
do_test(),
succeeded(Always()),
)
def test_download_missing(self):
"""
Error if we download a capability that doesn't exist
"""
http_client = create_tahoe_treq_client()
cap_gen = capability_generator("URI:CHK:")
uri = DecodedURL.from_text(u"http://example.com/uri?uri={}".format(next(cap_gen)))
resp = http_client.get(uri.to_uri().to_text())
self.assertThat(
resp,
succeeded(
MatchesStructure(
code=Equals(500)
)
)
)
def test_download_no_arg(self):
"""
Error if we GET from "/uri" with no ?uri= query-arg
"""
http_client = create_tahoe_treq_client()
uri = DecodedURL.from_text(u"http://example.com/uri/")
resp = http_client.get(uri.to_uri().to_text())
self.assertThat(
resp,
succeeded(
MatchesStructure(
code=Equals(400)
)
)
)

View File

@ -1,8 +1,5 @@
from __future__ import print_function
def foo(): pass # keep the line number constant
import binascii
import six
import hashlib
@ -17,8 +14,8 @@ from twisted.internet import defer, reactor
from twisted.python.failure import Failure
from twisted.python import log
from allmydata.util import base32, idlib, humanreadable, mathutil, hashutil
from allmydata.util import assertutil, fileutil, deferredutil, abbreviate
from allmydata.util import base32, idlib, mathutil, hashutil
from allmydata.util import fileutil, abbreviate
from allmydata.util import limiter, time_format, pollmixin
from allmydata.util import statistics, dictutil, pipeline, yamlutil
from allmydata.util import log as tahoe_log
@ -39,195 +36,15 @@ def sha256(data):
return binascii.hexlify(hashlib.sha256(data).digest())
class Base32(unittest.TestCase):
def test_b2a_matches_Pythons(self):
import base64
y = "\x12\x34\x45\x67\x89\x0a\xbc\xde\xf0"
x = base64.b32encode(y)
while x and x[-1] == '=':
x = x[:-1]
x = x.lower()
self.failUnlessEqual(base32.b2a(y), x)
def test_b2a(self):
self.failUnlessEqual(base32.b2a("\x12\x34"), "ci2a")
def test_b2a_or_none(self):
self.failUnlessEqual(base32.b2a_or_none(None), None)
self.failUnlessEqual(base32.b2a_or_none("\x12\x34"), "ci2a")
def test_a2b(self):
self.failUnlessEqual(base32.a2b("ci2a"), "\x12\x34")
self.failUnlessRaises(AssertionError, base32.a2b, "b0gus")
class IDLib(unittest.TestCase):
def test_nodeid_b2a(self):
self.failUnlessEqual(idlib.nodeid_b2a("\x00"*20), "a"*32)
class NoArgumentException(Exception):
def __init__(self):
pass
class HumanReadable(unittest.TestCase):
def test_repr(self):
hr = humanreadable.hr
self.failUnlessEqual(hr(foo), "<foo() at test_util.py:4>")
self.failUnlessEqual(hr(self.test_repr),
"<bound method HumanReadable.test_repr of <allmydata.test.test_util.HumanReadable testMethod=test_repr>>")
self.failUnlessEqual(hr(long(1)), "1")
self.failUnlessEqual(hr(10**40),
"100000000000000000...000000000000000000")
self.failUnlessEqual(hr(self), "<allmydata.test.test_util.HumanReadable testMethod=test_repr>")
self.failUnlessEqual(hr([1,2]), "[1, 2]")
self.failUnlessEqual(hr({1:2}), "{1:2}")
try:
raise ValueError
except Exception as e:
self.failUnless(
hr(e) == "<ValueError: ()>" # python-2.4
or hr(e) == "ValueError()") # python-2.5
try:
raise ValueError("oops")
except Exception as e:
self.failUnless(
hr(e) == "<ValueError: 'oops'>" # python-2.4
or hr(e) == "ValueError('oops',)") # python-2.5
try:
raise NoArgumentException
except Exception as e:
self.failUnless(
hr(e) == "<NoArgumentException>" # python-2.4
or hr(e) == "NoArgumentException()") # python-2.5
def test_abbrev_time_1s(self):
diff = timedelta(seconds=1)
s = abbreviate.abbreviate_time(diff)
self.assertEqual('1 second ago', s)
def test_abbrev_time_25s(self):
diff = timedelta(seconds=25)
s = abbreviate.abbreviate_time(diff)
self.assertEqual('25 seconds ago', s)
def test_abbrev_time_future_5_minutes(self):
diff = timedelta(minutes=-5)
s = abbreviate.abbreviate_time(diff)
self.assertEqual('5 minutes in the future', s)
def test_abbrev_time_hours(self):
diff = timedelta(hours=4)
s = abbreviate.abbreviate_time(diff)
self.assertEqual('4 hours ago', s)
def test_abbrev_time_day(self):
diff = timedelta(hours=49) # must be more than 2 days
s = abbreviate.abbreviate_time(diff)
self.assertEqual('2 days ago', s)
def test_abbrev_time_month(self):
diff = timedelta(days=91)
s = abbreviate.abbreviate_time(diff)
self.assertEqual('3 months ago', s)
def test_abbrev_time_year(self):
diff = timedelta(weeks=(5 * 52) + 1)
s = abbreviate.abbreviate_time(diff)
self.assertEqual('5 years ago', s)
class MyList(list):
pass
class Math(unittest.TestCase):
def test_div_ceil(self):
f = mathutil.div_ceil
self.failUnlessEqual(f(0, 1), 0)
self.failUnlessEqual(f(0, 2), 0)
self.failUnlessEqual(f(0, 3), 0)
self.failUnlessEqual(f(1, 3), 1)
self.failUnlessEqual(f(2, 3), 1)
self.failUnlessEqual(f(3, 3), 1)
self.failUnlessEqual(f(4, 3), 2)
self.failUnlessEqual(f(5, 3), 2)
self.failUnlessEqual(f(6, 3), 2)
self.failUnlessEqual(f(7, 3), 3)
def test_next_multiple(self):
f = mathutil.next_multiple
self.failUnlessEqual(f(5, 1), 5)
self.failUnlessEqual(f(5, 2), 6)
self.failUnlessEqual(f(5, 3), 6)
self.failUnlessEqual(f(5, 4), 8)
self.failUnlessEqual(f(5, 5), 5)
self.failUnlessEqual(f(5, 6), 6)
self.failUnlessEqual(f(32, 1), 32)
self.failUnlessEqual(f(32, 2), 32)
self.failUnlessEqual(f(32, 3), 33)
self.failUnlessEqual(f(32, 4), 32)
self.failUnlessEqual(f(32, 5), 35)
self.failUnlessEqual(f(32, 6), 36)
self.failUnlessEqual(f(32, 7), 35)
self.failUnlessEqual(f(32, 8), 32)
self.failUnlessEqual(f(32, 9), 36)
self.failUnlessEqual(f(32, 10), 40)
self.failUnlessEqual(f(32, 11), 33)
self.failUnlessEqual(f(32, 12), 36)
self.failUnlessEqual(f(32, 13), 39)
self.failUnlessEqual(f(32, 14), 42)
self.failUnlessEqual(f(32, 15), 45)
self.failUnlessEqual(f(32, 16), 32)
self.failUnlessEqual(f(32, 17), 34)
self.failUnlessEqual(f(32, 18), 36)
self.failUnlessEqual(f(32, 589), 589)
def test_pad_size(self):
f = mathutil.pad_size
self.failUnlessEqual(f(0, 4), 0)
self.failUnlessEqual(f(1, 4), 3)
self.failUnlessEqual(f(2, 4), 2)
self.failUnlessEqual(f(3, 4), 1)
self.failUnlessEqual(f(4, 4), 0)
self.failUnlessEqual(f(5, 4), 3)
def test_is_power_of_k(self):
f = mathutil.is_power_of_k
for i in range(1, 100):
if i in (1, 2, 4, 8, 16, 32, 64):
self.failUnless(f(i, 2), "but %d *is* a power of 2" % i)
else:
self.failIf(f(i, 2), "but %d is *not* a power of 2" % i)
for i in range(1, 100):
if i in (1, 3, 9, 27, 81):
self.failUnless(f(i, 3), "but %d *is* a power of 3" % i)
else:
self.failIf(f(i, 3), "but %d is *not* a power of 3" % i)
def test_next_power_of_k(self):
f = mathutil.next_power_of_k
self.failUnlessEqual(f(0,2), 1)
self.failUnlessEqual(f(1,2), 1)
self.failUnlessEqual(f(2,2), 2)
self.failUnlessEqual(f(3,2), 4)
self.failUnlessEqual(f(4,2), 4)
for i in range(5, 8): self.failUnlessEqual(f(i,2), 8, "%d" % i)
for i in range(9, 16): self.failUnlessEqual(f(i,2), 16, "%d" % i)
for i in range(17, 32): self.failUnlessEqual(f(i,2), 32, "%d" % i)
for i in range(33, 64): self.failUnlessEqual(f(i,2), 64, "%d" % i)
for i in range(65, 100): self.failUnlessEqual(f(i,2), 128, "%d" % i)
self.failUnlessEqual(f(0,3), 1)
self.failUnlessEqual(f(1,3), 1)
self.failUnlessEqual(f(2,3), 3)
self.failUnlessEqual(f(3,3), 3)
for i in range(4, 9): self.failUnlessEqual(f(i,3), 9, "%d" % i)
for i in range(10, 27): self.failUnlessEqual(f(i,3), 27, "%d" % i)
for i in range(28, 81): self.failUnlessEqual(f(i,3), 81, "%d" % i)
for i in range(82, 200): self.failUnlessEqual(f(i,3), 243, "%d" % i)
def test_ave(self):
f = mathutil.ave
self.failUnlessEqual(f([1,2,3]), 2)
self.failUnlessEqual(f([0,0,0,4]), 1)
self.failUnlessAlmostEqual(f([0.0, 1.0, 1.0]), .666666666666)
def test_round_sigfigs(self):
f = mathutil.round_sigfigs
self.failUnlessEqual(f(22.0/3, 4), 7.3330000000000002)
@ -370,65 +187,6 @@ class Statistics(unittest.TestCase):
self.failUnlessEqual(f(plist, .5, 3), .02734375)
class Asserts(unittest.TestCase):
def should_assert(self, func, *args, **kwargs):
try:
func(*args, **kwargs)
except AssertionError as e:
return str(e)
except Exception as e:
self.fail("assert failed with non-AssertionError: %s" % e)
self.fail("assert was not caught")
def should_not_assert(self, func, *args, **kwargs):
try:
func(*args, **kwargs)
except AssertionError as e:
self.fail("assertion fired when it should not have: %s" % e)
except Exception as e:
self.fail("assertion (which shouldn't have failed) failed with non-AssertionError: %s" % e)
return # we're happy
def test_assert(self):
f = assertutil._assert
self.should_assert(f)
self.should_assert(f, False)
self.should_not_assert(f, True)
m = self.should_assert(f, False, "message")
self.failUnlessEqual(m, "'message' <type 'str'>", m)
m = self.should_assert(f, False, "message1", othermsg=12)
self.failUnlessEqual("'message1' <type 'str'>, othermsg: 12 <type 'int'>", m)
m = self.should_assert(f, False, othermsg="message2")
self.failUnlessEqual("othermsg: 'message2' <type 'str'>", m)
def test_precondition(self):
f = assertutil.precondition
self.should_assert(f)
self.should_assert(f, False)
self.should_not_assert(f, True)
m = self.should_assert(f, False, "message")
self.failUnlessEqual("precondition: 'message' <type 'str'>", m)
m = self.should_assert(f, False, "message1", othermsg=12)
self.failUnlessEqual("precondition: 'message1' <type 'str'>, othermsg: 12 <type 'int'>", m)
m = self.should_assert(f, False, othermsg="message2")
self.failUnlessEqual("precondition: othermsg: 'message2' <type 'str'>", m)
def test_postcondition(self):
f = assertutil.postcondition
self.should_assert(f)
self.should_assert(f, False)
self.should_not_assert(f, True)
m = self.should_assert(f, False, "message")
self.failUnlessEqual("postcondition: 'message' <type 'str'>", m)
m = self.should_assert(f, False, "message1", othermsg=12)
self.failUnlessEqual("postcondition: 'message1' <type 'str'>, othermsg: 12 <type 'int'>", m)
m = self.should_assert(f, False, othermsg="message2")
self.failUnlessEqual("postcondition: othermsg: 'message2' <type 'str'>", m)
class FileUtil(ReallyEqualMixin, unittest.TestCase):
def mkdir(self, basedir, path, mode=0o777):
fn = os.path.join(basedir, path)
@ -823,60 +581,6 @@ class PollMixinTests(unittest.TestCase):
d.addCallbacks(_suc, _err)
return d
class DeferredUtilTests(unittest.TestCase, deferredutil.WaitForDelayedCallsMixin):
def test_gather_results(self):
d1 = defer.Deferred()
d2 = defer.Deferred()
res = deferredutil.gatherResults([d1, d2])
d1.errback(ValueError("BAD"))
def _callb(res):
self.fail("Should have errbacked, not resulted in %s" % (res,))
def _errb(thef):
thef.trap(ValueError)
res.addCallbacks(_callb, _errb)
return res
def test_success(self):
d1, d2 = defer.Deferred(), defer.Deferred()
good = []
bad = []
dlss = deferredutil.DeferredListShouldSucceed([d1,d2])
dlss.addCallbacks(good.append, bad.append)
d1.callback(1)
d2.callback(2)
self.failUnlessEqual(good, [[1,2]])
self.failUnlessEqual(bad, [])
def test_failure(self):
d1, d2 = defer.Deferred(), defer.Deferred()
good = []
bad = []
dlss = deferredutil.DeferredListShouldSucceed([d1,d2])
dlss.addCallbacks(good.append, bad.append)
d1.addErrback(lambda _ignore: None)
d2.addErrback(lambda _ignore: None)
d1.callback(1)
d2.errback(ValueError())
self.failUnlessEqual(good, [])
self.failUnlessEqual(len(bad), 1)
f = bad[0]
self.failUnless(isinstance(f, Failure))
self.failUnless(f.check(ValueError))
def test_wait_for_delayed_calls(self):
"""
This tests that 'wait_for_delayed_calls' does in fact wait for a
delayed call that is active when the test returns. If it didn't,
Trial would report an unclean reactor error for this test.
"""
def _trigger():
#print "trigger"
pass
reactor.callLater(0.1, _trigger)
d = defer.succeed(None)
d.addBoth(self.wait_for_delayed_calls)
return d
class HashUtilTests(unittest.TestCase):
@ -984,6 +688,41 @@ class HashUtilTests(unittest.TestCase):
)
class Abbreviate(unittest.TestCase):
def test_abbrev_time_1s(self):
diff = timedelta(seconds=1)
s = abbreviate.abbreviate_time(diff)
self.assertEqual('1 second ago', s)
def test_abbrev_time_25s(self):
diff = timedelta(seconds=25)
s = abbreviate.abbreviate_time(diff)
self.assertEqual('25 seconds ago', s)
def test_abbrev_time_future_5_minutes(self):
diff = timedelta(minutes=-5)
s = abbreviate.abbreviate_time(diff)
self.assertEqual('5 minutes in the future', s)
def test_abbrev_time_hours(self):
diff = timedelta(hours=4)
s = abbreviate.abbreviate_time(diff)
self.assertEqual('4 hours ago', s)
def test_abbrev_time_day(self):
diff = timedelta(hours=49) # must be more than 2 days
s = abbreviate.abbreviate_time(diff)
self.assertEqual('2 days ago', s)
def test_abbrev_time_month(self):
diff = timedelta(days=91)
s = abbreviate.abbreviate_time(diff)
self.assertEqual('3 months ago', s)
def test_abbrev_time_year(self):
diff = timedelta(weeks=(5 * 52) + 1)
s = abbreviate.abbreviate_time(diff)
self.assertEqual('5 years ago', s)
def test_time(self):
a = abbreviate.abbreviate_time
self.failUnlessEqual(a(None), "unknown")

View File

@ -0,0 +1,230 @@
"""
Tests for ```allmydata.web.status```.
"""
from bs4 import BeautifulSoup
from twisted.web.template import flattenString
from allmydata.web.status import (
Status,
StatusElement,
)
from zope.interface import implementer
from allmydata.interfaces import IDownloadResults
from allmydata.web.status import DownloadStatusElement
from allmydata.immutable.downloader.status import DownloadStatus
from .common import (
assert_soup_has_favicon,
assert_soup_has_tag_with_content,
)
from ..common import TrialTestCase
from .test_web import FakeHistory
# Test that status.StatusElement can render HTML.
class StatusTests(TrialTestCase):
def _render_status_page(self, active, recent):
elem = StatusElement(active, recent)
d = flattenString(None, elem)
return self.successResultOf(d)
def test_status_page(self):
status = Status(FakeHistory())
doc = self._render_status_page(
status._get_active_operations(),
status._get_recent_operations()
)
soup = BeautifulSoup(doc, 'html5lib')
assert_soup_has_favicon(self, soup)
assert_soup_has_tag_with_content(
self, soup, u"title",
u"Tahoe-LAFS - Recent and Active Operations"
)
assert_soup_has_tag_with_content(
self, soup, u"h2",
u"Active Operations:"
)
assert_soup_has_tag_with_content(
self, soup, u"td",
u"retrieve"
)
assert_soup_has_tag_with_content(
self, soup, u"td",
u"publish"
)
assert_soup_has_tag_with_content(
self, soup, u"td",
u"download"
)
assert_soup_has_tag_with_content(
self, soup, u"td",
u"upload"
)
assert_soup_has_tag_with_content(
self, soup, u"h2",
"Recent Operations:"
)
@implementer(IDownloadResults)
class FakeDownloadResults(object):
def __init__(self,
file_size=0,
servers_used=None,
server_problems=None,
servermap=None,
timings=None):
"""
See IDownloadResults for parameters.
"""
self.file_size = file_size
self.servers_used = servers_used
self.server_problems = server_problems
self.servermap = servermap
self.timings = timings
class FakeDownloadStatus(DownloadStatus):
def __init__(self,
storage_index = None,
file_size = 0,
servers_used = None,
server_problems = None,
servermap = None,
timings = None):
"""
See IDownloadStatus and IDownloadResults for parameters.
"""
super(FakeDownloadStatus, self).__init__(storage_index, file_size)
self.servers_used = servers_used
self.server_problems = server_problems
self.servermap = servermap
self.timings = timings
def get_results(self):
return FakeDownloadResults(self.size,
self.servers_used,
self.server_problems,
self.servermap,
self.timings)
class DownloadStatusElementTests(TrialTestCase):
"""
Tests for ```allmydata.web.status.DownloadStatusElement```.
"""
def _render_download_status_element(self, status):
"""
:param IDownloadStatus status:
:return: HTML string rendered by DownloadStatusElement
"""
elem = DownloadStatusElement(status)
d = flattenString(None, elem)
return self.successResultOf(d)
def test_download_status_element(self):
"""
See if we can render the page almost fully.
"""
status = FakeDownloadStatus(
"si-1", 123,
["s-1", "s-2", "s-3"],
{"s-1": "unknown problem"},
{"s-1": [1], "s-2": [1,2], "s-3": [2,3]},
{"fetch_per_server":
{"s-1": [1], "s-2": [2,3], "s-3": [3,2]}}
)
result = self._render_download_status_element(status)
soup = BeautifulSoup(result, 'html5lib')
assert_soup_has_favicon(self, soup)
assert_soup_has_tag_with_content(
self, soup, u"title", u"Tahoe-LAFS - File Download Status"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"File Size: 123 bytes"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"Progress: 0.0%"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"Servers Used: [omwtc], [omwte], [omwtg]"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"Server Problems:"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"[omwtc]: unknown problem"
)
assert_soup_has_tag_with_content(self, soup, u"li", u"Servermap:")
assert_soup_has_tag_with_content(
self, soup, u"li", u"[omwtc] has share: #1"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"[omwte] has shares: #1,#2"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"[omwtg] has shares: #2,#3"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"Per-Server Segment Fetch Response Times:"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"[omwtc]: 1.00s"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"[omwte]: 2.00s, 3.00s"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"[omwtg]: 3.00s, 2.00s"
)
def test_download_status_element_partial(self):
"""
See if we can render the page with incomplete download status.
"""
status = FakeDownloadStatus()
result = self._render_download_status_element(status)
soup = BeautifulSoup(result, 'html5lib')
assert_soup_has_tag_with_content(
self, soup, u"li", u"Servermap: None"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"File Size: 0 bytes"
)
assert_soup_has_tag_with_content(
self, soup, u"li", u"Total: None (None)"
)

View File

@ -22,6 +22,19 @@ class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnlessReallyEqual(common.abbreviate_time(0.00123), "1.2ms")
self.failUnlessReallyEqual(common.abbreviate_time(0.000123), "123us")
self.failUnlessReallyEqual(common.abbreviate_time(-123000), "-123000000000us")
self.failUnlessReallyEqual(common.abbreviate_time(2.5), "2.50s")
self.failUnlessReallyEqual(common.abbreviate_time(0.25), "250ms")
self.failUnlessReallyEqual(common.abbreviate_time(0.0021), "2.1ms")
self.failUnlessReallyEqual(common.abbreviate_time(None), "")
self.failUnlessReallyEqual(common.abbreviate_time(2.5), "2.50s")
self.failUnlessReallyEqual(common.abbreviate_time(0.25), "250ms")
self.failUnlessReallyEqual(common.abbreviate_time(0.0021), "2.1ms")
self.failUnlessReallyEqual(common.abbreviate_time(0.000123), "123us")
self.failUnlessReallyEqual(common.abbreviate_rate(None), "")
self.failUnlessReallyEqual(common.abbreviate_rate(2500000), "2.50MBps")
self.failUnlessReallyEqual(common.abbreviate_rate(30100), "30.1kBps")
self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps")
def test_compute_rate(self):
self.failUnlessReallyEqual(common.compute_rate(None, None), None)
@ -44,6 +57,9 @@ class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnlessReallyEqual(common.abbreviate_rate(1234000), "1.23MBps")
self.failUnlessReallyEqual(common.abbreviate_rate(12340), "12.3kBps")
self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps")
self.failUnlessReallyEqual(common.abbreviate_rate(2500000), "2.50MBps")
self.failUnlessReallyEqual(common.abbreviate_rate(30100), "30.1kBps")
self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps")
def test_abbreviate_size(self):
self.failUnlessReallyEqual(common.abbreviate_size(None), "")

View File

@ -33,7 +33,6 @@ from allmydata.immutable import upload
from allmydata.immutable.downloader.status import DownloadStatus
from allmydata.dirnode import DirectoryNode
from allmydata.nodemaker import NodeMaker
from allmydata.web import status
from allmydata.web.common import WebError, MultiFormatPage
from allmydata.util import fileutil, base32, hashutil
from allmydata.util.consumer import download_to_data
@ -954,8 +953,9 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
def test_storage(self):
d = self.GET("/storage")
def _check(res):
self.failUnlessIn('Storage Server Status', res)
self.failUnlessIn(FAVICON_MARKUP, res)
soup = BeautifulSoup(res, 'html5lib')
assert_soup_has_text(self, soup, 'Storage Server Status')
assert_soup_has_favicon(self, soup)
res_u = res.decode('utf-8')
self.failUnlessIn(u'<li>Server Nickname: <span class="nickname mine">fake_nickname \u263A</span></li>', res_u)
d.addCallback(_check)
@ -971,11 +971,11 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
d = self.GET("/status", followRedirect=True)
def _check(res):
self.failUnlessIn('Recent and Active Operations', res)
self.failUnlessIn('"down-%d"' % dl_num, res)
self.failUnlessIn('"up-%d"' % ul_num, res)
self.failUnlessIn('"mapupdate-%d"' % mu_num, res)
self.failUnlessIn('"publish-%d"' % pub_num, res)
self.failUnlessIn('"retrieve-%d"' % ret_num, res)
self.failUnlessIn('"/status/down-%d"' % dl_num, res)
self.failUnlessIn('"/status/up-%d"' % ul_num, res)
self.failUnlessIn('"/status/mapupdate-%d"' % mu_num, res)
self.failUnlessIn('"/status/publish-%d"' % pub_num, res)
self.failUnlessIn('"/status/retrieve-%d"' % ret_num, res)
d.addCallback(_check)
d.addCallback(lambda res: self.GET("/status/?t=json"))
def _check_json(res):
@ -1034,28 +1034,209 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
return d
def test_status_numbers(self):
drrm = status.DownloadResultsRendererMixin()
self.failUnlessReallyEqual(drrm.render_time(None, None), "")
self.failUnlessReallyEqual(drrm.render_time(None, 2.5), "2.50s")
self.failUnlessReallyEqual(drrm.render_time(None, 0.25), "250ms")
self.failUnlessReallyEqual(drrm.render_time(None, 0.0021), "2.1ms")
self.failUnlessReallyEqual(drrm.render_time(None, 0.000123), "123us")
self.failUnlessReallyEqual(drrm.render_rate(None, None), "")
self.failUnlessReallyEqual(drrm.render_rate(None, 2500000), "2.50MBps")
self.failUnlessReallyEqual(drrm.render_rate(None, 30100), "30.1kBps")
self.failUnlessReallyEqual(drrm.render_rate(None, 123), "123Bps")
def test_status_path_nodash_error(self):
"""
Expect an error, because path is expected to be of the form
"/status/{up,down,..}-%number", with a hyphen.
"""
return self.shouldFail2(error.Error,
"test_status_path_nodash",
"400 Bad Request",
"no '-' in 'nodash'",
self.GET,
"/status/nodash")
urrm = status.UploadResultsRendererMixin()
self.failUnlessReallyEqual(urrm.render_time(None, None), "")
self.failUnlessReallyEqual(urrm.render_time(None, 2.5), "2.50s")
self.failUnlessReallyEqual(urrm.render_time(None, 0.25), "250ms")
self.failUnlessReallyEqual(urrm.render_time(None, 0.0021), "2.1ms")
self.failUnlessReallyEqual(urrm.render_time(None, 0.000123), "123us")
self.failUnlessReallyEqual(urrm.render_rate(None, None), "")
self.failUnlessReallyEqual(urrm.render_rate(None, 2500000), "2.50MBps")
self.failUnlessReallyEqual(urrm.render_rate(None, 30100), "30.1kBps")
self.failUnlessReallyEqual(urrm.render_rate(None, 123), "123Bps")
def test_status_page_contains_links(self):
"""
Check that the rendered `/status` page contains all the
expected links.
"""
def _check_status_page_links(response):
(body, status, _) = response
self.failUnlessReallyEqual(int(status), 200)
soup = BeautifulSoup(body, 'html5lib')
h = self.s.get_history()
# Check for `<a href="/status/retrieve-0">Not started</a>`
ret_num = h.list_all_retrieve_statuses()[0].get_counter()
assert_soup_has_tag_with_attributes_and_content(
self, soup, u"a",
u"Not started",
{u"href": u"/status/retrieve-{}".format(ret_num)}
)
# Check for `<a href="/status/publish-0">Not started</a></td>`
pub_num = h.list_all_publish_statuses()[0].get_counter()
assert_soup_has_tag_with_attributes_and_content(
self, soup, u"a",
u"Not started",
{u"href": u"/status/publish-{}".format(pub_num)}
)
# Check for `<a href="/status/mapupdate-0">Not started</a>`
mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
assert_soup_has_tag_with_attributes_and_content(
self, soup, u"a",
u"Not started",
{u"href": u"/status/mapupdate-{}".format(mu_num)}
)
# Check for `<a href="/status/down-0">fetching segments
# 2,3; errors on segment 1</a>`: see build_one_ds() above.
dl_num = h.list_all_download_statuses()[0].get_counter()
assert_soup_has_tag_with_attributes_and_content(
self, soup, u"a",
u"fetching segments 2,3; errors on segment 1",
{u"href": u"/status/down-{}".format(dl_num)}
)
# Check for `<a href="/status/up-0">Not started</a>`
ul_num = h.list_all_upload_statuses()[0].get_counter()
assert_soup_has_tag_with_attributes_and_content(
self, soup, u"a",
u"Not started",
{u"href": u"/status/up-{}".format(ul_num)}
)
d = self.GET("/status", return_response=True)
d.addCallback(_check_status_page_links)
return d
def test_status_path_trailing_slashes(self):
"""
Test that both `GET /status` and `GET /status/` are treated
alike, but reject any additional trailing slashes and other
non-existent child nodes.
"""
def _check_status(response):
(body, status, _) = response
self.failUnlessReallyEqual(int(status), 200)
soup = BeautifulSoup(body, 'html5lib')
assert_soup_has_favicon(self, soup)
assert_soup_has_tag_with_content(
self, soup, u"title",
u"Tahoe-LAFS - Recent and Active Operations"
)
d = self.GET("/status", return_response=True)
d.addCallback(_check_status)
d = self.GET("/status/", return_response=True)
d.addCallback(_check_status)
d = self.shouldFail2(error.Error,
"test_status_path_trailing_slashes",
"400 Bad Request",
"no '-' in ''",
self.GET,
"/status//")
d = self.shouldFail2(error.Error,
"test_status_path_trailing_slashes",
"400 Bad Request",
"no '-' in ''",
self.GET,
"/status////////")
return d
def test_status_path_404_error(self):
"""
Looking for non-existent statuses under child paths should
exercises all the iterators in web.status.Status.getChild().
The test suite (hopefully!) would not have done any setup for
a very large number of statuses at this point, now or in the
future, so these all should always return 404.
"""
d = self.GET("/status/up-9999999")
d.addBoth(self.should404, "test_status_path_404_error (up)")
d = self.GET("/status/down-9999999")
d.addBoth(self.should404, "test_status_path_404_error (down)")
d = self.GET("/status/mapupdate-9999999")
d.addBoth(self.should404, "test_status_path_404_error (mapupdate)")
d = self.GET("/status/publish-9999999")
d.addBoth(self.should404, "test_status_path_404_error (publish)")
d = self.GET("/status/retrieve-9999999")
d.addBoth(self.should404, "test_status_path_404_error (retrieve)")
return d
def _check_status_subpath_result(self, result, expected_title):
"""
Helper to verify that results of "GET /status/up-0" and
similar are as expected.
"""
body, status, _ = result
self.failUnlessReallyEqual(int(status), 200)
soup = BeautifulSoup(body, 'html5lib')
assert_soup_has_favicon(self, soup)
assert_soup_has_tag_with_content(
self, soup, u"title", expected_title
)
def test_status_up_subpath(self):
"""
See that "GET /status/up-0" works.
"""
h = self.s.get_history()
ul_num = h.list_all_upload_statuses()[0].get_counter()
d = self.GET("/status/up-{}".format(ul_num), return_response=True)
d.addCallback(self._check_status_subpath_result,
u"Tahoe-LAFS - File Upload Status")
return d
def test_status_down_subpath(self):
"""
See that "GET /status/down-0" works.
"""
h = self.s.get_history()
dl_num = h.list_all_download_statuses()[0].get_counter()
d = self.GET("/status/down-{}".format(dl_num), return_response=True)
d.addCallback(self._check_status_subpath_result,
u"Tahoe-LAFS - File Download Status")
return d
def test_status_mapupdate_subpath(self):
"""
See that "GET /status/mapupdate-0" works.
"""
h = self.s.get_history()
mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
d = self.GET("/status/mapupdate-{}".format(mu_num), return_response=True)
d.addCallback(self._check_status_subpath_result,
u"Tahoe-LAFS - Mutable File Servermap Update Status")
return d
def test_status_publish_subpath(self):
"""
See that "GET /status/publish-0" works.
"""
h = self.s.get_history()
pub_num = h.list_all_publish_statuses()[0].get_counter()
d = self.GET("/status/publish-{}".format(pub_num), return_response=True)
d.addCallback(self._check_status_subpath_result,
u"Tahoe-LAFS - Mutable File Publish Status")
return d
def test_status_retrieve_subpath(self):
"""
See that "GET /status/retrieve-0" works.
"""
h = self.s.get_history()
ret_num = h.list_all_retrieve_statuses()[0].get_counter()
d = self.GET("/status/retrieve-{}".format(ret_num), return_response=True)
d.addCallback(self._check_status_subpath_result,
u"Tahoe-LAFS - Mutable File Retrieve Status")
return d
def test_GET_FILEURL(self):
d = self.GET(self.public_url + "/foo/bar.txt")

View File

View File

@ -0,0 +1,289 @@
# -*- coding: utf-8 -*-
# Tahoe-LAFS -- secure, distributed storage grid
#
# Copyright © 2020 The Tahoe-LAFS Software Foundation
#
# This file is part of Tahoe-LAFS.
#
# See the docs/about.rst file for licensing information.
"""
Test-helpers for clients that use the WebUI.
"""
import hashlib
import attr
from hyperlink import DecodedURL
from twisted.web.resource import (
Resource,
)
from twisted.web.iweb import (
IBodyProducer,
)
from twisted.web import (
http,
)
from twisted.internet.defer import (
succeed,
)
from treq.client import (
HTTPClient,
FileBodyProducer,
)
from treq.testing import (
RequestTraversalAgent,
)
from zope.interface import implementer
import allmydata.uri
from allmydata.util import (
base32,
)
__all__ = (
"create_fake_tahoe_root",
"create_tahoe_treq_client",
)
class _FakeTahoeRoot(Resource, object):
"""
An in-memory 'fake' of a Tahoe WebUI root. Currently it only
implements (some of) the `/uri` resource.
"""
def __init__(self, uri=None):
"""
:param uri: a Resource to handle the `/uri` tree.
"""
Resource.__init__(self) # this is an old-style class :(
self._uri = uri
self.putChild(b"uri", self._uri)
def add_data(self, kind, data):
fresh, cap = self._uri.add_data(kind, data)
return cap
KNOWN_CAPABILITIES = [
getattr(allmydata.uri, t).BASE_STRING
for t in dir(allmydata.uri)
if hasattr(getattr(allmydata.uri, t), 'BASE_STRING')
]
def capability_generator(kind):
"""
Deterministically generates a stream of valid capabilities of the
given kind. The N, K and size values aren't related to anything
real.
:param str kind: the kind of capability, like `URI:CHK`
:returns: a generator that yields new capablities of a particular
kind.
"""
if kind not in KNOWN_CAPABILITIES:
raise ValueError(
"Unknown capability kind '{} (valid are {})'".format(
kind,
", ".join(KNOWN_CAPABILITIES),
)
)
# what we do here is to start with empty hashers for the key and
# ueb_hash and repeatedly feed() them a zero byte on each
# iteration .. so the same sequence of capabilities will always be
# produced. We could add a seed= argument if we wanted to produce
# different sequences.
number = 0
key_hasher = hashlib.new("sha256")
ueb_hasher = hashlib.new("sha256") # ueb means "URI Extension Block"
# capabilities are "prefix:<128-bits-base32>:<256-bits-base32>:N:K:size"
while True:
number += 1
key_hasher.update("\x00")
ueb_hasher.update("\x00")
key = base32.b2a(key_hasher.digest()[:16]) # key is 16 bytes
ueb_hash = base32.b2a(ueb_hasher.digest()) # ueb hash is 32 bytes
cap = u"{kind}{key}:{ueb_hash}:{n}:{k}:{size}".format(
kind=kind,
key=key,
ueb_hash=ueb_hash,
n=1,
k=1,
size=number * 1000,
)
yield cap.encode("ascii")
@attr.s
class _FakeTahoeUriHandler(Resource, object):
"""
An in-memory fake of (some of) the `/uri` endpoint of a Tahoe
WebUI
"""
isLeaf = True
data = attr.ib(default=attr.Factory(dict))
capability_generators = attr.ib(default=attr.Factory(dict))
def _generate_capability(self, kind):
"""
:param str kind: any valid capability-string type
:returns: the next capability-string for the given kind
"""
if kind not in self.capability_generators:
self.capability_generators[kind] = capability_generator(kind)
capability = next(self.capability_generators[kind])
return capability
def add_data(self, kind, data):
"""
adds some data to our grid
:returns: a two-tuple: a bool (True if the data is freshly added) and a capability-string
"""
if not isinstance(data, bytes):
raise TypeError("'data' must be bytes")
for k in self.data:
if self.data[k] == data:
return (False, k)
cap = self._generate_capability(kind)
# it should be impossible for this to already be in our data,
# but check anyway to be sure
if cap in self.data:
raise Exception("Internal error; key already exists somehow")
self.data[cap] = data
return (True, cap)
def render_PUT(self, request):
data = request.content.read()
fresh, cap = self.add_data("URI:CHK:", data)
if fresh:
request.setResponseCode(http.CREATED) # real code does this for brand-new files
else:
request.setResponseCode(http.OK) # replaced/modified files
return cap
def render_POST(self, request):
t = request.args[u"t"][0]
data = request.content.read()
type_to_kind = {
"mkdir-immutable": "URI:DIR2-CHK:"
}
kind = type_to_kind[t]
fresh, cap = self.add_data(kind, data)
return cap
def render_GET(self, request):
uri = DecodedURL.from_text(request.uri.decode('utf8'))
capability = None
for arg, value in uri.query:
if arg == u"uri":
capability = value
# it's legal to use the form "/uri/<capability>"
if capability is None and request.postpath and request.postpath[0]:
capability = request.postpath[0]
# if we don't yet have a capability, that's an error
if capability is None:
request.setResponseCode(http.BAD_REQUEST)
return b"GET /uri requires uri="
# the user gave us a capability; if our Grid doesn't have any
# data for it, that's an error.
if capability not in self.data:
request.setResponseCode(http.BAD_REQUEST)
return u"No data for '{}'".format(capability).decode("ascii")
return self.data[capability]
def create_fake_tahoe_root():
"""
If you wish to pre-populate data into the fake Tahoe grid, retain
a reference to this root by creating it yourself and passing it to
`create_tahoe_treq_client`. For example::
root = create_fake_tahoe_root()
cap_string = root.add_data(...)
client = create_tahoe_treq_client(root)
:returns: an IResource instance that will handle certain Tahoe URI
endpoints similar to a real Tahoe server.
"""
root = _FakeTahoeRoot(
uri=_FakeTahoeUriHandler(),
)
return root
@implementer(IBodyProducer)
class _SynchronousProducer(object):
"""
A partial implementation of an :obj:`IBodyProducer` which produces its
entire payload immediately. There is no way to access to an instance of
this object from :obj:`RequestTraversalAgent` or :obj:`StubTreq`, or even a
:obj:`Resource: passed to :obj:`StubTreq`.
This does not implement the :func:`IBodyProducer.stopProducing` method,
because that is very difficult to trigger. (The request from
`RequestTraversalAgent` would have to be canceled while it is still in the
transmitting state), and the intent is to use `RequestTraversalAgent` to
make synchronous requests.
"""
def __init__(self, body):
"""
Create a synchronous producer with some bytes.
"""
if isinstance(body, FileBodyProducer):
body = body._inputFile.read()
if not isinstance(body, bytes):
raise ValueError(
"'body' must be bytes not '{}'".format(type(body))
)
self.body = body
self.length = len(body)
def startProducing(self, consumer):
"""
Immediately produce all data.
"""
consumer.write(self.body)
return succeed(None)
def create_tahoe_treq_client(root=None):
"""
:param root: an instance created via `create_fake_tahoe_root`. The
caller might want a copy of this to call `.add_data` for example.
:returns: an instance of treq.client.HTTPClient wired up to
in-memory fakes of the Tahoe WebUI. Only a subset of the real
WebUI is available.
"""
if root is None:
root = create_fake_tahoe_root()
client = HTTPClient(
agent=RequestTraversalAgent(root),
data_to_body_producer=_SynchronousProducer,
)
return client

View File

@ -0,0 +1,31 @@
"""
Track the port to Python 3.
This module has been ported to Python 3.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
# Keep these sorted alphabetically, to reduce merge conflicts:
PORTED_MODULES = [
"allmydata.util.assertutil",
"allmydata.util.deferredutil",
"allmydata.util.humanreadable",
"allmydata.util.mathutil",
"allmydata.util.namespace",
"allmydata.util.pollmixin",
"allmydata.util._python3",
]
PORTED_TEST_MODULES = [
"allmydata.test.test_deferredutil",
"allmydata.test.test_humanreadable",
"allmydata.test.test_python3",
]

View File

@ -1,57 +1,23 @@
"""
Tests useful in assertion checking, prints out nicely formated messages too.
Backwards compatibility layer, the versions in pyutil are better maintained and
have tests.
Ported to Python 3.
"""
from allmydata.util.humanreadable import hr
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
def _assert(___cond=False, *___args, **___kwargs):
if ___cond:
return True
msgbuf=[]
if ___args:
msgbuf.append("%s %s" % tuple(map(hr, (___args[0], type(___args[0]),))))
msgbuf.extend([", %s %s" % tuple(map(hr, (arg, type(arg),))) for arg in ___args[1:]])
if ___kwargs:
msgbuf.append(", %s: %s %s" % ((___kwargs.items()[0][0],) + tuple(map(hr, (___kwargs.items()[0][1], type(___kwargs.items()[0][1]),)))))
else:
if ___kwargs:
msgbuf.append("%s: %s %s" % ((___kwargs.items()[0][0],) + tuple(map(hr, (___kwargs.items()[0][1], type(___kwargs.items()[0][1]),)))))
msgbuf.extend([", %s: %s %s" % tuple(map(hr, (k, v, type(v),))) for k, v in ___kwargs.items()[1:]])
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
raise AssertionError("".join(msgbuf))
def precondition(___cond=False, *___args, **___kwargs):
if ___cond:
return True
msgbuf=["precondition", ]
if ___args or ___kwargs:
msgbuf.append(": ")
if ___args:
msgbuf.append("%s %s" % tuple(map(hr, (___args[0], type(___args[0]),))))
msgbuf.extend([", %s %s" % tuple(map(hr, (arg, type(arg),))) for arg in ___args[1:]])
if ___kwargs:
msgbuf.append(", %s: %s %s" % ((___kwargs.items()[0][0],) + tuple(map(hr, (___kwargs.items()[0][1], type(___kwargs.items()[0][1]),)))))
else:
if ___kwargs:
msgbuf.append("%s: %s %s" % ((___kwargs.items()[0][0],) + tuple(map(hr, (___kwargs.items()[0][1], type(___kwargs.items()[0][1]),)))))
msgbuf.extend([", %s: %s %s" % tuple(map(hr, (k, v, type(v),))) for k, v in ___kwargs.items()[1:]])
# The API importers expect:
from pyutil.assertutil import _assert, precondition, postcondition
raise AssertionError("".join(msgbuf))
def postcondition(___cond=False, *___args, **___kwargs):
if ___cond:
return True
msgbuf=["postcondition", ]
if ___args or ___kwargs:
msgbuf.append(": ")
if ___args:
msgbuf.append("%s %s" % tuple(map(hr, (___args[0], type(___args[0]),))))
msgbuf.extend([", %s %s" % tuple(map(hr, (arg, type(arg),))) for arg in ___args[1:]])
if ___kwargs:
msgbuf.append(", %s: %s %s" % ((___kwargs.items()[0][0],) + tuple(map(hr, (___kwargs.items()[0][1], type(___kwargs.items()[0][1]),)))))
else:
if ___kwargs:
msgbuf.append("%s: %s %s" % ((___kwargs.items()[0][0],) + tuple(map(hr, (___kwargs.items()[0][1], type(___kwargs.items()[0][1]),)))))
msgbuf.extend([", %s: %s %s" % tuple(map(hr, (k, v, type(v),))) for k, v in ___kwargs.items()[1:]])
raise AssertionError("".join(msgbuf))
__all__ = ["_assert", "precondition", "postcondition"]

View File

@ -52,13 +52,13 @@ def b2a(os):
@return the contents of os in base-32 encoded form
"""
return b2a_l(os, len(os)*8)
return _b2a_l(os, len(os)*8)
def b2a_or_none(os):
if os is not None:
return b2a(os)
def b2a_l(os, lengthinbits):
def _b2a_l(os, lengthinbits):
"""
@param os the data to be encoded (a string)
@param lengthinbits the number of bits of data in os to be encoded
@ -204,9 +204,9 @@ def a2b(cs):
precondition(could_be_base32_encoded(cs), "cs is required to be possibly base32 encoded data.", cs=cs)
precondition(isinstance(cs, six.binary_type), cs)
return a2b_l(cs, num_octets_that_encode_to_this_many_quintets(len(cs))*8)
return _a2b_l(cs, num_octets_that_encode_to_this_many_quintets(len(cs))*8)
def a2b_l(cs, lengthinbits):
def _a2b_l(cs, lengthinbits):
"""
@param lengthinbits the number of bits of data in encoded into cs
@ -261,5 +261,8 @@ def a2b_l(cs, lengthinbits):
pos = pos * 256
assert len(octets) == numoctets, "len(octets): %s, numoctets: %s, octets: %s" % (len(octets), numoctets, octets,)
res = ''.join(map(chr, octets))
precondition(b2a_l(res, lengthinbits) == cs, "cs is required to be the canonical base-32 encoding of some data.", b2a(res), res=res, cs=cs)
precondition(_b2a_l(res, lengthinbits) == cs, "cs is required to be the canonical base-32 encoding of some data.", b2a(res), res=res, cs=cs)
return res
__all__ = ["b2a", "a2b", "b2a_or_none", "BASE32CHAR_3bits", "BASE32CHAR_1bits", "BASE32CHAR", "BASE32STR_anybytes", "could_be_base32_encoded"]

View File

@ -1,7 +1,21 @@
"""
Utilities for working with Twisted Deferreds.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
import time
from foolscap.api import eventually, fireEventually
from foolscap.api import eventually
from twisted.internet import defer, reactor, error
from twisted.python.failure import Failure
@ -130,7 +144,7 @@ class HookMixin(object):
self._hooks[name] = (d, ignore_count)
return d
def _call_hook(self, res, name, async=False):
def _call_hook(self, res, name, **kwargs):
"""
Called to trigger the hook, with argument 'res'. This is a no-op if
the hook is unset. If the hook's ignore_count is positive, it will be
@ -142,7 +156,10 @@ class HookMixin(object):
which will typically cause the test to also fail.
'res' is returned so that the current result or failure will be passed
through.
Accepts a single keyword argument, async, defaulting to False.
"""
async_ = kwargs.get("async", False)
hook = self._hooks[name]
if hook is None:
return res # pass on error/result
@ -153,7 +170,7 @@ class HookMixin(object):
self._hooks[name] = (d, ignore_count - 1)
else:
self._hooks[name] = None
if async:
if async_:
_with_log(eventually_callback(d), res)
else:
_with_log(d.callback, res)
@ -163,42 +180,6 @@ class HookMixin(object):
log.msg(msg, level=log.NOISY)
def async_iterate(process, iterable, *extra_args, **kwargs):
"""
I iterate over the elements of 'iterable' (which may be deferred), eventually
applying 'process' to each one, optionally with 'extra_args' and 'kwargs'.
'process' should return a (possibly deferred) boolean: True to continue the
iteration, False to stop.
I return a Deferred that fires with True if all elements of the iterable
were processed (i.e. 'process' only returned True values); with False if
the iteration was stopped by 'process' returning False; or that fails with
the first failure of either 'process' or the iterator.
"""
iterator = iter(iterable)
d = defer.succeed(None)
def _iterate(ign):
d2 = defer.maybeDeferred(iterator.next)
def _cb(item):
d3 = defer.maybeDeferred(process, item, *extra_args, **kwargs)
def _maybe_iterate(res):
if res:
d4 = fireEventually()
d4.addCallback(_iterate)
return d4
return False
d3.addCallback(_maybe_iterate)
return d3
def _eb(f):
f.trap(StopIteration)
return True
d2.addCallbacks(_cb, _eb)
return d2
d.addCallback(_iterate)
return d
def for_items(cb, mapping):
"""
For each (key, value) pair in a mapping, I add a callback to cb(None, key, value)

View File

@ -12,7 +12,7 @@ from errno import ENOENT
if sys.platform == "win32":
from ctypes import WINFUNCTYPE, WinError, windll, POINTER, byref, c_ulonglong, \
create_unicode_buffer, get_last_error
from ctypes.wintypes import BOOL, DWORD, LPCWSTR, LPWSTR, LPVOID, HANDLE
from ctypes.wintypes import BOOL, DWORD, LPCWSTR, LPWSTR, LPVOID
from twisted.python import log
@ -538,60 +538,6 @@ def get_available_space(whichdir, reserved_space):
return 0
if sys.platform == "win32":
# <http://msdn.microsoft.com/en-us/library/aa363858%28v=vs.85%29.aspx>
CreateFileW = WINFUNCTYPE(
HANDLE, LPCWSTR, DWORD, DWORD, LPVOID, DWORD, DWORD, HANDLE,
use_last_error=True
)(("CreateFileW", windll.kernel32))
GENERIC_WRITE = 0x40000000
FILE_SHARE_READ = 0x00000001
FILE_SHARE_WRITE = 0x00000002
OPEN_EXISTING = 3
INVALID_HANDLE_VALUE = 0xFFFFFFFF
# <http://msdn.microsoft.com/en-us/library/aa364439%28v=vs.85%29.aspx>
FlushFileBuffers = WINFUNCTYPE(
BOOL, HANDLE,
use_last_error=True
)(("FlushFileBuffers", windll.kernel32))
# <http://msdn.microsoft.com/en-us/library/ms724211%28v=vs.85%29.aspx>
CloseHandle = WINFUNCTYPE(
BOOL, HANDLE,
use_last_error=True
)(("CloseHandle", windll.kernel32))
# <http://social.msdn.microsoft.com/forums/en-US/netfxbcl/thread/4465cafb-f4ed-434f-89d8-c85ced6ffaa8/>
def flush_volume(path):
abspath = os.path.realpath(path)
if abspath.startswith("\\\\?\\"):
abspath = abspath[4 :]
drive = os.path.splitdrive(abspath)[0]
print("flushing %r" % (drive,))
hVolume = CreateFileW(u"\\\\.\\" + drive,
GENERIC_WRITE,
FILE_SHARE_READ | FILE_SHARE_WRITE,
None,
OPEN_EXISTING,
0,
None
)
if hVolume == INVALID_HANDLE_VALUE:
raise WinError(get_last_error())
if FlushFileBuffers(hVolume) == 0:
raise WinError(get_last_error())
CloseHandle(hVolume)
else:
def flush_volume(path):
# use sync()?
pass
class ConflictError(Exception):
pass

View File

@ -1,5 +1,20 @@
import exceptions, os
from repr import Repr
"""
Utilities for turning objects into human-readable strings.
This module has been ported to Python 3.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
import os
from reprlib import Repr
class BetterRepr(Repr, object):
def __init__(self):
@ -14,21 +29,21 @@ class BetterRepr(Repr, object):
self.maxother = 300
def repr_function(self, obj, level):
if hasattr(obj, 'func_code'):
return '<' + obj.func_name + '() at ' + os.path.basename(obj.func_code.co_filename) + ':' + str(obj.func_code.co_firstlineno) + '>'
if hasattr(obj, '__code__'):
return '<' + obj.__name__ + '() at ' + os.path.basename(obj.__code__.co_filename) + ':' + str(obj.__code__.co_firstlineno) + '>'
else:
return '<' + obj.func_name + '() at (builtin)'
return '<' + obj.__name__ + '() at (builtin)'
def repr_instance_method(self, obj, level):
if hasattr(obj, 'func_code'):
return '<' + obj.im_class.__name__ + '.' + obj.im_func.__name__ + '() at ' + os.path.basename(obj.im_func.func_code.co_filename) + ':' + str(obj.im_func.func_code.co_firstlineno) + '>'
if hasattr(obj, '__code__'):
return '<' + obj.__self__.__class__.__name__ + '.' + obj.__func__.__name__ + '() at ' + os.path.basename(obj.__func__.__code__.co_filename) + ':' + str(obj.__func__.__code__.co_firstlineno) + '>'
else:
return '<' + obj.im_class.__name__ + '.' + obj.im_func.__name__ + '() at (builtin)'
return '<' + obj.__self__.__class__.__name__ + '.' + obj.__func__.__name__ + '() at (builtin)'
def repr_long(self, obj, level):
s = repr(obj) # XXX Hope this isn't too slow...
if len(s) > self.maxlong:
i = max(0, (self.maxlong-3)/2)
i = max(0, (self.maxlong-3) // 2)
j = max(0, self.maxlong-3-i)
s = s[:i] + '...' + s[len(s)-j:]
if s[-1] == 'L':
@ -43,7 +58,7 @@ class BetterRepr(Repr, object):
on it. If it is an instance of list call self.repr_list() on it. Else
call Repr.repr_instance().
"""
if isinstance(obj, exceptions.Exception):
if isinstance(obj, Exception):
# Don't cut down exception strings so much.
tms = self.maxstring
self.maxstring = max(512, tms * 4)
@ -91,7 +106,7 @@ class BetterRepr(Repr, object):
if level <= 0: return '{...}'
s = ''
n = len(obj)
items = obj.items()[:min(n, self.maxdict)]
items = list(obj.items())[:min(n, self.maxdict)]
items.sort()
for key, val in items:
entry = self.repr1(key, level-1) + ':' + self.repr1(val, level-1)

View File

@ -1,71 +1,28 @@
"""
A few commonly needed functions.
Backwards compatibility for direct imports.
Ported to Python 3.
"""
import math
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
def div_ceil(n, d):
"""
The smallest integer k such that k*d >= n.
"""
return (n/d) + (n%d != 0)
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
def next_multiple(n, k):
"""
The smallest multiple of k which is >= n.
"""
return div_ceil(n, k) * k
def pad_size(n, k):
"""
The smallest number that has to be added to n so that n is a multiple of k.
"""
if n%k:
return k - n%k
else:
return 0
# The API importers expect:
from pyutil.mathutil import div_ceil, next_multiple, pad_size, is_power_of_k, next_power_of_k, ave, log_ceil, log_floor
def is_power_of_k(n, k):
return k**int(math.log(n, k) + 0.5) == n
def next_power_of_k(n, k):
if n == 0:
x = 0
else:
x = int(math.log(n, k) + 0.5)
if k**x < n:
return k**(x+1)
else:
return k**x
def ave(l):
return sum(l) / len(l)
def log_ceil(n, b):
"""
The smallest integer k such that b^k >= n.
log_ceil(n, 2) is the number of bits needed to store any of n values, e.g.
the number of bits needed to store any of 128 possible values is 7.
"""
p = 1
k = 0
while p < n:
p *= b
k += 1
return k
def log_floor(n, b):
"""
The largest integer k such that b^k <= n.
"""
p = 1
k = 0
while p <= n:
p *= b
k += 1
return k - 1
# This function is not present in pyutil.mathutil:
def round_sigfigs(f, n):
fmt = "%." + str(n-1) + "e"
return float(fmt % f)
__all__ = ["div_ceil", "next_multiple", "pad_size", "is_power_of_k", "next_power_of_k", "ave", "log_ceil", "log_floor", "round_sigfigs"]

View File

@ -1,3 +1,6 @@
"""
This module has been ported to Python 3.
"""
class Namespace(object):
pass

View File

@ -1,4 +1,17 @@
"""
Polling utility that returns Deferred.
Ported to Python 3.
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
import time
from twisted.internet import task

View File

@ -1,7 +1,7 @@
from twisted.internet import address
from foolscap.api import Violation, RemoteException, DeadReferenceError, \
SturdyRef
from foolscap.api import Violation, RemoteException, SturdyRef
def add_version_to_remote_reference(rref, default):
"""I try to add a .version attribute to the given RemoteReference. I call
@ -19,12 +19,6 @@ def add_version_to_remote_reference(rref, default):
d.addCallbacks(_got_version, _no_get_version)
return d
def trap_and_discard(f, *errorTypes):
f.trap(*errorTypes)
def trap_deadref(f):
return trap_and_discard(f, DeadReferenceError)
def connection_hints_for_furl(furl):
hints = []

View File

@ -297,7 +297,7 @@ def _get_platform():
def _get_package_versions_and_locations():
import warnings
from _auto_deps import package_imports, global_deprecation_messages, deprecation_messages, \
from ._auto_deps import package_imports, global_deprecation_messages, deprecation_messages, \
runtime_warning_messages, warning_imports, ignorable
def package_dir(srcfile):

View File

@ -1,58 +1,62 @@
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
<head>
<title>Tahoe-LAFS - File Download Status</title>
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
<link href="/icon.png" rel="shortcut icon" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
</head>
<body>
<h1>File Download Status</h1>
<h1>File Download Status</h1>
<ul>
<li>Started: <span n:render="started"/></li>
<li>Storage Index: <span n:render="si"/></li>
<li>Helper?: <span n:render="helper"/></li>
<li>Total Size: <span n:render="total_size"/></li>
<li>Progress: <span n:render="progress"/></li>
<li>Status: <span n:render="status"/></li>
</ul>
<div n:render="events"></div>
<div n:render="results">
<h2>Download Results</h2>
<ul>
<li n:render="servers_used" />
<li>Servermap: <span n:render="servermap" /></li>
<li n:render="problems" />
<li>Timings:</li>
<ul>
<li>File Size: <span n:render="string" n:data="file_size" /> bytes</li>
<li>Total: <span n:render="time" n:data="time_total" />
(<span n:render="rate" n:data="rate_total" />)</li>
<ul>
<li>Peer Selection: <span n:render="time" n:data="time_peer_selection" /></li>
<li>UEB Fetch: <span n:render="time" n:data="time_uri_extension" /></li>
<li>Hashtree Fetch: <span n:render="time" n:data="time_hashtrees" /></li>
<li>Segment Fetch: <span n:render="time" n:data="time_segments" />
(<span n:render="rate" n:data="rate_segments" />)</li>
<ul>
<li>Cumulative Fetching: <span n:render="time" n:data="time_cumulative_fetch" />
(<span n:render="rate" n:data="rate_fetch" />)</li>
<li>Cumulative Decoding: <span n:render="time" n:data="time_cumulative_decode" />
(<span n:render="rate" n:data="rate_decode" />)</li>
<li>Cumulative Decrypting: <span n:render="time" n:data="time_cumulative_decrypt" />
(<span n:render="rate" n:data="rate_decrypt" />)</li>
</ul>
<li>Paused by client: <span n:render="time" n:data="time_paused" /></li>
</ul>
<li n:render="server_timings" />
<li>Started: <t:transparent t:render="started"/></li>
<li>Storage Index: <t:transparent t:render="si"/></li>
<li>Helper?: <t:transparent t:render="helper"/></li>
<li>Total Size: <t:transparent t:render="total_size"/></li>
<li>Progress: <t:transparent t:render="progress"/></li>
<li>Status: <t:transparent t:render="status"/></li>
</ul>
</ul>
</div>
<div>Return to the <a href="/">Welcome Page</a></div>
<div t:render="events"></div>
<div t:render="results">
<h2>Download Results</h2>
<ul>
<li t:render="servers_used" />
<li>Servermap: <t:transparent t:render="servermap" /></li>
<li t:render="problems" />
<li>Timings:</li>
<ul>
<li>File Size: <t:transparent t:render="file_size" /> bytes</li>
<li>Total: <t:transparent t:render="time_total" />
(<t:transparent t:render="rate_total" />)</li>
<ul>
<li>Peer Selection: <t:transparent t:render="time_peer_selection" /></li>
<li>UEB Fetch: <t:transparent t:render="time_uri_extension" /></li>
<li>Hashtree Fetch: <t:transparent t:render="time_hashtrees" /></li>
<li>Segment Fetch: <t:transparent t:render="time_segments" />
(<t:transparent t:render="rate_segments" />)</li>
<ul>
<li>Cumulative Fetching: <t:transparent t:render="time_cumulative_fetch" />
(<t:transparent t:render="rate_fetch" />)</li>
<li>Cumulative Decoding: <t:transparent t:render="time_cumulative_decode" />
(<t:transparent t:render="rate_decode" />)</li>
<li>Cumulative Decrypting: <t:transparent t:render="time_cumulative_decrypt" />
(<t:transparent t:render="rate_decrypt" />)</li>
</ul>
<li>Paused by client: <t:transparent t:render="time_paused" /></li>
</ul>
<li t:render="server_timings" />
</ul>
</ul>
</div>
<div>Return to the <a href="/">Welcome Page</a></div>
</body>
</html>

View File

@ -1,19 +1,23 @@
import time
from nevow import rend, url
from nevow.inevow import IRequest
from nevow import url
from twisted.web.template import (
renderer,
tags as T,
)
from twisted.python.failure import Failure
from twisted.internet import reactor, defer
from twisted.web import resource
from twisted.web.http import NOT_FOUND
from twisted.web.html import escape
from twisted.application import service
from allmydata.web.common import WebError, \
get_root, get_arg, boolean_of_arg
from allmydata.web.common import (
WebError,
get_root,
get_arg,
boolean_of_arg,
)
MINUTE = 60
HOUR = 60*MINUTE
@ -21,13 +25,16 @@ DAY = 24*HOUR
(MONITOR, RENDERER, WHEN_ADDED) = range(3)
class OphandleTable(rend.Page, service.Service):
class OphandleTable(resource.Resource, service.Service):
"""Renders /operations/%d."""
name = "operations"
UNCOLLECTED_HANDLE_LIFETIME = 4*DAY
COLLECTED_HANDLE_LIFETIME = 1*DAY
def __init__(self, clock=None):
super(OphandleTable, self).__init__()
# both of these are indexed by ophandle
self.handles = {} # tuple of (monitor, renderer, when_added)
self.timers = {}
@ -45,12 +52,17 @@ class OphandleTable(rend.Page, service.Service):
del self.timers
return service.Service.stopService(self)
def add_monitor(self, ctx, monitor, renderer):
ophandle = get_arg(ctx, "ophandle")
def add_monitor(self, req, monitor, renderer):
"""
:param allmydata.webish.MyRequest req:
:param allmydata.monitor.Monitor monitor:
:param allmydata.web.directory.ManifestResults renderer:
"""
ophandle = get_arg(req, "ophandle")
assert ophandle
now = time.time()
self.handles[ophandle] = (monitor, renderer, now)
retain_for = get_arg(ctx, "retain-for", None)
retain_for = get_arg(req, "retain-for", None)
if retain_for is not None:
self._set_timer(ophandle, int(retain_for))
monitor.when_done().addBoth(self._operation_complete, ophandle)
@ -67,36 +79,42 @@ class OphandleTable(rend.Page, service.Service):
# if we already have a timer, the client must have provided the
# retain-for= value, so don't touch it.
def redirect_to(self, ctx):
ophandle = get_arg(ctx, "ophandle")
def redirect_to(self, req):
"""
:param allmydata.webish.MyRequest req:
"""
ophandle = get_arg(req, "ophandle")
assert ophandle
target = get_root(ctx) + "/operations/" + ophandle
output = get_arg(ctx, "output")
target = get_root(req) + "/operations/" + ophandle
output = get_arg(req, "output")
if output:
target = target + "?output=%s" % output
# XXX: We have to use nevow.url here because nevow.appserver
# is unhappy with anything else; so this gets its own ticket.
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3314
return url.URL.fromString(target)
def childFactory(self, ctx, name):
def getChild(self, name, req):
ophandle = name
if ophandle not in self.handles:
raise WebError("unknown/expired handle '%s'" % escape(ophandle),
NOT_FOUND)
(monitor, renderer, when_added) = self.handles[ophandle]
request = IRequest(ctx)
t = get_arg(ctx, "t", "status")
if t == "cancel" and request.method == "POST":
t = get_arg(req, "t", "status")
if t == "cancel" and req.method == "POST":
monitor.cancel()
# return the status anyways, but release the handle
self._release_ophandle(ophandle)
else:
retain_for = get_arg(ctx, "retain-for", None)
retain_for = get_arg(req, "retain-for", None)
if retain_for is not None:
self._set_timer(ophandle, int(retain_for))
if monitor.is_finished():
if boolean_of_arg(get_arg(ctx, "release-after-complete", "false")):
if boolean_of_arg(get_arg(req, "release-after-complete", "false")):
self._release_ophandle(ophandle)
if retain_for is None:
# this GET is collecting the ophandle, so change its timer
@ -123,6 +141,7 @@ class OphandleTable(rend.Page, service.Service):
self.timers.pop(ophandle, None)
self.handles.pop(ophandle, None)
class ReloadMixin(object):
REFRESH_TIME = 1*MINUTE

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
<head>
<title>Tahoe-LAFS - Recent and Active Operations</title>
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
@ -11,8 +11,8 @@
<h2>Active Operations:</h2>
<table align="left" class="table-headings-top" n:render="sequence" n:data="active_operations">
<tr n:pattern="header">
<table align="left" class="table-headings-top" t:render="active_operations">
<tr t:render="header">
<th>Type</th>
<th>Storage Index</th>
<th>Helper?</th>
@ -20,21 +20,21 @@
<th>Progress</th>
<th>Status</th>
</tr>
<tr n:pattern="item" n:render="row">
<td><n:slot name="type"/></td>
<td><n:slot name="si"/></td>
<td><n:slot name="helper"/></td>
<td><n:slot name="total_size"/></td>
<td><n:slot name="progress"/></td>
<td><n:slot name="status"/></td>
<tr t:render="item">
<td><t:slot name="type"/></td>
<td><t:slot name="si"/></td>
<td><t:slot name="helper"/></td>
<td><t:slot name="total_size"/></td>
<td><t:slot name="progress"/></td>
<td><t:slot name="status"/></td>
</tr>
<tr n:pattern="empty"><td>No active operations!</td></tr>
<tr t:render="empty"><td>No active operations!</td></tr>
</table>
<br clear="all" />
<h2>Recent Operations:</h2>
<table align="left" class="table-headings-top" n:render="sequence" n:data="recent_operations">
<tr n:pattern="header">
<table align="left" class="table-headings-top" t:render="recent_operations">
<tr t:render="header">
<th>Started</th>
<th>Type</th>
<th>Storage Index</th>
@ -43,16 +43,16 @@
<th>Progress</th>
<th>Status</th>
</tr>
<tr n:pattern="item" n:render="row">
<td><n:slot name="started"/></td>
<td><n:slot name="type"/></td>
<td><n:slot name="si"/></td>
<td><n:slot name="helper"/></td>
<td><n:slot name="total_size"/></td>
<td><n:slot name="progress"/></td>
<td><n:slot name="status"/></td>
<tr t:render="item">
<td><t:slot name="started"/></td>
<td><t:slot name="type"/></td>
<td><t:slot name="si"/></td>
<td><t:slot name="helper"/></td>
<td><t:slot name="total_size"/></td>
<td><t:slot name="progress"/></td>
<td><t:slot name="status"/></td>
</tr>
<tr n:pattern="empty"><td>No recent operations!</td></tr>
<tr t:render="empty"><td>No recent operations!</td></tr>
</table>
<br clear="all" />

View File

@ -1,10 +1,16 @@
import time, json
from nevow import rend, tags as T
from twisted.python.filepath import FilePath
from twisted.web.template import (
Element,
XMLFile,
tags as T,
renderer,
renderElement
)
from allmydata.web.common import (
getxmlfile,
abbreviate_time,
MultiFormatPage,
MultiFormatResource
)
from allmydata.util.abbreviate import abbreviate_space
from allmydata.util import time_format, idlib
@ -16,91 +22,108 @@ def remove_prefix(s, prefix):
return s[len(prefix):]
class StorageStatus(MultiFormatPage):
docFactory = getxmlfile("storage_status.xhtml")
# the default 'data' argument is the StorageServer instance
class StorageStatusElement(Element):
"""Class to render a storage status page."""
loader = XMLFile(FilePath(__file__).sibling("storage_status.xhtml"))
def __init__(self, storage, nickname=""):
rend.Page.__init__(self, storage)
self.storage = storage
self.nickname = nickname
"""
:param _StorageServer storage: data about storage.
:param string nickname: friendly name for storage.
"""
super(StorageStatusElement, self).__init__()
self._storage = storage
self._nickname = nickname
def render_JSON(self, req):
req.setHeader("content-type", "text/plain")
d = {"stats": self.storage.get_stats(),
"bucket-counter": self.storage.bucket_counter.get_state(),
"lease-checker": self.storage.lease_checker.get_state(),
"lease-checker-progress": self.storage.lease_checker.get_progress(),
}
return json.dumps(d, indent=1) + "\n"
@renderer
def nickname(self, req, tag):
return tag(self._nickname)
def data_nickname(self, ctx, storage):
return self.nickname
def data_nodeid(self, ctx, storage):
return idlib.nodeid_b2a(self.storage.my_nodeid)
@renderer
def nodeid(self, req, tag):
return tag(idlib.nodeid_b2a(self._storage.my_nodeid))
def render_storage_running(self, ctx, storage):
if storage:
return ctx.tag
else:
return T.h1["No Storage Server Running"]
def _get_storage_stat(self, key):
"""Get storage server statistics.
def render_bool(self, ctx, data):
return {True: "Yes", False: "No"}[bool(data)]
Storage Server keeps a dict that contains various usage and
latency statistics. The dict looks like this:
def render_abbrev_space(self, ctx, size):
{
'storage_server.accepting_immutable_shares': 1,
'storage_server.allocated': 0,
'storage_server.disk_avail': 106539192320,
'storage_server.disk_free_for_nonroot': 106539192320,
'storage_server.disk_free_for_root': 154415284224,
'storage_server.disk_total': 941088460800,
'storage_server.disk_used': 786673176576,
'storage_server.latencies.add-lease.01_0_percentile': None,
'storage_server.latencies.add-lease.10_0_percentile': None,
...
}
``StorageServer.get_stats()`` returns the above dict. Storage
status page uses a subset of the items in the dict, concerning
disk usage.
:param str key: storage server statistic we want to know.
"""
return self._storage.get_stats().get(key)
def render_abbrev_space(self, size):
if size is None:
return "?"
return u"?"
return abbreviate_space(size)
def render_space(self, ctx, size):
def render_space(self, size):
if size is None:
return "?"
return "%d" % size
return u"?"
return u"%d" % size
def data_stats(self, ctx, data):
# FYI: 'data' appears to be self, rather than the StorageServer
# object in self.original that gets passed to render_* methods. I
# still don't understand Nevow.
@renderer
def storage_stats(self, req, tag):
# Render storage status table that appears near the top of the page.
total = self._get_storage_stat("storage_server.disk_total")
used = self._get_storage_stat("storage_server.disk_used")
free_root = self._get_storage_stat("storage_server.disk_free_for_root")
free_nonroot = self._get_storage_stat("storage_server.disk_free_for_nonroot")
reserved = self._get_storage_stat("storage_server.reserved_space")
available = self._get_storage_stat("storage_server.disk_avail")
# Nevow has nevow.accessors.DictionaryContainer: Any data= directive
# that appears in a context in which the current data is a dictionary
# will be looked up as keys in that dictionary. So if data_stats()
# returns a dictionary, then we can use something like this:
#
# <ul n:data="stats">
# <li>disk_total: <span n:render="abbrev" n:data="disk_total" /></li>
# </ul>
tag.fillSlots(
disk_total = self.render_space(total),
disk_total_abbrev = self.render_abbrev_space(total),
disk_used = self.render_space(used),
disk_used_abbrev = self.render_abbrev_space(used),
disk_free_for_root = self.render_space(free_root),
disk_free_for_root_abbrev = self.render_abbrev_space(free_root),
disk_free_for_nonroot = self.render_space(free_nonroot),
disk_free_for_nonroot_abbrev = self.render_abbrev_space(free_nonroot),
reserved_space = self.render_space(reserved),
reserved_space_abbrev = self.render_abbrev_space(reserved),
disk_avail = self.render_space(available),
disk_avail_abbrev = self.render_abbrev_space(available)
)
return tag
# to use get_stats()["storage_server.disk_total"] . However,
# DictionaryContainer does a raw d[] instead of d.get(), so any
# missing keys will cause an error, even if the renderer can tolerate
# None values. To overcome this, we either need a dict-like object
# that always returns None for unknown keys, or we must pre-populate
# our dict with those missing keys, or we should get rid of data_
# methods that return dicts (or find some way to override Nevow's
# handling of dictionaries).
@renderer
def accepting_immutable_shares(self, req, tag):
accepting = self._get_storage_stat("storage_server.accepting_immutable_shares")
return tag({True: "Yes", False: "No"}[bool(accepting)])
d = dict([ (remove_prefix(k, "storage_server."), v)
for k,v in self.storage.get_stats().items() ])
d.setdefault("disk_total", None)
d.setdefault("disk_used", None)
d.setdefault("disk_free_for_root", None)
d.setdefault("disk_free_for_nonroot", None)
d.setdefault("reserved_space", None)
d.setdefault("disk_avail", None)
return d
def data_last_complete_bucket_count(self, ctx, data):
s = self.storage.bucket_counter.get_state()
@renderer
def last_complete_bucket_count(self, req, tag):
s = self._storage.bucket_counter.get_state()
count = s.get("last-complete-bucket-count")
if count is None:
return "Not computed yet"
return count
return tag("Not computed yet")
return tag(str(count))
def render_count_crawler_status(self, ctx, storage):
p = self.storage.bucket_counter.get_progress()
return ctx.tag[self.format_crawler_progress(p)]
@renderer
def count_crawler_status(self, req, tag):
p = self._storage.bucket_counter.get_progress()
return tag(self.format_crawler_progress(p))
def format_crawler_progress(self, p):
cycletime = p["estimated-time-per-cycle"]
@ -127,56 +150,52 @@ class StorageStatus(MultiFormatPage):
return ["Next crawl in %s" % abbreviate_time(soon),
cycletime_s]
def render_lease_expiration_enabled(self, ctx, data):
lc = self.storage.lease_checker
if lc.expiration_enabled:
return ctx.tag["Enabled: expired leases will be removed"]
else:
return ctx.tag["Disabled: scan-only mode, no leases will be removed"]
@renderer
def storage_running(self, req, tag):
if self._storage:
return tag
return T.h1("No Storage Server Running")
def render_lease_expiration_mode(self, ctx, data):
lc = self.storage.lease_checker
@renderer
def lease_expiration_enabled(self, req, tag):
lc = self._storage.lease_checker
if lc.expiration_enabled:
return tag("Enabled: expired leases will be removed")
else:
return tag("Disabled: scan-only mode, no leases will be removed")
@renderer
def lease_expiration_mode(self, req, tag):
lc = self._storage.lease_checker
if lc.mode == "age":
if lc.override_lease_duration is None:
ctx.tag["Leases will expire naturally, probably 31 days after "
"creation or renewal."]
tag("Leases will expire naturally, probably 31 days after "
"creation or renewal.")
else:
ctx.tag["Leases created or last renewed more than %s ago "
"will be considered expired."
% abbreviate_time(lc.override_lease_duration)]
tag("Leases created or last renewed more than %s ago "
"will be considered expired."
% abbreviate_time(lc.override_lease_duration))
else:
assert lc.mode == "cutoff-date"
localizedutcdate = time.strftime("%d-%b-%Y", time.gmtime(lc.cutoff_date))
isoutcdate = time_format.iso_utc_date(lc.cutoff_date)
ctx.tag["Leases created or last renewed before %s (%s) UTC "
"will be considered expired." % (isoutcdate, localizedutcdate, )]
tag("Leases created or last renewed before %s (%s) UTC "
"will be considered expired."
% (isoutcdate, localizedutcdate, ))
if len(lc.mode) > 2:
ctx.tag[" The following sharetypes will be expired: ",
" ".join(sorted(lc.sharetypes_to_expire)), "."]
return ctx.tag
tag(" The following sharetypes will be expired: ",
" ".join(sorted(lc.sharetypes_to_expire)), ".")
return tag
def format_recovered(self, sr, a):
def maybe(d):
if d is None:
return "?"
return "%d" % d
return "%s shares, %s buckets (%s mutable / %s immutable), %s (%s / %s)" % \
(maybe(sr["%s-shares" % a]),
maybe(sr["%s-buckets" % a]),
maybe(sr["%s-buckets-mutable" % a]),
maybe(sr["%s-buckets-immutable" % a]),
abbreviate_space(sr["%s-diskbytes" % a]),
abbreviate_space(sr["%s-diskbytes-mutable" % a]),
abbreviate_space(sr["%s-diskbytes-immutable" % a]),
)
def render_lease_current_cycle_progress(self, ctx, data):
lc = self.storage.lease_checker
@renderer
def lease_current_cycle_progress(self, req, tag):
lc = self._storage.lease_checker
p = lc.get_progress()
return ctx.tag[self.format_crawler_progress(p)]
return tag(self.format_crawler_progress(p))
def render_lease_current_cycle_results(self, ctx, data):
lc = self.storage.lease_checker
@renderer
def lease_current_cycle_results(self, req, tag):
lc = self._storage.lease_checker
p = lc.get_progress()
if not p["cycle-in-progress"]:
return ""
@ -190,7 +209,7 @@ class StorageStatus(MultiFormatPage):
p = T.ul()
def add(*pieces):
p[T.li[pieces]]
p(T.li(pieces))
def maybe(d):
if d is None:
@ -226,29 +245,29 @@ class StorageStatus(MultiFormatPage):
if so_far["corrupt-shares"]:
add("Corrupt shares:",
T.ul[ [T.li[ ["SI %s shnum %d" % corrupt_share
T.ul( (T.li( ["SI %s shnum %d" % corrupt_share
for corrupt_share in so_far["corrupt-shares"] ]
]]])
))))
return tag("Current cycle:", p)
return ctx.tag["Current cycle:", p]
def render_lease_last_cycle_results(self, ctx, data):
lc = self.storage.lease_checker
@renderer
def lease_last_cycle_results(self, req, tag):
lc = self._storage.lease_checker
h = lc.get_state()["history"]
if not h:
return ""
last = h[max(h.keys())]
start, end = last["cycle-start-finish-times"]
ctx.tag["Last complete cycle (which took %s and finished %s ago)"
" recovered: " % (abbreviate_time(end-start),
abbreviate_time(time.time() - end)),
self.format_recovered(last["space-recovered"], "actual")
]
tag("Last complete cycle (which took %s and finished %s ago)"
" recovered: " % (abbreviate_time(end-start),
abbreviate_time(time.time() - end)),
self.format_recovered(last["space-recovered"], "actual"))
p = T.ul()
def add(*pieces):
p[T.li[pieces]]
p(T.li(pieces))
saw = self.format_recovered(last["space-recovered"], "examined")
add("and saw a total of ", saw)
@ -260,8 +279,42 @@ class StorageStatus(MultiFormatPage):
if last["corrupt-shares"]:
add("Corrupt shares:",
T.ul[ [T.li[ ["SI %s shnum %d" % corrupt_share
T.ul( (T.li( ["SI %s shnum %d" % corrupt_share
for corrupt_share in last["corrupt-shares"] ]
]]])
))))
return ctx.tag[p]
return tag(p)
@staticmethod
def format_recovered(sr, a):
def maybe(d):
if d is None:
return "?"
return "%d" % d
return "%s shares, %s buckets (%s mutable / %s immutable), %s (%s / %s)" % \
(maybe(sr["%s-shares" % a]),
maybe(sr["%s-buckets" % a]),
maybe(sr["%s-buckets-mutable" % a]),
maybe(sr["%s-buckets-immutable" % a]),
abbreviate_space(sr["%s-diskbytes" % a]),
abbreviate_space(sr["%s-diskbytes-mutable" % a]),
abbreviate_space(sr["%s-diskbytes-immutable" % a]),
)
class StorageStatus(MultiFormatResource):
def __init__(self, storage, nickname=""):
super(StorageStatus, self).__init__()
self._storage = storage
self._nickname = nickname
def render_HTML(self, req):
return renderElement(req, StorageStatusElement(self._storage, self._nickname))
def render_JSON(self, req):
req.setHeader("content-type", "text/plain")
d = {"stats": self._storage.get_stats(),
"bucket-counter": self._storage.bucket_counter.get_state(),
"lease-checker": self._storage.lease_checker.get_state(),
"lease-checker-progress": self._storage.lease_checker.get_progress(),
}
return json.dumps(d, indent=1) + "\n"

View File

@ -1,4 +1,4 @@
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
<head>
<title>Tahoe-LAFS - Storage Server Status</title>
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
@ -7,19 +7,19 @@
</head>
<body>
<div n:render="storage_running">
<div t:render="storage_running">
<h1>Storage Server Status</h1>
<table n:data="stats">
<table class="storage_status" t:render="storage_stats">
<tr><td>Total disk space:</td>
<td><span n:render="abbrev_space" n:data="disk_total" /></td>
<td>(<span n:render="space" n:data="disk_total" />)</td>
<td><t:slot name="disk_total_abbrev" /></td>
<td>(<t:slot name="disk_total" />)</td>
<td />
</tr>
<tr><td>Disk space used:</td>
<td>- <span n:render="abbrev_space" n:data="disk_used" /></td>
<td>(<span n:render="space" n:data="disk_used" />)</td>
<td>- <t:slot name="disk_used_abbrev" /></td>
<td>(<t:slot name="disk_used" />)</td>
<td />
</tr>
<tr><td />
@ -28,18 +28,18 @@
<td />
</tr>
<tr><td>Disk space free (root):</td>
<td><span n:render="abbrev_space" n:data="disk_free_for_root"/></td>
<td>(<span n:render="space" n:data="disk_free_for_root"/>)</td>
<td><t:slot name="disk_free_for_root_abbrev"/></td>
<td>(<t:slot name="disk_free_for_root"/>)</td>
<td>[see 1]</td>
</tr>
<tr><td>Disk space free (non-root):</td>
<td><span n:render="abbrev_space" n:data="disk_free_for_nonroot" /></td>
<td>(<span n:render="space" n:data="disk_free_for_nonroot" />)</td>
<td><t:slot name="disk_free_for_nonroot_abbrev" /></td>
<td>(<t:slot name="disk_free_for_nonroot" />)</td>
<td>[see 2]</td>
</tr>
<tr><td>Reserved space:</td>
<td>- <span n:render="abbrev_space" n:data="reserved_space" /></td>
<td>(<span n:render="space" n:data="reserved_space" />)</td>
<td>- <t:slot name="reserved_space_abbrev" /></td>
<td>(<t:slot name="reserved_space" />)</td>
<td />
</tr>
<tr><td />
@ -48,23 +48,23 @@
<td />
</tr>
<tr><td>Space Available to Tahoe:</td>
<td><span n:render="abbrev_space" n:data="disk_avail" /></td>
<td>(<span n:render="space" n:data="disk_avail" />)</td>
<td><t:slot name="disk_avail_abbrev" /></td>
<td>(<t:slot name="disk_avail" />)</td>
<td />
</tr>
</table>
<ul>
<li>Server Nickname: <span class="nickname mine" n:render="data" n:data="nickname" /></li>
<li>Server Nodeid: <span class="nodeid mine data-chars" n:render="string" n:data="nodeid" /></li>
<li n:data="stats">Accepting new shares:
<span n:render="bool" n:data="accepting_immutable_shares" /></li>
<li>Server Nickname: <span class="nickname mine"><t:transparent t:render="nickname" /></span></li>
<li>Server Nodeid: <span class="nodeid mine data-chars"> <t:transparent t:render="nodeid" /></span></li>
<li>Accepting new shares:
<span t:render="accepting_immutable_shares" /></li>
<li>Total buckets:
<span n:render="string" n:data="last_complete_bucket_count" />
<span t:render="last_complete_bucket_count" />
(the number of files and directories for which this server is holding
a share)
<ul>
<li n:render="count_crawler_status" />
<li><span t:render="count_crawler_status" /></li>
</ul>
</li>
</ul>
@ -72,11 +72,11 @@
<h2>Lease Expiration Crawler</h2>
<ul>
<li>Expiration <span n:render="lease_expiration_enabled" /></li>
<li n:render="lease_expiration_mode" />
<li n:render="lease_current_cycle_progress" />
<li n:render="lease_current_cycle_results" />
<li n:render="lease_last_cycle_results" />
<li>Expiration <span t:render="lease_expiration_enabled" /></li>
<li t:render="lease_expiration_mode" />
<li t:render="lease_current_cycle_progress" />
<li t:render="lease_current_cycle_results" />
<li t:render="lease_last_cycle_results" />
</ul>
<hr />

View File

@ -2,11 +2,25 @@
import urllib
from twisted.web import http
from twisted.internet import defer
from nevow import rend, url, tags as T
from twisted.python.filepath import FilePath
from twisted.web.resource import Resource
from twisted.web.template import (
XMLFile,
renderer,
renderElement,
tags,
)
from nevow import url
from allmydata.immutable.upload import FileHandle
from allmydata.mutable.publish import MutableFileHandle
from allmydata.web.common import getxmlfile, get_arg, boolean_of_arg, \
convert_children_json, WebError, get_format, get_mutable_type
from allmydata.web.common import (
get_arg,
boolean_of_arg,
convert_children_json,
WebError,
get_format,
get_mutable_type,
)
from allmydata.web import status
def PUTUnlinkedCHK(req, client):
@ -59,34 +73,53 @@ def POSTUnlinkedCHK(req, client):
return d
class UploadResultsPage(status.UploadResultsRendererMixin, rend.Page):
class UploadResultsPage(Resource, object):
"""'POST /uri', to create an unlinked file."""
docFactory = getxmlfile("upload-results.xhtml")
def __init__(self, upload_results):
rend.Page.__init__(self)
self.results = upload_results
"""
:param IUploadResults upload_results: stats provider.
"""
super(UploadResultsPage, self).__init__()
self._upload_results = upload_results
def render_POST(self, req):
elem = UploadResultsElement(self._upload_results)
return renderElement(req, elem)
class UploadResultsElement(status.UploadResultsRendererMixin):
loader = XMLFile(FilePath(__file__).sibling("upload-results.xhtml"))
def __init__(self, upload_results):
super(UploadResultsElement, self).__init__()
self._upload_results = upload_results
def upload_results(self):
return defer.succeed(self.results)
return defer.succeed(self._upload_results)
def data_done(self, ctx, data):
@renderer
def done(self, req, tag):
d = self.upload_results()
d.addCallback(lambda res: "done!")
return d
def data_uri(self, ctx, data):
@renderer
def uri(self, req, tag):
d = self.upload_results()
d.addCallback(lambda res: res.get_uri())
return d
def render_download_link(self, ctx, data):
@renderer
def download_link(self, req, tag):
d = self.upload_results()
d.addCallback(lambda res:
T.a(href="/uri/" + urllib.quote(res.get_uri()))
["/uri/" + res.get_uri()])
tags.a("/uri/" + res.get_uri(),
href="/uri/" + urllib.quote(res.get_uri())))
return d
def POSTUnlinkedSSK(req, client, version):
# "POST /uri", to create an unlinked file.
# SDMF: files are small, and we can only upload data

View File

@ -1,4 +1,4 @@
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
<head>
<title>Tahoe-LAFS - File Uploaded</title>
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
@ -7,37 +7,37 @@
</head>
<body>
<h1>Uploading File... <span n:render="string" n:data="done" /></h1>
<h1>Uploading File... <t:transparent t:render="done" /></h1>
<h2>Upload Results:</h2>
<ul>
<li>URI: <tt><span n:render="string" n:data="uri" /></tt></li>
<li>Download link: <span n:render="download_link" /></li>
<li>Sharemap: <span n:render="sharemap" /></li>
<li>Servermap: <span n:render="servermap" /></li>
<li>URI: <tt><span><t:transparent t:render="uri" /></span></tt></li>
<li>Download link: <t:transparent t:render="download_link" /></li>
<li>Sharemap: <t:transparent t:render="sharemap" /></li>
<li>Servermap: <t:transparent t:render="servermap" /></li>
<li>Timings:</li>
<ul>
<li>File Size: <span n:render="string" n:data="file_size" /> bytes</li>
<li>Total: <span n:render="time" n:data="time_total" />
(<span n:render="rate" n:data="rate_total" />)</li>
<li>File Size: <t:transparent t:render="file_size" /> bytes</li>
<li>Total: <t:transparent t:render="time_total" />
(<t:transparent t:render="rate_total" />)</li>
<ul>
<li>Storage Index: <span n:render="time" n:data="time_storage_index" />
(<span n:render="rate" n:data="rate_storage_index" />)</li>
<li>[Contacting Helper]: <span n:render="time" n:data="time_contacting_helper" /></li>
<li>[Upload Ciphertext To Helper]: <span n:render="time" n:data="time_cumulative_fetch" />
(<span n:render="rate" n:data="rate_ciphertext_fetch" />)</li>
<li>Storage Index: <t:transparent t:render="time_storage_index" />
(<t:transparent t:render="rate_storage_index" />)</li>
<li>[Contacting Helper]: <t:transparent t:render="time_contacting_helper" /></li>
<li>[Upload Ciphertext To Helper]: <t:transparent t:render="time_cumulative_fetch" />
(<t:transparent t:render="rate_ciphertext_fetch" />)</li>
<li>Peer Selection: <span n:render="time" n:data="time_peer_selection" /></li>
<li>Encode And Push: <span n:render="time" n:data="time_total_encode_and_push" />
(<span n:render="rate" n:data="rate_encode_and_push" />)</li>
<li>Peer Selection: <t:transparent t:render="time_peer_selection" /></li>
<li>Encode And Push: <t:transparent t:render="time_total_encode_and_push" />
(<t:transparent t:render="rate_encode_and_push" />)</li>
<ul>
<li>Cumulative Encoding: <span n:render="time" n:data="time_cumulative_encoding" />
(<span n:render="rate" n:data="rate_encode" />)</li>
<li>Cumulative Pushing: <span n:render="time" n:data="time_cumulative_sending" />
(<span n:render="rate" n:data="rate_push" />)</li>
<li>Send Hashes And Close: <span n:render="time" n:data="time_hashes_and_close" /></li>
<li>Cumulative Encoding: <t:transparent t:render="time_cumulative_encoding" />
(<t:transparent t:render="rate_encode" />)</li>
<li>Cumulative Pushing: <t:transparent t:render="time_cumulative_sending" />
(<t:transparent t:render="rate_push" />)</li>
<li>Send Hashes And Close: <t:transparent t:render="time_hashes_and_close" /></li>
</ul>
<li>[Helper Total]: <span n:render="time" n:data="time_helper_total" /></li>
<li>[Helper Total]: <t:transparent t:render="time_helper_total" /></li>
</ul>
</ul>
</ul>

View File

@ -1,4 +1,4 @@
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
<head>
<title>Tahoe-LAFS - File Upload Status</title>
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
@ -10,46 +10,46 @@
<h1>File Upload Status</h1>
<ul>
<li>Started: <span n:render="started"/></li>
<li>Storage Index: <span n:render="si"/></li>
<li>Helper?: <span n:render="helper"/></li>
<li>Total Size: <span n:render="total_size"/></li>
<li>Progress (Hash): <span n:render="progress_hash"/></li>
<li>Progress (Ciphertext): <span n:render="progress_ciphertext"/></li>
<li>Progress (Encode+Push): <span n:render="progress_encode_push"/></li>
<li>Status: <span n:render="status"/></li>
<li>Started: <t:transparent t:render="started"/></li>
<li>Storage Index: <t:transparent t:render="si"/></li>
<li>Helper?: <t:transparent t:render="helper"/></li>
<li>Total Size: <t:transparent t:render="total_size"/></li>
<li>Progress (Hash): <t:transparent t:render="progress_hash"/></li>
<li>Progress (Ciphertext): <t:transparent t:render="progress_ciphertext"/></li>
<li>Progress (Encode+Push): <t:transparent t:render="progress_encode_push"/></li>
<li>Status: <t:transparent t:render="status"/></li>
</ul>
<div n:render="results">
<div t:render="results">
<h2>Upload Results</h2>
<ul>
<li>Shares Pushed: <span n:render="pushed_shares" /></li>
<li>Shares Already Present: <span n:render="preexisting_shares" /></li>
<li>Sharemap: <span n:render="sharemap" /></li>
<li>Servermap: <span n:render="servermap" /></li>
<li>Shares Pushed: <t:transparent t:render="pushed_shares" /></li>
<li>Shares Already Present: <t:transparent t:render="preexisting_shares" /></li>
<li>Sharemap: <t:transparent t:render="sharemap" /></li>
<li>Servermap: <t:transparent t:render="servermap" /></li>
<li>Timings:</li>
<ul>
<li>File Size: <span n:render="string" n:data="file_size" /> bytes</li>
<li>Total: <span n:render="time" n:data="time_total" />
(<span n:render="rate" n:data="rate_total" />)</li>
<li>File Size: <t:transparent t:render="file_size" /> bytes</li>
<li>Total: <t:transparent t:render="time_total" />
(<t:transparent t:render="rate_total" />)</li>
<ul>
<li>Storage Index: <span n:render="time" n:data="time_storage_index" />
(<span n:render="rate" n:data="rate_storage_index" />)</li>
<li>[Contacting Helper]: <span n:render="time" n:data="time_contacting_helper" /></li>
<li>[Upload Ciphertext To Helper]: <span n:render="time" n:data="time_cumulative_fetch" />
(<span n:render="rate" n:data="rate_ciphertext_fetch" />)</li>
<li>Storage Index: <t:transparent t:render="time_storage_index" />
(<t:transparent t:render="rate_storage_index" />)</li>
<li>[Contacting Helper]: <t:transparent t:render="time_contacting_helper" /></li>
<li>[Upload Ciphertext To Helper]: <t:transparent t:render="time_cumulative_fetch" />
(<t:transparent t:render="rate_ciphertext_fetch" />)</li>
<li>Peer Selection: <span n:render="time" n:data="time_peer_selection" /></li>
<li>Encode And Push: <span n:render="time" n:data="time_total_encode_and_push" />
(<span n:render="rate" n:data="rate_encode_and_push" />)</li>
<li>Peer Selection: <t:transparent t:render="time_peer_selection" /></li>
<li>Encode And Push: <t:transparent t:render="time_total_encode_and_push" />
(<t:transparent t:render="rate_encode_and_push" />)</li>
<ul>
<li>Cumulative Encoding: <span n:render="time" n:data="time_cumulative_encoding" />
(<span n:render="rate" n:data="rate_encode" />)</li>
<li>Cumulative Pushing: <span n:render="time" n:data="time_cumulative_sending" />
(<span n:render="rate" n:data="rate_push" />)</li>
<li>Send Hashes And Close: <span n:render="time" n:data="time_hashes_and_close" /></li>
<li>Cumulative Encoding: <t:transparent t:render="time_cumulative_encoding" />
(<t:transparent t:render="rate_encode" />)</li>
<li>Cumulative Pushing: <t:transparent t:render="time_cumulative_sending" />
(<t:transparent t:render="rate_push" />)</li>
<li>Send Hashes And Close: <t:transparent t:render="time_hashes_and_close" /></li>
</ul>
<li>[Helper Total]: <span n:render="time" n:data="time_helper_total" /></li>
<li>[Helper Total]: <t:transparent t:render="time_helper_total" /></li>
</ul>
</ul>
</ul>

11
tox.ini
View File

@ -7,7 +7,7 @@
twisted = 1
[tox]
envlist = {py27,pypy27}{-coverage,}
envlist = {py27,pypy27,py36}{-coverage,}
minversion = 2.4
[testenv]
@ -45,8 +45,13 @@ usedevelop = False
# tests.
extras = test
commands =
tahoe --version
trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata}
tahoe --version
[testenv:py36]
# git inside of ratchet.sh needs $HOME.
passenv = HOME
commands = {toxinidir}/misc/python3/ratchet.sh
[testenv:integration]
setenv =
@ -75,7 +80,7 @@ commands =
whitelist_externals =
/bin/mv
commands =
pyflakes src static misc setup.py
flake8 src static misc setup.py
python misc/coding_tools/check-umids.py src
python misc/coding_tools/check-debugging.py
python misc/coding_tools/find-trailing-spaces.py -r src static misc setup.py