mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-19 21:17:54 +00:00
Merge branch '3342-base32-and-base62-to-python-3' into 3344.netstring-and-hashutil-to-python-3
This commit is contained in:
commit
4da8e2ddee
3
.gitignore
vendored
3
.gitignore
vendored
@ -44,6 +44,9 @@ zope.interface-*.egg
|
|||||||
/docs/_build/
|
/docs/_build/
|
||||||
/coverage.xml
|
/coverage.xml
|
||||||
/.hypothesis/
|
/.hypothesis/
|
||||||
|
/eliot.log
|
||||||
|
/misc/python3/results.xml
|
||||||
|
/misc/python3/results.subunit2
|
||||||
|
|
||||||
# This is the plaintext of the private environment needed for some CircleCI
|
# This is the plaintext of the private environment needed for some CircleCI
|
||||||
# operations. It's never supposed to be checked in.
|
# operations. It's never supposed to be checked in.
|
||||||
|
15
.travis.yml
15
.travis.yml
@ -1,7 +1,7 @@
|
|||||||
sudo: false
|
sudo: false
|
||||||
language: python
|
language: python
|
||||||
cache: pip
|
cache: pip
|
||||||
dist: trusty
|
dist: xenial
|
||||||
before_cache:
|
before_cache:
|
||||||
- rm -f $HOME/.cache/pip/log/debug.log
|
- rm -f $HOME/.cache/pip/log/debug.log
|
||||||
git:
|
git:
|
||||||
@ -16,19 +16,15 @@ install:
|
|||||||
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then export PATH=$HOME/Library/Python/2.7/bin:$PATH; fi
|
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then export PATH=$HOME/Library/Python/2.7/bin:$PATH; fi
|
||||||
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then wget https://bootstrap.pypa.io/get-pip.py && sudo python ./get-pip.py; fi
|
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then wget https://bootstrap.pypa.io/get-pip.py && sudo python ./get-pip.py; fi
|
||||||
- pip list
|
- pip list
|
||||||
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then pip install --user --upgrade codecov tox setuptools; fi
|
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then pip install --user --upgrade codecov tox setuptools virtualenv; fi
|
||||||
- if [ "${TRAVIS_OS_NAME}" = "linux" ]; then pip install --upgrade codecov tox setuptools; fi
|
- if [ "${TRAVIS_OS_NAME}" = "linux" ]; then pip install --upgrade codecov tox setuptools virtualenv; fi
|
||||||
- echo $PATH; which python; which pip; which tox
|
- echo $PATH; which python; which pip; which tox
|
||||||
- python misc/build_helpers/show-tool-versions.py
|
- python misc/build_helpers/show-tool-versions.py
|
||||||
|
|
||||||
script:
|
script:
|
||||||
- |
|
- |
|
||||||
set -eo pipefail
|
set -eo pipefail
|
||||||
if [ "${T}" = "py35" ]; then
|
|
||||||
python3 -m compileall -f -x tahoe-depgraph.py .
|
|
||||||
else
|
|
||||||
tox -e ${T}
|
tox -e ${T}
|
||||||
fi
|
|
||||||
# To verify that the resultant PyInstaller-generated binary executes
|
# To verify that the resultant PyInstaller-generated binary executes
|
||||||
# cleanly (i.e., that it terminates with an exit code of 0 and isn't
|
# cleanly (i.e., that it terminates with an exit code of 0 and isn't
|
||||||
# failing due to import/packaging-related errors, etc.).
|
# failing due to import/packaging-related errors, etc.).
|
||||||
@ -69,9 +65,8 @@ matrix:
|
|||||||
python: '2.7'
|
python: '2.7'
|
||||||
env: T=pyinstaller LANG=en_US.UTF-8
|
env: T=pyinstaller LANG=en_US.UTF-8
|
||||||
language: generic # "python" is not available on OS-X
|
language: generic # "python" is not available on OS-X
|
||||||
# this is a "lint" job that checks for python3 compatibility
|
|
||||||
- os: linux
|
- os: linux
|
||||||
python: '3.5'
|
python: '3.6'
|
||||||
env: T=py35
|
env: T=py36
|
||||||
|
|
||||||
fast_finish: true
|
fast_finish: true
|
||||||
|
28
misc/python3/ratchet-passing
Normal file
28
misc/python3/ratchet-passing
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
allmydata.test.mutable.test_exceptions.Exceptions.test_repr
|
||||||
|
allmydata.test.test_base32.Base32.test_a2b
|
||||||
|
allmydata.test.test_base32.Base32.test_a2b_b2a_match_Pythons
|
||||||
|
allmydata.test.test_base32.Base32.test_b2a
|
||||||
|
allmydata.test.test_base32.Base32.test_b2a_or_none
|
||||||
|
allmydata.test.test_base62.Base62.test_ende_0x00
|
||||||
|
allmydata.test.test_base62.Base62.test_ende_0x000000
|
||||||
|
allmydata.test.test_base62.Base62.test_ende_0x01
|
||||||
|
allmydata.test.test_base62.Base62.test_ende_0x0100
|
||||||
|
allmydata.test.test_base62.Base62.test_ende_0x010000
|
||||||
|
allmydata.test.test_base62.Base62.test_ende_longrandstr
|
||||||
|
allmydata.test.test_base62.Base62.test_ende_randstr
|
||||||
|
allmydata.test.test_base62.Base62.test_known_values
|
||||||
|
allmydata.test.test_base62.Base62.test_num_octets_that_encode_to_this_many_chars
|
||||||
|
allmydata.test.test_base62.Base62.test_odd_sizes
|
||||||
|
allmydata.test.test_base62.Base62.test_roundtrip
|
||||||
|
allmydata.test.test_deferredutil.DeferredUtilTests.test_failure
|
||||||
|
allmydata.test.test_deferredutil.DeferredUtilTests.test_gather_results
|
||||||
|
allmydata.test.test_deferredutil.DeferredUtilTests.test_success
|
||||||
|
allmydata.test.test_deferredutil.DeferredUtilTests.test_wait_for_delayed_calls
|
||||||
|
allmydata.test.test_humanreadable.HumanReadable.test_repr
|
||||||
|
allmydata.test.test_observer.Observer.test_lazy_oneshot
|
||||||
|
allmydata.test.test_observer.Observer.test_observerlist
|
||||||
|
allmydata.test.test_observer.Observer.test_oneshot
|
||||||
|
allmydata.test.test_observer.Observer.test_oneshot_fireagain
|
||||||
|
allmydata.test.test_python3.Python3PortingEffortTests.test_finished_porting
|
||||||
|
allmydata.test.test_python3.Python3PortingEffortTests.test_ported_modules_distinct
|
||||||
|
allmydata.test.test_python3.Python3PortingEffortTests.test_ported_modules_exist
|
409
misc/python3/ratchet.py
Executable file
409
misc/python3/ratchet.py
Executable file
@ -0,0 +1,409 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
'''Ratchet up passing tests, or ratchet down failing tests.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
ratchet.py <"up" or "down"> <junitxml file path> <tracking file path>
|
||||||
|
|
||||||
|
This script helps when you expect a large test suite to fail spectactularly in
|
||||||
|
some environment, and you want to gradually improve the situation with minimal
|
||||||
|
impact to forward development of the same codebase for other environments. The
|
||||||
|
initial and primary usecase is porting from Python 2 to Python 3.
|
||||||
|
|
||||||
|
The idea is to emit JUnit XML from your test runner, and then invoke ratchet.py
|
||||||
|
to consume this XML output and operate on a so-called "tracking" file. When
|
||||||
|
ratcheting up passing tests, the tracking file will contain a list of tests,
|
||||||
|
one per line, that passed. When ratching down, the tracking file contains a
|
||||||
|
list of failing tests. On each subsequent run, ratchet.py will compare the
|
||||||
|
prior results in the tracking file with the new results in the XML, and will
|
||||||
|
report on both welcome and unwelcome changes. It will modify the tracking file
|
||||||
|
in the case of welcome changes, and therein lies the ratcheting.
|
||||||
|
|
||||||
|
The exit codes are:
|
||||||
|
|
||||||
|
0 - no changes observed
|
||||||
|
1 - changes observed, whether welcome or unwelcome
|
||||||
|
2 - invocation error
|
||||||
|
|
||||||
|
If <junitxml file path> does not exist, you'll get a FileNotFoundError:
|
||||||
|
|
||||||
|
>>> _test('up', None, None) # doctest: +ELLIPSIS
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
FileNotFoundError: ...
|
||||||
|
|
||||||
|
If <tracking file path> does not exist, that's fine:
|
||||||
|
|
||||||
|
>>> _test('up', '1', None)
|
||||||
|
Some tests not required to pass did:
|
||||||
|
c0.t
|
||||||
|
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
|
||||||
|
Eep! 0 test(s) were required to pass, but instead 1 did. 🐭
|
||||||
|
|
||||||
|
Same if you're ratcheting down:
|
||||||
|
|
||||||
|
>>> _test('down', '1', None)
|
||||||
|
All and only tests expected to fail did. 💃
|
||||||
|
|
||||||
|
If the test run has the same output as last time, it's all good:
|
||||||
|
|
||||||
|
>>> _test('up', '01001110', '01001110')
|
||||||
|
All and only tests required to pass did. 💃
|
||||||
|
|
||||||
|
>>> _test('down', '01001110', '10110001')
|
||||||
|
All and only tests expected to fail did. 💃
|
||||||
|
|
||||||
|
If there's a welcome change, that's noted:
|
||||||
|
|
||||||
|
>>> _test('up', '0101', '0100')
|
||||||
|
Some tests not required to pass did:
|
||||||
|
c3.t
|
||||||
|
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
|
||||||
|
Eep! 1 test(s) were required to pass, but instead 2 did. 🐭
|
||||||
|
|
||||||
|
>>> _test('down', '0011', '1110')
|
||||||
|
Some tests expected to fail didn't:
|
||||||
|
c2.t
|
||||||
|
Conveniently, they have been removed from `<tracking_path>` for you. Perhaps commit that?
|
||||||
|
Eep! 3 test(s) were expected to fail, but instead 2 did. 🐭
|
||||||
|
|
||||||
|
And if there is an unwelcome change, that is noted as well:
|
||||||
|
|
||||||
|
>>> _test('up', '1101', '1111')
|
||||||
|
Some tests required to pass didn't:
|
||||||
|
c2.t
|
||||||
|
Eep! 4 test(s) were required to pass, but instead 3 did. 🐭
|
||||||
|
|
||||||
|
>>> _test('down', '0000', '1101')
|
||||||
|
Some tests not expected to fail did:
|
||||||
|
c2.t
|
||||||
|
Eep! 3 test(s) were expected to fail, but instead 4 did. 🐭
|
||||||
|
|
||||||
|
And if there are both welcome and unwelcome changes, they are both noted:
|
||||||
|
|
||||||
|
>>> _test('up', '1101', '1011')
|
||||||
|
Some tests not required to pass did:
|
||||||
|
c1.t
|
||||||
|
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
|
||||||
|
Some tests required to pass didn't:
|
||||||
|
c2.t
|
||||||
|
Eep! 3 test(s) were required to pass, but instead 3 did. 🐭
|
||||||
|
|
||||||
|
>>> _test('down', '0100', '1100')
|
||||||
|
Some tests not expected to fail did:
|
||||||
|
c2.t
|
||||||
|
c3.t
|
||||||
|
Some tests expected to fail didn't:
|
||||||
|
c1.t
|
||||||
|
Conveniently, they have been removed from `<tracking_path>` for you. Perhaps commit that?
|
||||||
|
Eep! 2 test(s) were expected to fail, but instead 3 did. 🐭
|
||||||
|
|
||||||
|
|
||||||
|
To test ratchet.py itself:
|
||||||
|
|
||||||
|
python3 -m doctest ratchet.py
|
||||||
|
|
||||||
|
'''
|
||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import xml.etree.ElementTree as Etree
|
||||||
|
|
||||||
|
|
||||||
|
class JUnitXMLFile(object):
|
||||||
|
'''Represent a file containing test results in JUnit XML format.
|
||||||
|
|
||||||
|
>>> eg = _mktemp_junitxml('0100111')
|
||||||
|
>>> results = JUnitXMLFile(eg.name).parse()
|
||||||
|
>>> results.failed
|
||||||
|
['c0.t', 'c2.t', 'c3.t']
|
||||||
|
>>> results.passed
|
||||||
|
['c1.t', 'c4.t', 'c5.t', 'c6.t']
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, filepath):
|
||||||
|
self.filepath = filepath
|
||||||
|
self.failed = []
|
||||||
|
self.failed_aggregates = {}
|
||||||
|
self.stderr_output = []
|
||||||
|
self.passed = []
|
||||||
|
self._tree = None
|
||||||
|
|
||||||
|
def parse(self):
|
||||||
|
if self._tree:
|
||||||
|
raise RuntimeError('already parsed')
|
||||||
|
self._tree = Etree.parse(self.filepath)
|
||||||
|
for testcase in self._tree.findall('testcase'):
|
||||||
|
self.process_testcase(testcase)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def process_testcase(self, case):
|
||||||
|
key = self.case_key(case)
|
||||||
|
|
||||||
|
# look at children but throw away stderr output
|
||||||
|
nonpassing = [c for c in case if not c.tag == 'system-err']
|
||||||
|
n = len(nonpassing)
|
||||||
|
if n > 1:
|
||||||
|
raise RuntimeError(f'multiple results for {key}: {nonpassing}')
|
||||||
|
elif n == 1:
|
||||||
|
result = nonpassing.pop()
|
||||||
|
self.failed.append(key)
|
||||||
|
message = result.get('message')
|
||||||
|
self.failed_aggregates.setdefault(message, []).append(key)
|
||||||
|
else:
|
||||||
|
self.passed.append(key)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def case_key(case):
|
||||||
|
return f'{case.get("classname")}.{case.get("name")}'
|
||||||
|
|
||||||
|
def report(self, details=False):
|
||||||
|
for k, v in sorted(
|
||||||
|
self.failed_aggregates.items(),
|
||||||
|
key = lambda i: len(i[1]),
|
||||||
|
reverse=True):
|
||||||
|
print(f'# {k}')
|
||||||
|
for t in v:
|
||||||
|
print(f' - {t}')
|
||||||
|
|
||||||
|
|
||||||
|
def load_previous_results(txt):
|
||||||
|
try:
|
||||||
|
previous_results = open(txt).read()
|
||||||
|
except FileNotFoundError:
|
||||||
|
previous_results = ''
|
||||||
|
parsed = set()
|
||||||
|
for line in previous_results.splitlines():
|
||||||
|
if not line or line.startswith('#'):
|
||||||
|
continue
|
||||||
|
parsed.add(line)
|
||||||
|
return parsed
|
||||||
|
|
||||||
|
|
||||||
|
def print_tests(tests):
|
||||||
|
for test in sorted(tests):
|
||||||
|
print(' ', test)
|
||||||
|
|
||||||
|
|
||||||
|
def ratchet_up_passing(tracking_path, tests):
|
||||||
|
try:
|
||||||
|
old = set(open(tracking_path, 'r'))
|
||||||
|
except FileNotFoundError:
|
||||||
|
old = set()
|
||||||
|
new = set(t + '\n' for t in tests)
|
||||||
|
merged = sorted(old | new)
|
||||||
|
open(tracking_path, 'w+').writelines(merged)
|
||||||
|
|
||||||
|
|
||||||
|
def ratchet_down_failing(tracking_path, tests):
|
||||||
|
new = set(t + '\n' for t in tests)
|
||||||
|
open(tracking_path, 'w+').writelines(sorted(new))
|
||||||
|
|
||||||
|
|
||||||
|
def main(direction, junitxml_path, tracking_path):
|
||||||
|
'''Takes a string indicating which direction to ratchet, "up" or "down,"
|
||||||
|
and two paths, one to test-runner output in JUnit XML format, the other to
|
||||||
|
a file tracking test results (one test case dotted name per line). Walk the
|
||||||
|
former looking for the latter, and react appropriately.
|
||||||
|
|
||||||
|
>>> inp = _mktemp_junitxml('0100111')
|
||||||
|
>>> out = _mktemp_tracking('0000000')
|
||||||
|
>>> _test_main('up', inp.name, out.name)
|
||||||
|
Some tests not required to pass did:
|
||||||
|
c1.t
|
||||||
|
c4.t
|
||||||
|
c5.t
|
||||||
|
c6.t
|
||||||
|
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
|
||||||
|
Eep! 0 test(s) were required to pass, but instead 4 did. 🐭
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
results = JUnitXMLFile(junitxml_path).parse()
|
||||||
|
|
||||||
|
if tracking_path == '...':
|
||||||
|
# Shortcut to aid in debugging XML parsing issues.
|
||||||
|
results.report()
|
||||||
|
return
|
||||||
|
|
||||||
|
previous = load_previous_results(tracking_path)
|
||||||
|
current = set(results.passed if direction == 'up' else results.failed)
|
||||||
|
|
||||||
|
subjunctive = {'up': 'required to pass', 'down': 'expected to fail'}[direction]
|
||||||
|
ratchet = None
|
||||||
|
|
||||||
|
too_many = current - previous
|
||||||
|
if too_many:
|
||||||
|
print(f'Some tests not {subjunctive} did:')
|
||||||
|
print_tests(too_many)
|
||||||
|
if direction == 'up':
|
||||||
|
# Too many passing tests is good -- let's do more of those!
|
||||||
|
ratchet_up_passing(tracking_path, current)
|
||||||
|
print(f'Conveniently, they have been added to `{tracking_path}` for you. Perhaps commit that?')
|
||||||
|
|
||||||
|
not_enough = previous - current
|
||||||
|
if not_enough:
|
||||||
|
print(f'Some tests {subjunctive} didn\'t:')
|
||||||
|
print_tests(not_enough)
|
||||||
|
if direction == 'down':
|
||||||
|
# Not enough failing tests is good -- let's do more of those!
|
||||||
|
ratchet_down_failing(tracking_path, current)
|
||||||
|
print(f'Conveniently, they have been removed from `{tracking_path}` for you. Perhaps commit that?')
|
||||||
|
|
||||||
|
if too_many or not_enough:
|
||||||
|
print(f'Eep! {len(previous)} test(s) were {subjunctive}, but instead {len(current)} did. 🐭')
|
||||||
|
return 1
|
||||||
|
|
||||||
|
print(f'All and only tests {subjunctive} did. 💃')
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
# When called as an executable ...
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
try:
|
||||||
|
direction, junitxml_path, tracking_path = sys.argv[1:4]
|
||||||
|
if direction not in ('up', 'down'):
|
||||||
|
raise ValueError
|
||||||
|
except ValueError:
|
||||||
|
doc = '\n'.join(__doc__.splitlines()[:6])
|
||||||
|
doc = re.sub(' ratchet.py', f' {sys.argv[0]}', doc)
|
||||||
|
print(doc, file=sys.stderr)
|
||||||
|
exit_code = 2
|
||||||
|
else:
|
||||||
|
exit_code = main(direction, junitxml_path, tracking_path)
|
||||||
|
sys.exit(exit_code)
|
||||||
|
|
||||||
|
|
||||||
|
# Helpers for when called under doctest ...
|
||||||
|
|
||||||
|
def _test(*a):
|
||||||
|
return _test_main(*_mk(*a))
|
||||||
|
|
||||||
|
|
||||||
|
def _test_main(direction, junitxml, tracking):
|
||||||
|
'''Takes a string 'up' or 'down' and paths to (or open file objects for)
|
||||||
|
the JUnit XML and tracking files to use for this test run. Captures and
|
||||||
|
emits stdout (slightly modified) for inspection via doctest.'''
|
||||||
|
junitxml_path = junitxml.name if hasattr(junitxml, 'name') else junitxml
|
||||||
|
tracking_path = tracking.name if hasattr(tracking, 'name') else tracking
|
||||||
|
|
||||||
|
old_stdout = sys.stdout
|
||||||
|
sys.stdout = io.StringIO()
|
||||||
|
try:
|
||||||
|
main(direction, junitxml_path, tracking_path)
|
||||||
|
finally:
|
||||||
|
sys.stdout.seek(0)
|
||||||
|
out = sys.stdout.read()
|
||||||
|
out = re.sub('`.*?`', '`<tracking_path>`', out).strip()
|
||||||
|
sys.stdout = old_stdout
|
||||||
|
print(out)
|
||||||
|
|
||||||
|
|
||||||
|
class _PotentialFile(object):
|
||||||
|
'''Represent a file that we are able to create but which doesn't exist yet,
|
||||||
|
and which, if we create it, will be automatically torn down when the test
|
||||||
|
run is over.'''
|
||||||
|
|
||||||
|
def __init__(self, filename):
|
||||||
|
self.d = tempfile.TemporaryDirectory()
|
||||||
|
self.name = os.path.join(self.d.name, filename)
|
||||||
|
|
||||||
|
|
||||||
|
def _mk(direction, spec_junitxml, spec_tracking):
|
||||||
|
'''Takes a string 'up' or 'down' and two bit strings specifying the state
|
||||||
|
of the JUnit XML results file and the tracking file to set up for this test
|
||||||
|
case. Returns the direction (unharmed) and two file-ish objects.
|
||||||
|
|
||||||
|
If a spec string is None the corresponding return value will be a
|
||||||
|
_PotentialFile object, which has a .name attribute (like a true file
|
||||||
|
object) that points to a file that does not exist, but could.
|
||||||
|
|
||||||
|
The reason not to simply return the path in all cases is that the file
|
||||||
|
objects are actually temporary file objects that destroy the underlying
|
||||||
|
file when they go out of scope, and we want to keep the underlying file
|
||||||
|
around until the end of the test run.'''
|
||||||
|
|
||||||
|
if None not in(spec_junitxml, spec_tracking):
|
||||||
|
if len(spec_junitxml) != len(spec_tracking):
|
||||||
|
raise ValueError('if both given, must be the same length: `{spec_junitxml}` and `{spec_tracking}`')
|
||||||
|
if spec_junitxml is None:
|
||||||
|
junitxml_fp = _PotentialFile('results.xml')
|
||||||
|
else:
|
||||||
|
junitxml_fp = _mktemp_junitxml(spec_junitxml)
|
||||||
|
if spec_tracking is None:
|
||||||
|
tracking_fp = _PotentialFile('tracking')
|
||||||
|
else:
|
||||||
|
tracking_fp = _mktemp_tracking(spec_tracking)
|
||||||
|
return direction, junitxml_fp, tracking_fp
|
||||||
|
|
||||||
|
|
||||||
|
def _mktemp_junitxml(spec):
|
||||||
|
'''Test helper to generate a raw JUnit XML file.
|
||||||
|
|
||||||
|
>>> fp = _mktemp_junitxml('00101')
|
||||||
|
>>> open(fp.name).read()[:11]
|
||||||
|
'<testsuite>'
|
||||||
|
|
||||||
|
'''
|
||||||
|
fp = tempfile.NamedTemporaryFile()
|
||||||
|
fp.write(b'<testsuite>')
|
||||||
|
|
||||||
|
passed = '''\
|
||||||
|
<testcase classname="c{i}" name="t"></testcase>
|
||||||
|
'''
|
||||||
|
failed = '''\
|
||||||
|
<testcase classname="c{i}" name="t">
|
||||||
|
<failure>Traceback (most recent call last):
|
||||||
|
File "/foo/bar/baz/buz.py", line 1, in <module>
|
||||||
|
NameError: name 'heck' is not defined
|
||||||
|
</failure>
|
||||||
|
</testcase>
|
||||||
|
'''
|
||||||
|
|
||||||
|
i = 0
|
||||||
|
for c in spec:
|
||||||
|
if c == '0':
|
||||||
|
out = failed
|
||||||
|
elif c == '1':
|
||||||
|
out = passed
|
||||||
|
else:
|
||||||
|
raise ValueError(f'bad c: `{c}`')
|
||||||
|
fp.write(out.format(i=i).encode('utf8'))
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
fp.write(b'</testsuite>')
|
||||||
|
fp.flush()
|
||||||
|
return fp
|
||||||
|
|
||||||
|
|
||||||
|
def _mktemp_tracking(spec):
|
||||||
|
'''Test helper to prefabricate a tracking file.
|
||||||
|
|
||||||
|
>>> fp = _mktemp_tracking('01101')
|
||||||
|
>>> print(open(fp.name).read()[:-1])
|
||||||
|
c1.t
|
||||||
|
c2.t
|
||||||
|
c4.t
|
||||||
|
|
||||||
|
'''
|
||||||
|
fp = tempfile.NamedTemporaryFile()
|
||||||
|
|
||||||
|
i = 0
|
||||||
|
for c in spec:
|
||||||
|
if c == '0':
|
||||||
|
pass
|
||||||
|
elif c == '1':
|
||||||
|
fp.write(f'c{i}.t\n'.encode('utf8'))
|
||||||
|
else:
|
||||||
|
raise ValueError(f'bad c: `{c}`')
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
fp.flush()
|
||||||
|
return fp
|
37
misc/python3/ratchet.sh
Executable file
37
misc/python3/ratchet.sh
Executable file
@ -0,0 +1,37 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euxo pipefail
|
||||||
|
tracking_filename="ratchet-passing"
|
||||||
|
|
||||||
|
# Start somewhere predictable.
|
||||||
|
cd "$(dirname $0)"
|
||||||
|
base=$(pwd)
|
||||||
|
|
||||||
|
# Actually, though, trial outputs some things that are only gitignored in the project root.
|
||||||
|
cd "../.."
|
||||||
|
|
||||||
|
# Since both of the next calls are expected to exit non-0, relax our guard.
|
||||||
|
set +e
|
||||||
|
SUBUNITREPORTER_OUTPUT_PATH="$base/results.subunit2" trial --reporter subunitv2-file allmydata
|
||||||
|
subunit2junitxml < "$base/results.subunit2" > "$base/results.xml"
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Okay, now we're clear.
|
||||||
|
cd "$base"
|
||||||
|
|
||||||
|
# Make sure ratchet.py itself is clean.
|
||||||
|
python3 -m doctest ratchet.py
|
||||||
|
|
||||||
|
# Now see about Tahoe-LAFS (also expected to fail) ...
|
||||||
|
set +e
|
||||||
|
python3 ratchet.py up results.xml "$tracking_filename"
|
||||||
|
code=$?
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Emit a diff of the tracking file, to aid in the situation where changes are
|
||||||
|
# not discovered until CI (where TERM might `dumb`).
|
||||||
|
if [ $TERM = 'dumb' ]; then
|
||||||
|
export TERM=ansi
|
||||||
|
fi
|
||||||
|
git diff "$tracking_filename"
|
||||||
|
|
||||||
|
exit $code
|
0
newsfragments/3325.other
Normal file
0
newsfragments/3325.other
Normal file
0
newsfragments/3340.other
Normal file
0
newsfragments/3340.other
Normal file
11
setup.py
11
setup.py
@ -141,8 +141,10 @@ tor_requires = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
i2p_requires = [
|
i2p_requires = [
|
||||||
# See the comment in tor_requires.
|
# txi2p has Python 3 support, but it's unreleased: https://github.com/str4d/txi2p/issues/10.
|
||||||
"txi2p >= 0.3.2",
|
# URL lookups are in PEP-508 (via https://stackoverflow.com/a/54794506).
|
||||||
|
# Also see the comment in tor_requires.
|
||||||
|
"txi2p @ git+https://github.com/str4d/txi2p@0611b9a86172cb70d2f5e415a88eee9f230590b3#egg=txi2p",
|
||||||
]
|
]
|
||||||
|
|
||||||
if len(sys.argv) > 1 and sys.argv[1] == '--fakedependency':
|
if len(sys.argv) > 1 and sys.argv[1] == '--fakedependency':
|
||||||
@ -353,7 +355,9 @@ setup(name="tahoe-lafs", # also set in __init__.py
|
|||||||
package_dir = {'':'src'},
|
package_dir = {'':'src'},
|
||||||
packages=find_packages('src') + ['allmydata.test.plugins'],
|
packages=find_packages('src') + ['allmydata.test.plugins'],
|
||||||
classifiers=trove_classifiers,
|
classifiers=trove_classifiers,
|
||||||
python_requires="<3.0",
|
# We support Python 2.7, and we're working on support for 3.6 (the
|
||||||
|
# highest version that PyPy currently supports).
|
||||||
|
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <3.7",
|
||||||
install_requires=install_requires,
|
install_requires=install_requires,
|
||||||
extras_require={
|
extras_require={
|
||||||
# Duplicate the Twisted pywin32 dependency here. See
|
# Duplicate the Twisted pywin32 dependency here. See
|
||||||
@ -382,6 +386,7 @@ setup(name="tahoe-lafs", # also set in __init__.py
|
|||||||
"fixtures",
|
"fixtures",
|
||||||
"beautifulsoup4",
|
"beautifulsoup4",
|
||||||
"html5lib",
|
"html5lib",
|
||||||
|
"junitxml",
|
||||||
] + tor_requires + i2p_requires,
|
] + tor_requires + i2p_requires,
|
||||||
"tor": tor_requires,
|
"tor": tor_requires,
|
||||||
"i2p": i2p_requires,
|
"i2p": i2p_requires,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import os, signal, sys, time
|
import os, signal, time
|
||||||
from random import randrange
|
from random import randrange
|
||||||
from six.moves import StringIO
|
from six.moves import StringIO
|
||||||
|
|
||||||
@ -8,7 +8,6 @@ from twisted.internet import reactor, defer
|
|||||||
from twisted.python import failure
|
from twisted.python import failure
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
|
|
||||||
from allmydata.util import fileutil, log
|
|
||||||
from ..util.assertutil import precondition
|
from ..util.assertutil import precondition
|
||||||
from allmydata.util.encodingutil import (unicode_platform, get_filesystem_encoding,
|
from allmydata.util.encodingutil import (unicode_platform, get_filesystem_encoding,
|
||||||
get_io_encoding)
|
get_io_encoding)
|
||||||
@ -89,39 +88,6 @@ class ReallyEqualMixin(object):
|
|||||||
self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg))
|
self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg))
|
||||||
|
|
||||||
|
|
||||||
class NonASCIIPathMixin(object):
|
|
||||||
def mkdir_nonascii(self, dirpath):
|
|
||||||
# Kludge to work around the fact that buildbot can't remove a directory tree that has
|
|
||||||
# any non-ASCII directory names on Windows. (#1472)
|
|
||||||
if sys.platform == "win32":
|
|
||||||
def _cleanup():
|
|
||||||
try:
|
|
||||||
fileutil.rm_dir(dirpath)
|
|
||||||
finally:
|
|
||||||
if os.path.exists(dirpath):
|
|
||||||
msg = ("We were unable to delete a non-ASCII directory %r created by the test. "
|
|
||||||
"This is liable to cause failures on future builds." % (dirpath,))
|
|
||||||
print(msg)
|
|
||||||
log.err(msg)
|
|
||||||
self.addCleanup(_cleanup)
|
|
||||||
os.mkdir(dirpath)
|
|
||||||
|
|
||||||
def unicode_or_fallback(self, unicode_name, fallback_name, io_as_well=False):
|
|
||||||
if not unicode_platform():
|
|
||||||
try:
|
|
||||||
unicode_name.encode(get_filesystem_encoding())
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
return fallback_name
|
|
||||||
|
|
||||||
if io_as_well:
|
|
||||||
try:
|
|
||||||
unicode_name.encode(get_io_encoding())
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
return fallback_name
|
|
||||||
|
|
||||||
return unicode_name
|
|
||||||
|
|
||||||
|
|
||||||
class SignalMixin(object):
|
class SignalMixin(object):
|
||||||
# This class is necessary for any code which wants to use Processes
|
# This class is necessary for any code which wants to use Processes
|
||||||
# outside the usual reactor.run() environment. It is copied from
|
# outside the usual reactor.run() environment. It is copied from
|
||||||
|
@ -27,7 +27,7 @@ from allmydata.util import base62, mathutil
|
|||||||
def insecurerandstr(n):
|
def insecurerandstr(n):
|
||||||
return bytes(list(map(random.randrange, [0]*n, [256]*n)))
|
return bytes(list(map(random.randrange, [0]*n, [256]*n)))
|
||||||
|
|
||||||
class T(unittest.TestCase):
|
class Base62(unittest.TestCase):
|
||||||
def _test_num_octets_that_encode_to_this_many_chars(self, chars, octets):
|
def _test_num_octets_that_encode_to_this_many_chars(self, chars, octets):
|
||||||
assert base62.num_octets_that_encode_to_this_many_chars(chars) == octets, "%s != %s <- %s" % (octets, base62.num_octets_that_encode_to_this_many_chars(chars), chars)
|
assert base62.num_octets_that_encode_to_this_many_chars(chars) == octets, "%s != %s <- %s" % (octets, base62.num_octets_that_encode_to_this_many_chars(chars), chars)
|
||||||
|
|
||||||
@ -38,11 +38,29 @@ class T(unittest.TestCase):
|
|||||||
self.assertIsInstance(encoded, bytes)
|
self.assertIsInstance(encoded, bytes)
|
||||||
self.assertIsInstance(bs, bytes)
|
self.assertIsInstance(bs, bytes)
|
||||||
self.assertIsInstance(decoded, bytes)
|
self.assertIsInstance(decoded, bytes)
|
||||||
|
# Encoded string only uses values from the base62 allowed characters:
|
||||||
|
self.assertFalse(set(encoded) - set(base62.chars))
|
||||||
|
|
||||||
@given(input_bytes=st.binary(max_size=100))
|
@given(input_bytes=st.binary(max_size=100))
|
||||||
def test_roundtrip(self, input_bytes):
|
def test_roundtrip(self, input_bytes):
|
||||||
self._test_roundtrip(input_bytes)
|
self._test_roundtrip(input_bytes)
|
||||||
|
|
||||||
|
def test_known_values(self):
|
||||||
|
"""Known values to ensure the algorithm hasn't changed."""
|
||||||
|
|
||||||
|
def check_expected(plaintext, encoded):
|
||||||
|
result1 = base62.b2a(plaintext)
|
||||||
|
self.assertEqual(encoded, result1)
|
||||||
|
result2 = base62.a2b(encoded)
|
||||||
|
self.assertEqual(plaintext, result2)
|
||||||
|
|
||||||
|
check_expected(b"hello", b'7tQLFHz')
|
||||||
|
check_expected(b"", b'0')
|
||||||
|
check_expected(b"zzz", b'0Xg7e')
|
||||||
|
check_expected(b"\x36\xffWAT", b'49pq4mq')
|
||||||
|
check_expected(b"1234 22323", b'1A0afZe9mxSZpz')
|
||||||
|
check_expected(b"______", b'0TmAuCHJX')
|
||||||
|
|
||||||
def test_num_octets_that_encode_to_this_many_chars(self):
|
def test_num_octets_that_encode_to_this_many_chars(self):
|
||||||
return self._test_num_octets_that_encode_to_this_many_chars(2, 1)
|
return self._test_num_octets_that_encode_to_this_many_chars(2, 1)
|
||||||
return self._test_num_octets_that_encode_to_this_many_chars(3, 2)
|
return self._test_num_octets_that_encode_to_this_many_chars(3, 2)
|
||||||
|
@ -83,7 +83,7 @@ BASECONFIG_I = ("[client]\n"
|
|||||||
"introducer.furl = %s\n"
|
"introducer.furl = %s\n"
|
||||||
)
|
)
|
||||||
|
|
||||||
class Basic(testutil.ReallyEqualMixin, testutil.NonASCIIPathMixin, unittest.TestCase):
|
class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||||
def test_loadable(self):
|
def test_loadable(self):
|
||||||
basedir = "test_client.Basic.test_loadable"
|
basedir = "test_client.Basic.test_loadable"
|
||||||
os.mkdir(basedir)
|
os.mkdir(basedir)
|
||||||
|
@ -12,6 +12,17 @@ from future.utils import PY2
|
|||||||
if PY2:
|
if PY2:
|
||||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
def backwardscompat_bytes(b):
|
||||||
|
"""
|
||||||
|
Replace Future bytes with native Python 2 bytes, so % works
|
||||||
|
consistently until other modules are ported.
|
||||||
|
"""
|
||||||
|
return getattr(b, "__native__", lambda: b)()
|
||||||
|
else:
|
||||||
|
def backwardscompat_bytes(b):
|
||||||
|
return b
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
from allmydata.util.assertutil import precondition
|
from allmydata.util.assertutil import precondition
|
||||||
@ -46,16 +57,16 @@ def get_trailing_chars_without_lsbs(N):
|
|||||||
d = {}
|
d = {}
|
||||||
return b''.join(_get_trailing_chars_without_lsbs(N, d=d))
|
return b''.join(_get_trailing_chars_without_lsbs(N, d=d))
|
||||||
|
|
||||||
BASE32CHAR =b'['+get_trailing_chars_without_lsbs(0)+b']'
|
BASE32CHAR = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(0)+b']')
|
||||||
BASE32CHAR_4bits =b'['+get_trailing_chars_without_lsbs(1)+b']'
|
BASE32CHAR_4bits = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(1)+b']')
|
||||||
BASE32CHAR_3bits =b'['+get_trailing_chars_without_lsbs(2)+b']'
|
BASE32CHAR_3bits = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(2)+b']')
|
||||||
BASE32CHAR_2bits =b'['+get_trailing_chars_without_lsbs(3)+b']'
|
BASE32CHAR_2bits = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(3)+b']')
|
||||||
BASE32CHAR_1bits =b'['+get_trailing_chars_without_lsbs(4)+b']'
|
BASE32CHAR_1bits = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(4)+b']')
|
||||||
BASE32STR_1byte = BASE32CHAR+BASE32CHAR_3bits
|
BASE32STR_1byte = backwardscompat_bytes(BASE32CHAR+BASE32CHAR_3bits)
|
||||||
BASE32STR_2bytes = BASE32CHAR+b'{3}'+BASE32CHAR_1bits
|
BASE32STR_2bytes = backwardscompat_bytes(BASE32CHAR+b'{3}'+BASE32CHAR_1bits)
|
||||||
BASE32STR_3bytes = BASE32CHAR+b'{4}'+BASE32CHAR_4bits
|
BASE32STR_3bytes = backwardscompat_bytes(BASE32CHAR+b'{4}'+BASE32CHAR_4bits)
|
||||||
BASE32STR_4bytes = BASE32CHAR+b'{6}'+BASE32CHAR_2bits
|
BASE32STR_4bytes = backwardscompat_bytes(BASE32CHAR+b'{6}'+BASE32CHAR_2bits)
|
||||||
BASE32STR_anybytes =b'((?:%s{8})*' % (BASE32CHAR,) + b"(?:|%s|%s|%s|%s))" % (BASE32STR_1byte, BASE32STR_2bytes, BASE32STR_3bytes, BASE32STR_4bytes)
|
BASE32STR_anybytes = backwardscompat_bytes(bytes(b'((?:%s{8})*') % (BASE32CHAR,) + bytes(b"(?:|%s|%s|%s|%s))") % (BASE32STR_1byte, BASE32STR_2bytes, BASE32STR_3bytes, BASE32STR_4bytes))
|
||||||
|
|
||||||
def b2a(os):
|
def b2a(os):
|
||||||
"""
|
"""
|
||||||
@ -83,7 +94,9 @@ NUM_OS_TO_NUM_QS=(0, 2, 4, 5, 7,)
|
|||||||
|
|
||||||
NUM_QS_TO_NUM_OS=(0, 1, 1, 2, 2, 3, 3, 4)
|
NUM_QS_TO_NUM_OS=(0, 1, 1, 2, 2, 3, 3, 4)
|
||||||
NUM_QS_LEGIT=(1, 0, 1, 0, 1, 1, 0, 1,)
|
NUM_QS_LEGIT=(1, 0, 1, 0, 1, 1, 0, 1,)
|
||||||
NUM_QS_TO_NUM_BITS=tuple([x*8 for x in NUM_QS_TO_NUM_OS])
|
NUM_QS_TO_NUM_BITS=tuple([_x*8 for _x in NUM_QS_TO_NUM_OS])
|
||||||
|
if PY2:
|
||||||
|
del _x
|
||||||
|
|
||||||
# A fast way to determine whether a given string *could* be base-32 encoded data, assuming that the
|
# A fast way to determine whether a given string *could* be base-32 encoded data, assuming that the
|
||||||
# original data had 8K bits for a positive integer K.
|
# original data had 8K bits for a positive integer K.
|
||||||
|
@ -12,7 +12,7 @@ from errno import ENOENT
|
|||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
from ctypes import WINFUNCTYPE, WinError, windll, POINTER, byref, c_ulonglong, \
|
from ctypes import WINFUNCTYPE, WinError, windll, POINTER, byref, c_ulonglong, \
|
||||||
create_unicode_buffer, get_last_error
|
create_unicode_buffer, get_last_error
|
||||||
from ctypes.wintypes import BOOL, DWORD, LPCWSTR, LPWSTR, LPVOID, HANDLE
|
from ctypes.wintypes import BOOL, DWORD, LPCWSTR, LPWSTR, LPVOID
|
||||||
|
|
||||||
from twisted.python import log
|
from twisted.python import log
|
||||||
|
|
||||||
@ -538,60 +538,6 @@ def get_available_space(whichdir, reserved_space):
|
|||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
if sys.platform == "win32":
|
|
||||||
# <http://msdn.microsoft.com/en-us/library/aa363858%28v=vs.85%29.aspx>
|
|
||||||
CreateFileW = WINFUNCTYPE(
|
|
||||||
HANDLE, LPCWSTR, DWORD, DWORD, LPVOID, DWORD, DWORD, HANDLE,
|
|
||||||
use_last_error=True
|
|
||||||
)(("CreateFileW", windll.kernel32))
|
|
||||||
|
|
||||||
GENERIC_WRITE = 0x40000000
|
|
||||||
FILE_SHARE_READ = 0x00000001
|
|
||||||
FILE_SHARE_WRITE = 0x00000002
|
|
||||||
OPEN_EXISTING = 3
|
|
||||||
INVALID_HANDLE_VALUE = 0xFFFFFFFF
|
|
||||||
|
|
||||||
# <http://msdn.microsoft.com/en-us/library/aa364439%28v=vs.85%29.aspx>
|
|
||||||
FlushFileBuffers = WINFUNCTYPE(
|
|
||||||
BOOL, HANDLE,
|
|
||||||
use_last_error=True
|
|
||||||
)(("FlushFileBuffers", windll.kernel32))
|
|
||||||
|
|
||||||
# <http://msdn.microsoft.com/en-us/library/ms724211%28v=vs.85%29.aspx>
|
|
||||||
CloseHandle = WINFUNCTYPE(
|
|
||||||
BOOL, HANDLE,
|
|
||||||
use_last_error=True
|
|
||||||
)(("CloseHandle", windll.kernel32))
|
|
||||||
|
|
||||||
# <http://social.msdn.microsoft.com/forums/en-US/netfxbcl/thread/4465cafb-f4ed-434f-89d8-c85ced6ffaa8/>
|
|
||||||
def flush_volume(path):
|
|
||||||
abspath = os.path.realpath(path)
|
|
||||||
if abspath.startswith("\\\\?\\"):
|
|
||||||
abspath = abspath[4 :]
|
|
||||||
drive = os.path.splitdrive(abspath)[0]
|
|
||||||
|
|
||||||
print("flushing %r" % (drive,))
|
|
||||||
hVolume = CreateFileW(u"\\\\.\\" + drive,
|
|
||||||
GENERIC_WRITE,
|
|
||||||
FILE_SHARE_READ | FILE_SHARE_WRITE,
|
|
||||||
None,
|
|
||||||
OPEN_EXISTING,
|
|
||||||
0,
|
|
||||||
None
|
|
||||||
)
|
|
||||||
if hVolume == INVALID_HANDLE_VALUE:
|
|
||||||
raise WinError(get_last_error())
|
|
||||||
|
|
||||||
if FlushFileBuffers(hVolume) == 0:
|
|
||||||
raise WinError(get_last_error())
|
|
||||||
|
|
||||||
CloseHandle(hVolume)
|
|
||||||
else:
|
|
||||||
def flush_volume(path):
|
|
||||||
# use sync()?
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ConflictError(Exception):
|
class ConflictError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
|
|
||||||
from twisted.internet import address
|
from twisted.internet import address
|
||||||
from foolscap.api import Violation, RemoteException, DeadReferenceError, \
|
from foolscap.api import Violation, RemoteException, SturdyRef
|
||||||
SturdyRef
|
|
||||||
|
|
||||||
def add_version_to_remote_reference(rref, default):
|
def add_version_to_remote_reference(rref, default):
|
||||||
"""I try to add a .version attribute to the given RemoteReference. I call
|
"""I try to add a .version attribute to the given RemoteReference. I call
|
||||||
@ -19,12 +19,6 @@ def add_version_to_remote_reference(rref, default):
|
|||||||
d.addCallbacks(_got_version, _no_get_version)
|
d.addCallbacks(_got_version, _no_get_version)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def trap_and_discard(f, *errorTypes):
|
|
||||||
f.trap(*errorTypes)
|
|
||||||
|
|
||||||
def trap_deadref(f):
|
|
||||||
return trap_and_discard(f, DeadReferenceError)
|
|
||||||
|
|
||||||
|
|
||||||
def connection_hints_for_furl(furl):
|
def connection_hints_for_furl(furl):
|
||||||
hints = []
|
hints = []
|
||||||
|
@ -297,7 +297,7 @@ def _get_platform():
|
|||||||
|
|
||||||
def _get_package_versions_and_locations():
|
def _get_package_versions_and_locations():
|
||||||
import warnings
|
import warnings
|
||||||
from _auto_deps import package_imports, global_deprecation_messages, deprecation_messages, \
|
from ._auto_deps import package_imports, global_deprecation_messages, deprecation_messages, \
|
||||||
runtime_warning_messages, warning_imports, ignorable
|
runtime_warning_messages, warning_imports, ignorable
|
||||||
|
|
||||||
def package_dir(srcfile):
|
def package_dir(srcfile):
|
||||||
|
9
tox.ini
9
tox.ini
@ -7,7 +7,7 @@
|
|||||||
twisted = 1
|
twisted = 1
|
||||||
|
|
||||||
[tox]
|
[tox]
|
||||||
envlist = {py27,pypy27}{-coverage,}
|
envlist = {py27,pypy27,py36}{-coverage,}
|
||||||
minversion = 2.4
|
minversion = 2.4
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
@ -45,8 +45,13 @@ usedevelop = False
|
|||||||
# tests.
|
# tests.
|
||||||
extras = test
|
extras = test
|
||||||
commands =
|
commands =
|
||||||
tahoe --version
|
|
||||||
trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata}
|
trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata}
|
||||||
|
tahoe --version
|
||||||
|
|
||||||
|
[testenv:py36]
|
||||||
|
# git inside of ratchet.sh needs $HOME.
|
||||||
|
passenv = HOME
|
||||||
|
commands = {toxinidir}/misc/python3/ratchet.sh
|
||||||
|
|
||||||
[testenv:integration]
|
[testenv:integration]
|
||||||
setenv =
|
setenv =
|
||||||
|
Loading…
Reference in New Issue
Block a user