mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-19 04:57:54 +00:00
Merge 'origin/master' into 3733.use-newer-netifaces
This commit is contained in:
commit
f4bba078d5
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
@ -150,13 +150,15 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- macos-latest
|
||||
- windows-latest
|
||||
- ubuntu-latest
|
||||
python-version:
|
||||
- 2.7
|
||||
- 3.6
|
||||
- 3.9
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
python-version: 3.6
|
||||
- os: macos-latest
|
||||
python-version: 2.7
|
||||
|
||||
steps:
|
||||
|
||||
|
@ -1,13 +1,18 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# ./check-debugging.py src
|
||||
"""
|
||||
Checks for defer.setDebugging().
|
||||
|
||||
Runs on Python 3.
|
||||
|
||||
Usage: ./check-debugging.py src
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys, re, os
|
||||
|
||||
ok = True
|
||||
umids = {}
|
||||
|
||||
for starting_point in sys.argv[1:]:
|
||||
for root, dirs, files in os.walk(starting_point):
|
||||
|
@ -1,186 +0,0 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os, sys, compiler
|
||||
from compiler.ast import Node, For, While, ListComp, AssName, Name, Lambda, Function
|
||||
|
||||
|
||||
def check_source(source):
|
||||
return check_thing(compiler.parse, source)
|
||||
|
||||
def check_file(path):
|
||||
return check_thing(compiler.parseFile, path)
|
||||
|
||||
def check_thing(parser, thing):
|
||||
try:
|
||||
ast = parser(thing)
|
||||
except SyntaxError as e:
|
||||
return e
|
||||
else:
|
||||
results = []
|
||||
check_ast(ast, results)
|
||||
return results
|
||||
|
||||
def check_ast(ast, results):
|
||||
"""Check a node outside a loop."""
|
||||
if isinstance(ast, (For, While, ListComp)):
|
||||
check_loop(ast, results)
|
||||
else:
|
||||
for child in ast.getChildNodes():
|
||||
if isinstance(ast, Node):
|
||||
check_ast(child, results)
|
||||
|
||||
def check_loop(ast, results):
|
||||
"""Check a particular outer loop."""
|
||||
|
||||
# List comprehensions have a poorly designed AST of the form
|
||||
# ListComp(exprNode, [ListCompFor(...), ...]), in which the
|
||||
# result expression is outside the ListCompFor node even though
|
||||
# it is logically inside the loop(s).
|
||||
# There may be multiple ListCompFor nodes (in cases such as
|
||||
# [lambda: (a,b) for a in ... for b in ...]
|
||||
# ), and that case they are not nested in the AST. But these
|
||||
# warts (nonobviously) happen not to matter for our analysis.
|
||||
|
||||
assigned = {} # maps name to lineno of topmost assignment
|
||||
nested = set()
|
||||
collect_assigned_and_nested(ast, assigned, nested)
|
||||
|
||||
# For each nested function...
|
||||
for funcnode in nested:
|
||||
# Check for captured variables in this function.
|
||||
captured = set()
|
||||
collect_captured(funcnode, assigned, captured, False)
|
||||
for name in captured:
|
||||
# We want to report the outermost capturing function
|
||||
# (since that is where the workaround will need to be
|
||||
# added), and the topmost assignment to the variable.
|
||||
# Just one report per capturing function per variable
|
||||
# will do.
|
||||
results.append(make_result(funcnode, name, assigned[name]))
|
||||
|
||||
# Check each node in the function body in case it
|
||||
# contains another 'for' loop.
|
||||
childnodes = funcnode.getChildNodes()[len(funcnode.defaults):]
|
||||
for child in childnodes:
|
||||
check_ast(child, results)
|
||||
|
||||
def collect_assigned_and_nested(ast, assigned, nested):
|
||||
"""
|
||||
Collect the names assigned in this loop, not including names
|
||||
assigned in nested functions. Also collect the nodes of functions
|
||||
that are nested one level deep.
|
||||
"""
|
||||
if isinstance(ast, AssName):
|
||||
if ast.name not in assigned or assigned[ast.name] > ast.lineno:
|
||||
assigned[ast.name] = ast.lineno
|
||||
else:
|
||||
childnodes = ast.getChildNodes()
|
||||
if isinstance(ast, (Lambda, Function)):
|
||||
nested.add(ast)
|
||||
|
||||
# The default argument expressions are "outside" the
|
||||
# function, even though they are children of the
|
||||
# Lambda or Function node.
|
||||
childnodes = childnodes[:len(ast.defaults)]
|
||||
|
||||
for child in childnodes:
|
||||
if isinstance(ast, Node):
|
||||
collect_assigned_and_nested(child, assigned, nested)
|
||||
|
||||
def collect_captured(ast, assigned, captured, in_function_yet):
|
||||
"""Collect any captured variables that are also in assigned."""
|
||||
if isinstance(ast, Name):
|
||||
if ast.name in assigned:
|
||||
captured.add(ast.name)
|
||||
else:
|
||||
childnodes = ast.getChildNodes()
|
||||
if isinstance(ast, (Lambda, Function)):
|
||||
# Formal parameters of the function are excluded from
|
||||
# captures we care about in subnodes of the function body.
|
||||
new_assigned = assigned.copy()
|
||||
remove_argnames(ast.argnames, new_assigned)
|
||||
|
||||
if len(new_assigned) > 0:
|
||||
for child in childnodes[len(ast.defaults):]:
|
||||
collect_captured(child, new_assigned, captured, True)
|
||||
|
||||
# The default argument expressions are "outside" *this*
|
||||
# function, even though they are children of the Lambda or
|
||||
# Function node.
|
||||
if not in_function_yet:
|
||||
return
|
||||
childnodes = childnodes[:len(ast.defaults)]
|
||||
|
||||
for child in childnodes:
|
||||
if isinstance(ast, Node):
|
||||
collect_captured(child, assigned, captured, True)
|
||||
|
||||
|
||||
def remove_argnames(names, fromset):
|
||||
for element in names:
|
||||
if element in fromset:
|
||||
del fromset[element]
|
||||
elif isinstance(element, (tuple, list)):
|
||||
remove_argnames(element, fromset)
|
||||
|
||||
|
||||
def make_result(funcnode, var_name, var_lineno):
|
||||
if hasattr(funcnode, 'name'):
|
||||
func_name = 'function %r' % (funcnode.name,)
|
||||
else:
|
||||
func_name = '<lambda>'
|
||||
return (funcnode.lineno, func_name, var_name, var_lineno)
|
||||
|
||||
def report(out, path, results):
|
||||
for r in results:
|
||||
print(path + (":%r %s captures %r assigned at line %d" % r), file=out)
|
||||
|
||||
def check(sources, out):
|
||||
class Counts(object):
|
||||
n = 0
|
||||
processed_files = 0
|
||||
suspect_files = 0
|
||||
error_files = 0
|
||||
counts = Counts()
|
||||
|
||||
def _process(path):
|
||||
results = check_file(path)
|
||||
if isinstance(results, SyntaxError):
|
||||
print(path + (" NOT ANALYSED due to syntax error: %s" % results), file=out)
|
||||
counts.error_files += 1
|
||||
else:
|
||||
report(out, path, results)
|
||||
counts.n += len(results)
|
||||
counts.processed_files += 1
|
||||
if len(results) > 0:
|
||||
counts.suspect_files += 1
|
||||
|
||||
for source in sources:
|
||||
print("Checking %s..." % (source,), file=out)
|
||||
if os.path.isfile(source):
|
||||
_process(source)
|
||||
else:
|
||||
for (dirpath, dirnames, filenames) in os.walk(source):
|
||||
for fn in filenames:
|
||||
(basename, ext) = os.path.splitext(fn)
|
||||
if ext == '.py':
|
||||
_process(os.path.join(dirpath, fn))
|
||||
|
||||
print("%d suspiciously captured variables in %d out of %d file(s)."
|
||||
% (counts.n, counts.suspect_files, counts.processed_files), file=out)
|
||||
if counts.error_files > 0:
|
||||
print("%d file(s) not processed due to syntax errors."
|
||||
% (counts.error_files,), file=out)
|
||||
return counts.n
|
||||
|
||||
|
||||
sources = ['src']
|
||||
if len(sys.argv) > 1:
|
||||
sources = sys.argv[1:]
|
||||
if check(sources, sys.stderr) > 0:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
# TODO: self-tests
|
@ -1,4 +1,10 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python3
|
||||
|
||||
"""
|
||||
Ensure UMIDS are unique.
|
||||
|
||||
This runs on Python 3.
|
||||
"""
|
||||
|
||||
# ./check-umids.py src
|
||||
|
||||
|
@ -201,7 +201,9 @@ class CPUWatcher(service.MultiService, resource.Resource, Referenceable):
|
||||
log.msg("error reading process %s (%s), ignoring" % (pid, name))
|
||||
log.err()
|
||||
try:
|
||||
pickle.dump(self.history, open("history.pickle.tmp", "wb"))
|
||||
# Newer protocols won't work in Python 2; when it is dropped,
|
||||
# protocol v4 can be used (added in Python 3.4).
|
||||
pickle.dump(self.history, open("history.pickle.tmp", "wb"), protocol=2)
|
||||
os.rename("history.pickle.tmp", "history.pickle")
|
||||
except:
|
||||
pass
|
||||
|
0
newsfragments/3702.minor
Normal file
0
newsfragments/3702.minor
Normal file
0
newsfragments/3722.minor
Normal file
0
newsfragments/3722.minor
Normal file
0
newsfragments/3732.minor
Normal file
0
newsfragments/3732.minor
Normal file
0
newsfragments/3734.minor
Normal file
0
newsfragments/3734.minor
Normal file
0
newsfragments/3736.minor
Normal file
0
newsfragments/3736.minor
Normal file
1
newsfragments/3738.bugfix
Normal file
1
newsfragments/3738.bugfix
Normal file
@ -0,0 +1 @@
|
||||
Fix regression where uploading files with non-ASCII names failed.
|
1
newsfragments/3739.bugfix
Normal file
1
newsfragments/3739.bugfix
Normal file
@ -0,0 +1 @@
|
||||
Fixed annoying UnicodeWarning message on Python 2 when running CLI tools.
|
0
newsfragments/3741.minor
Normal file
0
newsfragments/3741.minor
Normal file
@ -1,3 +1,15 @@
|
||||
"""
|
||||
Authentication for frontends.
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from zope.interface import implementer
|
||||
from twisted.internet import defer
|
||||
from twisted.cred import error, checkers, credentials
|
||||
|
@ -94,11 +94,17 @@ def do_http(method, url, body=b""):
|
||||
|
||||
|
||||
def format_http_success(resp):
|
||||
return "%s %s" % (resp.status, quote_output(resp.reason, quotemarks=False))
|
||||
# ensure_text() shouldn't be necessary when Python 2 is dropped.
|
||||
return quote_output(
|
||||
"%s %s" % (resp.status, six.ensure_text(resp.reason)),
|
||||
quotemarks=False)
|
||||
|
||||
def format_http_error(msg, resp):
|
||||
return "%s: %s %s\n%s" % (msg, resp.status, quote_output(resp.reason, quotemarks=False),
|
||||
quote_output(resp.read(), quotemarks=False))
|
||||
# ensure_text() shouldn't be necessary when Python 2 is dropped.
|
||||
return quote_output(
|
||||
"%s: %s %s\n%s" % (msg, resp.status, six.ensure_text(resp.reason),
|
||||
six.ensure_text(resp.read())),
|
||||
quotemarks=False)
|
||||
|
||||
def check_http_error(resp, stderr):
|
||||
if resp.status < 200 or resp.status >= 300:
|
||||
|
@ -204,7 +204,8 @@ def _setup_coverage(reactor):
|
||||
"""
|
||||
# can we put this _setup_coverage call after we hit
|
||||
# argument-parsing?
|
||||
if '--coverage' not in sys.argv:
|
||||
# ensure_str() only necessary on Python 2.
|
||||
if six.ensure_str('--coverage') not in sys.argv:
|
||||
return
|
||||
sys.argv.remove('--coverage')
|
||||
|
||||
|
@ -252,7 +252,9 @@ class ShareCrawler(service.MultiService):
|
||||
self.state["last-complete-prefix"] = last_complete_prefix
|
||||
tmpfile = self.statefile + ".tmp"
|
||||
with open(tmpfile, "wb") as f:
|
||||
pickle.dump(self.state, f)
|
||||
# Newer protocols won't work in Python 2; when it is dropped,
|
||||
# protocol v4 can be used (added in Python 3.4).
|
||||
pickle.dump(self.state, f, protocol=2)
|
||||
fileutil.move_into_place(tmpfile, self.statefile)
|
||||
|
||||
def startService(self):
|
||||
|
@ -95,7 +95,9 @@ class LeaseCheckingCrawler(ShareCrawler):
|
||||
if not os.path.exists(self.historyfile):
|
||||
history = {} # cyclenum -> dict
|
||||
with open(self.historyfile, "wb") as f:
|
||||
pickle.dump(history, f)
|
||||
# Newer protocols won't work in Python 2; when it is dropped,
|
||||
# protocol v4 can be used (added in Python 3.4).
|
||||
pickle.dump(history, f, protocol=2)
|
||||
|
||||
def create_empty_cycle_dict(self):
|
||||
recovered = self.create_empty_recovered_dict()
|
||||
@ -319,7 +321,9 @@ class LeaseCheckingCrawler(ShareCrawler):
|
||||
oldcycles = sorted(history.keys())
|
||||
del history[oldcycles[0]]
|
||||
with open(self.historyfile, "wb") as f:
|
||||
pickle.dump(history, f)
|
||||
# Newer protocols won't work in Python 2; when it is dropped,
|
||||
# protocol v4 can be used (added in Python 3.4).
|
||||
pickle.dump(history, f, protocol=2)
|
||||
|
||||
def get_state(self):
|
||||
"""In addition to the crawler state described in
|
||||
|
@ -55,6 +55,11 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
d.addCallback(lambda res: self.do_cli("get", "tahoe:" + artonwall_arg))
|
||||
d.addCallback(lambda rc_out_err: self.assertEqual(rc_out_err[1], DATA1))
|
||||
|
||||
# Version where destination filename is explicitly Unicode too.
|
||||
d.addCallback(lambda res: self.do_cli("cp", fn1, "tahoe:" + artonwall_arg + "-2"))
|
||||
d.addCallback(lambda res: self.do_cli("get", "tahoe:" + artonwall_arg + "-2"))
|
||||
d.addCallback(lambda rc_out_err: self.assertEqual(rc_out_err[1], DATA1))
|
||||
|
||||
d.addCallback(lambda res: self.do_cli("cp", fn2, "tahoe:"))
|
||||
|
||||
d.addCallback(lambda res: self.do_cli("get", "tahoe:Metallica"))
|
||||
@ -74,7 +79,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
if PY2:
|
||||
out = out.decode(get_io_encoding())
|
||||
self.failUnlessReallyEqual(out, u"Metallica\n\u00C4rtonwall\n")
|
||||
self.failUnlessReallyEqual(out, u"Metallica\n\u00C4rtonwall\n\u00C4rtonwall-2\n")
|
||||
self.assertEqual(len(err), 0, err)
|
||||
d.addCallback(_check)
|
||||
|
||||
|
@ -39,8 +39,10 @@ dBSD8940XU3YW+oeq8e+p3yQ2GinHfeJ3BYQyNQLuMAJ
|
||||
""")
|
||||
|
||||
DUMMY_ACCOUNTS = u"""\
|
||||
alice password URI:DIR2:aaaaaaaaaaaaaaaaaaaaaaaaaa:1111111111111111111111111111111111111111111111111111
|
||||
alice herpassword URI:DIR2:aaaaaaaaaaaaaaaaaaaaaaaaaa:1111111111111111111111111111111111111111111111111111
|
||||
bob sekrit URI:DIR2:bbbbbbbbbbbbbbbbbbbbbbbbbb:2222222222222222222222222222222222222222222222222222
|
||||
|
||||
# dennis password URI:DIR2:aaaaaaaaaaaaaaaaaaaaaaaaaa:1111111111111111111111111111111111111111111111111111
|
||||
carol {key} URI:DIR2:cccccccccccccccccccccccccc:3333333333333333333333333333333333333333333333333333
|
||||
""".format(key=str(DUMMY_KEY.public().toString("openssh"), "ascii")).encode("ascii")
|
||||
|
||||
@ -54,7 +56,7 @@ class AccountFileCheckerKeyTests(unittest.TestCase):
|
||||
abspath = abspath_expanduser_unicode(str(self.account_file.path))
|
||||
self.checker = auth.AccountFileChecker(None, abspath)
|
||||
|
||||
def test_unknown_user(self):
|
||||
def test_unknown_user_ssh(self):
|
||||
"""
|
||||
AccountFileChecker.requestAvatarId returns a Deferred that fires with
|
||||
UnauthorizedLogin if called with an SSHPrivateKey object with a
|
||||
@ -65,6 +67,19 @@ class AccountFileCheckerKeyTests(unittest.TestCase):
|
||||
avatarId = self.checker.requestAvatarId(key_credentials)
|
||||
return self.assertFailure(avatarId, error.UnauthorizedLogin)
|
||||
|
||||
def test_unknown_user_password(self):
|
||||
"""
|
||||
AccountFileChecker.requestAvatarId returns a Deferred that fires with
|
||||
UnauthorizedLogin if called with an SSHPrivateKey object with a
|
||||
username not present in the account file.
|
||||
|
||||
We use a commented out user, so we're also checking that comments are
|
||||
skipped.
|
||||
"""
|
||||
key_credentials = credentials.UsernamePassword(b"dennis", b"password")
|
||||
d = self.checker.requestAvatarId(key_credentials)
|
||||
return self.assertFailure(d, error.UnauthorizedLogin)
|
||||
|
||||
def test_password_auth_user_with_ssh_key(self):
|
||||
"""
|
||||
AccountFileChecker.requestAvatarId returns a Deferred that fires with
|
||||
@ -81,7 +96,21 @@ class AccountFileCheckerKeyTests(unittest.TestCase):
|
||||
AccountFileChecker.requestAvatarId returns a Deferred that fires with
|
||||
the user if the correct password is given.
|
||||
"""
|
||||
key_credentials = credentials.UsernamePassword(b"alice", b"password")
|
||||
key_credentials = credentials.UsernamePassword(b"alice", b"herpassword")
|
||||
d = self.checker.requestAvatarId(key_credentials)
|
||||
def authenticated(avatarId):
|
||||
self.assertEqual(
|
||||
(b"alice",
|
||||
b"URI:DIR2:aaaaaaaaaaaaaaaaaaaaaaaaaa:1111111111111111111111111111111111111111111111111111"),
|
||||
(avatarId.username, avatarId.rootcap))
|
||||
return d
|
||||
|
||||
def test_password_auth_user_with_correct_hashed_password(self):
|
||||
"""
|
||||
AccountFileChecker.requestAvatarId returns a Deferred that fires with
|
||||
the user if the correct password is given in hashed form.
|
||||
"""
|
||||
key_credentials = credentials.UsernameHashedPassword(b"alice", b"herpassword")
|
||||
d = self.checker.requestAvatarId(key_credentials)
|
||||
def authenticated(avatarId):
|
||||
self.assertEqual(
|
||||
|
@ -52,6 +52,7 @@ PORTED_MODULES = [
|
||||
"allmydata.deep_stats",
|
||||
"allmydata.dirnode",
|
||||
"allmydata.frontends",
|
||||
"allmydata.frontends.auth",
|
||||
"allmydata.frontends.sftpd",
|
||||
"allmydata.hashtree",
|
||||
"allmydata.history",
|
||||
|
@ -90,6 +90,7 @@ from allmydata.util.time_format import (
|
||||
)
|
||||
from allmydata.util.encodingutil import (
|
||||
quote_output,
|
||||
quote_output_u,
|
||||
to_bytes,
|
||||
)
|
||||
from allmydata.util import abbreviate
|
||||
@ -324,7 +325,7 @@ def humanize_exception(exc):
|
||||
return ("There was already a child by that name, and you asked me "
|
||||
"to not replace it.", http.CONFLICT)
|
||||
if isinstance(exc, NoSuchChildError):
|
||||
quoted_name = quote_output(exc.args[0], encoding="utf-8", quotemarks=False)
|
||||
quoted_name = quote_output_u(exc.args[0], quotemarks=False)
|
||||
return ("No such child: %s" % quoted_name, http.NOT_FOUND)
|
||||
if isinstance(exc, NotEnoughSharesError):
|
||||
t = ("NotEnoughSharesError: This indicates that some "
|
||||
|
55
tox.ini
55
tox.ini
@ -104,17 +104,15 @@ setenv =
|
||||
commands =
|
||||
python --version
|
||||
# NOTE: 'run with "py.test --keep-tempdir -s -v integration/" to debug failures'
|
||||
python3 -b -m pytest --timeout=1800 --coverage -v {posargs:integration}
|
||||
py.test --timeout=1800 --coverage -v {posargs:integration}
|
||||
coverage combine
|
||||
coverage report
|
||||
|
||||
|
||||
# Once 2.7 is dropped, this can be removed. It just does flake8 with Python 2
|
||||
# since that can give different results than flake8 on Python 3.
|
||||
[testenv:codechecks]
|
||||
basepython = python2.7
|
||||
# On macOS, git inside of towncrier needs $HOME.
|
||||
passenv = HOME
|
||||
whitelist_externals =
|
||||
/bin/mv
|
||||
setenv =
|
||||
# Workaround an error when towncrier is run under the VCS hook,
|
||||
# https://stackoverflow.com/a/4027726/624787:
|
||||
@ -128,10 +126,31 @@ setenv =
|
||||
DEFAULT_FILES=src integration static misc setup.py
|
||||
commands =
|
||||
flake8 {posargs:{env:DEFAULT_FILES}}
|
||||
|
||||
|
||||
[testenv:codechecks3]
|
||||
basepython = python3
|
||||
deps =
|
||||
# Newer versions of PyLint have buggy configuration
|
||||
# (https://github.com/PyCQA/pylint/issues/4574), so stick to old version
|
||||
# for now.
|
||||
pylint < 2.5
|
||||
# On macOS, git inside of towncrier needs $HOME.
|
||||
passenv = HOME
|
||||
whitelist_externals =
|
||||
/bin/mv
|
||||
setenv =
|
||||
# If no positional arguments are given, try to run the checks on the
|
||||
# entire codebase, including various pieces of supporting code.
|
||||
DEFAULT_FILES=src integration static misc setup.py
|
||||
commands =
|
||||
flake8 {posargs:{env:DEFAULT_FILES}}
|
||||
python misc/coding_tools/check-umids.py {posargs:{env:DEFAULT_FILES}}
|
||||
python misc/coding_tools/check-debugging.py {posargs:{env:DEFAULT_FILES}}
|
||||
python misc/coding_tools/find-trailing-spaces.py -r {posargs:{env:DEFAULT_FILES}}
|
||||
python misc/coding_tools/check-miscaptures.py {posargs:{env:DEFAULT_FILES}}
|
||||
# PyLint has other useful checks, might want to enable them:
|
||||
# http://pylint.pycqa.org/en/latest/technical_reference/features.html
|
||||
pylint --disable=all --enable=cell-var-from-loop {posargs:{env:DEFAULT_FILES}}
|
||||
|
||||
# If towncrier.check fails, you forgot to add a towncrier news
|
||||
# fragment explaining the change in this branch. Create one at
|
||||
@ -140,22 +159,16 @@ commands =
|
||||
python -m towncrier.check --config towncrier.toml
|
||||
|
||||
|
||||
[testenv:codechecks3]
|
||||
basepython = python3
|
||||
setenv =
|
||||
# If no positional arguments are given, try to run the checks on the
|
||||
# entire codebase, including various pieces of supporting code.
|
||||
DEFAULT_FILES=src integration static misc setup.py
|
||||
commands =
|
||||
flake8 {posargs:{env:DEFAULT_FILES}}
|
||||
|
||||
|
||||
[testenv:typechecks]
|
||||
basepython = python3
|
||||
skip_install = True
|
||||
deps =
|
||||
mypy
|
||||
git+https://github.com/Shoobx/mypy-zope
|
||||
mypy-zope
|
||||
types-mock
|
||||
types-six
|
||||
types-PyYAML
|
||||
types-pkg_resources
|
||||
git+https://github.com/warner/foolscap
|
||||
# Twisted 21.2.0 introduces some type hints which we are not yet
|
||||
# compatible with.
|
||||
@ -166,9 +179,9 @@ commands = mypy src
|
||||
|
||||
[testenv:draftnews]
|
||||
passenv = TAHOE_LAFS_* PIP_* SUBUNITREPORTER_* USERPROFILE HOMEDRIVE HOMEPATH
|
||||
# see comment in [testenv] about "certifi"
|
||||
whitelist_externals = mv
|
||||
deps =
|
||||
# see comment in [testenv] about "certifi"
|
||||
certifi
|
||||
towncrier==21.3.0
|
||||
commands =
|
||||
@ -176,9 +189,11 @@ commands =
|
||||
|
||||
[testenv:news]
|
||||
passenv = TAHOE_LAFS_* PIP_* SUBUNITREPORTER_* USERPROFILE HOMEDRIVE HOMEPATH
|
||||
# see comment in [testenv] about "certifi"
|
||||
whitelist_externals = mv
|
||||
whitelist_externals =
|
||||
mv
|
||||
git
|
||||
deps =
|
||||
# see comment in [testenv] about "certifi"
|
||||
certifi
|
||||
towncrier==21.3.0
|
||||
commands =
|
||||
|
Loading…
Reference in New Issue
Block a user