mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-04-08 03:14:21 +00:00
Merge remote-tracking branch 'origin/master' into 3579.ftp-python-3
This commit is contained in:
commit
6fd62eb25e
@ -91,6 +91,9 @@ workflows:
|
||||
- "build-porting-depgraph":
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
|
||||
- "typechecks":
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
|
||||
images:
|
||||
# Build the Docker images used by the ci jobs. This makes the ci jobs
|
||||
# faster and takes various spurious failures out of the critical path.
|
||||
@ -475,6 +478,18 @@ jobs:
|
||||
. /tmp/venv/bin/activate
|
||||
./misc/python3/depgraph.sh
|
||||
|
||||
typechecks:
|
||||
docker:
|
||||
- <<: *DOCKERHUB_AUTH
|
||||
image: "tahoelafsci/ubuntu:18.04-py3"
|
||||
|
||||
steps:
|
||||
- "checkout"
|
||||
- run:
|
||||
name: "Validate Types"
|
||||
command: |
|
||||
/tmp/venv/bin/tox -e typechecks
|
||||
|
||||
build-image: &BUILD_IMAGE
|
||||
# This is a template for a job to build a Docker image that has as much of
|
||||
# the setup as we can manage already done and baked in. This cuts down on
|
||||
|
2
.github/CONTRIBUTING.rst
vendored
2
.github/CONTRIBUTING.rst
vendored
@ -17,4 +17,4 @@ Examples of contributions include:
|
||||
* `Patch reviews <https://tahoe-lafs.org/trac/tahoe-lafs/wiki/PatchReviewProcess>`_
|
||||
|
||||
Before authoring or reviewing a patch,
|
||||
please familiarize yourself with the `coding standard <https://tahoe-lafs.org/trac/tahoe-lafs/wiki/CodingStandards>`_.
|
||||
please familiarize yourself with the `Coding Standards <https://tahoe-lafs.org/trac/tahoe-lafs/wiki/CodingStandards>`_ and the `Contributor Code of Conduct <../docs/CODE_OF_CONDUCT.md>`_.
|
||||
|
62
.github/workflows/ci.yml
vendored
62
.github/workflows/ci.yml
vendored
@ -30,17 +30,37 @@ jobs:
|
||||
with:
|
||||
args: install vcpython27
|
||||
|
||||
# See https://github.com/actions/checkout. A fetch-depth of 0
|
||||
# fetches all tags and branches.
|
||||
- name: Check out Tahoe-LAFS sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Fetch all history for all tags and branches
|
||||
run: git fetch --prune --unshallow
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
# To use pip caching with GitHub Actions in an OS-independent
|
||||
# manner, we need `pip cache dir` command, which became
|
||||
# available since pip v20.1+. At the time of writing this,
|
||||
# GitHub Actions offers pip v20.3.3 for both ubuntu-latest and
|
||||
# windows-latest, and pip v20.3.1 for macos-latest.
|
||||
- name: Get pip cache directory
|
||||
id: pip-cache
|
||||
run: |
|
||||
echo "::set-output name=dir::$(pip cache dir)"
|
||||
|
||||
# See https://github.com/actions/cache
|
||||
- name: Use pip cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ steps.pip-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade codecov tox setuptools
|
||||
@ -103,15 +123,27 @@ jobs:
|
||||
|
||||
- name: Check out Tahoe-LAFS sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Fetch all history for all tags and branches
|
||||
run: git fetch --prune --unshallow
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Get pip cache directory
|
||||
id: pip-cache
|
||||
run: |
|
||||
echo "::set-output name=dir::$(pip cache dir)"
|
||||
|
||||
- name: Use pip cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ steps.pip-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade tox
|
||||
@ -155,15 +187,27 @@ jobs:
|
||||
|
||||
- name: Check out Tahoe-LAFS sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Fetch all history for all tags and branches
|
||||
run: git fetch --prune --unshallow
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Get pip cache directory
|
||||
id: pip-cache
|
||||
run: |
|
||||
echo "::set-output name=dir::$(pip cache dir)"
|
||||
|
||||
- name: Use pip cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ steps.pip-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade tox
|
||||
|
@ -28,7 +28,7 @@ import os
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = []
|
||||
extensions = ['recommonmark']
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
@ -36,7 +36,7 @@ templates_path = ['_templates']
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
# source_suffix = ['.rst', '.md']
|
||||
source_suffix = '.rst'
|
||||
source_suffix = ['.rst', '.md']
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
1
docs/contributing.rst
Normal file
1
docs/contributing.rst
Normal file
@ -0,0 +1 @@
|
||||
.. include:: ../.github/CONTRIBUTING.rst
|
@ -2032,10 +2032,11 @@ potential for surprises when the file store structure is changed.
|
||||
|
||||
Tahoe-LAFS provides a mutable file store, but the ways that the store can
|
||||
change are limited. The only things that can change are:
|
||||
* the mapping from child names to child objects inside mutable directories
|
||||
(by adding a new child, removing an existing child, or changing an
|
||||
existing child to point to a different object)
|
||||
* the contents of mutable files
|
||||
|
||||
* the mapping from child names to child objects inside mutable directories
|
||||
(by adding a new child, removing an existing child, or changing an
|
||||
existing child to point to a different object)
|
||||
* the contents of mutable files
|
||||
|
||||
Obviously if you query for information about the file store and then act
|
||||
to change it (such as by getting a listing of the contents of a mutable
|
||||
|
@ -23,8 +23,9 @@ Contents:
|
||||
frontends/download-status
|
||||
|
||||
known_issues
|
||||
../.github/CONTRIBUTING
|
||||
contributing
|
||||
CODE_OF_CONDUCT
|
||||
release-checklist
|
||||
|
||||
servers
|
||||
helper
|
||||
|
@ -40,23 +40,31 @@ Create Branch and Apply Updates
|
||||
- Create a branch for release-candidates (e.g. `XXXX.release-1.15.0.rc0`)
|
||||
- run `tox -e news` to produce a new NEWS.txt file (this does a commit)
|
||||
- create the news for the release
|
||||
|
||||
- newsfragments/<ticket number>.minor
|
||||
- commit it
|
||||
|
||||
- manually fix NEWS.txt
|
||||
|
||||
- proper title for latest release ("Release 1.15.0" instead of "Release ...post1432")
|
||||
- double-check date (maybe release will be in the future)
|
||||
- spot-check the release notes (these come from the newsfragments
|
||||
files though so don't do heavy editing)
|
||||
- commit these changes
|
||||
|
||||
- update "relnotes.txt"
|
||||
|
||||
- update all mentions of 1.14.0 -> 1.15.0
|
||||
- update "previous release" statement and date
|
||||
- summarize major changes
|
||||
- commit it
|
||||
|
||||
- update "CREDITS"
|
||||
|
||||
- are there any new contributors in this release?
|
||||
- one way: git log release-1.14.0.. | grep Author | sort | uniq
|
||||
- commit it
|
||||
|
||||
- update "docs/known_issues.rst" if appropriate
|
||||
- update "docs/INSTALL.rst" references to the new release
|
||||
- Push the branch to github
|
||||
@ -82,25 +90,36 @@ they will need to evaluate which contributors' signatures they trust.
|
||||
|
||||
- (all steps above are completed)
|
||||
- sign the release
|
||||
|
||||
- git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-1.15.0rc0" tahoe-lafs-1.15.0rc0
|
||||
- (replace the key-id above with your own)
|
||||
|
||||
- build all code locally
|
||||
- these should all pass:
|
||||
|
||||
- tox -e py27,codechecks,docs,integration
|
||||
|
||||
- these can fail (ideally they should not of course):
|
||||
|
||||
- tox -e deprecations,upcoming-deprecations
|
||||
|
||||
- build tarballs
|
||||
|
||||
- tox -e tarballs
|
||||
- confirm it at least exists:
|
||||
- ls dist/ | grep 1.15.0rc0
|
||||
|
||||
- inspect and test the tarballs
|
||||
|
||||
- install each in a fresh virtualenv
|
||||
- run `tahoe` command
|
||||
|
||||
- when satisfied, sign the tarballs:
|
||||
- gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0-py2-none-any.whl
|
||||
- gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.tar.bz2
|
||||
- gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.tar.gz
|
||||
- gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.zip
|
||||
|
||||
- gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0-py2-none-any.whl
|
||||
- gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.tar.bz2
|
||||
- gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.tar.gz
|
||||
- gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.zip
|
||||
|
||||
|
||||
Privileged Contributor
|
||||
@ -129,6 +148,7 @@ need to be uploaded to https://tahoe-lafs.org in `~source/downloads`
|
||||
https://tahoe-lafs.org/downloads/ on the Web.
|
||||
- scp dist/*1.15.0* username@tahoe-lafs.org:/home/source/downloads
|
||||
- the following developers have access to do this:
|
||||
|
||||
- exarkun
|
||||
- meejah
|
||||
- warner
|
||||
@ -137,8 +157,9 @@ For the actual release, the tarball and signature files need to be
|
||||
uploaded to PyPI as well.
|
||||
|
||||
- how to do this?
|
||||
- (original guide says only "twine upload dist/*")
|
||||
- (original guide says only `twine upload dist/*`)
|
||||
- the following developers have access to do this:
|
||||
|
||||
- warner
|
||||
- exarkun (partial?)
|
||||
- meejah (partial?)
|
||||
|
@ -40,7 +40,8 @@ from util import (
|
||||
TahoeProcess,
|
||||
cli,
|
||||
_run_node,
|
||||
generate_ssh_key
|
||||
generate_ssh_key,
|
||||
block_with_timeout,
|
||||
)
|
||||
|
||||
|
||||
@ -156,7 +157,7 @@ def flog_gatherer(reactor, temp_dir, flog_binary, request):
|
||||
)
|
||||
print("Waiting for flogtool to complete")
|
||||
try:
|
||||
pytest_twisted.blockon(flog_protocol.done)
|
||||
block_with_timeout(flog_protocol.done, reactor)
|
||||
except ProcessTerminated as e:
|
||||
print("flogtool exited unexpectedly: {}".format(str(e)))
|
||||
print("Flogtool completed")
|
||||
@ -297,7 +298,7 @@ log_gatherer.furl = {log_furl}
|
||||
def cleanup():
|
||||
try:
|
||||
transport.signalProcess('TERM')
|
||||
pytest_twisted.blockon(protocol.exited)
|
||||
block_with_timeout(protocol.exited, reactor)
|
||||
except ProcessExitedAlready:
|
||||
pass
|
||||
request.addfinalizer(cleanup)
|
||||
@ -351,6 +352,9 @@ def alice(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, requ
|
||||
reactor, request, temp_dir, introducer_furl, flog_gatherer, "alice",
|
||||
web_port="tcp:9980:interface=localhost",
|
||||
storage=False,
|
||||
# We're going to kill this ourselves, so no need for finalizer to
|
||||
# do it:
|
||||
finalize=False,
|
||||
)
|
||||
)
|
||||
await_client_ready(process)
|
||||
@ -533,7 +537,13 @@ def tor_network(reactor, temp_dir, chutney, request):
|
||||
path=join(chutney_dir),
|
||||
env=env,
|
||||
)
|
||||
pytest_twisted.blockon(proto.done)
|
||||
try:
|
||||
block_with_timeout(proto.done, reactor)
|
||||
except ProcessTerminated:
|
||||
# If this doesn't exit cleanly, that's fine, that shouldn't fail
|
||||
# the test suite.
|
||||
pass
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
return chut
|
||||
|
@ -1,6 +1,13 @@
|
||||
"""
|
||||
It's possible to create/rename/delete files and directories in Tahoe-LAFS using
|
||||
SFTP.
|
||||
|
||||
These tests use Paramiko, rather than Twisted's Conch, because:
|
||||
|
||||
1. It's a different implementation, so we're not testing Conch against
|
||||
itself.
|
||||
|
||||
2. Its API is much simpler to use.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
@ -23,7 +30,7 @@ from paramiko.rsakey import RSAKey
|
||||
|
||||
import pytest
|
||||
|
||||
from .util import generate_ssh_key
|
||||
from .util import generate_ssh_key, run_in_thread
|
||||
|
||||
|
||||
def connect_sftp(connect_args={"username": "alice", "password": "password"}):
|
||||
@ -50,6 +57,7 @@ def connect_sftp(connect_args={"username": "alice", "password": "password"}):
|
||||
return sftp
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_bad_account_password_ssh_key(alice, tmpdir):
|
||||
"""
|
||||
Can't login with unknown username, wrong password, or wrong SSH pub key.
|
||||
@ -79,6 +87,7 @@ def test_bad_account_password_ssh_key(alice, tmpdir):
|
||||
})
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_ssh_key_auth(alice):
|
||||
"""It's possible to login authenticating with SSH public key."""
|
||||
key = RSAKey(filename=join(alice.node_dir, "private", "ssh_client_rsa_key"))
|
||||
@ -88,20 +97,21 @@ def test_ssh_key_auth(alice):
|
||||
assert sftp.listdir() == []
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_read_write_files(alice):
|
||||
"""It's possible to upload and download files."""
|
||||
sftp = connect_sftp()
|
||||
f = sftp.file("myfile", "wb")
|
||||
f.write(b"abc")
|
||||
f.write(b"def")
|
||||
f.close()
|
||||
f = sftp.file("myfile", "rb")
|
||||
assert f.read(4) == b"abcd"
|
||||
assert f.read(2) == b"ef"
|
||||
assert f.read(1) == b""
|
||||
f.close()
|
||||
with sftp.file("myfile", "wb") as f:
|
||||
f.write(b"abc")
|
||||
f.write(b"def")
|
||||
|
||||
with sftp.file("myfile", "rb") as f:
|
||||
assert f.read(4) == b"abcd"
|
||||
assert f.read(2) == b"ef"
|
||||
assert f.read(1) == b""
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_directories(alice):
|
||||
"""
|
||||
It's possible to create, list directories, and create and remove files in
|
||||
@ -135,6 +145,7 @@ def test_directories(alice):
|
||||
assert sftp.listdir() == []
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_rename(alice):
|
||||
"""Directories and files can be renamed."""
|
||||
sftp = connect_sftp()
|
||||
|
@ -127,12 +127,12 @@ def test_deep_stats(alice):
|
||||
dircap_uri,
|
||||
data={
|
||||
u"t": u"upload",
|
||||
u"when_done": u".",
|
||||
},
|
||||
files={
|
||||
u"file": FILE_CONTENTS,
|
||||
},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
|
||||
# confirm the file is in the directory
|
||||
resp = requests.get(
|
||||
@ -175,6 +175,7 @@ def test_deep_stats(alice):
|
||||
time.sleep(.5)
|
||||
|
||||
|
||||
@util.run_in_thread
|
||||
def test_status(alice):
|
||||
"""
|
||||
confirm we get something sensible from /status and the various sub-types
|
||||
|
@ -5,7 +5,7 @@ from os import mkdir, environ
|
||||
from os.path import exists, join
|
||||
from six.moves import StringIO
|
||||
from functools import partial
|
||||
from subprocess import check_output, check_call
|
||||
from subprocess import check_output
|
||||
|
||||
from twisted.python.filepath import (
|
||||
FilePath,
|
||||
@ -13,10 +13,12 @@ from twisted.python.filepath import (
|
||||
from twisted.internet.defer import Deferred, succeed
|
||||
from twisted.internet.protocol import ProcessProtocol
|
||||
from twisted.internet.error import ProcessExitedAlready, ProcessDone
|
||||
from twisted.internet.threads import deferToThread
|
||||
|
||||
import requests
|
||||
|
||||
from paramiko.rsakey import RSAKey
|
||||
from boltons.funcutils import wraps
|
||||
|
||||
from allmydata.util.configutil import (
|
||||
get_config,
|
||||
@ -28,6 +30,12 @@ from allmydata import client
|
||||
import pytest_twisted
|
||||
|
||||
|
||||
def block_with_timeout(deferred, reactor, timeout=120):
|
||||
"""Block until Deferred has result, but timeout instead of waiting forever."""
|
||||
deferred.addTimeout(timeout, reactor)
|
||||
return pytest_twisted.blockon(deferred)
|
||||
|
||||
|
||||
class _ProcessExitedProtocol(ProcessProtocol):
|
||||
"""
|
||||
Internal helper that .callback()s on self.done when the process
|
||||
@ -126,11 +134,12 @@ def _cleanup_tahoe_process(tahoe_transport, exited):
|
||||
|
||||
:return: After the process has exited.
|
||||
"""
|
||||
from twisted.internet import reactor
|
||||
try:
|
||||
print("signaling {} with TERM".format(tahoe_transport.pid))
|
||||
tahoe_transport.signalProcess('TERM')
|
||||
print("signaled, blocking on exit")
|
||||
pytest_twisted.blockon(exited)
|
||||
block_with_timeout(exited, reactor)
|
||||
print("exited, goodbye")
|
||||
except ProcessExitedAlready:
|
||||
pass
|
||||
@ -186,7 +195,7 @@ class TahoeProcess(object):
|
||||
return "<TahoeProcess in '{}'>".format(self._node_dir)
|
||||
|
||||
|
||||
def _run_node(reactor, node_dir, request, magic_text):
|
||||
def _run_node(reactor, node_dir, request, magic_text, finalize=True):
|
||||
"""
|
||||
Run a tahoe process from its node_dir.
|
||||
|
||||
@ -210,7 +219,8 @@ def _run_node(reactor, node_dir, request, magic_text):
|
||||
)
|
||||
transport.exited = protocol.exited
|
||||
|
||||
request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited))
|
||||
if finalize:
|
||||
request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited))
|
||||
|
||||
# XXX abusing the Deferred; should use .when_magic_seen() pattern
|
||||
|
||||
@ -229,7 +239,8 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam
|
||||
magic_text=None,
|
||||
needed=2,
|
||||
happy=3,
|
||||
total=4):
|
||||
total=4,
|
||||
finalize=True):
|
||||
"""
|
||||
Helper to create a single node, run it and return the instance
|
||||
spawnProcess returned (ITransport)
|
||||
@ -256,7 +267,7 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam
|
||||
'--helper',
|
||||
]
|
||||
if not storage:
|
||||
args.append('--no-storage')
|
||||
args.append('--no-storage')
|
||||
args.append(node_dir)
|
||||
|
||||
_tahoe_runner_optional_coverage(done_proto, reactor, request, args)
|
||||
@ -277,7 +288,7 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam
|
||||
d = Deferred()
|
||||
d.callback(None)
|
||||
d.addCallback(lambda _: created_d)
|
||||
d.addCallback(lambda _: _run_node(reactor, node_dir, request, magic_text))
|
||||
d.addCallback(lambda _: _run_node(reactor, node_dir, request, magic_text, finalize=finalize))
|
||||
return d
|
||||
|
||||
|
||||
@ -516,3 +527,28 @@ def generate_ssh_key(path):
|
||||
key.write_private_key_file(path)
|
||||
with open(path + ".pub", "wb") as f:
|
||||
f.write(b"%s %s" % (key.get_name(), key.get_base64()))
|
||||
|
||||
|
||||
def run_in_thread(f):
|
||||
"""Decorator for integration tests that runs code in a thread.
|
||||
|
||||
Because we're using pytest_twisted, tests that rely on the reactor are
|
||||
expected to return a Deferred and use async APIs so the reactor can run.
|
||||
|
||||
In the case of the integration test suite, it launches nodes in the
|
||||
background using Twisted APIs. The nodes stdout and stderr is read via
|
||||
Twisted code. If the reactor doesn't run, reads don't happen, and
|
||||
eventually the buffers fill up, and the nodes block when they try to flush
|
||||
logs.
|
||||
|
||||
We can switch to Twisted APIs (treq instead of requests etc.), but
|
||||
sometimes it's easier or expedient to just have a blocking test. So this
|
||||
decorator allows you to run the test in a thread, and the reactor can keep
|
||||
running in the main thread.
|
||||
|
||||
See https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3597 for tracking bug.
|
||||
"""
|
||||
@wraps(f)
|
||||
def test(*args, **kwargs):
|
||||
return deferToThread(lambda: f(*args, **kwargs))
|
||||
return test
|
||||
|
3
mypy.ini
Normal file
3
mypy.ini
Normal file
@ -0,0 +1,3 @@
|
||||
[mypy]
|
||||
ignore_missing_imports = True
|
||||
plugins=mypy_zope:plugin
|
0
newsfragments/2920.minor
Normal file
0
newsfragments/2920.minor
Normal file
1
newsfragments/3399.feature
Normal file
1
newsfragments/3399.feature
Normal file
@ -0,0 +1 @@
|
||||
Added 'typechecks' environment for tox running mypy and performing static typechecks.
|
0
newsfragments/3536.minor
Normal file
0
newsfragments/3536.minor
Normal file
0
newsfragments/3576.minor
Normal file
0
newsfragments/3576.minor
Normal file
@ -1 +1 @@
|
||||
SFTP public key auth likely works better, and SFTP in general was broken in the prerelease.
|
||||
SFTP public key auth likely works more consistently, and SFTP in general was previously broken.
|
0
newsfragments/3589.minor
Normal file
0
newsfragments/3589.minor
Normal file
1
newsfragments/3590.bugfix
Normal file
1
newsfragments/3590.bugfix
Normal file
@ -0,0 +1 @@
|
||||
Fixed issue where redirecting old-style URIs (/uri/?uri=...) didn't work.
|
0
newsfragments/3591.minor
Normal file
0
newsfragments/3591.minor
Normal file
0
newsfragments/3594.minor
Normal file
0
newsfragments/3594.minor
Normal file
0
newsfragments/3595.minor
Normal file
0
newsfragments/3595.minor
Normal file
0
newsfragments/3599.minor
Normal file
0
newsfragments/3599.minor
Normal file
1
setup.py
1
setup.py
@ -396,6 +396,7 @@ setup(name="tahoe-lafs", # also set in __init__.py
|
||||
"junitxml",
|
||||
"tenacity",
|
||||
"paramiko",
|
||||
"pytest-timeout",
|
||||
] + tor_requires + i2p_requires,
|
||||
"tor": tor_requires,
|
||||
"i2p": i2p_requires,
|
||||
|
@ -14,7 +14,9 @@ __all__ = [
|
||||
|
||||
__version__ = "unknown"
|
||||
try:
|
||||
from allmydata._version import __version__
|
||||
# type ignored as it fails in CI
|
||||
# (https://app.circleci.com/pipelines/github/tahoe-lafs/tahoe-lafs/1647/workflows/60ae95d4-abe8-492c-8a03-1ad3b9e42ed3/jobs/40972)
|
||||
from allmydata._version import __version__ # type: ignore
|
||||
except ImportError:
|
||||
# We're running in a tree that hasn't run update_version, and didn't
|
||||
# come with a _version.py, so we don't know what our version is.
|
||||
@ -24,7 +26,9 @@ except ImportError:
|
||||
full_version = "unknown"
|
||||
branch = "unknown"
|
||||
try:
|
||||
from allmydata._version import full_version, branch
|
||||
# type ignored as it fails in CI
|
||||
# (https://app.circleci.com/pipelines/github/tahoe-lafs/tahoe-lafs/1647/workflows/60ae95d4-abe8-492c-8a03-1ad3b9e42ed3/jobs/40972)
|
||||
from allmydata._version import full_version, branch # type: ignore
|
||||
except ImportError:
|
||||
# We're running in a tree that hasn't run update_version, and didn't
|
||||
# come with a _version.py, so we don't know what our full version or
|
||||
|
@ -57,6 +57,10 @@ class CRSEncoder(object):
|
||||
|
||||
return defer.succeed((shares, desired_share_ids))
|
||||
|
||||
def encode_proposal(self, data, desired_share_ids=None):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@implementer(ICodecDecoder)
|
||||
class CRSDecoder(object):
|
||||
|
||||
|
@ -568,7 +568,7 @@ class DirectoryNode(object):
|
||||
d = self.get_child_and_metadata(childnamex)
|
||||
return d
|
||||
|
||||
def set_uri(self, namex, writecap, readcap, metadata=None, overwrite=True):
|
||||
def set_uri(self, namex, writecap, readcap=None, metadata=None, overwrite=True):
|
||||
precondition(isinstance(writecap, (bytes, type(None))), writecap)
|
||||
precondition(isinstance(readcap, (bytes, type(None))), readcap)
|
||||
|
||||
|
@ -1269,7 +1269,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
(existing_file, userpath, flags, _repr_flags(flags), parent, childname, filenode, metadata),
|
||||
level=NOISY)
|
||||
|
||||
_assert((isinstance(userpath, bytes) and isinstance(childname, (str, type(None)) (metadata is None or 'no-write' in metadata)),
|
||||
_assert((isinstance(userpath, bytes) and isinstance(childname, (str, type(None))) and
|
||||
(metadata is None or 'no-write' in metadata)),
|
||||
userpath=userpath, childname=childname, metadata=metadata)
|
||||
|
||||
writing = (flags & (FXF_WRITE | FXF_CREAT)) != 0
|
||||
|
@ -19,7 +19,7 @@ from twisted.protocols import basic
|
||||
from allmydata.interfaces import IImmutableFileNode, ICheckable
|
||||
from allmydata.uri import LiteralFileURI
|
||||
|
||||
@implementer(IImmutableFileNode, ICheckable)
|
||||
|
||||
class _ImmutableFileNodeBase(object):
|
||||
|
||||
def get_write_uri(self):
|
||||
@ -56,6 +56,7 @@ class _ImmutableFileNodeBase(object):
|
||||
return not self == other
|
||||
|
||||
|
||||
@implementer(IImmutableFileNode, ICheckable)
|
||||
class LiteralFileNode(_ImmutableFileNodeBase):
|
||||
|
||||
def __init__(self, filecap):
|
||||
|
@ -141,7 +141,7 @@ class CHKCheckerAndUEBFetcher(object):
|
||||
|
||||
|
||||
@implementer(interfaces.RICHKUploadHelper)
|
||||
class CHKUploadHelper(Referenceable, upload.CHKUploader):
|
||||
class CHKUploadHelper(Referenceable, upload.CHKUploader): # type: ignore # warner/foolscap#78
|
||||
"""I am the helper-server -side counterpart to AssistedUploader. I handle
|
||||
peer selection, encoding, and share pushing. I read ciphertext from the
|
||||
remote AssistedUploader.
|
||||
@ -499,10 +499,13 @@ class LocalCiphertextReader(AskUntilSuccessMixin):
|
||||
# ??. I'm not sure if it makes sense to forward the close message.
|
||||
return self.call("close")
|
||||
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3561
|
||||
def set_upload_status(self, upload_status):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@implementer(interfaces.RIHelper, interfaces.IStatsProducer)
|
||||
class Helper(Referenceable):
|
||||
class Helper(Referenceable): # type: ignore # warner/foolscap#78
|
||||
"""
|
||||
:ivar dict[bytes, CHKUploadHelper] _active_uploads: For any uploads which
|
||||
have been started but not finished, a mapping from storage index to the
|
||||
|
@ -13,19 +13,30 @@ if PY2:
|
||||
from past.builtins import long, unicode
|
||||
from six import ensure_str
|
||||
|
||||
try:
|
||||
from typing import List
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
import os, time, weakref, itertools
|
||||
|
||||
import attr
|
||||
|
||||
from zope.interface import implementer
|
||||
from twisted.python import failure
|
||||
from twisted.internet import defer
|
||||
from twisted.application import service
|
||||
from foolscap.api import Referenceable, Copyable, RemoteCopy, fireEventually
|
||||
from foolscap.api import Referenceable, Copyable, RemoteCopy
|
||||
|
||||
from allmydata.crypto import aes
|
||||
from allmydata.util.hashutil import file_renewal_secret_hash, \
|
||||
file_cancel_secret_hash, bucket_renewal_secret_hash, \
|
||||
bucket_cancel_secret_hash, plaintext_hasher, \
|
||||
storage_index_hash, plaintext_segment_hasher, convergence_hasher
|
||||
from allmydata.util.deferredutil import timeout_call
|
||||
from allmydata.util.deferredutil import (
|
||||
timeout_call,
|
||||
until,
|
||||
)
|
||||
from allmydata import hashtree, uri
|
||||
from allmydata.storage.server import si_b2a
|
||||
from allmydata.immutable import encode
|
||||
@ -386,6 +397,9 @@ class PeerSelector(object):
|
||||
)
|
||||
return self.happiness_mappings
|
||||
|
||||
def add_peers(self, peerids=None):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class _QueryStatistics(object):
|
||||
|
||||
@ -897,13 +911,45 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
|
||||
raise UploadUnhappinessError(msg)
|
||||
|
||||
|
||||
@attr.s
|
||||
class _Accum(object):
|
||||
"""
|
||||
Accumulate up to some known amount of ciphertext.
|
||||
|
||||
:ivar remaining: The number of bytes still expected.
|
||||
:ivar ciphertext: The bytes accumulated so far.
|
||||
"""
|
||||
remaining = attr.ib(validator=attr.validators.instance_of(int)) # type: int
|
||||
ciphertext = attr.ib(default=attr.Factory(list)) # type: List[bytes]
|
||||
|
||||
def extend(self,
|
||||
size, # type: int
|
||||
ciphertext, # type: List[bytes]
|
||||
):
|
||||
"""
|
||||
Accumulate some more ciphertext.
|
||||
|
||||
:param size: The amount of data the new ciphertext represents towards
|
||||
the goal. This may be more than the actual size of the given
|
||||
ciphertext if the source has run out of data.
|
||||
|
||||
:param ciphertext: The new ciphertext to accumulate.
|
||||
"""
|
||||
self.remaining -= size
|
||||
self.ciphertext.extend(ciphertext)
|
||||
|
||||
|
||||
@implementer(IEncryptedUploadable)
|
||||
class EncryptAnUploadable(object):
|
||||
"""This is a wrapper that takes an IUploadable and provides
|
||||
IEncryptedUploadable."""
|
||||
CHUNKSIZE = 50*1024
|
||||
|
||||
def __init__(self, original, log_parent=None, progress=None):
|
||||
def __init__(self, original, log_parent=None, progress=None, chunk_size=None):
|
||||
"""
|
||||
:param chunk_size: The number of bytes to read from the uploadable at a
|
||||
time, or None for some default.
|
||||
"""
|
||||
precondition(original.default_params_set,
|
||||
"set_default_encoding_parameters not called on %r before wrapping with EncryptAnUploadable" % (original,))
|
||||
self.original = IUploadable(original)
|
||||
@ -917,6 +963,8 @@ class EncryptAnUploadable(object):
|
||||
self._ciphertext_bytes_read = 0
|
||||
self._status = None
|
||||
self._progress = progress
|
||||
if chunk_size is not None:
|
||||
self.CHUNKSIZE = chunk_size
|
||||
|
||||
def set_upload_status(self, upload_status):
|
||||
self._status = IUploadStatus(upload_status)
|
||||
@ -1023,47 +1071,53 @@ class EncryptAnUploadable(object):
|
||||
# and size
|
||||
d.addCallback(lambda ignored: self.get_size())
|
||||
d.addCallback(lambda ignored: self._get_encryptor())
|
||||
# then fetch and encrypt the plaintext. The unusual structure here
|
||||
# (passing a Deferred *into* a function) is needed to avoid
|
||||
# overflowing the stack: Deferreds don't optimize out tail recursion.
|
||||
# We also pass in a list, to which _read_encrypted will append
|
||||
# ciphertext.
|
||||
ciphertext = []
|
||||
d2 = defer.Deferred()
|
||||
d.addCallback(lambda ignored:
|
||||
self._read_encrypted(length, ciphertext, hash_only, d2))
|
||||
d.addCallback(lambda ignored: d2)
|
||||
|
||||
accum = _Accum(length)
|
||||
|
||||
def action():
|
||||
"""
|
||||
Read some bytes into the accumulator.
|
||||
"""
|
||||
return self._read_encrypted(accum, hash_only)
|
||||
|
||||
def condition():
|
||||
"""
|
||||
Check to see if the accumulator has all the data.
|
||||
"""
|
||||
return accum.remaining == 0
|
||||
|
||||
d.addCallback(lambda ignored: until(action, condition))
|
||||
d.addCallback(lambda ignored: accum.ciphertext)
|
||||
return d
|
||||
|
||||
def _read_encrypted(self, remaining, ciphertext, hash_only, fire_when_done):
|
||||
if not remaining:
|
||||
fire_when_done.callback(ciphertext)
|
||||
return None
|
||||
def _read_encrypted(self,
|
||||
ciphertext_accum, # type: _Accum
|
||||
hash_only, # type: bool
|
||||
):
|
||||
# type: (...) -> defer.Deferred
|
||||
"""
|
||||
Read the next chunk of plaintext, encrypt it, and extend the accumulator
|
||||
with the resulting ciphertext.
|
||||
"""
|
||||
# tolerate large length= values without consuming a lot of RAM by
|
||||
# reading just a chunk (say 50kB) at a time. This only really matters
|
||||
# when hash_only==True (i.e. resuming an interrupted upload), since
|
||||
# that's the case where we will be skipping over a lot of data.
|
||||
size = min(remaining, self.CHUNKSIZE)
|
||||
remaining = remaining - size
|
||||
size = min(ciphertext_accum.remaining, self.CHUNKSIZE)
|
||||
|
||||
# read a chunk of plaintext..
|
||||
d = defer.maybeDeferred(self.original.read, size)
|
||||
# N.B.: if read() is synchronous, then since everything else is
|
||||
# actually synchronous too, we'd blow the stack unless we stall for a
|
||||
# tick. Once you accept a Deferred from IUploadable.read(), you must
|
||||
# be prepared to have it fire immediately too.
|
||||
d.addCallback(fireEventually)
|
||||
def _good(plaintext):
|
||||
# and encrypt it..
|
||||
# o/' over the fields we go, hashing all the way, sHA! sHA! sHA! o/'
|
||||
ct = self._hash_and_encrypt_plaintext(plaintext, hash_only)
|
||||
ciphertext.extend(ct)
|
||||
self._read_encrypted(remaining, ciphertext, hash_only,
|
||||
fire_when_done)
|
||||
def _err(why):
|
||||
fire_when_done.errback(why)
|
||||
# Intentionally tell the accumulator about the expected size, not
|
||||
# the actual size. If we run out of data we still want remaining
|
||||
# to drop otherwise it will never reach 0 and the loop will never
|
||||
# end.
|
||||
ciphertext_accum.extend(size, ct)
|
||||
d.addCallback(_good)
|
||||
d.addErrback(_err)
|
||||
return None
|
||||
return d
|
||||
|
||||
def _hash_and_encrypt_plaintext(self, data, hash_only):
|
||||
assert isinstance(data, (tuple, list)), type(data)
|
||||
@ -1424,7 +1478,7 @@ class LiteralUploader(object):
|
||||
return self._status
|
||||
|
||||
@implementer(RIEncryptedUploadable)
|
||||
class RemoteEncryptedUploadable(Referenceable):
|
||||
class RemoteEncryptedUploadable(Referenceable): # type: ignore # warner/foolscap#78
|
||||
|
||||
def __init__(self, encrypted_uploadable, upload_status):
|
||||
self._eu = IEncryptedUploadable(encrypted_uploadable)
|
||||
|
@ -681,7 +681,7 @@ class IURI(Interface):
|
||||
passing into init_from_string."""
|
||||
|
||||
|
||||
class IVerifierURI(Interface, IURI):
|
||||
class IVerifierURI(IURI):
|
||||
def init_from_string(uri):
|
||||
"""Accept a string (as created by my to_string() method) and populate
|
||||
this instance with its data. I am not normally called directly,
|
||||
@ -748,7 +748,7 @@ class IProgress(Interface):
|
||||
"Current amount of progress (in percentage)"
|
||||
)
|
||||
|
||||
def set_progress(self, value):
|
||||
def set_progress(value):
|
||||
"""
|
||||
Sets the current amount of progress.
|
||||
|
||||
@ -756,7 +756,7 @@ class IProgress(Interface):
|
||||
set_progress_total.
|
||||
"""
|
||||
|
||||
def set_progress_total(self, value):
|
||||
def set_progress_total(value):
|
||||
"""
|
||||
Sets the total amount of expected progress
|
||||
|
||||
@ -859,12 +859,6 @@ class IPeerSelector(Interface):
|
||||
peer selection begins.
|
||||
"""
|
||||
|
||||
def confirm_share_allocation(peerid, shnum):
|
||||
"""
|
||||
Confirm that an allocated peer=>share pairing has been
|
||||
successfully established.
|
||||
"""
|
||||
|
||||
def add_peers(peerids=set):
|
||||
"""
|
||||
Update my internal state to include the peers in peerids as
|
||||
@ -1824,11 +1818,6 @@ class IEncoder(Interface):
|
||||
willing to receive data.
|
||||
"""
|
||||
|
||||
def set_size(size):
|
||||
"""Specify the number of bytes that will be encoded. This must be
|
||||
peformed before get_serialized_params() can be called.
|
||||
"""
|
||||
|
||||
def set_encrypted_uploadable(u):
|
||||
"""Provide a source of encrypted upload data. 'u' must implement
|
||||
IEncryptedUploadable.
|
||||
|
@ -178,9 +178,9 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
kwargs["facility"] = "tahoe.introducer.client"
|
||||
return log.msg(*args, **kwargs)
|
||||
|
||||
def subscribe_to(self, service_name, cb, *args, **kwargs):
|
||||
def subscribe_to(self, service_name, callback, *args, **kwargs):
|
||||
obs = self._local_subscribers.setdefault(service_name, ObserverList())
|
||||
obs.subscribe(lambda key_s, ann: cb(key_s, ann, *args, **kwargs))
|
||||
obs.subscribe(lambda key_s, ann: callback(key_s, ann, *args, **kwargs))
|
||||
self._maybe_subscribe()
|
||||
for index,(ann,key_s,when) in list(self._inbound_announcements.items()):
|
||||
precondition(isinstance(key_s, bytes), key_s)
|
||||
|
@ -73,7 +73,7 @@ class IIntroducerClient(Interface):
|
||||
publish their services to the rest of the world, and I help them learn
|
||||
about services available on other nodes."""
|
||||
|
||||
def publish(service_name, ann, signing_key=None):
|
||||
def publish(service_name, ann, signing_key):
|
||||
"""Publish the given announcement dictionary (which must be
|
||||
JSON-serializable), plus some additional keys, to the world.
|
||||
|
||||
@ -83,8 +83,7 @@ class IIntroducerClient(Interface):
|
||||
the signing_key, if present, otherwise it is derived from the
|
||||
'anonymous-storage-FURL' key.
|
||||
|
||||
If signing_key= is set to an instance of SigningKey, it will be
|
||||
used to sign the announcement."""
|
||||
signing_key (a SigningKey) will be used to sign the announcement."""
|
||||
|
||||
def subscribe_to(service_name, callback, *args, **kwargs):
|
||||
"""Call this if you will eventually want to use services with the
|
||||
|
@ -15,6 +15,12 @@ from past.builtins import long
|
||||
from six import ensure_text
|
||||
|
||||
import time, os.path, textwrap
|
||||
|
||||
try:
|
||||
from typing import Any, Dict, Union
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from zope.interface import implementer
|
||||
from twisted.application import service
|
||||
from twisted.internet import defer
|
||||
@ -147,10 +153,12 @@ class IntroducerService(service.MultiService, Referenceable):
|
||||
name = "introducer"
|
||||
# v1 is the original protocol, added in 1.0 (but only advertised starting
|
||||
# in 1.3), removed in 1.12. v2 is the new signed protocol, added in 1.10
|
||||
VERSION = { #"http://allmydata.org/tahoe/protocols/introducer/v1": { },
|
||||
# TODO: reconcile bytes/str for keys
|
||||
VERSION = {
|
||||
#"http://allmydata.org/tahoe/protocols/introducer/v1": { },
|
||||
b"http://allmydata.org/tahoe/protocols/introducer/v2": { },
|
||||
b"application-version": allmydata.__full_version__.encode("utf-8"),
|
||||
}
|
||||
} # type: Dict[Union[bytes, str], Any]
|
||||
|
||||
def __init__(self):
|
||||
service.MultiService.__init__(self)
|
||||
|
@ -564,7 +564,7 @@ class MutableFileNode(object):
|
||||
return d
|
||||
|
||||
|
||||
def upload(self, new_contents, servermap):
|
||||
def upload(self, new_contents, servermap, progress=None):
|
||||
"""
|
||||
I overwrite the contents of the best recoverable version of this
|
||||
mutable file with new_contents, using servermap instead of
|
||||
@ -951,7 +951,7 @@ class MutableFileVersion(object):
|
||||
return self._servermap.size_of_version(self._version)
|
||||
|
||||
|
||||
def download_to_data(self, fetch_privkey=False, progress=None):
|
||||
def download_to_data(self, fetch_privkey=False, progress=None): # type: ignore # fixme
|
||||
"""
|
||||
I return a Deferred that fires with the contents of this
|
||||
readable object as a byte string.
|
||||
@ -1205,3 +1205,7 @@ class MutableFileVersion(object):
|
||||
self._servermap,
|
||||
mode=mode)
|
||||
return u.update()
|
||||
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3562
|
||||
def get_servermap(self):
|
||||
raise NotImplementedError
|
||||
|
@ -23,6 +23,11 @@ from base64 import b32decode, b32encode
|
||||
from errno import ENOENT, EPERM
|
||||
from warnings import warn
|
||||
|
||||
try:
|
||||
from typing import Union
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
import attr
|
||||
|
||||
# On Python 2 this will be the backported package.
|
||||
@ -273,6 +278,11 @@ def _error_about_old_config_files(basedir, generated_files):
|
||||
raise e
|
||||
|
||||
|
||||
def ensure_text_and_abspath_expanduser_unicode(basedir):
|
||||
# type: (Union[bytes, str]) -> str
|
||||
return abspath_expanduser_unicode(ensure_text(basedir))
|
||||
|
||||
|
||||
@attr.s
|
||||
class _Config(object):
|
||||
"""
|
||||
@ -300,8 +310,8 @@ class _Config(object):
|
||||
config = attr.ib(validator=attr.validators.instance_of(configparser.ConfigParser))
|
||||
portnum_fname = attr.ib()
|
||||
_basedir = attr.ib(
|
||||
converter=lambda basedir: abspath_expanduser_unicode(ensure_text(basedir)),
|
||||
)
|
||||
converter=ensure_text_and_abspath_expanduser_unicode,
|
||||
) # type: str
|
||||
config_path = attr.ib(
|
||||
validator=attr.validators.optional(
|
||||
attr.validators.instance_of(FilePath),
|
||||
@ -927,7 +937,6 @@ class Node(service.MultiService):
|
||||
"""
|
||||
NODETYPE = "unknown NODETYPE"
|
||||
CERTFILE = "node.pem"
|
||||
GENERATED_FILES = []
|
||||
|
||||
def __init__(self, config, main_tub, control_tub, i2p_provider, tor_provider):
|
||||
"""
|
||||
|
@ -1,5 +1,10 @@
|
||||
from __future__ import print_function
|
||||
|
||||
try:
|
||||
from allmydata.scripts.types_ import SubCommands
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.python import usage
|
||||
from allmydata.scripts.common import BaseOptions
|
||||
|
||||
@ -79,8 +84,8 @@ def do_admin(options):
|
||||
|
||||
|
||||
subCommands = [
|
||||
["admin", None, AdminCommand, "admin subcommands: use 'tahoe admin' for a list"],
|
||||
]
|
||||
("admin", None, AdminCommand, "admin subcommands: use 'tahoe admin' for a list"),
|
||||
] # type: SubCommands
|
||||
|
||||
dispatch = {
|
||||
"admin": do_admin,
|
||||
|
@ -1,6 +1,12 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os.path, re, fnmatch
|
||||
|
||||
try:
|
||||
from allmydata.scripts.types_ import SubCommands, Parameters
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.python import usage
|
||||
from allmydata.scripts.common import get_aliases, get_default_nodedir, \
|
||||
DEFAULT_ALIAS, BaseOptions
|
||||
@ -19,7 +25,7 @@ class FileStoreOptions(BaseOptions):
|
||||
"This overrides the URL found in the --node-directory ."],
|
||||
["dir-cap", None, None,
|
||||
"Specify which dirnode URI should be used as the 'tahoe' alias."]
|
||||
]
|
||||
] # type: Parameters
|
||||
|
||||
def postOptions(self):
|
||||
self["quiet"] = self.parent["quiet"]
|
||||
@ -455,25 +461,25 @@ class DeepCheckOptions(FileStoreOptions):
|
||||
Optionally repair any problems found."""
|
||||
|
||||
subCommands = [
|
||||
["mkdir", None, MakeDirectoryOptions, "Create a new directory."],
|
||||
["add-alias", None, AddAliasOptions, "Add a new alias cap."],
|
||||
["create-alias", None, CreateAliasOptions, "Create a new alias cap."],
|
||||
["list-aliases", None, ListAliasesOptions, "List all alias caps."],
|
||||
["ls", None, ListOptions, "List a directory."],
|
||||
["get", None, GetOptions, "Retrieve a file from the grid."],
|
||||
["put", None, PutOptions, "Upload a file into the grid."],
|
||||
["cp", None, CpOptions, "Copy one or more files or directories."],
|
||||
["unlink", None, UnlinkOptions, "Unlink a file or directory on the grid."],
|
||||
["mv", None, MvOptions, "Move a file within the grid."],
|
||||
["ln", None, LnOptions, "Make an additional link to an existing file or directory."],
|
||||
["backup", None, BackupOptions, "Make target dir look like local dir."],
|
||||
["webopen", None, WebopenOptions, "Open a web browser to a grid file or directory."],
|
||||
["manifest", None, ManifestOptions, "List all files/directories in a subtree."],
|
||||
["stats", None, StatsOptions, "Print statistics about all files/directories in a subtree."],
|
||||
["check", None, CheckOptions, "Check a single file or directory."],
|
||||
["deep-check", None, DeepCheckOptions, "Check all files/directories reachable from a starting point."],
|
||||
["status", None, TahoeStatusCommand, "Various status information."],
|
||||
]
|
||||
("mkdir", None, MakeDirectoryOptions, "Create a new directory."),
|
||||
("add-alias", None, AddAliasOptions, "Add a new alias cap."),
|
||||
("create-alias", None, CreateAliasOptions, "Create a new alias cap."),
|
||||
("list-aliases", None, ListAliasesOptions, "List all alias caps."),
|
||||
("ls", None, ListOptions, "List a directory."),
|
||||
("get", None, GetOptions, "Retrieve a file from the grid."),
|
||||
("put", None, PutOptions, "Upload a file into the grid."),
|
||||
("cp", None, CpOptions, "Copy one or more files or directories."),
|
||||
("unlink", None, UnlinkOptions, "Unlink a file or directory on the grid."),
|
||||
("mv", None, MvOptions, "Move a file within the grid."),
|
||||
("ln", None, LnOptions, "Make an additional link to an existing file or directory."),
|
||||
("backup", None, BackupOptions, "Make target dir look like local dir."),
|
||||
("webopen", None, WebopenOptions, "Open a web browser to a grid file or directory."),
|
||||
("manifest", None, ManifestOptions, "List all files/directories in a subtree."),
|
||||
("stats", None, StatsOptions, "Print statistics about all files/directories in a subtree."),
|
||||
("check", None, CheckOptions, "Check a single file or directory."),
|
||||
("deep-check", None, DeepCheckOptions, "Check all files/directories reachable from a starting point."),
|
||||
("status", None, TahoeStatusCommand, "Various status information."),
|
||||
] # type: SubCommands
|
||||
|
||||
def mkdir(options):
|
||||
from allmydata.scripts import tahoe_mkdir
|
||||
|
@ -4,6 +4,12 @@ import os, sys, urllib, textwrap
|
||||
import codecs
|
||||
from os.path import join
|
||||
|
||||
try:
|
||||
from typing import Optional
|
||||
from .types_ import Parameters
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from yaml import (
|
||||
safe_dump,
|
||||
)
|
||||
@ -41,8 +47,8 @@ class BaseOptions(usage.Options):
|
||||
def opt_version(self):
|
||||
raise usage.UsageError("--version not allowed on subcommands")
|
||||
|
||||
description = None
|
||||
description_unwrapped = None
|
||||
description = None # type: Optional[str]
|
||||
description_unwrapped = None # type: Optional[str]
|
||||
|
||||
def __str__(self):
|
||||
width = int(os.environ.get('COLUMNS', '80'))
|
||||
@ -65,7 +71,7 @@ class BasedirOptions(BaseOptions):
|
||||
optParameters = [
|
||||
["basedir", "C", None, "Specify which Tahoe base directory should be used. [default: %s]"
|
||||
% quote_local_unicode_path(_default_nodedir)],
|
||||
]
|
||||
] # type: Parameters
|
||||
|
||||
def parseArgs(self, basedir=None):
|
||||
# This finds the node-directory option correctly even if we are in a subcommand.
|
||||
@ -102,7 +108,7 @@ class NoDefaultBasedirOptions(BasedirOptions):
|
||||
|
||||
optParameters = [
|
||||
["basedir", "C", None, "Specify which Tahoe base directory should be used."],
|
||||
]
|
||||
] # type: Parameters
|
||||
|
||||
# This is overridden in order to ensure we get a "Wrong number of arguments."
|
||||
# error when more than one argument is given.
|
||||
|
@ -3,6 +3,11 @@ from __future__ import print_function
|
||||
import os
|
||||
import json
|
||||
|
||||
try:
|
||||
from allmydata.scripts.types_ import SubCommands
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.internet import reactor, defer
|
||||
from twisted.python.usage import UsageError
|
||||
from twisted.python.filepath import (
|
||||
@ -492,10 +497,10 @@ def create_introducer(config):
|
||||
|
||||
|
||||
subCommands = [
|
||||
["create-node", None, CreateNodeOptions, "Create a node that acts as a client, server or both."],
|
||||
["create-client", None, CreateClientOptions, "Create a client node (with storage initially disabled)."],
|
||||
["create-introducer", None, CreateIntroducerOptions, "Create an introducer node."],
|
||||
]
|
||||
("create-node", None, CreateNodeOptions, "Create a node that acts as a client, server or both."),
|
||||
("create-client", None, CreateClientOptions, "Create a client node (with storage initially disabled)."),
|
||||
("create-introducer", None, CreateIntroducerOptions, "Create an introducer node."),
|
||||
] # type: SubCommands
|
||||
|
||||
dispatch = {
|
||||
"create-node": create_node,
|
||||
|
@ -1,5 +1,10 @@
|
||||
from __future__ import print_function
|
||||
|
||||
try:
|
||||
from allmydata.scripts.types_ import SubCommands
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from future.utils import bchr
|
||||
|
||||
# do not import any allmydata modules at this level. Do that from inside
|
||||
@ -1053,8 +1058,8 @@ def do_debug(options):
|
||||
|
||||
|
||||
subCommands = [
|
||||
["debug", None, DebugCommand, "debug subcommands: use 'tahoe debug' for a list."],
|
||||
]
|
||||
("debug", None, DebugCommand, "debug subcommands: use 'tahoe debug' for a list."),
|
||||
] # type: SubCommands
|
||||
|
||||
dispatch = {
|
||||
"debug": do_debug,
|
||||
|
@ -4,6 +4,11 @@ import os, sys
|
||||
from six.moves import StringIO
|
||||
import six
|
||||
|
||||
try:
|
||||
from allmydata.scripts.types_ import SubCommands
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.python import usage
|
||||
from twisted.internet import defer, task, threads
|
||||
|
||||
@ -40,8 +45,8 @@ _control_node_dispatch = {
|
||||
}
|
||||
|
||||
process_control_commands = [
|
||||
["run", None, tahoe_run.RunOptions, "run a node without daemonizing"],
|
||||
]
|
||||
("run", None, tahoe_run.RunOptions, "run a node without daemonizing"),
|
||||
] # type: SubCommands
|
||||
|
||||
|
||||
class Options(usage.Options):
|
||||
@ -98,7 +103,7 @@ class Options(usage.Options):
|
||||
|
||||
create_dispatch = {}
|
||||
for module in (create_node,):
|
||||
create_dispatch.update(module.dispatch)
|
||||
create_dispatch.update(module.dispatch) # type: ignore
|
||||
|
||||
def parse_options(argv, config=None):
|
||||
if not config:
|
||||
|
@ -2,6 +2,11 @@ from __future__ import print_function
|
||||
|
||||
import json
|
||||
|
||||
try:
|
||||
from allmydata.scripts.types_ import SubCommands
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.python import usage
|
||||
from twisted.internet import defer, reactor
|
||||
|
||||
@ -103,7 +108,7 @@ def invite(options):
|
||||
subCommands = [
|
||||
("invite", None, InviteOptions,
|
||||
"Invite a new node to this grid"),
|
||||
]
|
||||
] # type: SubCommands
|
||||
|
||||
dispatch = {
|
||||
"invite": invite,
|
||||
|
12
src/allmydata/scripts/types_.py
Normal file
12
src/allmydata/scripts/types_.py
Normal file
@ -0,0 +1,12 @@
|
||||
from typing import List, Tuple, Type, Sequence, Any
|
||||
from allmydata.scripts.common import BaseOptions
|
||||
|
||||
|
||||
# Historically, subcommands were implemented as lists, but due to a
|
||||
# [designed contraint in mypy](https://stackoverflow.com/a/52559625/70170),
|
||||
# a Tuple is required.
|
||||
SubCommand = Tuple[str, None, Type[BaseOptions], str]
|
||||
|
||||
SubCommands = List[SubCommand]
|
||||
|
||||
Parameters = List[Sequence[Any]]
|
@ -23,7 +23,7 @@ from allmydata.interfaces import IStatsProducer
|
||||
@implementer(IStatsProducer)
|
||||
class CPUUsageMonitor(service.MultiService):
|
||||
HISTORY_LENGTH = 15
|
||||
POLL_INTERVAL = 60
|
||||
POLL_INTERVAL = 60 # type: float
|
||||
|
||||
def __init__(self):
|
||||
service.MultiService.__init__(self)
|
||||
|
@ -19,7 +19,7 @@ import os, time, struct
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
import pickle # type: ignore
|
||||
from twisted.internet import reactor
|
||||
from twisted.application import service
|
||||
from allmydata.storage.common import si_b2a
|
||||
|
@ -202,7 +202,7 @@ class ShareFile(object):
|
||||
|
||||
|
||||
@implementer(RIBucketWriter)
|
||||
class BucketWriter(Referenceable):
|
||||
class BucketWriter(Referenceable): # type: ignore # warner/foolscap#78
|
||||
|
||||
def __init__(self, ss, incominghome, finalhome, max_size, lease_info, canary):
|
||||
self.ss = ss
|
||||
@ -301,7 +301,7 @@ class BucketWriter(Referenceable):
|
||||
|
||||
|
||||
@implementer(RIBucketReader)
|
||||
class BucketReader(Referenceable):
|
||||
class BucketReader(Referenceable): # type: ignore # warner/foolscap#78
|
||||
|
||||
def __init__(self, ss, sharefname, storage_index=None, shnum=None):
|
||||
self.ss = ss
|
||||
|
@ -581,7 +581,7 @@ class StorageServer(service.MultiService, Referenceable):
|
||||
for share in six.viewvalues(shares):
|
||||
share.add_or_renew_lease(lease_info)
|
||||
|
||||
def slot_testv_and_readv_and_writev(
|
||||
def slot_testv_and_readv_and_writev( # type: ignore # warner/foolscap#78
|
||||
self,
|
||||
storage_index,
|
||||
secrets,
|
||||
|
@ -37,6 +37,11 @@ a mean of 10kB and a max of 100MB, so filesize=min(int(1.0/random(.0002)),1e8)
|
||||
import os, sys, httplib, binascii
|
||||
import urllib, json, random, time, urlparse
|
||||
|
||||
try:
|
||||
from typing import Dict
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Python 2 compatibility
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
@ -49,13 +54,13 @@ if sys.argv[1] == "--stats":
|
||||
DELAY = 10
|
||||
MAXSAMPLES = 6
|
||||
totals = []
|
||||
last_stats = {}
|
||||
last_stats = {} # type: Dict[str, float]
|
||||
while True:
|
||||
stats = {}
|
||||
stats = {} # type: Dict[str, float]
|
||||
for sf in statsfiles:
|
||||
for line in open(sf, "r").readlines():
|
||||
name, value = line.split(":")
|
||||
value = int(value.strip())
|
||||
name, str_value = line.split(":")
|
||||
value = int(str_value.strip())
|
||||
if name not in stats:
|
||||
stats[name] = 0
|
||||
stats[name] += float(value)
|
||||
|
@ -508,13 +508,13 @@ if __name__ == '__main__':
|
||||
mode = "upload"
|
||||
if len(sys.argv) > 1:
|
||||
mode = sys.argv[1]
|
||||
if sys.maxint == 2147483647:
|
||||
if sys.maxsize == 2147483647:
|
||||
bits = "32"
|
||||
elif sys.maxint == 9223372036854775807:
|
||||
elif sys.maxsize == 9223372036854775807:
|
||||
bits = "64"
|
||||
else:
|
||||
bits = "?"
|
||||
print("%s-bit system (sys.maxint=%d)" % (bits, sys.maxint))
|
||||
print("%s-bit system (sys.maxsize=%d)" % (bits, sys.maxsize))
|
||||
# put the logfile and stats.out in _test_memory/ . These stick around.
|
||||
# put the nodes and other files in _test_memory/test/ . These are
|
||||
# removed each time we run.
|
||||
|
@ -406,7 +406,7 @@ class DummyProducer(object):
|
||||
pass
|
||||
|
||||
@implementer(IImmutableFileNode)
|
||||
class FakeCHKFileNode(object):
|
||||
class FakeCHKFileNode(object): # type: ignore # incomplete implementation
|
||||
"""I provide IImmutableFileNode, but all of my data is stored in a
|
||||
class-level dictionary."""
|
||||
|
||||
@ -544,7 +544,7 @@ def create_chk_filenode(contents, all_contents):
|
||||
|
||||
|
||||
@implementer(IMutableFileNode, ICheckable)
|
||||
class FakeMutableFileNode(object):
|
||||
class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
||||
"""I provide IMutableFileNode, but all of my data is stored in a
|
||||
class-level dictionary."""
|
||||
|
||||
|
@ -68,7 +68,7 @@ class Marker(object):
|
||||
|
||||
fireNow = partial(defer.succeed, None)
|
||||
|
||||
@implementer(IRemoteReference)
|
||||
@implementer(IRemoteReference) # type: ignore # warner/foolscap#79
|
||||
class LocalWrapper(object):
|
||||
"""
|
||||
A ``LocalWrapper`` presents the remote reference interface to a local
|
||||
@ -213,9 +213,12 @@ class NoNetworkServer(object):
|
||||
return _StorageServer(lambda: self.rref)
|
||||
def get_version(self):
|
||||
return self.rref.version
|
||||
def start_connecting(self, trigger_cb):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@implementer(IStorageBroker)
|
||||
class NoNetworkStorageBroker(object):
|
||||
class NoNetworkStorageBroker(object): # type: ignore # missing many methods
|
||||
def get_servers_for_psi(self, peer_selection_index):
|
||||
def _permuted(server):
|
||||
seed = server.get_permutation_seed()
|
||||
@ -259,7 +262,7 @@ def create_no_network_client(basedir):
|
||||
return defer.succeed(client)
|
||||
|
||||
|
||||
class _NoNetworkClient(_Client):
|
||||
class _NoNetworkClient(_Client): # type: ignore # tahoe-lafs/ticket/3573
|
||||
"""
|
||||
Overrides all _Client networking functionality to do nothing.
|
||||
"""
|
||||
|
@ -47,8 +47,9 @@ class RIDummy(RemoteInterface):
|
||||
"""
|
||||
|
||||
|
||||
|
||||
@implementer(IFoolscapStoragePlugin)
|
||||
# type ignored due to missing stubs for Twisted
|
||||
# https://twistedmatrix.com/trac/ticket/9717
|
||||
@implementer(IFoolscapStoragePlugin) # type: ignore
|
||||
@attr.s
|
||||
class DummyStorage(object):
|
||||
name = attr.ib()
|
||||
@ -107,7 +108,7 @@ class GetCounter(Resource, object):
|
||||
|
||||
@implementer(RIDummy)
|
||||
@attr.s(frozen=True)
|
||||
class DummyStorageServer(object):
|
||||
class DummyStorageServer(object): # type: ignore # warner/foolscap#78
|
||||
get_anonymous_storage_server = attr.ib()
|
||||
|
||||
def remote_just_some_method(self):
|
||||
@ -116,7 +117,7 @@ class DummyStorageServer(object):
|
||||
|
||||
@implementer(IStorageServer)
|
||||
@attr.s
|
||||
class DummyStorageClient(object):
|
||||
class DummyStorageClient(object): # type: ignore # incomplete implementation
|
||||
get_rref = attr.ib()
|
||||
configuration = attr.ib()
|
||||
announcement = attr.ib()
|
||||
|
@ -62,7 +62,7 @@ class FakeClient(object):
|
||||
|
||||
|
||||
@implementer(IServer)
|
||||
class FakeServer(object):
|
||||
class FakeServer(object): # type: ignore # incomplete implementation
|
||||
|
||||
def get_name(self):
|
||||
return "fake name"
|
||||
@ -75,7 +75,7 @@ class FakeServer(object):
|
||||
|
||||
|
||||
@implementer(ICheckResults)
|
||||
class FakeCheckResults(object):
|
||||
class FakeCheckResults(object): # type: ignore # incomplete implementation
|
||||
|
||||
def __init__(self, si=None,
|
||||
healthy=False, recoverable=False,
|
||||
@ -106,7 +106,7 @@ class FakeCheckResults(object):
|
||||
|
||||
|
||||
@implementer(ICheckAndRepairResults)
|
||||
class FakeCheckAndRepairResults(object):
|
||||
class FakeCheckAndRepairResults(object): # type: ignore # incomplete implementation
|
||||
|
||||
def __init__(self, si=None,
|
||||
repair_attempted=False,
|
||||
|
@ -74,3 +74,58 @@ class DeferredUtilTests(unittest.TestCase, deferredutil.WaitForDelayedCallsMixin
|
||||
d = defer.succeed(None)
|
||||
d.addBoth(self.wait_for_delayed_calls)
|
||||
return d
|
||||
|
||||
|
||||
class UntilTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for ``deferredutil.until``.
|
||||
"""
|
||||
def test_exception(self):
|
||||
"""
|
||||
If the action raises an exception, the ``Deferred`` returned by ``until``
|
||||
fires with a ``Failure``.
|
||||
"""
|
||||
self.assertFailure(
|
||||
deferredutil.until(lambda: 1/0, lambda: True),
|
||||
ZeroDivisionError,
|
||||
)
|
||||
|
||||
def test_stops_on_condition(self):
|
||||
"""
|
||||
The action is called repeatedly until ``condition`` returns ``True``.
|
||||
"""
|
||||
calls = []
|
||||
def action():
|
||||
calls.append(None)
|
||||
|
||||
def condition():
|
||||
return len(calls) == 3
|
||||
|
||||
self.assertIs(
|
||||
self.successResultOf(
|
||||
deferredutil.until(action, condition),
|
||||
),
|
||||
None,
|
||||
)
|
||||
self.assertEqual(3, len(calls))
|
||||
|
||||
def test_waits_for_deferred(self):
|
||||
"""
|
||||
If the action returns a ``Deferred`` then it is called again when the
|
||||
``Deferred`` fires.
|
||||
"""
|
||||
counter = [0]
|
||||
r1 = defer.Deferred()
|
||||
r2 = defer.Deferred()
|
||||
results = [r1, r2]
|
||||
def action():
|
||||
counter[0] += 1
|
||||
return results.pop(0)
|
||||
|
||||
def condition():
|
||||
return False
|
||||
|
||||
deferredutil.until(action, condition)
|
||||
self.assertEqual([1], counter)
|
||||
r1.callback(None)
|
||||
self.assertEqual([2], counter)
|
||||
|
@ -1561,7 +1561,7 @@ class Packing(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
kids, fn.get_writekey(), deep_immutable=True)
|
||||
|
||||
@implementer(IMutableFileNode)
|
||||
class FakeMutableFile(object):
|
||||
class FakeMutableFile(object): # type: ignore # incomplete implementation
|
||||
counter = 0
|
||||
def __init__(self, initial_contents=b""):
|
||||
data = self._get_initial_contents(initial_contents)
|
||||
@ -1622,7 +1622,7 @@ class FakeNodeMaker(NodeMaker):
|
||||
def create_mutable_file(self, contents=b"", keysize=None, version=None):
|
||||
return defer.succeed(FakeMutableFile(contents))
|
||||
|
||||
class FakeClient2(_Client):
|
||||
class FakeClient2(_Client): # type: ignore # tahoe-lafs/ticket/3573
|
||||
def __init__(self):
|
||||
self.nodemaker = FakeNodeMaker(None, None, None,
|
||||
None, None,
|
||||
|
@ -19,6 +19,12 @@ from functools import (
|
||||
)
|
||||
import attr
|
||||
|
||||
try:
|
||||
from typing import List
|
||||
from allmydata.introducer.client import IntroducerClient
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.internet import defer
|
||||
from twisted.trial import unittest
|
||||
from twisted.application import service
|
||||
@ -125,7 +131,7 @@ class FakeCHKCheckerAndUEBFetcher(object):
|
||||
))
|
||||
|
||||
class FakeClient(service.MultiService):
|
||||
introducer_clients = []
|
||||
introducer_clients = [] # type: List[IntroducerClient]
|
||||
DEFAULT_ENCODING_PARAMETERS = {"k":25,
|
||||
"happy": 75,
|
||||
"n": 100,
|
||||
|
@ -564,7 +564,7 @@ class TestMissingPorts(unittest.TestCase):
|
||||
config = config_from_string(self.basedir, "portnum", config_data)
|
||||
with self.assertRaises(PortAssignmentRequired):
|
||||
_tub_portlocation(config, None, None)
|
||||
test_listen_on_zero_with_host.todo = native_str(
|
||||
test_listen_on_zero_with_host.todo = native_str( # type: ignore
|
||||
"https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3563"
|
||||
)
|
||||
|
||||
|
@ -16,6 +16,7 @@ from testtools.matchers import (
|
||||
BLACKLIST = {
|
||||
"allmydata.test.check_load",
|
||||
"allmydata.windows.registry",
|
||||
"allmydata.scripts.types_",
|
||||
}
|
||||
|
||||
|
||||
|
@ -44,7 +44,7 @@ class Python3PortingEffortTests(SynchronousTestCase):
|
||||
),
|
||||
),
|
||||
)
|
||||
test_finished_porting.todo = native_str(
|
||||
test_finished_porting.todo = native_str( # type: ignore
|
||||
"https://tahoe-lafs.org/trac/tahoe-lafs/milestone/Support%20Python%203 should be completed",
|
||||
)
|
||||
|
||||
|
@ -19,18 +19,15 @@ from twisted.python.failure import Failure
|
||||
from twisted.internet.error import ProcessDone, ProcessTerminated
|
||||
from allmydata.util import deferredutil
|
||||
|
||||
conch_interfaces = None
|
||||
sftp = None
|
||||
sftpd = None
|
||||
|
||||
try:
|
||||
from twisted.conch import interfaces as conch_interfaces
|
||||
from twisted.conch.ssh import filetransfer as sftp
|
||||
from allmydata.frontends import sftpd
|
||||
except ImportError as e:
|
||||
conch_interfaces = sftp = sftpd = None # type: ignore
|
||||
conch_unavailable_reason = e
|
||||
else:
|
||||
conch_unavailable_reason = None
|
||||
conch_unavailable_reason = None # type: ignore
|
||||
|
||||
from allmydata.interfaces import IDirectoryNode, ExistingChildError, NoSuchChildError
|
||||
from allmydata.mutable.common import NotWriteableError
|
||||
|
@ -105,7 +105,8 @@ from allmydata.interfaces import (
|
||||
|
||||
SOME_FURL = "pb://abcde@nowhere/fake"
|
||||
|
||||
class NativeStorageServerWithVersion(NativeStorageServer):
|
||||
|
||||
class NativeStorageServerWithVersion(NativeStorageServer): # type: ignore # tahoe-lafs/ticket/3573
|
||||
def __init__(self, version):
|
||||
# note: these instances won't work for anything other than
|
||||
# get_available_space() because we don't upcall
|
||||
@ -569,7 +570,7 @@ class SpyEndpoint(object):
|
||||
return d
|
||||
|
||||
|
||||
@implementer(IConnectionHintHandler)
|
||||
@implementer(IConnectionHintHandler) # type: ignore # warner/foolscap#78
|
||||
@attr.s
|
||||
class SpyHandler(object):
|
||||
"""
|
||||
|
@ -14,6 +14,17 @@ if PY2:
|
||||
|
||||
import os, shutil
|
||||
from io import BytesIO
|
||||
from base64 import (
|
||||
b64encode,
|
||||
)
|
||||
|
||||
from hypothesis import (
|
||||
given,
|
||||
)
|
||||
from hypothesis.strategies import (
|
||||
just,
|
||||
integers,
|
||||
)
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.python.failure import Failure
|
||||
@ -2029,6 +2040,91 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
|
||||
f.close()
|
||||
return None
|
||||
|
||||
|
||||
class EncryptAnUploadableTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for ``EncryptAnUploadable``.
|
||||
"""
|
||||
def test_same_length(self):
|
||||
"""
|
||||
``EncryptAnUploadable.read_encrypted`` returns ciphertext of the same
|
||||
length as the underlying plaintext.
|
||||
"""
|
||||
plaintext = b"hello world"
|
||||
uploadable = upload.FileHandle(BytesIO(plaintext), None)
|
||||
uploadable.set_default_encoding_parameters({
|
||||
# These values shouldn't matter.
|
||||
"k": 3,
|
||||
"happy": 5,
|
||||
"n": 10,
|
||||
"max_segment_size": 128 * 1024,
|
||||
})
|
||||
encrypter = upload.EncryptAnUploadable(uploadable)
|
||||
ciphertext = b"".join(self.successResultOf(encrypter.read_encrypted(1024, False)))
|
||||
self.assertEqual(len(ciphertext), len(plaintext))
|
||||
|
||||
@given(just(b"hello world"), integers(min_value=0, max_value=len(b"hello world")))
|
||||
def test_known_result(self, plaintext, split_at):
|
||||
"""
|
||||
``EncryptAnUploadable.read_encrypted`` returns a known-correct ciphertext
|
||||
string for certain inputs. The ciphertext is independent of the read
|
||||
sizes.
|
||||
"""
|
||||
convergence = b"\x42" * 16
|
||||
uploadable = upload.FileHandle(BytesIO(plaintext), convergence)
|
||||
uploadable.set_default_encoding_parameters({
|
||||
# The convergence key is a function of k, n, and max_segment_size
|
||||
# (among other things). The value for happy doesn't matter
|
||||
# though.
|
||||
"k": 3,
|
||||
"happy": 5,
|
||||
"n": 10,
|
||||
"max_segment_size": 128 * 1024,
|
||||
})
|
||||
encrypter = upload.EncryptAnUploadable(uploadable)
|
||||
def read(n):
|
||||
return b"".join(self.successResultOf(encrypter.read_encrypted(n, False)))
|
||||
|
||||
# Read the string in one or two pieces to make sure underlying state
|
||||
# is maintained properly.
|
||||
first = read(split_at)
|
||||
second = read(len(plaintext) - split_at)
|
||||
third = read(1)
|
||||
ciphertext = first + second + third
|
||||
|
||||
self.assertEqual(
|
||||
b"Jd2LHCRXozwrEJc=",
|
||||
b64encode(ciphertext),
|
||||
)
|
||||
|
||||
def test_large_read(self):
|
||||
"""
|
||||
``EncryptAnUploadable.read_encrypted`` succeeds even when the requested
|
||||
data length is much larger than the chunk size.
|
||||
"""
|
||||
convergence = b"\x42" * 16
|
||||
# 4kB of plaintext
|
||||
plaintext = b"\xde\xad\xbe\xef" * 1024
|
||||
uploadable = upload.FileHandle(BytesIO(plaintext), convergence)
|
||||
uploadable.set_default_encoding_parameters({
|
||||
"k": 3,
|
||||
"happy": 5,
|
||||
"n": 10,
|
||||
"max_segment_size": 128 * 1024,
|
||||
})
|
||||
# Make the chunk size very small so we don't have to operate on a huge
|
||||
# amount of data to exercise the relevant codepath.
|
||||
encrypter = upload.EncryptAnUploadable(uploadable, chunk_size=1)
|
||||
d = encrypter.read_encrypted(len(plaintext), False)
|
||||
ciphertext = self.successResultOf(d)
|
||||
self.assertEqual(
|
||||
list(map(len, ciphertext)),
|
||||
# Chunk size was specified as 1 above so we will get the whole
|
||||
# plaintext in one byte chunks.
|
||||
[1] * len(plaintext),
|
||||
)
|
||||
|
||||
|
||||
# TODO:
|
||||
# upload with exactly 75 servers (shares_of_happiness)
|
||||
# have a download fail
|
||||
|
@ -1,54 +0,0 @@
|
||||
import json
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet.defer import inlineCallbacks
|
||||
|
||||
from eliot import log_call
|
||||
|
||||
from autobahn.twisted.testing import create_memory_agent, MemoryReactorClockResolver, create_pumper
|
||||
|
||||
from allmydata.web.logs import TokenAuthenticatedWebSocketServerProtocol
|
||||
|
||||
|
||||
class TestStreamingLogs(unittest.TestCase):
|
||||
"""
|
||||
Test websocket streaming of logs
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.reactor = MemoryReactorClockResolver()
|
||||
self.pumper = create_pumper()
|
||||
self.agent = create_memory_agent(self.reactor, self.pumper, TokenAuthenticatedWebSocketServerProtocol)
|
||||
return self.pumper.start()
|
||||
|
||||
def tearDown(self):
|
||||
return self.pumper.stop()
|
||||
|
||||
@inlineCallbacks
|
||||
def test_one_log(self):
|
||||
"""
|
||||
write a single Eliot log and see it streamed via websocket
|
||||
"""
|
||||
|
||||
proto = yield self.agent.open(
|
||||
transport_config=u"ws://localhost:1234/ws",
|
||||
options={},
|
||||
)
|
||||
|
||||
messages = []
|
||||
def got_message(msg, is_binary=False):
|
||||
messages.append(json.loads(msg))
|
||||
proto.on("message", got_message)
|
||||
|
||||
@log_call(action_type=u"test:cli:some-exciting-action")
|
||||
def do_a_thing():
|
||||
pass
|
||||
|
||||
do_a_thing()
|
||||
|
||||
proto.transport.loseConnection()
|
||||
yield proto.is_closed
|
||||
|
||||
self.assertEqual(len(messages), 2)
|
||||
self.assertEqual("started", messages[0]["action_status"])
|
||||
self.assertEqual("succeeded", messages[1]["action_status"])
|
@ -1,3 +1,15 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import json
|
||||
from os.path import join
|
||||
|
||||
@ -213,7 +225,7 @@ class IntroducerRootTests(unittest.TestCase):
|
||||
resource = IntroducerRoot(introducer_node)
|
||||
response = json.loads(
|
||||
self.successResultOf(
|
||||
render(resource, {"t": [b"json"]}),
|
||||
render(resource, {b"t": [b"json"]}),
|
||||
),
|
||||
)
|
||||
self.assertEqual(
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""
|
||||
Tests for ``allmydata.web.logs``.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import (
|
||||
@ -9,6 +11,19 @@ from __future__ import (
|
||||
division,
|
||||
)
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import json
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet.defer import inlineCallbacks
|
||||
|
||||
from eliot import log_call
|
||||
|
||||
from autobahn.twisted.testing import create_memory_agent, MemoryReactorClockResolver, create_pumper
|
||||
|
||||
from testtools.matchers import (
|
||||
Equals,
|
||||
)
|
||||
@ -37,6 +52,7 @@ from ..common import (
|
||||
|
||||
from ...web.logs import (
|
||||
create_log_resources,
|
||||
TokenAuthenticatedWebSocketServerProtocol,
|
||||
)
|
||||
|
||||
class StreamingEliotLogsTests(SyncTestCase):
|
||||
@ -57,3 +73,47 @@ class StreamingEliotLogsTests(SyncTestCase):
|
||||
self.client.get(b"http:///v1"),
|
||||
succeeded(has_response_code(Equals(OK))),
|
||||
)
|
||||
|
||||
|
||||
class TestStreamingLogs(unittest.TestCase):
|
||||
"""
|
||||
Test websocket streaming of logs
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.reactor = MemoryReactorClockResolver()
|
||||
self.pumper = create_pumper()
|
||||
self.agent = create_memory_agent(self.reactor, self.pumper, TokenAuthenticatedWebSocketServerProtocol)
|
||||
return self.pumper.start()
|
||||
|
||||
def tearDown(self):
|
||||
return self.pumper.stop()
|
||||
|
||||
@inlineCallbacks
|
||||
def test_one_log(self):
|
||||
"""
|
||||
write a single Eliot log and see it streamed via websocket
|
||||
"""
|
||||
|
||||
proto = yield self.agent.open(
|
||||
transport_config=u"ws://localhost:1234/ws",
|
||||
options={},
|
||||
)
|
||||
|
||||
messages = []
|
||||
def got_message(msg, is_binary=False):
|
||||
messages.append(json.loads(msg))
|
||||
proto.on("message", got_message)
|
||||
|
||||
@log_call(action_type=u"test:cli:some-exciting-action")
|
||||
def do_a_thing():
|
||||
pass
|
||||
|
||||
do_a_thing()
|
||||
|
||||
proto.transport.loseConnection()
|
||||
yield proto.is_closed
|
||||
|
||||
self.assertEqual(len(messages), 2)
|
||||
self.assertEqual("started", messages[0]["action_status"])
|
||||
self.assertEqual("succeeded", messages[1]["action_status"])
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""
|
||||
Tests for ``allmydata.web.private``.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import (
|
||||
@ -9,6 +11,10 @@ from __future__ import (
|
||||
division,
|
||||
)
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from testtools.matchers import (
|
||||
Equals,
|
||||
)
|
||||
@ -56,6 +62,7 @@ class PrivacyTests(SyncTestCase):
|
||||
return super(PrivacyTests, self).setUp()
|
||||
|
||||
def _authorization(self, scheme, value):
|
||||
value = str(value, "utf-8")
|
||||
return Headers({
|
||||
u"authorization": [u"{} {}".format(scheme, value)],
|
||||
})
|
||||
@ -90,7 +97,7 @@ class PrivacyTests(SyncTestCase):
|
||||
self.assertThat(
|
||||
self.client.head(
|
||||
b"http:///foo/bar",
|
||||
headers=self._authorization(SCHEME, u"foo bar"),
|
||||
headers=self._authorization(str(SCHEME, "utf-8"), b"foo bar"),
|
||||
),
|
||||
succeeded(has_response_code(Equals(UNAUTHORIZED))),
|
||||
)
|
||||
@ -103,7 +110,7 @@ class PrivacyTests(SyncTestCase):
|
||||
self.assertThat(
|
||||
self.client.head(
|
||||
b"http:///foo/bar",
|
||||
headers=self._authorization(SCHEME, self.token),
|
||||
headers=self._authorization(str(SCHEME, "utf-8"), self.token),
|
||||
),
|
||||
# It's a made up URL so we don't get a 200, either, but a 404.
|
||||
succeeded(has_response_code(Equals(NOT_FOUND))),
|
||||
|
@ -1,6 +1,18 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import time
|
||||
|
||||
from urllib import (
|
||||
from urllib.parse import (
|
||||
quote,
|
||||
)
|
||||
|
||||
@ -77,7 +89,7 @@ class RenderSlashUri(unittest.TestCase):
|
||||
)
|
||||
self.assertEqual(
|
||||
response_body,
|
||||
"Invalid capability",
|
||||
b"Invalid capability",
|
||||
)
|
||||
|
||||
|
||||
@ -92,7 +104,7 @@ class RenderServiceRow(unittest.TestCase):
|
||||
ann = {"anonymous-storage-FURL": "pb://w2hqnbaa25yw4qgcvghl5psa3srpfgw3@tcp:127.0.0.1:51309/vucto2z4fxment3vfxbqecblbf6zyp6x",
|
||||
"permutation-seed-base32": "w2hqnbaa25yw4qgcvghl5psa3srpfgw3",
|
||||
}
|
||||
srv = NativeStorageServer("server_id", ann, None, {}, EMPTY_CLIENT_CONFIG)
|
||||
srv = NativeStorageServer(b"server_id", ann, None, {}, EMPTY_CLIENT_CONFIG)
|
||||
srv.get_connection_status = lambda: ConnectionStatus(False, "summary", {}, 0, 0)
|
||||
|
||||
class FakeClient(_Client):
|
||||
@ -103,7 +115,7 @@ class RenderServiceRow(unittest.TestCase):
|
||||
tub_maker=None,
|
||||
node_config=EMPTY_CLIENT_CONFIG,
|
||||
)
|
||||
self.storage_broker.test_add_server("test-srv", srv)
|
||||
self.storage_broker.test_add_server(b"test-srv", srv)
|
||||
|
||||
root = RootElement(FakeClient(), time.time)
|
||||
req = DummyRequest(b"")
|
||||
|
@ -189,7 +189,7 @@ class FakeHistory(object):
|
||||
def list_all_helper_statuses(self):
|
||||
return []
|
||||
|
||||
class FakeDisplayableServer(StubServer):
|
||||
class FakeDisplayableServer(StubServer): # type: ignore # tahoe-lafs/ticket/3573
|
||||
def __init__(self, serverid, nickname, connected,
|
||||
last_connect_time, last_loss_time, last_rx_time):
|
||||
StubServer.__init__(self, serverid)
|
||||
@ -255,7 +255,7 @@ class FakeStorageServer(service.MultiService):
|
||||
def on_status_changed(self, cb):
|
||||
cb(self)
|
||||
|
||||
class FakeClient(_Client):
|
||||
class FakeClient(_Client): # type: ignore # tahoe-lafs/ticket/3573
|
||||
def __init__(self):
|
||||
# don't upcall to Client.__init__, since we only want to initialize a
|
||||
# minimal subset
|
||||
@ -4757,6 +4757,31 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
op_url = self.webish_url + "/operations/134?t=status&output=JSON"
|
||||
yield self.assertHTTPError(op_url, 404, "unknown/expired handle '134'")
|
||||
|
||||
@inlineCallbacks
|
||||
def test_uri_redirect(self):
|
||||
"""URI redirects don't cause failure.
|
||||
|
||||
Unit test reproducer for https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3590
|
||||
"""
|
||||
def req(method, path, **kwargs):
|
||||
return treq.request(method, self.webish_url + path, persistent=False,
|
||||
**kwargs)
|
||||
|
||||
response = yield req("POST", "/uri?format=sdmf&t=mkdir")
|
||||
dircap = yield response.content()
|
||||
assert dircap.startswith('URI:DIR2:')
|
||||
dircap_uri = "/uri/?uri={}&t=json".format(urllib.quote(dircap))
|
||||
|
||||
response = yield req(
|
||||
"GET",
|
||||
dircap_uri,
|
||||
)
|
||||
self.assertEqual(
|
||||
response.request.absoluteURI,
|
||||
self.webish_url + "/uri/{}?t=json".format(urllib.quote(dircap)))
|
||||
if response.code >= 400:
|
||||
raise Error(response.code, response=response.content())
|
||||
|
||||
def test_incident(self):
|
||||
d = self.POST("/report_incident", details="eek")
|
||||
def _done(res):
|
||||
|
@ -1,3 +1,13 @@
|
||||
"""Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from zope.interface import implementer
|
||||
from twisted.internet import defer
|
||||
|
@ -22,6 +22,11 @@ from past.builtins import unicode, long
|
||||
|
||||
import re
|
||||
|
||||
try:
|
||||
from typing import Type
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from zope.interface import implementer
|
||||
from twisted.python.components import registerAdapter
|
||||
|
||||
@ -489,7 +494,7 @@ class MDMFVerifierURI(_BaseURI):
|
||||
return self
|
||||
|
||||
|
||||
@implementer(IURI, IDirnodeURI)
|
||||
@implementer(IDirnodeURI)
|
||||
class _DirectoryBaseURI(_BaseURI):
|
||||
def __init__(self, filenode_uri=None):
|
||||
self._filenode_uri = filenode_uri
|
||||
@ -536,7 +541,7 @@ class _DirectoryBaseURI(_BaseURI):
|
||||
return self._filenode_uri.get_storage_index()
|
||||
|
||||
|
||||
@implementer(IDirectoryURI)
|
||||
@implementer(IURI, IDirectoryURI)
|
||||
class DirectoryURI(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING=b'URI:DIR2:'
|
||||
@ -555,7 +560,7 @@ class DirectoryURI(_DirectoryBaseURI):
|
||||
return ReadonlyDirectoryURI(self._filenode_uri.get_readonly())
|
||||
|
||||
|
||||
@implementer(IReadonlyDirectoryURI)
|
||||
@implementer(IURI, IReadonlyDirectoryURI)
|
||||
class ReadonlyDirectoryURI(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING=b'URI:DIR2-RO:'
|
||||
@ -574,6 +579,7 @@ class ReadonlyDirectoryURI(_DirectoryBaseURI):
|
||||
return self
|
||||
|
||||
|
||||
@implementer(IURI, IDirnodeURI)
|
||||
class _ImmutableDirectoryBaseURI(_DirectoryBaseURI):
|
||||
def __init__(self, filenode_uri=None):
|
||||
if filenode_uri:
|
||||
@ -611,7 +617,7 @@ class LiteralDirectoryURI(_ImmutableDirectoryBaseURI):
|
||||
return None
|
||||
|
||||
|
||||
@implementer(IDirectoryURI)
|
||||
@implementer(IURI, IDirectoryURI)
|
||||
class MDMFDirectoryURI(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING=b'URI:DIR2-MDMF:'
|
||||
@ -633,7 +639,7 @@ class MDMFDirectoryURI(_DirectoryBaseURI):
|
||||
return MDMFDirectoryURIVerifier(self._filenode_uri.get_verify_cap())
|
||||
|
||||
|
||||
@implementer(IReadonlyDirectoryURI)
|
||||
@implementer(IURI, IReadonlyDirectoryURI)
|
||||
class ReadonlyMDMFDirectoryURI(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING=b'URI:DIR2-MDMF-RO:'
|
||||
@ -671,7 +677,7 @@ def wrap_dirnode_cap(filecap):
|
||||
raise AssertionError("cannot interpret as a directory cap: %s" % filecap.__class__)
|
||||
|
||||
|
||||
@implementer(IVerifierURI)
|
||||
@implementer(IURI, IVerifierURI)
|
||||
class MDMFDirectoryURIVerifier(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING=b'URI:DIR2-MDMF-Verifier:'
|
||||
@ -696,12 +702,12 @@ class MDMFDirectoryURIVerifier(_DirectoryBaseURI):
|
||||
return self
|
||||
|
||||
|
||||
@implementer(IVerifierURI)
|
||||
@implementer(IURI, IVerifierURI)
|
||||
class DirectoryURIVerifier(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING=b'URI:DIR2-Verifier:'
|
||||
BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
|
||||
INNER_URI_CLASS=SSKVerifierURI
|
||||
INNER_URI_CLASS=SSKVerifierURI # type: Type[IVerifierURI]
|
||||
|
||||
def __init__(self, filenode_uri=None):
|
||||
if filenode_uri:
|
||||
|
@ -84,6 +84,7 @@ PORTED_MODULES = [
|
||||
"allmydata.storage.shares",
|
||||
"allmydata.test.no_network",
|
||||
"allmydata.test.mutable.util",
|
||||
"allmydata.unknown",
|
||||
"allmydata.uri",
|
||||
"allmydata.util._python3",
|
||||
"allmydata.util.abbreviate",
|
||||
@ -114,6 +115,7 @@ PORTED_MODULES = [
|
||||
"allmydata.util.spans",
|
||||
"allmydata.util.statistics",
|
||||
"allmydata.util.time_format",
|
||||
"allmydata.web.logs",
|
||||
"allmydata.webish",
|
||||
]
|
||||
|
||||
@ -188,6 +190,10 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_util",
|
||||
"allmydata.test.web.test_common",
|
||||
"allmydata.test.web.test_grid",
|
||||
"allmydata.test.web.test_introducer",
|
||||
"allmydata.test.web.test_logs",
|
||||
"allmydata.test.web.test_private",
|
||||
"allmydata.test.web.test_root",
|
||||
"allmydata.test.web.test_status",
|
||||
"allmydata.test.web.test_util",
|
||||
"allmydata.test.web.test_webish",
|
||||
|
@ -15,7 +15,18 @@ if PY2:
|
||||
|
||||
import time
|
||||
|
||||
try:
|
||||
from typing import (
|
||||
Callable,
|
||||
Any,
|
||||
)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from foolscap.api import eventually
|
||||
from eliot.twisted import (
|
||||
inline_callbacks,
|
||||
)
|
||||
from twisted.internet import defer, reactor, error
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
@ -201,3 +212,22 @@ class WaitForDelayedCallsMixin(PollMixin):
|
||||
d.addErrback(log.err, "error while waiting for delayed calls")
|
||||
d.addBoth(lambda ign: res)
|
||||
return d
|
||||
|
||||
@inline_callbacks
|
||||
def until(
|
||||
action, # type: Callable[[], defer.Deferred[Any]]
|
||||
condition, # type: Callable[[], bool]
|
||||
):
|
||||
# type: (...) -> defer.Deferred[None]
|
||||
"""
|
||||
Run a Deferred-returning function until a condition is true.
|
||||
|
||||
:param action: The action to run.
|
||||
:param condition: The predicate signaling stop.
|
||||
|
||||
:return: A Deferred that fires after the condition signals stop.
|
||||
"""
|
||||
while True:
|
||||
yield action()
|
||||
if condition():
|
||||
break
|
||||
|
@ -311,7 +311,7 @@ def precondition_abspath(path):
|
||||
|
||||
_getfullpathname = None
|
||||
try:
|
||||
from nt import _getfullpathname
|
||||
from nt import _getfullpathname # type: ignore
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
@ -14,6 +14,12 @@ if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import time
|
||||
|
||||
try:
|
||||
from typing import List
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.internet import task
|
||||
|
||||
class TimeoutError(Exception):
|
||||
@ -23,7 +29,7 @@ class PollComplete(Exception):
|
||||
pass
|
||||
|
||||
class PollMixin(object):
|
||||
_poll_should_ignore_these_errors = []
|
||||
_poll_should_ignore_these_errors = [] # type: List[Exception]
|
||||
|
||||
def poll(self, check_f, pollinterval=0.01, timeout=1000):
|
||||
# Return a Deferred, then call check_f periodically until it returns
|
||||
|
@ -6,6 +6,11 @@ Can eventually be merged back into allmydata.web.common.
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
try:
|
||||
from typing import Optional
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.web import resource, http
|
||||
|
||||
from allmydata.util import abbreviate
|
||||
@ -55,7 +60,7 @@ class MultiFormatResource(resource.Resource, object):
|
||||
format if nothing else is given as the ``formatDefault``.
|
||||
"""
|
||||
formatArgument = "t"
|
||||
formatDefault = None
|
||||
formatDefault = None # type: Optional[str]
|
||||
|
||||
def render(self, req):
|
||||
"""
|
||||
|
@ -26,10 +26,10 @@ class IntroducerRoot(MultiFormatResource):
|
||||
self.introducer_node = introducer_node
|
||||
self.introducer_service = introducer_node.getServiceNamed("introducer")
|
||||
# necessary as a root Resource
|
||||
self.putChild("", self)
|
||||
self.putChild(b"", self)
|
||||
static_dir = resource_filename("allmydata.web", "static")
|
||||
for filen in os.listdir(static_dir):
|
||||
self.putChild(filen, static.File(os.path.join(static_dir, filen)))
|
||||
self.putChild(filen.encode("utf-8"), static.File(os.path.join(static_dir, filen)))
|
||||
|
||||
def _create_element(self):
|
||||
"""
|
||||
@ -66,7 +66,7 @@ class IntroducerRoot(MultiFormatResource):
|
||||
announcement_summary[service_name] += 1
|
||||
res[u"announcement_summary"] = announcement_summary
|
||||
|
||||
return json.dumps(res, indent=1) + b"\n"
|
||||
return (json.dumps(res, indent=1) + "\n").encode("utf-8")
|
||||
|
||||
|
||||
class IntroducerRootElement(Element):
|
||||
|
@ -1,3 +1,6 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import (
|
||||
print_function,
|
||||
unicode_literals,
|
||||
@ -49,7 +52,11 @@ class TokenAuthenticatedWebSocketServerProtocol(WebSocketServerProtocol):
|
||||
"""
|
||||
# probably want a try/except around here? what do we do if
|
||||
# transmission fails or anything else bad happens?
|
||||
self.sendMessage(json.dumps(message))
|
||||
encoded = json.dumps(message)
|
||||
if isinstance(encoded, str):
|
||||
# On Python 3 dumps() returns Unicode...
|
||||
encoded = encoded.encode("utf-8")
|
||||
self.sendMessage(encoded)
|
||||
|
||||
def onOpen(self):
|
||||
"""
|
||||
|
@ -61,7 +61,16 @@ class IToken(ICredentials):
|
||||
pass
|
||||
|
||||
|
||||
@implementer(IToken)
|
||||
# Workaround for Shoobx/mypy-zope#26, where without suitable
|
||||
# stubs for twisted classes (ICredentials), IToken does not
|
||||
# appear to be an Interface. The proper fix appears to be to
|
||||
# create stubs for twisted
|
||||
# (https://twistedmatrix.com/trac/ticket/9717). For now,
|
||||
# bypassing the inline decorator syntax works around the issue.
|
||||
_itoken_impl = implementer(IToken)
|
||||
|
||||
|
||||
@_itoken_impl
|
||||
@attr.s
|
||||
class Token(object):
|
||||
proposed_token = attr.ib(type=bytes)
|
||||
|
@ -1,6 +1,8 @@
|
||||
from future.utils import PY3
|
||||
|
||||
import os
|
||||
import time
|
||||
import urllib
|
||||
from urllib.parse import quote as urlquote
|
||||
|
||||
from hyperlink import DecodedURL, URL
|
||||
from pkg_resources import resource_filename
|
||||
@ -9,7 +11,7 @@ from twisted.web import (
|
||||
resource,
|
||||
static,
|
||||
)
|
||||
from twisted.web.util import redirectTo
|
||||
from twisted.web.util import redirectTo, Redirect
|
||||
from twisted.python.filepath import FilePath
|
||||
from twisted.web.template import (
|
||||
Element,
|
||||
@ -81,7 +83,7 @@ class URIHandler(resource.Resource, object):
|
||||
# it seems Nevow was creating absolute URLs including
|
||||
# host/port whereas req.uri is absolute (but lacks host/port)
|
||||
redir_uri = URL.from_text(req.prePathURL().decode('utf8'))
|
||||
redir_uri = redir_uri.child(urllib.quote(uri_arg).decode('utf8'))
|
||||
redir_uri = redir_uri.child(urlquote(uri_arg))
|
||||
# add back all the query args that AREN'T "?uri="
|
||||
for k, values in req.args.items():
|
||||
if k != b"uri":
|
||||
@ -145,7 +147,7 @@ class URIHandler(resource.Resource, object):
|
||||
and creates and appropriate handler (depending on the kind of
|
||||
capability it was passed).
|
||||
"""
|
||||
# this is in case a URI like "/uri/?cap=<valid capability>" is
|
||||
# this is in case a URI like "/uri/?uri=<valid capability>" is
|
||||
# passed -- we re-direct to the non-trailing-slash version so
|
||||
# that there is just one valid URI for "uri" resource.
|
||||
if not name:
|
||||
@ -153,7 +155,7 @@ class URIHandler(resource.Resource, object):
|
||||
u = u.replace(
|
||||
path=(s for s in u.path if s), # remove empty segments
|
||||
)
|
||||
return redirectTo(u.to_uri().to_text().encode('utf8'), req)
|
||||
return Redirect(u.to_uri().to_text().encode('utf8'))
|
||||
try:
|
||||
node = self.client.create_node_from_uri(name)
|
||||
return directory.make_handler_for(node, self.client)
|
||||
@ -227,26 +229,26 @@ class Root(MultiFormatResource):
|
||||
self._client = client
|
||||
self._now_fn = now_fn
|
||||
|
||||
# Children need to be bytes; for now just doing these to make specific
|
||||
# tests pass on Python 3, but eventually will do all them when this
|
||||
# module is ported to Python 3 (if not earlier).
|
||||
self.putChild(b"uri", URIHandler(client))
|
||||
self.putChild("cap", URIHandler(client))
|
||||
self.putChild(b"cap", URIHandler(client))
|
||||
|
||||
# Handler for everything beneath "/private", an area of the resource
|
||||
# hierarchy which is only accessible with the private per-node API
|
||||
# auth token.
|
||||
self.putChild("private", create_private_tree(client.get_auth_token))
|
||||
self.putChild(b"private", create_private_tree(client.get_auth_token))
|
||||
|
||||
self.putChild("file", FileHandler(client))
|
||||
self.putChild("named", FileHandler(client))
|
||||
self.putChild("status", status.Status(client.get_history()))
|
||||
self.putChild("statistics", status.Statistics(client.stats_provider))
|
||||
self.putChild(b"file", FileHandler(client))
|
||||
self.putChild(b"named", FileHandler(client))
|
||||
self.putChild(b"status", status.Status(client.get_history()))
|
||||
self.putChild(b"statistics", status.Statistics(client.stats_provider))
|
||||
static_dir = resource_filename("allmydata.web", "static")
|
||||
for filen in os.listdir(static_dir):
|
||||
self.putChild(filen, static.File(os.path.join(static_dir, filen)))
|
||||
child_path = filen
|
||||
if PY3:
|
||||
child_path = filen.encode("utf-8")
|
||||
self.putChild(child_path, static.File(os.path.join(static_dir, filen)))
|
||||
|
||||
self.putChild("report_incident", IncidentReporter())
|
||||
self.putChild(b"report_incident", IncidentReporter())
|
||||
|
||||
@exception_to_child
|
||||
def getChild(self, path, request):
|
||||
|
@ -217,7 +217,12 @@ def initialize():
|
||||
# Instead it "mangles" or escapes them using \x7F as an escape character, which we
|
||||
# unescape here.
|
||||
def unmangle(s):
|
||||
return re.sub(u'\\x7F[0-9a-fA-F]*\\;', lambda m: unichr(int(m.group(0)[1:-1], 16)), s)
|
||||
return re.sub(
|
||||
u'\\x7F[0-9a-fA-F]*\\;',
|
||||
# type ignored for 'unichr' (Python 2 only)
|
||||
lambda m: unichr(int(m.group(0)[1:-1], 16)), # type: ignore
|
||||
s,
|
||||
)
|
||||
|
||||
try:
|
||||
argv = [unmangle(argv_unicode[i]).encode('utf-8') for i in xrange(0, argc.value)]
|
||||
|
15
tox.ini
15
tox.ini
@ -7,7 +7,7 @@
|
||||
twisted = 1
|
||||
|
||||
[tox]
|
||||
envlist = codechecks,py27,py36,pypy27
|
||||
envlist = typechecks,codechecks,py27,py36,pypy27
|
||||
minversion = 2.4
|
||||
|
||||
[testenv]
|
||||
@ -77,7 +77,7 @@ setenv =
|
||||
COVERAGE_PROCESS_START=.coveragerc
|
||||
commands =
|
||||
# NOTE: 'run with "py.test --keep-tempdir -s -v integration/" to debug failures'
|
||||
py.test --coverage -v {posargs:integration}
|
||||
py.test --timeout=1800 --coverage -v {posargs:integration}
|
||||
coverage combine
|
||||
coverage report
|
||||
|
||||
@ -112,6 +112,16 @@ commands =
|
||||
# file. See pyproject.toml for legal <change type> values.
|
||||
python -m towncrier.check --pyproject towncrier.pyproject.toml
|
||||
|
||||
|
||||
[testenv:typechecks]
|
||||
skip_install = True
|
||||
deps =
|
||||
mypy
|
||||
git+https://github.com/Shoobx/mypy-zope
|
||||
git+https://github.com/warner/foolscap
|
||||
commands = mypy src
|
||||
|
||||
|
||||
[testenv:draftnews]
|
||||
passenv = TAHOE_LAFS_* PIP_* SUBUNITREPORTER_* USERPROFILE HOMEDRIVE HOMEPATH
|
||||
# see comment in [testenv] about "certifi"
|
||||
@ -211,6 +221,7 @@ commands =
|
||||
deps =
|
||||
sphinx
|
||||
docutils==0.12
|
||||
recommonmark
|
||||
# normal install is not needed for docs, and slows things down
|
||||
skip_install = True
|
||||
commands =
|
||||
|
Loading…
x
Reference in New Issue
Block a user