mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-06-19 07:48:11 +00:00
Merge branch 'master' into 2916.grid-manager-proposal.5
This commit is contained in:
@ -14,44 +14,73 @@ version: 2.1
|
|||||||
workflows:
|
workflows:
|
||||||
ci:
|
ci:
|
||||||
jobs:
|
jobs:
|
||||||
# Platforms
|
# Start with jobs testing various platforms.
|
||||||
- "debian-9"
|
|
||||||
|
# Every job that pulls a Docker image from Docker Hub needs to provide
|
||||||
|
# credentials for that pull operation to avoid being subjected to
|
||||||
|
# unauthenticated pull limits shared across all of CircleCI. Use this
|
||||||
|
# first job to define a yaml anchor that can be used to supply a
|
||||||
|
# CircleCI job context which makes Docker Hub credentials available in
|
||||||
|
# the environment.
|
||||||
|
#
|
||||||
|
# Contexts are managed in the CircleCI web interface:
|
||||||
|
#
|
||||||
|
# https://app.circleci.com/settings/organization/github/tahoe-lafs/contexts
|
||||||
|
- "debian-9": &DOCKERHUB_CONTEXT
|
||||||
|
context: "dockerhub-auth"
|
||||||
|
|
||||||
- "debian-8":
|
- "debian-8":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
requires:
|
requires:
|
||||||
- "debian-9"
|
- "debian-9"
|
||||||
|
|
||||||
- "ubuntu-20-04"
|
- "ubuntu-20-04":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
- "ubuntu-18-04":
|
- "ubuntu-18-04":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
requires:
|
requires:
|
||||||
- "ubuntu-20-04"
|
- "ubuntu-20-04"
|
||||||
- "ubuntu-16-04":
|
- "ubuntu-16-04":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
requires:
|
requires:
|
||||||
- "ubuntu-20-04"
|
- "ubuntu-20-04"
|
||||||
|
|
||||||
- "fedora-29"
|
- "fedora-29":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
- "fedora-28":
|
- "fedora-28":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
requires:
|
requires:
|
||||||
- "fedora-29"
|
- "fedora-29"
|
||||||
|
|
||||||
- "centos-8"
|
- "centos-8":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
|
||||||
- "nixos-19-09"
|
- "nixos-19-09":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
|
||||||
# Test against PyPy 2.7
|
# Test against PyPy 2.7
|
||||||
- "pypy27-buster"
|
- "pypy27-buster":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
|
||||||
# Just one Python 3.6 configuration while the port is in-progress.
|
# Just one Python 3.6 configuration while the port is in-progress.
|
||||||
- "python36"
|
- "python36":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
|
||||||
# Other assorted tasks and configurations
|
# Other assorted tasks and configurations
|
||||||
- "lint"
|
- "lint":
|
||||||
- "pyinstaller"
|
<<: *DOCKERHUB_CONTEXT
|
||||||
- "deprecations"
|
- "pyinstaller":
|
||||||
- "c-locale"
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
- "deprecations":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
- "c-locale":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
# Any locale other than C or UTF-8.
|
# Any locale other than C or UTF-8.
|
||||||
- "another-locale"
|
- "another-locale":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
|
||||||
- "integration":
|
- "integration":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
requires:
|
requires:
|
||||||
# If the unit test suite doesn't pass, don't bother running the
|
# If the unit test suite doesn't pass, don't bother running the
|
||||||
# integration tests.
|
# integration tests.
|
||||||
@ -59,7 +88,8 @@ workflows:
|
|||||||
|
|
||||||
# Generate the underlying data for a visualization to aid with Python 3
|
# Generate the underlying data for a visualization to aid with Python 3
|
||||||
# porting.
|
# porting.
|
||||||
- "build-porting-depgraph"
|
- "build-porting-depgraph":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
|
||||||
images:
|
images:
|
||||||
# Build the Docker images used by the ci jobs. This makes the ci jobs
|
# Build the Docker images used by the ci jobs. This makes the ci jobs
|
||||||
@ -74,22 +104,55 @@ workflows:
|
|||||||
- "master"
|
- "master"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
- "build-image-debian-8"
|
- "build-image-debian-8":
|
||||||
- "build-image-debian-9"
|
<<: *DOCKERHUB_CONTEXT
|
||||||
- "build-image-ubuntu-16-04"
|
- "build-image-debian-9":
|
||||||
- "build-image-ubuntu-18-04"
|
<<: *DOCKERHUB_CONTEXT
|
||||||
- "build-image-ubuntu-20-04"
|
- "build-image-ubuntu-16-04":
|
||||||
- "build-image-fedora-28"
|
<<: *DOCKERHUB_CONTEXT
|
||||||
- "build-image-fedora-29"
|
- "build-image-ubuntu-18-04":
|
||||||
- "build-image-centos-8"
|
<<: *DOCKERHUB_CONTEXT
|
||||||
- "build-image-pypy27-buster"
|
- "build-image-ubuntu-20-04":
|
||||||
- "build-image-python36-ubuntu"
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
- "build-image-fedora-28":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
- "build-image-fedora-29":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
- "build-image-centos-8":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
- "build-image-pypy27-buster":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
- "build-image-python36-ubuntu":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
dockerhub-auth-template:
|
||||||
|
# This isn't a real job. It doesn't get scheduled as part of any
|
||||||
|
# workflow. Instead, it's just a place we can hang a yaml anchor to
|
||||||
|
# finish the Docker Hub authentication configuration. Workflow jobs using
|
||||||
|
# the DOCKERHUB_CONTEXT anchor will have access to the environment
|
||||||
|
# variables used here. These variables will allow the Docker Hub image
|
||||||
|
# pull to be authenticated and hopefully avoid hitting and rate limits.
|
||||||
|
docker: &DOCKERHUB_AUTH
|
||||||
|
- image: "null"
|
||||||
|
auth:
|
||||||
|
username: $DOCKERHUB_USERNAME
|
||||||
|
password: $DOCKERHUB_PASSWORD
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- run:
|
||||||
|
name: "CircleCI YAML schema conformity"
|
||||||
|
command: |
|
||||||
|
# This isn't a real command. We have to have something in this
|
||||||
|
# space, though, or the CircleCI yaml schema validator gets angry.
|
||||||
|
# Since this job is never scheduled this step is never run so the
|
||||||
|
# actual value here is irrelevant.
|
||||||
|
|
||||||
lint:
|
lint:
|
||||||
docker:
|
docker:
|
||||||
- image: "circleci/python:2"
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
image: "circleci/python:2"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- "checkout"
|
- "checkout"
|
||||||
@ -106,7 +169,8 @@ jobs:
|
|||||||
|
|
||||||
pyinstaller:
|
pyinstaller:
|
||||||
docker:
|
docker:
|
||||||
- image: "circleci/python:2"
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
image: "circleci/python:2"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- "checkout"
|
- "checkout"
|
||||||
@ -131,7 +195,8 @@ jobs:
|
|||||||
|
|
||||||
debian-9: &DEBIAN
|
debian-9: &DEBIAN
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/debian:9-py2.7"
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
image: "tahoelafsci/debian:9-py2.7"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
environment: &UTF_8_ENVIRONMENT
|
environment: &UTF_8_ENVIRONMENT
|
||||||
@ -154,6 +219,8 @@ jobs:
|
|||||||
# we maintain.
|
# we maintain.
|
||||||
WHEELHOUSE_PATH: &WHEELHOUSE_PATH "/tmp/wheelhouse"
|
WHEELHOUSE_PATH: &WHEELHOUSE_PATH "/tmp/wheelhouse"
|
||||||
PIP_FIND_LINKS: "file:///tmp/wheelhouse"
|
PIP_FIND_LINKS: "file:///tmp/wheelhouse"
|
||||||
|
# Upload the coverage report.
|
||||||
|
UPLOAD_COVERAGE: "yes"
|
||||||
|
|
||||||
# pip cannot install packages if the working directory is not readable.
|
# pip cannot install packages if the working directory is not readable.
|
||||||
# We want to run a lot of steps as nobody instead of as root.
|
# We want to run a lot of steps as nobody instead of as root.
|
||||||
@ -202,26 +269,32 @@ jobs:
|
|||||||
- run: &SUBMIT_COVERAGE
|
- run: &SUBMIT_COVERAGE
|
||||||
name: "Submit coverage results"
|
name: "Submit coverage results"
|
||||||
command: |
|
command: |
|
||||||
|
if [ -n "${UPLOAD_COVERAGE}" ]; then
|
||||||
/tmp/venv/bin/codecov
|
/tmp/venv/bin/codecov
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
debian-8:
|
debian-8:
|
||||||
<<: *DEBIAN
|
<<: *DEBIAN
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/debian:8-py2.7"
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
image: "tahoelafsci/debian:8-py2.7"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
|
|
||||||
pypy27-buster:
|
pypy27-buster:
|
||||||
<<: *DEBIAN
|
<<: *DEBIAN
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/pypy:buster-py2"
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
image: "tahoelafsci/pypy:buster-py2"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
<<: *UTF_8_ENVIRONMENT
|
<<: *UTF_8_ENVIRONMENT
|
||||||
# We don't do coverage since it makes PyPy far too slow:
|
# We don't do coverage since it makes PyPy far too slow:
|
||||||
TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27"
|
TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27"
|
||||||
|
# Since we didn't collect it, don't upload it.
|
||||||
|
UPLOAD_COVERAGE: ""
|
||||||
|
|
||||||
|
|
||||||
c-locale:
|
c-locale:
|
||||||
@ -250,6 +323,8 @@ jobs:
|
|||||||
TAHOE_LAFS_TOX_ENVIRONMENT: "deprecations,upcoming-deprecations"
|
TAHOE_LAFS_TOX_ENVIRONMENT: "deprecations,upcoming-deprecations"
|
||||||
# Put the logs somewhere we can report them.
|
# Put the logs somewhere we can report them.
|
||||||
TAHOE_LAFS_WARNINGS_LOG: "/tmp/artifacts/deprecation-warnings.log"
|
TAHOE_LAFS_WARNINGS_LOG: "/tmp/artifacts/deprecation-warnings.log"
|
||||||
|
# The deprecations tox environments don't do coverage measurement.
|
||||||
|
UPLOAD_COVERAGE: ""
|
||||||
|
|
||||||
|
|
||||||
integration:
|
integration:
|
||||||
@ -272,21 +347,24 @@ jobs:
|
|||||||
ubuntu-16-04:
|
ubuntu-16-04:
|
||||||
<<: *DEBIAN
|
<<: *DEBIAN
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/ubuntu:16.04-py2.7"
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
image: "tahoelafsci/ubuntu:16.04-py2.7"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
|
|
||||||
ubuntu-18-04: &UBUNTU_18_04
|
ubuntu-18-04: &UBUNTU_18_04
|
||||||
<<: *DEBIAN
|
<<: *DEBIAN
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/ubuntu:18.04-py2.7"
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
image: "tahoelafsci/ubuntu:18.04-py2.7"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
|
|
||||||
python36:
|
python36:
|
||||||
<<: *UBUNTU_18_04
|
<<: *UBUNTU_18_04
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/ubuntu:18.04-py3"
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
image: "tahoelafsci/ubuntu:18.04-py3"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
@ -301,13 +379,15 @@ jobs:
|
|||||||
ubuntu-20-04:
|
ubuntu-20-04:
|
||||||
<<: *DEBIAN
|
<<: *DEBIAN
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/ubuntu:20.04"
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
image: "tahoelafsci/ubuntu:20.04"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
|
|
||||||
centos-8: &RHEL_DERIV
|
centos-8: &RHEL_DERIV
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/centos:8-py2"
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
image: "tahoelafsci/centos:8-py2"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
environment: *UTF_8_ENVIRONMENT
|
environment: *UTF_8_ENVIRONMENT
|
||||||
@ -329,21 +409,24 @@ jobs:
|
|||||||
fedora-28:
|
fedora-28:
|
||||||
<<: *RHEL_DERIV
|
<<: *RHEL_DERIV
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/fedora:28-py"
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
image: "tahoelafsci/fedora:28-py"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
|
|
||||||
fedora-29:
|
fedora-29:
|
||||||
<<: *RHEL_DERIV
|
<<: *RHEL_DERIV
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/fedora:29-py"
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
image: "tahoelafsci/fedora:29-py"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
|
|
||||||
nixos-19-09:
|
nixos-19-09:
|
||||||
docker:
|
docker:
|
||||||
# Run in a highly Nix-capable environment.
|
# Run in a highly Nix-capable environment.
|
||||||
- image: "nixorg/nix:circleci"
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
image: "nixorg/nix:circleci"
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
NIX_PATH: "nixpkgs=https://github.com/NixOS/nixpkgs-channels/archive/nixos-19.09-small.tar.gz"
|
NIX_PATH: "nixpkgs=https://github.com/NixOS/nixpkgs-channels/archive/nixos-19.09-small.tar.gz"
|
||||||
@ -400,7 +483,8 @@ jobs:
|
|||||||
#
|
#
|
||||||
# https://circleci.com/blog/how-to-build-a-docker-image-on-circleci-2-0/
|
# https://circleci.com/blog/how-to-build-a-docker-image-on-circleci-2-0/
|
||||||
docker:
|
docker:
|
||||||
- image: "docker:17.05.0-ce-git"
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
image: "docker:17.05.0-ce-git"
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
DISTRO: "tahoelafsci/<DISTRO>:foo-py2"
|
DISTRO: "tahoelafsci/<DISTRO>:foo-py2"
|
||||||
@ -410,47 +494,10 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- "checkout"
|
- "checkout"
|
||||||
- "setup_remote_docker"
|
- "setup_remote_docker"
|
||||||
- run:
|
|
||||||
name: "Get openssl"
|
|
||||||
command: |
|
|
||||||
apk add --no-cache openssl
|
|
||||||
- run:
|
|
||||||
name: "Get Dockerhub secrets"
|
|
||||||
command: |
|
|
||||||
# If you create an encryption key like this:
|
|
||||||
#
|
|
||||||
# openssl enc -aes-256-cbc -k secret -P -md sha256
|
|
||||||
|
|
||||||
# From the output that looks like:
|
|
||||||
#
|
|
||||||
# salt=...
|
|
||||||
# key=...
|
|
||||||
# iv =...
|
|
||||||
#
|
|
||||||
# extract just the value for ``key``.
|
|
||||||
|
|
||||||
# then you can re-generate ``secret-env-cipher`` locally using the
|
|
||||||
# command:
|
|
||||||
#
|
|
||||||
# openssl aes-256-cbc -e -md sha256 -in secret-env-plain -out .circleci/secret-env-cipher -pass env:KEY
|
|
||||||
#
|
|
||||||
# Make sure the key is set as the KEY environment variable in the
|
|
||||||
# CircleCI web interface. You can do this by visiting
|
|
||||||
# <https://circleci.com/gh/tahoe-lafs/tahoe-lafs/edit#env-vars>
|
|
||||||
# after logging in to CircleCI with an account in the tahoe-lafs
|
|
||||||
# CircleCI team.
|
|
||||||
#
|
|
||||||
# Then you can recover the environment plaintext (for example, to
|
|
||||||
# change and re-encrypt it) like just like CircleCI recovers it
|
|
||||||
# here:
|
|
||||||
#
|
|
||||||
openssl aes-256-cbc -d -md sha256 -in .circleci/secret-env-cipher -pass env:KEY >> ~/.env
|
|
||||||
- run:
|
- run:
|
||||||
name: "Log in to Dockerhub"
|
name: "Log in to Dockerhub"
|
||||||
command: |
|
command: |
|
||||||
. ~/.env
|
docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD}
|
||||||
# TAHOELAFSCI_PASSWORD come from the secret env.
|
|
||||||
docker login -u tahoelafsci -p ${TAHOELAFSCI_PASSWORD}
|
|
||||||
- run:
|
- run:
|
||||||
name: "Build image"
|
name: "Build image"
|
||||||
command: |
|
command: |
|
||||||
|
@ -1 +0,0 @@
|
|||||||
Salted__<EFBFBD><EFBFBD>GP<05><><EFBFBD>)|!<21>[<5B>U[<5B><>vS<04>,F<><46>m:<3A><><EFBFBD>~<7E>Y[U<>_<EFBFBD>Fxג<78><D792>%<25><>4l<34>ֻ<EFBFBD>8<EFBFBD><38><EFBFBD><EFBFBD>1<EFBFBD><31><EFBFBD>/l<><6C>`n<>^<5E>Z]<05>q<EFBFBD><71><EFBFBD><EFBFBD>&<26><><08><>݂<03><>%T<>n
|
|
11
.coveragerc
11
.coveragerc
@ -14,3 +14,14 @@ branch = True
|
|||||||
[report]
|
[report]
|
||||||
show_missing = True
|
show_missing = True
|
||||||
skip_covered = True
|
skip_covered = True
|
||||||
|
|
||||||
|
[paths]
|
||||||
|
source =
|
||||||
|
# It looks like this in the checkout
|
||||||
|
src/
|
||||||
|
# It looks like this in the Windows build environment
|
||||||
|
D:/a/tahoe-lafs/tahoe-lafs/.tox/py*-coverage/Lib/site-packages/
|
||||||
|
# Although sometimes it looks like this instead. Also it looks like this on macOS.
|
||||||
|
.tox/py*-coverage/lib/python*/site-packages/
|
||||||
|
# On some Linux CI jobs it looks like this
|
||||||
|
/tmp/tahoe-lafs.tox/py*-coverage/lib/python*/site-packages/
|
||||||
|
@ -3,13 +3,7 @@ repos:
|
|||||||
hooks:
|
hooks:
|
||||||
- id: codechecks
|
- id: codechecks
|
||||||
name: codechecks
|
name: codechecks
|
||||||
stages: ["commit"]
|
stages: ["push"]
|
||||||
entry: "tox -e codechecks"
|
entry: "tox -e codechecks"
|
||||||
language: system
|
language: system
|
||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
- id: test
|
|
||||||
name: test
|
|
||||||
stages: ["push"]
|
|
||||||
entry: "make test"
|
|
||||||
language: system
|
|
||||||
pass_filenames: false
|
|
||||||
|
2
CREDITS
2
CREDITS
@ -113,6 +113,8 @@ E: jacob@appelbaum.com
|
|||||||
W: http://www.appelbaum.net/
|
W: http://www.appelbaum.net/
|
||||||
P: 12E4 04FF D3C9 31F9 3405 2D06 B884 1A91 9D0F ACE4
|
P: 12E4 04FF D3C9 31F9 3405 2D06 B884 1A91 9D0F ACE4
|
||||||
D: Debian packaging including init scripts
|
D: Debian packaging including init scripts
|
||||||
|
D: Note that contributions from Jacob Appelbaum (ioerror) are no longer welcome
|
||||||
|
D: due to behavior unacceptable to community standards in Tor and other projects
|
||||||
|
|
||||||
N: Jeremy Visser
|
N: Jeremy Visser
|
||||||
D: Ubuntu packaging, usability testing
|
D: Ubuntu packaging, usability testing
|
||||||
|
14
Makefile
14
Makefile
@ -19,8 +19,10 @@ PYTHON=python
|
|||||||
export PYTHON
|
export PYTHON
|
||||||
PYFLAKES=flake8
|
PYFLAKES=flake8
|
||||||
export PYFLAKES
|
export PYFLAKES
|
||||||
|
VIRTUAL_ENV=./.tox/py27
|
||||||
SOURCES=src/allmydata static misc setup.py
|
SOURCES=src/allmydata static misc setup.py
|
||||||
APPNAME=tahoe-lafs
|
APPNAME=tahoe-lafs
|
||||||
|
TEST_SUITE=allmydata
|
||||||
|
|
||||||
|
|
||||||
# Top-level, phony targets
|
# Top-level, phony targets
|
||||||
@ -45,6 +47,18 @@ test: .tox/create-venvs.log
|
|||||||
tox --develop -e codechecks
|
tox --develop -e codechecks
|
||||||
# Run all the test environments in parallel to reduce run-time
|
# Run all the test environments in parallel to reduce run-time
|
||||||
tox --develop -p auto -e 'py27,py36,pypy27'
|
tox --develop -p auto -e 'py27,py36,pypy27'
|
||||||
|
.PHONY: test-venv-coverage
|
||||||
|
## Run all tests with coverage collection and reporting.
|
||||||
|
test-venv-coverage:
|
||||||
|
# Special handling for reporting coverage even when the test run fails
|
||||||
|
rm -f ./.coverage.*
|
||||||
|
test_exit=
|
||||||
|
$(VIRTUAL_ENV)/bin/coverage run -m twisted.trial --rterrors --reporter=timing \
|
||||||
|
$(TEST_SUITE) || test_exit="$$?"
|
||||||
|
$(VIRTUAL_ENV)/bin/coverage combine
|
||||||
|
$(VIRTUAL_ENV)/bin/coverage xml || true
|
||||||
|
$(VIRTUAL_ENV)/bin/coverage report
|
||||||
|
if [ ! -z "$$test_exit" ]; then exit "$$test_exit"; fi
|
||||||
.PHONY: test-py3-all
|
.PHONY: test-py3-all
|
||||||
## Run all tests under Python 3
|
## Run all tests under Python 3
|
||||||
test-py3-all: .tox/create-venvs.log
|
test-py3-all: .tox/create-venvs.log
|
||||||
|
@ -64,3 +64,9 @@ Peter Secor
|
|||||||
Shawn Willden
|
Shawn Willden
|
||||||
|
|
||||||
Terrell Russell
|
Terrell Russell
|
||||||
|
|
||||||
|
Jean-Paul Calderone
|
||||||
|
|
||||||
|
meejah
|
||||||
|
|
||||||
|
Sajith Sasidharan
|
||||||
|
@ -8,10 +8,13 @@ from os.path import join, exists
|
|||||||
from tempfile import mkdtemp, mktemp
|
from tempfile import mkdtemp, mktemp
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
|
from foolscap.furl import (
|
||||||
|
decode_furl,
|
||||||
|
)
|
||||||
|
|
||||||
from eliot import (
|
from eliot import (
|
||||||
to_file,
|
to_file,
|
||||||
log_call,
|
log_call,
|
||||||
start_action,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from twisted.python.procutils import which
|
from twisted.python.procutils import which
|
||||||
@ -30,7 +33,6 @@ from util import (
|
|||||||
_DumpOutputProtocol,
|
_DumpOutputProtocol,
|
||||||
_ProcessExitedProtocol,
|
_ProcessExitedProtocol,
|
||||||
_create_node,
|
_create_node,
|
||||||
_run_node,
|
|
||||||
_cleanup_tahoe_process,
|
_cleanup_tahoe_process,
|
||||||
_tahoe_runner_optional_coverage,
|
_tahoe_runner_optional_coverage,
|
||||||
await_client_ready,
|
await_client_ready,
|
||||||
@ -226,6 +228,16 @@ def introducer_furl(introducer, temp_dir):
|
|||||||
print("Don't see {} yet".format(furl_fname))
|
print("Don't see {} yet".format(furl_fname))
|
||||||
sleep(.1)
|
sleep(.1)
|
||||||
furl = open(furl_fname, 'r').read()
|
furl = open(furl_fname, 'r').read()
|
||||||
|
tubID, location_hints, name = decode_furl(furl)
|
||||||
|
if not location_hints:
|
||||||
|
# If there are no location hints then nothing can ever possibly
|
||||||
|
# connect to it and the only thing that can happen next is something
|
||||||
|
# will hang or time out. So just give up right now.
|
||||||
|
raise ValueError(
|
||||||
|
"Introducer ({!r}) fURL has no location hints!".format(
|
||||||
|
introducer_furl,
|
||||||
|
),
|
||||||
|
)
|
||||||
return furl
|
return furl
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
import sys
|
import sys
|
||||||
from os.path import join
|
from os.path import join
|
||||||
|
|
||||||
from twisted.internet import task
|
|
||||||
from twisted.internet.error import ProcessTerminated
|
from twisted.internet.error import ProcessTerminated
|
||||||
|
|
||||||
import util
|
import util
|
||||||
|
@ -53,7 +53,12 @@ class _StreamingLogClientProtocol(WebSocketClientProtocol):
|
|||||||
self.factory.on_open.callback(self)
|
self.factory.on_open.callback(self)
|
||||||
|
|
||||||
def onMessage(self, payload, isBinary):
|
def onMessage(self, payload, isBinary):
|
||||||
self.on_message.callback(payload)
|
if self.on_message is None:
|
||||||
|
# Already did our job, ignore it
|
||||||
|
return
|
||||||
|
on_message = self.on_message
|
||||||
|
self.on_message = None
|
||||||
|
on_message.callback(payload)
|
||||||
|
|
||||||
def onClose(self, wasClean, code, reason):
|
def onClose(self, wasClean, code, reason):
|
||||||
self.on_close.callback(reason)
|
self.on_close.callback(reason)
|
||||||
@ -131,10 +136,13 @@ def _test_streaming_logs(reactor, temp_dir, alice):
|
|||||||
client.on_close = Deferred()
|
client.on_close = Deferred()
|
||||||
client.on_message = Deferred()
|
client.on_message = Deferred()
|
||||||
|
|
||||||
|
# Capture this now before on_message perhaps goes away.
|
||||||
|
racing = _race(client.on_close, client.on_message)
|
||||||
|
|
||||||
# Provoke _some_ log event.
|
# Provoke _some_ log event.
|
||||||
yield treq.get(node_url)
|
yield treq.get(node_url)
|
||||||
|
|
||||||
result = yield _race(client.on_close, client.on_message)
|
result = yield racing
|
||||||
|
|
||||||
assert isinstance(result, Right)
|
assert isinstance(result, Right)
|
||||||
json.loads(result.value)
|
json.loads(result.value)
|
||||||
|
@ -1,15 +1,8 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
from os import mkdir
|
||||||
import shutil
|
from os.path import join
|
||||||
from os import mkdir, unlink, listdir
|
|
||||||
from os.path import join, exists
|
|
||||||
from six.moves import StringIO
|
|
||||||
|
|
||||||
from twisted.internet.protocol import ProcessProtocol
|
|
||||||
from twisted.internet.error import ProcessExitedAlready, ProcessDone
|
|
||||||
from twisted.internet.defer import inlineCallbacks, Deferred
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_twisted
|
import pytest_twisted
|
||||||
|
@ -9,20 +9,15 @@ WebAPI *should* do in every situation. It's not clear the latter
|
|||||||
exists anywhere, however.
|
exists anywhere, however.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sys
|
|
||||||
import time
|
import time
|
||||||
import shutil
|
|
||||||
import json
|
import json
|
||||||
import urllib2
|
import urllib2
|
||||||
from os import mkdir, unlink, utime
|
|
||||||
from os.path import join, exists, getmtime
|
|
||||||
|
|
||||||
import allmydata.uri
|
import allmydata.uri
|
||||||
|
|
||||||
import util
|
import util
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
import pytest_twisted
|
|
||||||
import html5lib
|
import html5lib
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
@ -265,7 +260,8 @@ def test_directory_deep_check(alice):
|
|||||||
dircap_url,
|
dircap_url,
|
||||||
params={u"t": u"json"},
|
params={u"t": u"json"},
|
||||||
)
|
)
|
||||||
dir_meta = json.loads(resp.content)
|
# Just verify it is valid JSON.
|
||||||
|
json.loads(resp.content)
|
||||||
|
|
||||||
# upload a file of pangrams into the directory
|
# upload a file of pangrams into the directory
|
||||||
FILE_CONTENTS = u"Sphinx of black quartz, judge my vow.\n" * (2048*10)
|
FILE_CONTENTS = u"Sphinx of black quartz, judge my vow.\n" * (2048*10)
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
from os import mkdir
|
from os import mkdir, environ
|
||||||
from os.path import exists, join
|
from os.path import exists, join
|
||||||
from six.moves import StringIO
|
from six.moves import StringIO
|
||||||
from functools import partial
|
from functools import partial
|
||||||
@ -145,6 +145,7 @@ def _tahoe_runner_optional_coverage(proto, reactor, request, other_args):
|
|||||||
proto,
|
proto,
|
||||||
sys.executable,
|
sys.executable,
|
||||||
args,
|
args,
|
||||||
|
env=environ,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -20,7 +20,7 @@ class Options(usage.Options):
|
|||||||
self["args"] = list(args)
|
self["args"] = list(args)
|
||||||
|
|
||||||
description = """Run as:
|
description = """Run as:
|
||||||
PYTHONWARNINGS=default::DeprecationWarning python run-deprecations.py [--warnings=STDERRFILE] [--package=PYTHONPACKAGE ] COMMAND ARGS..
|
python run-deprecations.py [--warnings=STDERRFILE] [--package=PYTHONPACKAGE ] COMMAND ARGS..
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class RunPP(protocol.ProcessProtocol):
|
class RunPP(protocol.ProcessProtocol):
|
||||||
@ -79,12 +79,6 @@ def run_command(main):
|
|||||||
(command, os.environ.get("PATH")))
|
(command, os.environ.get("PATH")))
|
||||||
exe = executables[0]
|
exe = executables[0]
|
||||||
|
|
||||||
pw = os.environ.get("PYTHONWARNINGS")
|
|
||||||
DDW = "default::DeprecationWarning"
|
|
||||||
if pw != DDW:
|
|
||||||
print("note: $PYTHONWARNINGS is '%s', not the expected %s" % (pw, DDW))
|
|
||||||
sys.stdout.flush()
|
|
||||||
|
|
||||||
pp = RunPP()
|
pp = RunPP()
|
||||||
pp.d = defer.Deferred()
|
pp.d = defer.Deferred()
|
||||||
pp.stdout = io.BytesIO()
|
pp.stdout = io.BytesIO()
|
||||||
|
@ -143,7 +143,6 @@ print_py_pkg_ver('coverage')
|
|||||||
print_py_pkg_ver('cryptography')
|
print_py_pkg_ver('cryptography')
|
||||||
print_py_pkg_ver('foolscap')
|
print_py_pkg_ver('foolscap')
|
||||||
print_py_pkg_ver('mock')
|
print_py_pkg_ver('mock')
|
||||||
print_py_pkg_ver('Nevow', 'nevow')
|
|
||||||
print_py_pkg_ver('pyasn1')
|
print_py_pkg_ver('pyasn1')
|
||||||
print_py_pkg_ver('pycparser')
|
print_py_pkg_ver('pycparser')
|
||||||
print_py_pkg_ver('cryptography')
|
print_py_pkg_ver('cryptography')
|
||||||
|
53
misc/python3/Makefile
Normal file
53
misc/python3/Makefile
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
# Python 3 porting targets
|
||||||
|
#
|
||||||
|
# NOTE: this Makefile requires GNU make
|
||||||
|
|
||||||
|
### Defensive settings for make:
|
||||||
|
# https://tech.davis-hansson.com/p/make/
|
||||||
|
SHELL := bash
|
||||||
|
.ONESHELL:
|
||||||
|
.SHELLFLAGS := -xeu -o pipefail -c
|
||||||
|
.SILENT:
|
||||||
|
.DELETE_ON_ERROR:
|
||||||
|
MAKEFLAGS += --warn-undefined-variables
|
||||||
|
MAKEFLAGS += --no-builtin-rules
|
||||||
|
|
||||||
|
|
||||||
|
# Top-level, phony targets
|
||||||
|
|
||||||
|
.PHONY: default
|
||||||
|
default:
|
||||||
|
@echo "no default target"
|
||||||
|
|
||||||
|
.PHONY: test-py3-all-before
|
||||||
|
## Log the output of running all tests under Python 3 before changes
|
||||||
|
test-py3-all-before: ../../.tox/make-test-py3-all-old.log
|
||||||
|
.PHONY: test-py3-all-diff
|
||||||
|
## Compare the output of running all tests under Python 3 after changes
|
||||||
|
test-py3-all-diff: ../../.tox/make-test-py3-all.diff
|
||||||
|
|
||||||
|
|
||||||
|
# Real targets
|
||||||
|
|
||||||
|
# Gauge the impact of changes on Python 3 compatibility
|
||||||
|
# Compare the output from running all tests under Python 3 before and after changes.
|
||||||
|
# Before changes:
|
||||||
|
# `$ rm -f .tox/make-test-py3-all-*.log && make .tox/make-test-py3-all-old.log`
|
||||||
|
# After changes:
|
||||||
|
# `$ make .tox/make-test-py3-all.diff`
|
||||||
|
$(foreach side,old new,../../.tox/make-test-py3-all-$(side).log):
|
||||||
|
cd "../../"
|
||||||
|
tox --develop --notest -e py36-coverage
|
||||||
|
(make VIRTUAL_ENV=./.tox/py36-coverage TEST_SUITE=allmydata \
|
||||||
|
test-venv-coverage || true) | \
|
||||||
|
sed -E 's/\([0-9]+\.[0-9]{3} secs\)/(#.### secs)/' | \
|
||||||
|
tee "./misc/python3/$(@)"
|
||||||
|
../../.tox/make-test-py3-all.diff: ../../.tox/make-test-py3-all-new.log
|
||||||
|
(diff -u "$(<:%-new.log=%-old.log)" "$(<)" || true) | tee "$(@)"
|
||||||
|
|
||||||
|
# Locate modules that are candidates for naively converting `unicode` -> `str`.
|
||||||
|
# List all Python source files that reference `unicode` but don't reference `str`
|
||||||
|
../../.tox/py3-unicode-no-str.ls:
|
||||||
|
cd "../../"
|
||||||
|
find src -type f -iname '*.py' -exec grep -l -E '\Wunicode\W' '{}' ';' | \
|
||||||
|
xargs grep -L '\Wstr\W' | xargs ls -ld | tee "./misc/python3/$(@)"
|
0
newsfragments/2928.minor
Normal file
0
newsfragments/2928.minor
Normal file
0
newsfragments/3283.minor
Normal file
0
newsfragments/3283.minor
Normal file
0
newsfragments/3314.minor
Normal file
0
newsfragments/3314.minor
Normal file
0
newsfragments/3428.minor
Normal file
0
newsfragments/3428.minor
Normal file
0
newsfragments/3432.minor
Normal file
0
newsfragments/3432.minor
Normal file
1
newsfragments/3433.installation
Normal file
1
newsfragments/3433.installation
Normal file
@ -0,0 +1 @@
|
|||||||
|
Tahoe-LAFS no longer depends on Nevow.
|
0
newsfragments/3434.minor
Normal file
0
newsfragments/3434.minor
Normal file
0
newsfragments/3435.minor
Normal file
0
newsfragments/3435.minor
Normal file
1
newsfragments/3448.minor
Normal file
1
newsfragments/3448.minor
Normal file
@ -0,0 +1 @@
|
|||||||
|
Convert modules that only reference `unicode` to use `str`.
|
0
newsfragments/3453.minor
Normal file
0
newsfragments/3453.minor
Normal file
0
newsfragments/3454.minor
Normal file
0
newsfragments/3454.minor
Normal file
1
newsfragments/3455.minor
Normal file
1
newsfragments/3455.minor
Normal file
@ -0,0 +1 @@
|
|||||||
|
Begin porting the `node` module to Python 3.
|
0
newsfragments/3456.minor
Normal file
0
newsfragments/3456.minor
Normal file
0
newsfragments/3458.minor
Normal file
0
newsfragments/3458.minor
Normal file
0
newsfragments/3459.minor
Normal file
0
newsfragments/3459.minor
Normal file
0
newsfragments/3460.minor
Normal file
0
newsfragments/3460.minor
Normal file
0
newsfragments/3462.minor
Normal file
0
newsfragments/3462.minor
Normal file
0
newsfragments/3463.minor
Normal file
0
newsfragments/3463.minor
Normal file
1
newsfragments/3464.minor
Normal file
1
newsfragments/3464.minor
Normal file
@ -0,0 +1 @@
|
|||||||
|
Cleanup comments that don't match the project convention.
|
0
newsfragments/3465.minor
Normal file
0
newsfragments/3465.minor
Normal file
0
newsfragments/3466.minor
Normal file
0
newsfragments/3466.minor
Normal file
0
newsfragments/3467.minor
Normal file
0
newsfragments/3467.minor
Normal file
0
newsfragments/3468.minor
Normal file
0
newsfragments/3468.minor
Normal file
0
newsfragments/3470.minor
Normal file
0
newsfragments/3470.minor
Normal file
0
newsfragments/3471.minor
Normal file
0
newsfragments/3471.minor
Normal file
0
newsfragments/3472.minor
Normal file
0
newsfragments/3472.minor
Normal file
0
newsfragments/3473.minor
Normal file
0
newsfragments/3473.minor
Normal file
0
newsfragments/3474.minor
Normal file
0
newsfragments/3474.minor
Normal file
0
newsfragments/3475.minor
Normal file
0
newsfragments/3475.minor
Normal file
0
newsfragments/3479.minor
Normal file
0
newsfragments/3479.minor
Normal file
0
newsfragments/3481.minor
Normal file
0
newsfragments/3481.minor
Normal file
0
newsfragments/3482.minor
Normal file
0
newsfragments/3482.minor
Normal file
0
newsfragments/3483.minor
Normal file
0
newsfragments/3483.minor
Normal file
0
newsfragments/3485.minor
Normal file
0
newsfragments/3485.minor
Normal file
1
newsfragments/3486.installation
Normal file
1
newsfragments/3486.installation
Normal file
@ -0,0 +1 @@
|
|||||||
|
Tahoe-LAFS now requires the `netifaces` Python package and no longer requires the external `ip`, `ifconfig`, or `route.exe` executables.
|
0
newsfragments/3488.minor
Normal file
0
newsfragments/3488.minor
Normal file
0
newsfragments/3490.minor
Normal file
0
newsfragments/3490.minor
Normal file
0
newsfragments/3491.minor
Normal file
0
newsfragments/3491.minor
Normal file
0
newsfragments/3492.minor
Normal file
0
newsfragments/3492.minor
Normal file
0
newsfragments/3493.minor
Normal file
0
newsfragments/3493.minor
Normal file
0
newsfragments/3496.minor
Normal file
0
newsfragments/3496.minor
Normal file
0
newsfragments/3499.minor
Normal file
0
newsfragments/3499.minor
Normal file
0
newsfragments/3500.minor
Normal file
0
newsfragments/3500.minor
Normal file
0
newsfragments/3501.minor
Normal file
0
newsfragments/3501.minor
Normal file
@ -1,45 +0,0 @@
|
|||||||
{ stdenv, buildPythonPackage, fetchPypi, isPy3k, twisted }:
|
|
||||||
|
|
||||||
buildPythonPackage rec {
|
|
||||||
pname = "Nevow";
|
|
||||||
version = "0.14.5";
|
|
||||||
name = "${pname}-${version}";
|
|
||||||
disabled = isPy3k;
|
|
||||||
|
|
||||||
src = fetchPypi {
|
|
||||||
inherit pname;
|
|
||||||
inherit version;
|
|
||||||
sha256 = "1wr3fai01h1bcp4qpia6indg4qmxvywwv3q1iibm669mln2vmdmg";
|
|
||||||
};
|
|
||||||
|
|
||||||
propagatedBuildInputs = [ twisted ];
|
|
||||||
|
|
||||||
checkInputs = [ twisted ];
|
|
||||||
|
|
||||||
checkPhase = ''
|
|
||||||
trial formless nevow
|
|
||||||
'';
|
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
|
||||||
description = "Nevow, a web application construction kit for Python";
|
|
||||||
longDescription = ''
|
|
||||||
Nevow - Pronounced as the French "nouveau", or "noo-voh", Nevow
|
|
||||||
is a web application construction kit written in Python. It is
|
|
||||||
designed to allow the programmer to express as much of the view
|
|
||||||
logic as desired in Python, and includes a pure Python XML
|
|
||||||
expression syntax named stan to facilitate this. However it
|
|
||||||
also provides rich support for designer-edited templates, using
|
|
||||||
a very small XML attribute language to provide bi-directional
|
|
||||||
template manipulation capability.
|
|
||||||
|
|
||||||
Nevow also includes formless, a declarative syntax for
|
|
||||||
specifying the types of method parameters and exposing these
|
|
||||||
methods to the web. Forms can be rendered automatically, and
|
|
||||||
form posts will be validated and input coerced, rendering error
|
|
||||||
pages if appropriate. Once a form post has validated
|
|
||||||
successfully, the method will be called with the coerced values.
|
|
||||||
'';
|
|
||||||
homepage = https://github.com/twisted/nevow;
|
|
||||||
license = licenses.mit;
|
|
||||||
};
|
|
||||||
}
|
|
@ -3,10 +3,7 @@ self: super: {
|
|||||||
packageOverrides = python-self: python-super: {
|
packageOverrides = python-self: python-super: {
|
||||||
# eliot is not part of nixpkgs at all at this time.
|
# eliot is not part of nixpkgs at all at this time.
|
||||||
eliot = python-self.callPackage ./eliot.nix { };
|
eliot = python-self.callPackage ./eliot.nix { };
|
||||||
# The packaged version of Nevow is very slightly out of date but also
|
|
||||||
# conflicts with the packaged version of Twisted. Supply our own
|
|
||||||
# slightly newer version.
|
|
||||||
nevow = python-super.callPackage ./nevow.nix { };
|
|
||||||
# NixOS autobahn package has trollius as a dependency, although
|
# NixOS autobahn package has trollius as a dependency, although
|
||||||
# it is optional. Trollius is unmaintained and fails on CI.
|
# it is optional. Trollius is unmaintained and fails on CI.
|
||||||
autobahn = python-super.callPackage ./autobahn.nix { };
|
autobahn = python-super.callPackage ./autobahn.nix { };
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
{ fetchFromGitHub, lib
|
{ fetchFromGitHub, lib
|
||||||
, nettools, python
|
, python
|
||||||
, twisted, foolscap, nevow, zfec
|
, twisted, foolscap, zfec
|
||||||
, setuptools, setuptoolsTrial, pyasn1, zope_interface
|
, setuptools, setuptoolsTrial, pyasn1, zope_interface
|
||||||
, service-identity, pyyaml, magic-wormhole, treq, appdirs
|
, service-identity, pyyaml, magic-wormhole, treq, appdirs
|
||||||
, beautifulsoup4, eliot, autobahn, cryptography
|
, beautifulsoup4, eliot, autobahn, cryptography, netifaces
|
||||||
, html5lib, pyutil, distro
|
, html5lib, pyutil, distro, configparser
|
||||||
}:
|
}:
|
||||||
python.pkgs.buildPythonPackage rec {
|
python.pkgs.buildPythonPackage rec {
|
||||||
version = "1.14.0.dev";
|
version = "1.14.0.dev";
|
||||||
@ -41,16 +41,12 @@ python.pkgs.buildPythonPackage rec {
|
|||||||
'';
|
'';
|
||||||
|
|
||||||
|
|
||||||
propagatedNativeBuildInputs = [
|
|
||||||
nettools
|
|
||||||
];
|
|
||||||
|
|
||||||
propagatedBuildInputs = with python.pkgs; [
|
propagatedBuildInputs = with python.pkgs; [
|
||||||
twisted foolscap nevow zfec appdirs
|
twisted foolscap zfec appdirs
|
||||||
setuptoolsTrial pyasn1 zope_interface
|
setuptoolsTrial pyasn1 zope_interface
|
||||||
service-identity pyyaml magic-wormhole treq
|
service-identity pyyaml magic-wormhole treq
|
||||||
eliot autobahn cryptography setuptools
|
eliot autobahn cryptography netifaces setuptools
|
||||||
future pyutil distro
|
future pyutil distro configparser
|
||||||
];
|
];
|
||||||
|
|
||||||
checkInputs = with python.pkgs; [
|
checkInputs = with python.pkgs; [
|
||||||
|
13
setup.py
13
setup.py
@ -38,8 +38,7 @@ install_requires = [
|
|||||||
"zfec >= 1.1.0",
|
"zfec >= 1.1.0",
|
||||||
|
|
||||||
# zope.interface >= 3.6.0 is required for Twisted >= 12.1.0.
|
# zope.interface >= 3.6.0 is required for Twisted >= 12.1.0.
|
||||||
# zope.interface 3.6.3 and 3.6.4 are incompatible with Nevow (#1435).
|
"zope.interface >= 3.6.0",
|
||||||
"zope.interface >= 3.6.0, != 3.6.3, != 3.6.4",
|
|
||||||
|
|
||||||
# * foolscap < 0.5.1 had a performance bug which spent O(N**2) CPU for
|
# * foolscap < 0.5.1 had a performance bug which spent O(N**2) CPU for
|
||||||
# transferring large mutable files of size N.
|
# transferring large mutable files of size N.
|
||||||
@ -70,7 +69,6 @@ install_requires = [
|
|||||||
# rekeying bug <https://twistedmatrix.com/trac/ticket/4395>
|
# rekeying bug <https://twistedmatrix.com/trac/ticket/4395>
|
||||||
# * The FTP frontend depends on Twisted >= 11.1.0 for
|
# * The FTP frontend depends on Twisted >= 11.1.0 for
|
||||||
# filepath.Permissions
|
# filepath.Permissions
|
||||||
# * Nevow 0.11.1 depends on Twisted >= 13.0.0.
|
|
||||||
# * The SFTP frontend and manhole depend on the conch extra. However, we
|
# * The SFTP frontend and manhole depend on the conch extra. However, we
|
||||||
# can't explicitly declare that without an undesirable dependency on gmpy,
|
# can't explicitly declare that without an undesirable dependency on gmpy,
|
||||||
# as explained in ticket #2740.
|
# as explained in ticket #2740.
|
||||||
@ -102,9 +100,6 @@ install_requires = [
|
|||||||
# an sftp extra in Tahoe-LAFS, there is no point in having one.
|
# an sftp extra in Tahoe-LAFS, there is no point in having one.
|
||||||
"Twisted[tls,conch] >= 18.4.0",
|
"Twisted[tls,conch] >= 18.4.0",
|
||||||
|
|
||||||
# We need Nevow >= 0.11.1 which can be installed using pip.
|
|
||||||
"Nevow >= 0.11.1",
|
|
||||||
|
|
||||||
"PyYAML >= 3.11",
|
"PyYAML >= 3.11",
|
||||||
|
|
||||||
"six >= 1.10.0",
|
"six >= 1.10.0",
|
||||||
@ -131,11 +126,17 @@ install_requires = [
|
|||||||
# Support for Python 3 transition
|
# Support for Python 3 transition
|
||||||
"future >= 0.18.2",
|
"future >= 0.18.2",
|
||||||
|
|
||||||
|
# Discover local network configuration
|
||||||
|
"netifaces",
|
||||||
|
|
||||||
# Utility code:
|
# Utility code:
|
||||||
"pyutil >= 3.3.0",
|
"pyutil >= 3.3.0",
|
||||||
|
|
||||||
# Linux distribution detection:
|
# Linux distribution detection:
|
||||||
"distro >= 1.4.0",
|
"distro >= 1.4.0",
|
||||||
|
|
||||||
|
# Backported configparser for Python 2:
|
||||||
|
"configparser ; python_version < '3.0'",
|
||||||
]
|
]
|
||||||
|
|
||||||
setup_requires = [
|
setup_requires = [
|
||||||
|
@ -42,3 +42,9 @@ __full_version__ = __appname__ + '/' + str(__version__)
|
|||||||
# Install Python 3 module locations in Python 2:
|
# Install Python 3 module locations in Python 2:
|
||||||
from future import standard_library
|
from future import standard_library
|
||||||
standard_library.install_aliases()
|
standard_library.install_aliases()
|
||||||
|
|
||||||
|
|
||||||
|
# Monkey-patch 3rd party libraries:
|
||||||
|
from ._monkeypatch import patch
|
||||||
|
patch()
|
||||||
|
del patch
|
||||||
|
@ -1,3 +1,16 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from allmydata.scripts.runner import run
|
from allmydata.scripts.runner import run
|
||||||
|
@ -1,3 +1,16 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
# Note: please minimize imports in this file. In particular, do not import
|
# Note: please minimize imports in this file. In particular, do not import
|
||||||
# any module from Tahoe-LAFS or its dependencies, and do not import any
|
# any module from Tahoe-LAFS or its dependencies, and do not import any
|
||||||
# modules at all at global level. That includes setuptools and pkg_resources.
|
# modules at all at global level. That includes setuptools and pkg_resources.
|
||||||
@ -11,7 +24,6 @@ package_imports = [
|
|||||||
('foolscap', 'foolscap'),
|
('foolscap', 'foolscap'),
|
||||||
('zfec', 'zfec'),
|
('zfec', 'zfec'),
|
||||||
('Twisted', 'twisted'),
|
('Twisted', 'twisted'),
|
||||||
('Nevow', 'nevow'),
|
|
||||||
('zope.interface', 'zope.interface'),
|
('zope.interface', 'zope.interface'),
|
||||||
('python', None),
|
('python', None),
|
||||||
('platform', None),
|
('platform', None),
|
||||||
@ -72,7 +84,6 @@ runtime_warning_messages = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
warning_imports = [
|
warning_imports = [
|
||||||
'nevow',
|
|
||||||
'twisted.persisted.sob',
|
'twisted.persisted.sob',
|
||||||
'twisted.python.filepath',
|
'twisted.python.filepath',
|
||||||
]
|
]
|
||||||
|
48
src/allmydata/_monkeypatch.py
Normal file
48
src/allmydata/_monkeypatch.py
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
"""
|
||||||
|
Monkey-patching of third party libraries.
|
||||||
|
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
|
from warnings import catch_warnings
|
||||||
|
|
||||||
|
|
||||||
|
def patch():
|
||||||
|
"""Path third-party libraries to make Tahoe-LAFS work."""
|
||||||
|
# Make sure Foolscap always get native strings passed to method names in callRemote.
|
||||||
|
# This can be removed when any one of the following happens:
|
||||||
|
#
|
||||||
|
# 1. Tahoe-LAFS on Python 2 switches to version of Foolscap that fixes
|
||||||
|
# https://github.com/warner/foolscap/issues/72
|
||||||
|
# 2. Foolscap is dropped as a dependency.
|
||||||
|
# 3. Tahoe-LAFS drops Python 2 support.
|
||||||
|
|
||||||
|
if not PY2:
|
||||||
|
# Python 3 doesn't need to monkey patch Foolscap
|
||||||
|
return
|
||||||
|
|
||||||
|
# We need to suppress warnings so as to prevent unexpected output from
|
||||||
|
# breaking some integration tests.
|
||||||
|
with catch_warnings(record=True):
|
||||||
|
# Only tested with this version; ensure correctness with new releases,
|
||||||
|
# and then either update the assert or hopefully drop the monkeypatch.
|
||||||
|
from foolscap import __version__
|
||||||
|
assert __version__ == "0.13.1", "Wrong version %s of Foolscap" % (__version__,)
|
||||||
|
|
||||||
|
from foolscap.referenceable import RemoteReference
|
||||||
|
original_getMethodInfo = RemoteReference._getMethodInfo
|
||||||
|
|
||||||
|
def _getMethodInfo(self, name):
|
||||||
|
if isinstance(name, str):
|
||||||
|
name = name.encode("utf-8")
|
||||||
|
return original_getMethodInfo(self, name)
|
||||||
|
RemoteReference._getMethodInfo = _getMethodInfo
|
@ -1,3 +1,4 @@
|
|||||||
|
from past.builtins import unicode
|
||||||
|
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \
|
from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \
|
||||||
@ -56,7 +57,11 @@ class CheckResults(object):
|
|||||||
self._list_incompatible_shares = list_incompatible_shares
|
self._list_incompatible_shares = list_incompatible_shares
|
||||||
self._count_incompatible_shares = count_incompatible_shares
|
self._count_incompatible_shares = count_incompatible_shares
|
||||||
|
|
||||||
assert isinstance(summary, str) # should be a single string
|
# On Python 2, we can mix bytes and Unicode. On Python 3, we want
|
||||||
|
# unicode.
|
||||||
|
if isinstance(summary, bytes):
|
||||||
|
summary = unicode(summary, "utf-8")
|
||||||
|
assert isinstance(summary, unicode) # should be a single string
|
||||||
self._summary = summary
|
self._summary = summary
|
||||||
assert not isinstance(report, str) # should be list of strings
|
assert not isinstance(report, str) # should be list of strings
|
||||||
self._report = report
|
self._report = report
|
||||||
|
@ -7,10 +7,9 @@ from allmydata import node
|
|||||||
from base64 import urlsafe_b64encode
|
from base64 import urlsafe_b64encode
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from errno import ENOENT, EPERM
|
from errno import ENOENT, EPERM
|
||||||
try:
|
|
||||||
from ConfigParser import NoSectionError
|
# On Python 2 this will be the backported package:
|
||||||
except ImportError:
|
from configparser import NoSectionError
|
||||||
from configparser import NoSectionError
|
|
||||||
|
|
||||||
from foolscap.furl import (
|
from foolscap.furl import (
|
||||||
decode_furl,
|
decode_furl,
|
||||||
@ -40,8 +39,7 @@ from allmydata.util import (
|
|||||||
hashutil, base32, pollmixin, log, idlib,
|
hashutil, base32, pollmixin, log, idlib,
|
||||||
yamlutil, configutil,
|
yamlutil, configutil,
|
||||||
)
|
)
|
||||||
from allmydata.util.encodingutil import (get_filesystem_encoding,
|
from allmydata.util.encodingutil import get_filesystem_encoding
|
||||||
from_utf8_or_none)
|
|
||||||
from allmydata.util.abbreviate import parse_abbreviated_size
|
from allmydata.util.abbreviate import parse_abbreviated_size
|
||||||
from allmydata.util.time_format import parse_duration, parse_date
|
from allmydata.util.time_format import parse_duration, parse_date
|
||||||
from allmydata.util.i2p_provider import create as create_i2p_provider
|
from allmydata.util.i2p_provider import create as create_i2p_provider
|
||||||
@ -73,8 +71,8 @@ def _is_valid_section(section_name):
|
|||||||
Currently considers all possible storage server plugin sections valid.
|
Currently considers all possible storage server plugin sections valid.
|
||||||
"""
|
"""
|
||||||
return (
|
return (
|
||||||
section_name.startswith(b"storageserver.plugins.") or
|
section_name.startswith("storageserver.plugins.") or
|
||||||
section_name.startswith(b"storageclient.plugins.")
|
section_name.startswith("storageclient.plugins.")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -143,7 +141,7 @@ def _valid_config():
|
|||||||
return cfg.update(_client_config)
|
return cfg.update(_client_config)
|
||||||
|
|
||||||
# this is put into README in new node-directories
|
# this is put into README in new node-directories
|
||||||
CLIENT_README = """
|
CLIENT_README = u"""
|
||||||
This directory contains files which contain private data for the Tahoe node,
|
This directory contains files which contain private data for the Tahoe node,
|
||||||
such as private keys. On Unix-like systems, the permissions on this directory
|
such as private keys. On Unix-like systems, the permissions on this directory
|
||||||
are set to disallow users other than its owner from reading the contents of
|
are set to disallow users other than its owner from reading the contents of
|
||||||
@ -524,7 +522,7 @@ def create_introducer_clients(config, main_tub, _introducer_factory=None):
|
|||||||
config.nickname,
|
config.nickname,
|
||||||
str(allmydata.__full_version__),
|
str(allmydata.__full_version__),
|
||||||
str(_Client.OLDEST_SUPPORTED_VERSION),
|
str(_Client.OLDEST_SUPPORTED_VERSION),
|
||||||
node.get_app_versions(),
|
list(node.get_app_versions()),
|
||||||
partial(_sequencer, config),
|
partial(_sequencer, config),
|
||||||
introducer_cache_filepath,
|
introducer_cache_filepath,
|
||||||
)
|
)
|
||||||
@ -683,7 +681,7 @@ def storage_enabled(config):
|
|||||||
|
|
||||||
:return bool: ``True`` if storage is enabled, ``False`` otherwise.
|
:return bool: ``True`` if storage is enabled, ``False`` otherwise.
|
||||||
"""
|
"""
|
||||||
return config.get_config(b"storage", b"enabled", True, boolean=True)
|
return config.get_config("storage", "enabled", True, boolean=True)
|
||||||
|
|
||||||
|
|
||||||
def anonymous_storage_enabled(config):
|
def anonymous_storage_enabled(config):
|
||||||
@ -697,7 +695,7 @@ def anonymous_storage_enabled(config):
|
|||||||
"""
|
"""
|
||||||
return (
|
return (
|
||||||
storage_enabled(config) and
|
storage_enabled(config) and
|
||||||
config.get_config(b"storage", b"anonymous", True, boolean=True)
|
config.get_config("storage", "anonymous", True, boolean=True)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -774,6 +772,9 @@ class _Client(node.Node, pollmixin.PollMixin):
|
|||||||
|
|
||||||
def init_stats_provider(self):
|
def init_stats_provider(self):
|
||||||
gatherer_furl = self.config.get_config("client", "stats_gatherer.furl", None)
|
gatherer_furl = self.config.get_config("client", "stats_gatherer.furl", None)
|
||||||
|
if gatherer_furl:
|
||||||
|
# FURLs should be bytes:
|
||||||
|
gatherer_furl = gatherer_furl.encode("utf-8")
|
||||||
self.stats_provider = StatsProvider(self, gatherer_furl)
|
self.stats_provider = StatsProvider(self, gatherer_furl)
|
||||||
self.stats_provider.setServiceParent(self)
|
self.stats_provider.setServiceParent(self)
|
||||||
self.stats_provider.register_producer(self)
|
self.stats_provider.register_producer(self)
|
||||||
@ -836,7 +837,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
|||||||
vk_string = ed25519.string_from_verifying_key(self._node_public_key)
|
vk_string = ed25519.string_from_verifying_key(self._node_public_key)
|
||||||
vk_bytes = remove_prefix(vk_string, ed25519.PUBLIC_KEY_PREFIX)
|
vk_bytes = remove_prefix(vk_string, ed25519.PUBLIC_KEY_PREFIX)
|
||||||
seed = base32.b2a(vk_bytes)
|
seed = base32.b2a(vk_bytes)
|
||||||
self.config.write_config_file("permutation-seed", seed+"\n")
|
self.config.write_config_file("permutation-seed", seed+b"\n", mode="wb")
|
||||||
return seed.strip()
|
return seed.strip()
|
||||||
|
|
||||||
def get_anonymous_storage_server(self):
|
def get_anonymous_storage_server(self):
|
||||||
@ -861,7 +862,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
|||||||
|
|
||||||
config_storedir = self.get_config(
|
config_storedir = self.get_config(
|
||||||
"storage", "storage_dir", self.STOREDIR,
|
"storage", "storage_dir", self.STOREDIR,
|
||||||
).decode('utf-8')
|
)
|
||||||
storedir = self.config.get_config_path(config_storedir)
|
storedir = self.config.get_config_path(config_storedir)
|
||||||
|
|
||||||
data = self.config.get_config("storage", "reserved_space", None)
|
data = self.config.get_config("storage", "reserved_space", None)
|
||||||
@ -1002,6 +1003,10 @@ class _Client(node.Node, pollmixin.PollMixin):
|
|||||||
if helper_furl in ("None", ""):
|
if helper_furl in ("None", ""):
|
||||||
helper_furl = None
|
helper_furl = None
|
||||||
|
|
||||||
|
# FURLs need to be bytes:
|
||||||
|
if helper_furl is not None:
|
||||||
|
helper_furl = helper_furl.encode("utf-8")
|
||||||
|
|
||||||
DEP = self.encoding_params
|
DEP = self.encoding_params
|
||||||
DEP["k"] = int(self.config.get_config("client", "shares.needed", DEP["k"]))
|
DEP["k"] = int(self.config.get_config("client", "shares.needed", DEP["k"]))
|
||||||
DEP["n"] = int(self.config.get_config("client", "shares.total", DEP["n"]))
|
DEP["n"] = int(self.config.get_config("client", "shares.total", DEP["n"]))
|
||||||
@ -1088,7 +1093,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
|||||||
c = ControlServer()
|
c = ControlServer()
|
||||||
c.setServiceParent(self)
|
c.setServiceParent(self)
|
||||||
control_url = self.control_tub.registerReference(c)
|
control_url = self.control_tub.registerReference(c)
|
||||||
self.config.write_private_config("control.furl", control_url + b"\n")
|
self.config.write_private_config("control.furl", control_url + "\n")
|
||||||
|
|
||||||
def init_helper(self):
|
def init_helper(self):
|
||||||
self.helper = Helper(self.config.get_config_path("helper"),
|
self.helper = Helper(self.config.get_config_path("helper"),
|
||||||
@ -1110,15 +1115,14 @@ class _Client(node.Node, pollmixin.PollMixin):
|
|||||||
|
|
||||||
from allmydata.webish import WebishServer
|
from allmydata.webish import WebishServer
|
||||||
nodeurl_path = self.config.get_config_path("node.url")
|
nodeurl_path = self.config.get_config_path("node.url")
|
||||||
staticdir_config = self.config.get_config("node", "web.static", "public_html").decode("utf-8")
|
staticdir_config = self.config.get_config("node", "web.static", "public_html")
|
||||||
staticdir = self.config.get_config_path(staticdir_config)
|
staticdir = self.config.get_config_path(staticdir_config)
|
||||||
ws = WebishServer(self, webport, nodeurl_path, staticdir)
|
ws = WebishServer(self, webport, nodeurl_path, staticdir)
|
||||||
ws.setServiceParent(self)
|
ws.setServiceParent(self)
|
||||||
|
|
||||||
def init_ftp_server(self):
|
def init_ftp_server(self):
|
||||||
if self.config.get_config("ftpd", "enabled", False, boolean=True):
|
if self.config.get_config("ftpd", "enabled", False, boolean=True):
|
||||||
accountfile = from_utf8_or_none(
|
accountfile = self.config.get_config("ftpd", "accounts.file", None)
|
||||||
self.config.get_config("ftpd", "accounts.file", None))
|
|
||||||
if accountfile:
|
if accountfile:
|
||||||
accountfile = self.config.get_config_path(accountfile)
|
accountfile = self.config.get_config_path(accountfile)
|
||||||
accounturl = self.config.get_config("ftpd", "accounts.url", None)
|
accounturl = self.config.get_config("ftpd", "accounts.url", None)
|
||||||
@ -1130,14 +1134,13 @@ class _Client(node.Node, pollmixin.PollMixin):
|
|||||||
|
|
||||||
def init_sftp_server(self):
|
def init_sftp_server(self):
|
||||||
if self.config.get_config("sftpd", "enabled", False, boolean=True):
|
if self.config.get_config("sftpd", "enabled", False, boolean=True):
|
||||||
accountfile = from_utf8_or_none(
|
accountfile = self.config.get_config("sftpd", "accounts.file", None)
|
||||||
self.config.get_config("sftpd", "accounts.file", None))
|
|
||||||
if accountfile:
|
if accountfile:
|
||||||
accountfile = self.config.get_config_path(accountfile)
|
accountfile = self.config.get_config_path(accountfile)
|
||||||
accounturl = self.config.get_config("sftpd", "accounts.url", None)
|
accounturl = self.config.get_config("sftpd", "accounts.url", None)
|
||||||
sftp_portstr = self.config.get_config("sftpd", "port", "8022")
|
sftp_portstr = self.config.get_config("sftpd", "port", "8022")
|
||||||
pubkey_file = from_utf8_or_none(self.config.get_config("sftpd", "host_pubkey_file"))
|
pubkey_file = self.config.get_config("sftpd", "host_pubkey_file")
|
||||||
privkey_file = from_utf8_or_none(self.config.get_config("sftpd", "host_privkey_file"))
|
privkey_file = self.config.get_config("sftpd", "host_privkey_file")
|
||||||
|
|
||||||
from allmydata.frontends import sftpd
|
from allmydata.frontends import sftpd
|
||||||
s = sftpd.SFTPServer(self, accountfile, accounturl,
|
s = sftpd.SFTPServer(self, accountfile, accounturl,
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
from six import ensure_str
|
||||||
|
|
||||||
from types import NoneType
|
from types import NoneType
|
||||||
|
|
||||||
@ -333,5 +334,7 @@ class FTPServer(service.MultiService):
|
|||||||
raise NeedRootcapLookupScheme("must provide some translation")
|
raise NeedRootcapLookupScheme("must provide some translation")
|
||||||
|
|
||||||
f = ftp.FTPFactory(p)
|
f = ftp.FTPFactory(p)
|
||||||
|
# strports requires a native string.
|
||||||
|
ftp_portstr = ensure_str(ftp_portstr)
|
||||||
s = strports.service(ftp_portstr, f)
|
s = strports.service(ftp_portstr, f)
|
||||||
s.setServiceParent(self)
|
s.setServiceParent(self)
|
||||||
|
@ -616,7 +616,7 @@ class Checker(log.PrefixingLogMixin):
|
|||||||
d.addCallback(_got_ueb)
|
d.addCallback(_got_ueb)
|
||||||
|
|
||||||
def _discard_result(r):
|
def _discard_result(r):
|
||||||
assert isinstance(r, str), r
|
assert isinstance(r, bytes), r
|
||||||
# to free up the RAM
|
# to free up the RAM
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -1,3 +1,14 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import struct
|
import struct
|
||||||
import time
|
import time
|
||||||
|
@ -1,5 +1,18 @@
|
|||||||
# -*- test-case-name: allmydata.test.test_encode -*-
|
# -*- test-case-name: allmydata.test.test_encode -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import time
|
import time
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
@ -468,7 +481,7 @@ class Encoder(object):
|
|||||||
(self,
|
(self,
|
||||||
self.segment_size*(segnum+1),
|
self.segment_size*(segnum+1),
|
||||||
self.segment_size*self.num_segments,
|
self.segment_size*self.num_segments,
|
||||||
100 * (segnum+1) / self.num_segments,
|
100 * (segnum+1) // self.num_segments,
|
||||||
),
|
),
|
||||||
level=log.OPERATIONAL)
|
level=log.OPERATIONAL)
|
||||||
elapsed = time.time() - start
|
elapsed = time.time() - start
|
||||||
|
@ -1,4 +1,17 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
|
from functools import reduce
|
||||||
import binascii
|
import binascii
|
||||||
from time import time as now
|
from time import time as now
|
||||||
|
|
||||||
@ -139,7 +152,6 @@ class CiphertextFileNode(object):
|
|||||||
for server in servers:
|
for server in servers:
|
||||||
sm.add(shnum, server)
|
sm.add(shnum, server)
|
||||||
servers_responding.add(server)
|
servers_responding.add(server)
|
||||||
servers_responding = sorted(servers_responding)
|
|
||||||
|
|
||||||
good_hosts = len(reduce(set.union, sm.values(), set()))
|
good_hosts = len(reduce(set.union, sm.values(), set()))
|
||||||
is_healthy = bool(len(sm) >= verifycap.total_shares)
|
is_healthy = bool(len(sm) >= verifycap.total_shares)
|
||||||
|
@ -1,3 +1,15 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import struct
|
import struct
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
@ -1,3 +1,14 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import os, stat, time, weakref
|
import os, stat, time, weakref
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
@ -25,10 +36,11 @@ class CHKCheckerAndUEBFetcher(object):
|
|||||||
less than 'N' shares present.
|
less than 'N' shares present.
|
||||||
|
|
||||||
If the file is completely healthy, I return a tuple of (sharemap,
|
If the file is completely healthy, I return a tuple of (sharemap,
|
||||||
UEB_data, UEB_hash).
|
UEB_data, UEB_hash). A sharemap is a dict with share numbers as keys and
|
||||||
|
sets of server ids (which hold that share) as values.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, peer_getter, storage_index, logparent=None):
|
def __init__(self, peer_getter, storage_index, logparent):
|
||||||
self._peer_getter = peer_getter
|
self._peer_getter = peer_getter
|
||||||
self._found_shares = set()
|
self._found_shares = set()
|
||||||
self._storage_index = storage_index
|
self._storage_index = storage_index
|
||||||
@ -46,6 +58,12 @@ class CHKCheckerAndUEBFetcher(object):
|
|||||||
return log.msg(*args, **kwargs)
|
return log.msg(*args, **kwargs)
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
|
"""
|
||||||
|
:return Deferred[bool|(DictOfSets, dict, bytes)]: If no share can be found
|
||||||
|
with a usable UEB block or fewer than N shares can be found then the
|
||||||
|
Deferred fires with ``False``. Otherwise, it fires with a tuple of
|
||||||
|
the sharemap, the UEB data, and the UEB hash.
|
||||||
|
"""
|
||||||
d = self._get_all_shareholders(self._storage_index)
|
d = self._get_all_shareholders(self._storage_index)
|
||||||
d.addCallback(self._get_uri_extension)
|
d.addCallback(self._get_uri_extension)
|
||||||
d.addCallback(self._done)
|
d.addCallback(self._done)
|
||||||
@ -128,9 +146,9 @@ class CHKUploadHelper(Referenceable, upload.CHKUploader):
|
|||||||
peer selection, encoding, and share pushing. I read ciphertext from the
|
peer selection, encoding, and share pushing. I read ciphertext from the
|
||||||
remote AssistedUploader.
|
remote AssistedUploader.
|
||||||
"""
|
"""
|
||||||
VERSION = { "http://allmydata.org/tahoe/protocols/helper/chk-upload/v1" :
|
VERSION = { b"http://allmydata.org/tahoe/protocols/helper/chk-upload/v1" :
|
||||||
{ },
|
{ },
|
||||||
"application-version": str(allmydata.__full_version__),
|
b"application-version": allmydata.__full_version__.encode("utf-8"),
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self, storage_index,
|
def __init__(self, storage_index,
|
||||||
@ -485,6 +503,19 @@ class LocalCiphertextReader(AskUntilSuccessMixin):
|
|||||||
|
|
||||||
@implementer(interfaces.RIHelper, interfaces.IStatsProducer)
|
@implementer(interfaces.RIHelper, interfaces.IStatsProducer)
|
||||||
class Helper(Referenceable):
|
class Helper(Referenceable):
|
||||||
|
"""
|
||||||
|
:ivar dict[bytes, CHKUploadHelper] _active_uploads: For any uploads which
|
||||||
|
have been started but not finished, a mapping from storage index to the
|
||||||
|
upload helper.
|
||||||
|
|
||||||
|
:ivar chk_checker: A callable which returns an object like a
|
||||||
|
CHKCheckerAndUEBFetcher instance which can check CHK shares.
|
||||||
|
Primarily for the convenience of tests to override.
|
||||||
|
|
||||||
|
:ivar chk_upload: A callable which returns an object like a
|
||||||
|
CHKUploadHelper instance which can upload CHK shares. Primarily for
|
||||||
|
the convenience of tests to override.
|
||||||
|
"""
|
||||||
# this is the non-distributed version. When we need to have multiple
|
# this is the non-distributed version. When we need to have multiple
|
||||||
# helpers, this object will become the HelperCoordinator, and will query
|
# helpers, this object will become the HelperCoordinator, and will query
|
||||||
# the farm of Helpers to see if anyone has the storage_index of interest,
|
# the farm of Helpers to see if anyone has the storage_index of interest,
|
||||||
@ -498,6 +529,9 @@ class Helper(Referenceable):
|
|||||||
}
|
}
|
||||||
MAX_UPLOAD_STATUSES = 10
|
MAX_UPLOAD_STATUSES = 10
|
||||||
|
|
||||||
|
chk_checker = CHKCheckerAndUEBFetcher
|
||||||
|
chk_upload = CHKUploadHelper
|
||||||
|
|
||||||
def __init__(self, basedir, storage_broker, secret_holder,
|
def __init__(self, basedir, storage_broker, secret_holder,
|
||||||
stats_provider, history):
|
stats_provider, history):
|
||||||
self._basedir = basedir
|
self._basedir = basedir
|
||||||
@ -569,6 +603,9 @@ class Helper(Referenceable):
|
|||||||
return self.VERSION
|
return self.VERSION
|
||||||
|
|
||||||
def remote_upload_chk(self, storage_index):
|
def remote_upload_chk(self, storage_index):
|
||||||
|
"""
|
||||||
|
See ``RIHelper.upload_chk``
|
||||||
|
"""
|
||||||
self.count("chk_upload_helper.upload_requests")
|
self.count("chk_upload_helper.upload_requests")
|
||||||
lp = self.log(format="helper: upload_chk query for SI %(si)s",
|
lp = self.log(format="helper: upload_chk query for SI %(si)s",
|
||||||
si=si_b2a(storage_index))
|
si=si_b2a(storage_index))
|
||||||
@ -591,7 +628,7 @@ class Helper(Referenceable):
|
|||||||
lp2 = self.log("doing a quick check+UEBfetch",
|
lp2 = self.log("doing a quick check+UEBfetch",
|
||||||
parent=lp, level=log.NOISY)
|
parent=lp, level=log.NOISY)
|
||||||
sb = self._storage_broker
|
sb = self._storage_broker
|
||||||
c = CHKCheckerAndUEBFetcher(sb.get_servers_for_psi, storage_index, lp2)
|
c = self.chk_checker(sb.get_servers_for_psi, storage_index, lp2)
|
||||||
d = c.check()
|
d = c.check()
|
||||||
def _checked(res):
|
def _checked(res):
|
||||||
if res:
|
if res:
|
||||||
@ -633,14 +670,18 @@ class Helper(Referenceable):
|
|||||||
return (None, uh)
|
return (None, uh)
|
||||||
|
|
||||||
def _make_chk_upload_helper(self, storage_index, lp):
|
def _make_chk_upload_helper(self, storage_index, lp):
|
||||||
si_s = si_b2a(storage_index)
|
si_s = si_b2a(storage_index).decode('ascii')
|
||||||
incoming_file = os.path.join(self._chk_incoming, si_s)
|
incoming_file = os.path.join(self._chk_incoming, si_s)
|
||||||
encoding_file = os.path.join(self._chk_encoding, si_s)
|
encoding_file = os.path.join(self._chk_encoding, si_s)
|
||||||
uh = CHKUploadHelper(storage_index, self,
|
uh = self.chk_upload(
|
||||||
|
storage_index,
|
||||||
|
self,
|
||||||
self._storage_broker,
|
self._storage_broker,
|
||||||
self._secret_holder,
|
self._secret_holder,
|
||||||
incoming_file, encoding_file,
|
incoming_file,
|
||||||
lp)
|
encoding_file,
|
||||||
|
lp,
|
||||||
|
)
|
||||||
return uh
|
return uh
|
||||||
|
|
||||||
def _add_upload(self, uh):
|
def _add_upload(self, uh):
|
||||||
|
@ -1,3 +1,15 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2, native_str
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
from past.builtins import long, unicode
|
from past.builtins import long, unicode
|
||||||
|
|
||||||
import os, time, weakref, itertools
|
import os, time, weakref, itertools
|
||||||
@ -66,7 +78,7 @@ def _serialize_existing_shares(existing_shares):
|
|||||||
return {
|
return {
|
||||||
server: list(shares)
|
server: list(shares)
|
||||||
for (server, shares)
|
for (server, shares)
|
||||||
in existing_shares.iteritems()
|
in existing_shares.items()
|
||||||
}
|
}
|
||||||
|
|
||||||
_EXISTING_SHARES = Field(
|
_EXISTING_SHARES = Field(
|
||||||
@ -79,7 +91,7 @@ def _serialize_happiness_mappings(happiness_mappings):
|
|||||||
return {
|
return {
|
||||||
sharenum: base32.b2a(serverid)
|
sharenum: base32.b2a(serverid)
|
||||||
for (sharenum, serverid)
|
for (sharenum, serverid)
|
||||||
in happiness_mappings.iteritems()
|
in happiness_mappings.items()
|
||||||
}
|
}
|
||||||
|
|
||||||
_HAPPINESS_MAPPINGS = Field(
|
_HAPPINESS_MAPPINGS = Field(
|
||||||
@ -150,7 +162,9 @@ class HelperUploadResults(Copyable, RemoteCopy):
|
|||||||
# note: don't change this string, it needs to match the value used on the
|
# note: don't change this string, it needs to match the value used on the
|
||||||
# helper, and it does *not* need to match the fully-qualified
|
# helper, and it does *not* need to match the fully-qualified
|
||||||
# package/module/class name
|
# package/module/class name
|
||||||
typeToCopy = "allmydata.upload.UploadResults.tahoe.allmydata.com"
|
#
|
||||||
|
# Needs to be native string to make Foolscap happy.
|
||||||
|
typeToCopy = native_str("allmydata.upload.UploadResults.tahoe.allmydata.com")
|
||||||
copytype = typeToCopy
|
copytype = typeToCopy
|
||||||
|
|
||||||
# also, think twice about changing the shape of any existing attribute,
|
# also, think twice about changing the shape of any existing attribute,
|
||||||
@ -283,7 +297,7 @@ class ServerTracker(object):
|
|||||||
#log.msg("%s._got_reply(%s)" % (self, (alreadygot, buckets)))
|
#log.msg("%s._got_reply(%s)" % (self, (alreadygot, buckets)))
|
||||||
(alreadygot, buckets) = alreadygot_and_buckets
|
(alreadygot, buckets) = alreadygot_and_buckets
|
||||||
b = {}
|
b = {}
|
||||||
for sharenum, rref in buckets.items():
|
for sharenum, rref in list(buckets.items()):
|
||||||
bp = self.wbp_class(rref, self._server, self.sharesize,
|
bp = self.wbp_class(rref, self._server, self.sharesize,
|
||||||
self.blocksize,
|
self.blocksize,
|
||||||
self.num_segments,
|
self.num_segments,
|
||||||
@ -780,7 +794,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
|
|||||||
|
|
||||||
shares_to_ask = set()
|
shares_to_ask = set()
|
||||||
servermap = self._share_placements
|
servermap = self._share_placements
|
||||||
for shnum, tracker_id in servermap.items():
|
for shnum, tracker_id in list(servermap.items()):
|
||||||
if tracker_id == None:
|
if tracker_id == None:
|
||||||
continue
|
continue
|
||||||
if tracker.get_serverid() == tracker_id:
|
if tracker.get_serverid() == tracker_id:
|
||||||
@ -1574,7 +1588,7 @@ class AssistedUploader(object):
|
|||||||
# abbreviated), so if we detect old results, just clobber them.
|
# abbreviated), so if we detect old results, just clobber them.
|
||||||
|
|
||||||
sharemap = upload_results.sharemap
|
sharemap = upload_results.sharemap
|
||||||
if str in [type(v) for v in sharemap.values()]:
|
if any(isinstance(v, (bytes, unicode)) for v in sharemap.values()):
|
||||||
upload_results.sharemap = None
|
upload_results.sharemap = None
|
||||||
|
|
||||||
def _build_verifycap(self, helper_upload_results):
|
def _build_verifycap(self, helper_upload_results):
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
from past.builtins import unicode
|
||||||
|
|
||||||
import time
|
import time
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
@ -178,7 +180,7 @@ class IntroducerClient(service.Service, Referenceable):
|
|||||||
self._subscriptions.add(service_name)
|
self._subscriptions.add(service_name)
|
||||||
self._debug_outstanding += 1
|
self._debug_outstanding += 1
|
||||||
d = self._publisher.callRemote("subscribe_v2",
|
d = self._publisher.callRemote("subscribe_v2",
|
||||||
self, service_name,
|
self, service_name.encode("utf-8"),
|
||||||
self._my_subscriber_info)
|
self._my_subscriber_info)
|
||||||
d.addBoth(self._debug_retired)
|
d.addBoth(self._debug_retired)
|
||||||
d.addErrback(log.err, facility="tahoe.introducer.client",
|
d.addErrback(log.err, facility="tahoe.introducer.client",
|
||||||
|
@ -1,3 +1,14 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
from allmydata.uri import from_string
|
from allmydata.uri import from_string
|
||||||
from allmydata.util import base32, log, dictutil
|
from allmydata.util import base32, log, dictutil
|
||||||
@ -187,7 +198,7 @@ class MutableChecker(object):
|
|||||||
if self.bad_shares:
|
if self.bad_shares:
|
||||||
report.append("Corrupt Shares:")
|
report.append("Corrupt Shares:")
|
||||||
summary.append("Corrupt Shares:")
|
summary.append("Corrupt Shares:")
|
||||||
for (server, shnum, f) in sorted(self.bad_shares):
|
for (server, shnum, f) in sorted(self.bad_shares, key=id):
|
||||||
serverid = server.get_serverid()
|
serverid = server.get_serverid()
|
||||||
locator = (server, self._storage_index, shnum)
|
locator = (server, self._storage_index, shnum)
|
||||||
corrupt_share_locators.append(locator)
|
corrupt_share_locators.append(locator)
|
||||||
|
@ -1,3 +1,14 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
MODE_CHECK = "MODE_CHECK" # query all peers
|
MODE_CHECK = "MODE_CHECK" # query all peers
|
||||||
MODE_ANYTHING = "MODE_ANYTHING" # one recoverable version
|
MODE_ANYTHING = "MODE_ANYTHING" # one recoverable version
|
||||||
|
@ -1,3 +1,15 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import random
|
import random
|
||||||
|
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
@ -147,9 +159,9 @@ class MutableFileNode(object):
|
|||||||
|
|
||||||
def _get_initial_contents(self, contents):
|
def _get_initial_contents(self, contents):
|
||||||
if contents is None:
|
if contents is None:
|
||||||
return MutableData("")
|
return MutableData(b"")
|
||||||
|
|
||||||
if isinstance(contents, str):
|
if isinstance(contents, bytes):
|
||||||
return MutableData(contents)
|
return MutableData(contents)
|
||||||
|
|
||||||
if IMutableUploadable.providedBy(contents):
|
if IMutableUploadable.providedBy(contents):
|
||||||
@ -884,9 +896,9 @@ class MutableFileVersion(object):
|
|||||||
d = self._try_to_download_data()
|
d = self._try_to_download_data()
|
||||||
def _apply(old_contents):
|
def _apply(old_contents):
|
||||||
new_contents = modifier(old_contents, self._servermap, first_time)
|
new_contents = modifier(old_contents, self._servermap, first_time)
|
||||||
precondition((isinstance(new_contents, str) or
|
precondition((isinstance(new_contents, bytes) or
|
||||||
new_contents is None),
|
new_contents is None),
|
||||||
"Modifier function must return a string "
|
"Modifier function must return bytes "
|
||||||
"or None")
|
"or None")
|
||||||
|
|
||||||
if new_contents is None or new_contents == old_contents:
|
if new_contents is None or new_contents == old_contents:
|
||||||
@ -960,7 +972,7 @@ class MutableFileVersion(object):
|
|||||||
c = consumer.MemoryConsumer()
|
c = consumer.MemoryConsumer()
|
||||||
# modify will almost certainly write, so we need the privkey.
|
# modify will almost certainly write, so we need the privkey.
|
||||||
d = self._read(c, fetch_privkey=True)
|
d = self._read(c, fetch_privkey=True)
|
||||||
d.addCallback(lambda mc: "".join(mc.chunks))
|
d.addCallback(lambda mc: b"".join(mc.chunks))
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
@ -1076,7 +1088,7 @@ class MutableFileVersion(object):
|
|||||||
start = offset
|
start = offset
|
||||||
rest = offset + data.get_size()
|
rest = offset + data.get_size()
|
||||||
new = old[:start]
|
new = old[:start]
|
||||||
new += "".join(data.read(data.get_size()))
|
new += b"".join(data.read(data.get_size()))
|
||||||
new += old[rest:]
|
new += old[rest:]
|
||||||
return new
|
return new
|
||||||
return self._modify(m, None)
|
return self._modify(m, None)
|
||||||
@ -1141,7 +1153,7 @@ class MutableFileVersion(object):
|
|||||||
start_segments = {} # shnum -> start segment
|
start_segments = {} # shnum -> start segment
|
||||||
end_segments = {} # shnum -> end segment
|
end_segments = {} # shnum -> end segment
|
||||||
blockhashes = {} # shnum -> blockhash tree
|
blockhashes = {} # shnum -> blockhash tree
|
||||||
for (shnum, original_data) in update_data.iteritems():
|
for (shnum, original_data) in list(update_data.items()):
|
||||||
data = [d[1] for d in original_data if d[0] == self._version]
|
data = [d[1] for d in original_data if d[0] == self._version]
|
||||||
# data is [(blockhashes,start,end)..]
|
# data is [(blockhashes,start,end)..]
|
||||||
|
|
||||||
|
@ -1,5 +1,17 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import os, time
|
import os, time
|
||||||
from six.moves import cStringIO as StringIO
|
from io import BytesIO
|
||||||
from itertools import count
|
from itertools import count
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
@ -49,7 +61,7 @@ class PublishStatus(object):
|
|||||||
self.size = None
|
self.size = None
|
||||||
self.status = "Not started"
|
self.status = "Not started"
|
||||||
self.progress = 0.0
|
self.progress = 0.0
|
||||||
self.counter = self.statusid_counter.next()
|
self.counter = next(self.statusid_counter)
|
||||||
self.started = time.time()
|
self.started = time.time()
|
||||||
|
|
||||||
def add_per_server_time(self, server, elapsed):
|
def add_per_server_time(self, server, elapsed):
|
||||||
@ -308,7 +320,7 @@ class Publish(object):
|
|||||||
# Our update process fetched these for us. We need to update
|
# Our update process fetched these for us. We need to update
|
||||||
# them in place as publishing happens.
|
# them in place as publishing happens.
|
||||||
self.blockhashes = {} # (shnum, [blochashes])
|
self.blockhashes = {} # (shnum, [blochashes])
|
||||||
for (i, bht) in blockhashes.iteritems():
|
for (i, bht) in list(blockhashes.items()):
|
||||||
# We need to extract the leaves from our old hash tree.
|
# We need to extract the leaves from our old hash tree.
|
||||||
old_segcount = mathutil.div_ceil(version[4],
|
old_segcount = mathutil.div_ceil(version[4],
|
||||||
version[3])
|
version[3])
|
||||||
@ -316,7 +328,7 @@ class Publish(object):
|
|||||||
bht = dict(enumerate(bht))
|
bht = dict(enumerate(bht))
|
||||||
h.set_hashes(bht)
|
h.set_hashes(bht)
|
||||||
leaves = h[h.get_leaf_index(0):]
|
leaves = h[h.get_leaf_index(0):]
|
||||||
for j in xrange(self.num_segments - len(leaves)):
|
for j in range(self.num_segments - len(leaves)):
|
||||||
leaves.append(None)
|
leaves.append(None)
|
||||||
|
|
||||||
assert len(leaves) >= self.num_segments
|
assert len(leaves) >= self.num_segments
|
||||||
@ -449,7 +461,7 @@ class Publish(object):
|
|||||||
|
|
||||||
# then we add in all the shares that were bad (corrupted, bad
|
# then we add in all the shares that were bad (corrupted, bad
|
||||||
# signatures, etc). We want to replace these.
|
# signatures, etc). We want to replace these.
|
||||||
for key, old_checkstring in self._servermap.get_bad_shares().items():
|
for key, old_checkstring in list(self._servermap.get_bad_shares().items()):
|
||||||
(server, shnum) = key
|
(server, shnum) = key
|
||||||
self.goal.add( (server,shnum) )
|
self.goal.add( (server,shnum) )
|
||||||
self.bad_share_checkstrings[(server,shnum)] = old_checkstring
|
self.bad_share_checkstrings[(server,shnum)] = old_checkstring
|
||||||
@ -512,10 +524,10 @@ class Publish(object):
|
|||||||
# This will eventually hold the block hash chain for each share
|
# This will eventually hold the block hash chain for each share
|
||||||
# that we publish. We define it this way so that empty publishes
|
# that we publish. We define it this way so that empty publishes
|
||||||
# will still have something to write to the remote slot.
|
# will still have something to write to the remote slot.
|
||||||
self.blockhashes = dict([(i, []) for i in xrange(self.total_shares)])
|
self.blockhashes = dict([(i, []) for i in range(self.total_shares)])
|
||||||
for i in xrange(self.total_shares):
|
for i in range(self.total_shares):
|
||||||
blocks = self.blockhashes[i]
|
blocks = self.blockhashes[i]
|
||||||
for j in xrange(self.num_segments):
|
for j in range(self.num_segments):
|
||||||
blocks.append(None)
|
blocks.append(None)
|
||||||
self.sharehash_leaves = None # eventually [sharehashes]
|
self.sharehash_leaves = None # eventually [sharehashes]
|
||||||
self.sharehashes = {} # shnum -> [sharehash leaves necessary to
|
self.sharehashes = {} # shnum -> [sharehash leaves necessary to
|
||||||
@ -529,7 +541,7 @@ class Publish(object):
|
|||||||
return self.done_deferred
|
return self.done_deferred
|
||||||
|
|
||||||
def _get_some_writer(self):
|
def _get_some_writer(self):
|
||||||
return list(self.writers.values()[0])[0]
|
return list(list(self.writers.values())[0])[0]
|
||||||
|
|
||||||
def _update_status(self):
|
def _update_status(self):
|
||||||
self._status.set_status("Sending Shares: %d placed out of %d, "
|
self._status.set_status("Sending Shares: %d placed out of %d, "
|
||||||
@ -687,7 +699,7 @@ class Publish(object):
|
|||||||
salt = os.urandom(16)
|
salt = os.urandom(16)
|
||||||
assert self._version == SDMF_VERSION
|
assert self._version == SDMF_VERSION
|
||||||
|
|
||||||
for shnum, writers in self.writers.iteritems():
|
for shnum, writers in self.writers.items():
|
||||||
for writer in writers:
|
for writer in writers:
|
||||||
writer.put_salt(salt)
|
writer.put_salt(salt)
|
||||||
|
|
||||||
@ -706,8 +718,9 @@ class Publish(object):
|
|||||||
|
|
||||||
self.log("Pushing segment %d of %d" % (segnum + 1, self.num_segments))
|
self.log("Pushing segment %d of %d" % (segnum + 1, self.num_segments))
|
||||||
data = self.data.read(segsize)
|
data = self.data.read(segsize)
|
||||||
# XXX: This is dumb. Why return a list?
|
if not isinstance(data, bytes):
|
||||||
data = "".join(data)
|
# XXX: Why does this return a list?
|
||||||
|
data = b"".join(data)
|
||||||
|
|
||||||
assert len(data) == segsize, len(data)
|
assert len(data) == segsize, len(data)
|
||||||
|
|
||||||
@ -735,7 +748,7 @@ class Publish(object):
|
|||||||
for i in range(len(crypttext_pieces)):
|
for i in range(len(crypttext_pieces)):
|
||||||
offset = i * piece_size
|
offset = i * piece_size
|
||||||
piece = crypttext[offset:offset+piece_size]
|
piece = crypttext[offset:offset+piece_size]
|
||||||
piece = piece + "\x00"*(piece_size - len(piece)) # padding
|
piece = piece + b"\x00"*(piece_size - len(piece)) # padding
|
||||||
crypttext_pieces[i] = piece
|
crypttext_pieces[i] = piece
|
||||||
assert len(piece) == piece_size
|
assert len(piece) == piece_size
|
||||||
d = fec.encode(crypttext_pieces)
|
d = fec.encode(crypttext_pieces)
|
||||||
@ -754,7 +767,7 @@ class Publish(object):
|
|||||||
results, salt = encoded_and_salt
|
results, salt = encoded_and_salt
|
||||||
shares, shareids = results
|
shares, shareids = results
|
||||||
self._status.set_status("Pushing segment")
|
self._status.set_status("Pushing segment")
|
||||||
for i in xrange(len(shares)):
|
for i in range(len(shares)):
|
||||||
sharedata = shares[i]
|
sharedata = shares[i]
|
||||||
shareid = shareids[i]
|
shareid = shareids[i]
|
||||||
if self._version == MDMF_VERSION:
|
if self._version == MDMF_VERSION:
|
||||||
@ -789,7 +802,7 @@ class Publish(object):
|
|||||||
def push_encprivkey(self):
|
def push_encprivkey(self):
|
||||||
encprivkey = self._encprivkey
|
encprivkey = self._encprivkey
|
||||||
self._status.set_status("Pushing encrypted private key")
|
self._status.set_status("Pushing encrypted private key")
|
||||||
for shnum, writers in self.writers.iteritems():
|
for shnum, writers in self.writers.items():
|
||||||
for writer in writers:
|
for writer in writers:
|
||||||
writer.put_encprivkey(encprivkey)
|
writer.put_encprivkey(encprivkey)
|
||||||
|
|
||||||
@ -797,7 +810,7 @@ class Publish(object):
|
|||||||
def push_blockhashes(self):
|
def push_blockhashes(self):
|
||||||
self.sharehash_leaves = [None] * len(self.blockhashes)
|
self.sharehash_leaves = [None] * len(self.blockhashes)
|
||||||
self._status.set_status("Building and pushing block hash tree")
|
self._status.set_status("Building and pushing block hash tree")
|
||||||
for shnum, blockhashes in self.blockhashes.iteritems():
|
for shnum, blockhashes in list(self.blockhashes.items()):
|
||||||
t = hashtree.HashTree(blockhashes)
|
t = hashtree.HashTree(blockhashes)
|
||||||
self.blockhashes[shnum] = list(t)
|
self.blockhashes[shnum] = list(t)
|
||||||
# set the leaf for future use.
|
# set the leaf for future use.
|
||||||
@ -811,7 +824,7 @@ class Publish(object):
|
|||||||
def push_sharehashes(self):
|
def push_sharehashes(self):
|
||||||
self._status.set_status("Building and pushing share hash chain")
|
self._status.set_status("Building and pushing share hash chain")
|
||||||
share_hash_tree = hashtree.HashTree(self.sharehash_leaves)
|
share_hash_tree = hashtree.HashTree(self.sharehash_leaves)
|
||||||
for shnum in xrange(len(self.sharehash_leaves)):
|
for shnum in range(len(self.sharehash_leaves)):
|
||||||
needed_indices = share_hash_tree.needed_hashes(shnum)
|
needed_indices = share_hash_tree.needed_hashes(shnum)
|
||||||
self.sharehashes[shnum] = dict( [ (i, share_hash_tree[i])
|
self.sharehashes[shnum] = dict( [ (i, share_hash_tree[i])
|
||||||
for i in needed_indices] )
|
for i in needed_indices] )
|
||||||
@ -827,7 +840,7 @@ class Publish(object):
|
|||||||
# - Get the checkstring of the resulting layout; sign that.
|
# - Get the checkstring of the resulting layout; sign that.
|
||||||
# - Push the signature
|
# - Push the signature
|
||||||
self._status.set_status("Pushing root hashes and signature")
|
self._status.set_status("Pushing root hashes and signature")
|
||||||
for shnum in xrange(self.total_shares):
|
for shnum in range(self.total_shares):
|
||||||
writers = self.writers[shnum]
|
writers = self.writers[shnum]
|
||||||
for writer in writers:
|
for writer in writers:
|
||||||
writer.put_root_hash(self.root_hash)
|
writer.put_root_hash(self.root_hash)
|
||||||
@ -855,7 +868,7 @@ class Publish(object):
|
|||||||
signable = self._get_some_writer().get_signable()
|
signable = self._get_some_writer().get_signable()
|
||||||
self.signature = rsa.sign_data(self._privkey, signable)
|
self.signature = rsa.sign_data(self._privkey, signable)
|
||||||
|
|
||||||
for (shnum, writers) in self.writers.iteritems():
|
for (shnum, writers) in self.writers.items():
|
||||||
for writer in writers:
|
for writer in writers:
|
||||||
writer.put_signature(self.signature)
|
writer.put_signature(self.signature)
|
||||||
self._status.timings['sign'] = time.time() - started
|
self._status.timings['sign'] = time.time() - started
|
||||||
@ -870,7 +883,7 @@ class Publish(object):
|
|||||||
ds = []
|
ds = []
|
||||||
verification_key = rsa.der_string_from_verifying_key(self._pubkey)
|
verification_key = rsa.der_string_from_verifying_key(self._pubkey)
|
||||||
|
|
||||||
for (shnum, writers) in self.writers.copy().iteritems():
|
for (shnum, writers) in list(self.writers.copy().items()):
|
||||||
for writer in writers:
|
for writer in writers:
|
||||||
writer.put_verification_key(verification_key)
|
writer.put_verification_key(verification_key)
|
||||||
self.num_outstanding += 1
|
self.num_outstanding += 1
|
||||||
@ -1025,7 +1038,7 @@ class Publish(object):
|
|||||||
|
|
||||||
# TODO: Precompute this.
|
# TODO: Precompute this.
|
||||||
shares = []
|
shares = []
|
||||||
for shnum, writers in self.writers.iteritems():
|
for shnum, writers in self.writers.items():
|
||||||
shares.extend([x.shnum for x in writers if x.server == server])
|
shares.extend([x.shnum for x in writers if x.server == server])
|
||||||
known_shnums = set(shares)
|
known_shnums = set(shares)
|
||||||
surprise_shares -= known_shnums
|
surprise_shares -= known_shnums
|
||||||
@ -1126,7 +1139,7 @@ class Publish(object):
|
|||||||
self.bad_servers.add(server) # don't ask them again
|
self.bad_servers.add(server) # don't ask them again
|
||||||
# use the checkstring to add information to the log message
|
# use the checkstring to add information to the log message
|
||||||
unknown_format = False
|
unknown_format = False
|
||||||
for (shnum,readv) in read_data.items():
|
for (shnum,readv) in list(read_data.items()):
|
||||||
checkstring = readv[0]
|
checkstring = readv[0]
|
||||||
version = get_version_from_checkstring(checkstring)
|
version = get_version_from_checkstring(checkstring)
|
||||||
if version == MDMF_VERSION:
|
if version == MDMF_VERSION:
|
||||||
@ -1220,7 +1233,7 @@ class Publish(object):
|
|||||||
class MutableFileHandle(object):
|
class MutableFileHandle(object):
|
||||||
"""
|
"""
|
||||||
I am a mutable uploadable built around a filehandle-like object,
|
I am a mutable uploadable built around a filehandle-like object,
|
||||||
usually either a StringIO instance or a handle to an actual file.
|
usually either a BytesIO instance or a handle to an actual file.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, filehandle):
|
def __init__(self, filehandle):
|
||||||
@ -1290,14 +1303,14 @@ class MutableFileHandle(object):
|
|||||||
class MutableData(MutableFileHandle):
|
class MutableData(MutableFileHandle):
|
||||||
"""
|
"""
|
||||||
I am a mutable uploadable built around a string, which I then cast
|
I am a mutable uploadable built around a string, which I then cast
|
||||||
into a StringIO and treat as a filehandle.
|
into a BytesIO and treat as a filehandle.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, s):
|
def __init__(self, s):
|
||||||
# Take a string and return a file-like uploadable.
|
# Take a string and return a file-like uploadable.
|
||||||
assert isinstance(s, str)
|
assert isinstance(s, bytes)
|
||||||
|
|
||||||
MutableFileHandle.__init__(self, StringIO(s))
|
MutableFileHandle.__init__(self, BytesIO(s))
|
||||||
|
|
||||||
|
|
||||||
@implementer(IMutableUploadable)
|
@implementer(IMutableUploadable)
|
||||||
@ -1349,7 +1362,7 @@ class TransformingUploadable(object):
|
|||||||
# are we in state 0?
|
# are we in state 0?
|
||||||
self.log("reading %d bytes" % length)
|
self.log("reading %d bytes" % length)
|
||||||
|
|
||||||
old_start_data = ""
|
old_start_data = b""
|
||||||
old_data_length = self._first_segment_offset - self._read_marker
|
old_data_length = self._first_segment_offset - self._read_marker
|
||||||
if old_data_length > 0:
|
if old_data_length > 0:
|
||||||
if old_data_length > length:
|
if old_data_length > length:
|
||||||
@ -1367,7 +1380,7 @@ class TransformingUploadable(object):
|
|||||||
# to pad the end of the data with data from our last segment.
|
# to pad the end of the data with data from our last segment.
|
||||||
old_end_length = length - \
|
old_end_length = length - \
|
||||||
(self._newdata.get_size() - self._newdata.pos())
|
(self._newdata.get_size() - self._newdata.pos())
|
||||||
old_end_data = ""
|
old_end_data = b""
|
||||||
if old_end_length > 0:
|
if old_end_length > 0:
|
||||||
self.log("reading %d bytes of old end data" % old_end_length)
|
self.log("reading %d bytes of old end data" % old_end_length)
|
||||||
|
|
||||||
@ -1383,7 +1396,7 @@ class TransformingUploadable(object):
|
|||||||
|
|
||||||
self.log("reading %d bytes of new data" % length)
|
self.log("reading %d bytes of new data" % length)
|
||||||
new_data = self._newdata.read(length)
|
new_data = self._newdata.read(length)
|
||||||
new_data = "".join(new_data)
|
new_data = b"".join(new_data)
|
||||||
|
|
||||||
self._read_marker += len(old_start_data + new_data + old_end_data)
|
self._read_marker += len(old_start_data + new_data + old_end_data)
|
||||||
|
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
from past.builtins import unicode
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from itertools import count
|
from itertools import count
|
||||||
@ -906,9 +908,11 @@ class Retrieve(object):
|
|||||||
|
|
||||||
|
|
||||||
def notify_server_corruption(self, server, shnum, reason):
|
def notify_server_corruption(self, server, shnum, reason):
|
||||||
|
if isinstance(reason, unicode):
|
||||||
|
reason = reason.encode("utf-8")
|
||||||
storage_server = server.get_storage_server()
|
storage_server = server.get_storage_server()
|
||||||
storage_server.advise_corrupt_share(
|
storage_server.advise_corrupt_share(
|
||||||
"mutable",
|
b"mutable",
|
||||||
self._storage_index,
|
self._storage_index,
|
||||||
shnum,
|
shnum,
|
||||||
reason,
|
reason,
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from past.builtins import unicode
|
||||||
|
|
||||||
import sys, time, copy
|
import sys, time, copy
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from itertools import count
|
from itertools import count
|
||||||
@ -156,6 +158,7 @@ class ServerMap(object):
|
|||||||
corrupted or badly signed) so that a repair operation can do the
|
corrupted or badly signed) so that a repair operation can do the
|
||||||
test-and-set using it as a reference.
|
test-and-set using it as a reference.
|
||||||
"""
|
"""
|
||||||
|
assert isinstance(checkstring, bytes)
|
||||||
key = (server, shnum) # record checkstring
|
key = (server, shnum) # record checkstring
|
||||||
self._bad_shares[key] = checkstring
|
self._bad_shares[key] = checkstring
|
||||||
self._known_shares.pop(key, None)
|
self._known_shares.pop(key, None)
|
||||||
@ -800,9 +803,11 @@ class ServermapUpdater(object):
|
|||||||
|
|
||||||
|
|
||||||
def notify_server_corruption(self, server, shnum, reason):
|
def notify_server_corruption(self, server, shnum, reason):
|
||||||
|
if isinstance(reason, unicode):
|
||||||
|
reason = reason.encode("utf-8")
|
||||||
ss = server.get_storage_server()
|
ss = server.get_storage_server()
|
||||||
ss.advise_corrupt_share(
|
ss.advise_corrupt_share(
|
||||||
"mutable",
|
b"mutable",
|
||||||
self._storage_index,
|
self._storage_index,
|
||||||
shnum,
|
shnum,
|
||||||
reason,
|
reason,
|
||||||
|
@ -1,19 +1,30 @@
|
|||||||
"""
|
"""
|
||||||
This module contains classes and functions to implement and manage
|
This module contains classes and functions to implement and manage
|
||||||
a node for Tahoe-LAFS.
|
a node for Tahoe-LAFS.
|
||||||
|
|
||||||
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
from past.builtins import unicode
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
from six import ensure_str, ensure_text
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
import types
|
import types
|
||||||
import errno
|
import errno
|
||||||
from six.moves import configparser
|
|
||||||
import tempfile
|
import tempfile
|
||||||
from io import BytesIO
|
|
||||||
from base64 import b32decode, b32encode
|
from base64 import b32decode, b32encode
|
||||||
|
|
||||||
|
# On Python 2 this will be the backported package.
|
||||||
|
import configparser
|
||||||
|
|
||||||
from twisted.python import log as twlog
|
from twisted.python import log as twlog
|
||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
@ -69,8 +80,8 @@ def _common_valid_config():
|
|||||||
|
|
||||||
# Add our application versions to the data that Foolscap's LogPublisher
|
# Add our application versions to the data that Foolscap's LogPublisher
|
||||||
# reports.
|
# reports.
|
||||||
for thing, things_version in get_package_versions().items():
|
for thing, things_version in list(get_package_versions().items()):
|
||||||
app_versions.add_version(thing, str(things_version))
|
app_versions.add_version(thing, things_version)
|
||||||
|
|
||||||
# group 1 will be addr (dotted quad string), group 3 if any will be portnum (string)
|
# group 1 will be addr (dotted quad string), group 3 if any will be portnum (string)
|
||||||
ADDR_RE = re.compile("^([1-9][0-9]*\.[1-9][0-9]*\.[1-9][0-9]*\.[1-9][0-9]*)(:([1-9][0-9]*))?$")
|
ADDR_RE = re.compile("^([1-9][0-9]*\.[1-9][0-9]*\.[1-9][0-9]*\.[1-9][0-9]*)(:([1-9][0-9]*))?$")
|
||||||
@ -95,8 +106,8 @@ def formatTimeTahoeStyle(self, when):
|
|||||||
"""
|
"""
|
||||||
d = datetime.datetime.utcfromtimestamp(when)
|
d = datetime.datetime.utcfromtimestamp(when)
|
||||||
if d.microsecond:
|
if d.microsecond:
|
||||||
return d.isoformat(" ")[:-3]+"Z"
|
return d.isoformat(ensure_str(" "))[:-3]+"Z"
|
||||||
return d.isoformat(" ") + ".000Z"
|
return d.isoformat(ensure_str(" ")) + ".000Z"
|
||||||
|
|
||||||
PRIV_README = """
|
PRIV_README = """
|
||||||
This directory contains files which contain private data for the Tahoe node,
|
This directory contains files which contain private data for the Tahoe node,
|
||||||
@ -150,6 +161,7 @@ def create_node_dir(basedir, readme_text):
|
|||||||
privdir = os.path.join(basedir, "private")
|
privdir = os.path.join(basedir, "private")
|
||||||
if not os.path.exists(privdir):
|
if not os.path.exists(privdir):
|
||||||
fileutil.make_dirs(privdir, 0o700)
|
fileutil.make_dirs(privdir, 0o700)
|
||||||
|
readme_text = ensure_text(readme_text)
|
||||||
with open(os.path.join(privdir, 'README'), 'w') as f:
|
with open(os.path.join(privdir, 'README'), 'w') as f:
|
||||||
f.write(readme_text)
|
f.write(readme_text)
|
||||||
|
|
||||||
@ -170,7 +182,7 @@ def read_config(basedir, portnumfile, generated_files=[], _valid_config=None):
|
|||||||
|
|
||||||
:returns: :class:`allmydata.node._Config` instance
|
:returns: :class:`allmydata.node._Config` instance
|
||||||
"""
|
"""
|
||||||
basedir = abspath_expanduser_unicode(unicode(basedir))
|
basedir = abspath_expanduser_unicode(ensure_text(basedir))
|
||||||
if _valid_config is None:
|
if _valid_config is None:
|
||||||
_valid_config = _common_valid_config()
|
_valid_config = _common_valid_config()
|
||||||
|
|
||||||
@ -182,12 +194,13 @@ def read_config(basedir, portnumfile, generated_files=[], _valid_config=None):
|
|||||||
|
|
||||||
# (try to) read the main config file
|
# (try to) read the main config file
|
||||||
config_fname = os.path.join(basedir, "tahoe.cfg")
|
config_fname = os.path.join(basedir, "tahoe.cfg")
|
||||||
parser = configparser.SafeConfigParser()
|
|
||||||
try:
|
try:
|
||||||
parser = configutil.get_config(config_fname)
|
parser = configutil.get_config(config_fname)
|
||||||
except EnvironmentError as e:
|
except EnvironmentError as e:
|
||||||
if e.errno != errno.ENOENT:
|
if e.errno != errno.ENOENT:
|
||||||
raise
|
raise
|
||||||
|
# The file is missing, just create empty ConfigParser.
|
||||||
|
parser = configutil.get_config_from_string(u"")
|
||||||
|
|
||||||
configutil.validate_config(config_fname, parser, _valid_config)
|
configutil.validate_config(config_fname, parser, _valid_config)
|
||||||
|
|
||||||
@ -204,9 +217,12 @@ def config_from_string(basedir, portnumfile, config_str, _valid_config=None):
|
|||||||
if _valid_config is None:
|
if _valid_config is None:
|
||||||
_valid_config = _common_valid_config()
|
_valid_config = _common_valid_config()
|
||||||
|
|
||||||
|
if isinstance(config_str, bytes):
|
||||||
|
config_str = config_str.decode("utf-8")
|
||||||
|
|
||||||
# load configuration from in-memory string
|
# load configuration from in-memory string
|
||||||
parser = configparser.SafeConfigParser()
|
parser = configutil.get_config_from_string(config_str)
|
||||||
parser.readfp(BytesIO(config_str))
|
|
||||||
fname = "<in-memory>"
|
fname = "<in-memory>"
|
||||||
configutil.validate_config(fname, parser, _valid_config)
|
configutil.validate_config(fname, parser, _valid_config)
|
||||||
|
|
||||||
@ -277,7 +293,7 @@ class _Config(object):
|
|||||||
is a ConfigParser instance
|
is a ConfigParser instance
|
||||||
"""
|
"""
|
||||||
self.portnum_fname = portnum_fname
|
self.portnum_fname = portnum_fname
|
||||||
self._basedir = abspath_expanduser_unicode(unicode(basedir))
|
self._basedir = abspath_expanduser_unicode(ensure_text(basedir))
|
||||||
self._config_fname = config_fname
|
self._config_fname = config_fname
|
||||||
self.config = configparser
|
self.config = configparser
|
||||||
|
|
||||||
@ -285,12 +301,8 @@ class _Config(object):
|
|||||||
write_new_tahoecfg = self._default_write_new_tahoecfg
|
write_new_tahoecfg = self._default_write_new_tahoecfg
|
||||||
self._write_config = write_new_tahoecfg
|
self._write_config = write_new_tahoecfg
|
||||||
|
|
||||||
nickname_utf8 = self.get_config("node", "nickname", "<unspecified>")
|
self.nickname = self.get_config("node", "nickname", u"<unspecified>")
|
||||||
if isinstance(nickname_utf8, bytes): # Python 2
|
assert isinstance(self.nickname, str)
|
||||||
self.nickname = nickname_utf8.decode("utf-8")
|
|
||||||
else:
|
|
||||||
self.nickname = nickname_utf8
|
|
||||||
assert type(self.nickname) is unicode
|
|
||||||
|
|
||||||
def _default_write_new_tahoecfg(self, config):
|
def _default_write_new_tahoecfg(self, config):
|
||||||
"""
|
"""
|
||||||
@ -344,7 +356,7 @@ class _Config(object):
|
|||||||
return self.config.getboolean(section, option)
|
return self.config.getboolean(section, option)
|
||||||
|
|
||||||
item = self.config.get(section, option)
|
item = self.config.get(section, option)
|
||||||
if option.endswith(".furl") and self._contains_unescaped_hash(item):
|
if option.endswith(".furl") and '#' in item:
|
||||||
raise UnescapedHashError(section, option, item)
|
raise UnescapedHashError(section, option, item)
|
||||||
|
|
||||||
return item
|
return item
|
||||||
@ -402,14 +414,16 @@ class _Config(object):
|
|||||||
"""
|
"""
|
||||||
privname = os.path.join(self._basedir, "private", name)
|
privname = os.path.join(self._basedir, "private", name)
|
||||||
try:
|
try:
|
||||||
value = fileutil.read(privname)
|
value = fileutil.read(privname, mode="r")
|
||||||
except EnvironmentError as e:
|
except EnvironmentError as e:
|
||||||
if e.errno != errno.ENOENT:
|
if e.errno != errno.ENOENT:
|
||||||
raise # we only care about "file doesn't exist"
|
raise # we only care about "file doesn't exist"
|
||||||
if default is _None:
|
if default is _None:
|
||||||
raise MissingConfigEntry("The required configuration file %s is missing."
|
raise MissingConfigEntry("The required configuration file %s is missing."
|
||||||
% (quote_output(privname),))
|
% (quote_output(privname),))
|
||||||
if isinstance(default, (bytes, unicode)):
|
if isinstance(default, bytes):
|
||||||
|
default = str(default, "utf-8")
|
||||||
|
if isinstance(default, str):
|
||||||
value = default
|
value = default
|
||||||
else:
|
else:
|
||||||
value = default()
|
value = default()
|
||||||
@ -421,19 +435,21 @@ class _Config(object):
|
|||||||
config file that resides within the subdirectory named 'private'), and
|
config file that resides within the subdirectory named 'private'), and
|
||||||
return it.
|
return it.
|
||||||
"""
|
"""
|
||||||
|
if isinstance(value, str):
|
||||||
|
value = value.encode("utf-8")
|
||||||
privname = os.path.join(self._basedir, "private", name)
|
privname = os.path.join(self._basedir, "private", name)
|
||||||
with open(privname, "wb") as f:
|
with open(privname, "wb") as f:
|
||||||
f.write(value)
|
f.write(value)
|
||||||
|
|
||||||
def get_private_config(self, name, default=_None):
|
def get_private_config(self, name, default=_None):
|
||||||
"""Read the (string) contents of a private config file (which is a
|
"""Read the (native string) contents of a private config file (a
|
||||||
config file that resides within the subdirectory named 'private'),
|
config file that resides within the subdirectory named 'private'),
|
||||||
and return it. Return a default, or raise an error if one was not
|
and return it. Return a default, or raise an error if one was not
|
||||||
given.
|
given.
|
||||||
"""
|
"""
|
||||||
privname = os.path.join(self._basedir, "private", name)
|
privname = os.path.join(self._basedir, "private", name)
|
||||||
try:
|
try:
|
||||||
return fileutil.read(privname).strip()
|
return fileutil.read(privname, mode="r").strip()
|
||||||
except EnvironmentError as e:
|
except EnvironmentError as e:
|
||||||
if e.errno != errno.ENOENT:
|
if e.errno != errno.ENOENT:
|
||||||
raise # we only care about "file doesn't exist"
|
raise # we only care about "file doesn't exist"
|
||||||
@ -461,17 +477,6 @@ class _Config(object):
|
|||||||
os.path.join(self._basedir, *args)
|
os.path.join(self._basedir, *args)
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _contains_unescaped_hash(item):
|
|
||||||
characters = iter(item)
|
|
||||||
for c in characters:
|
|
||||||
if c == '\\':
|
|
||||||
characters.next()
|
|
||||||
elif c == '#':
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def create_tub_options(config):
|
def create_tub_options(config):
|
||||||
"""
|
"""
|
||||||
@ -574,12 +579,12 @@ def create_tub(tub_options, default_connection_handlers, foolscap_connection_han
|
|||||||
the new Tub via `Tub.setOption`
|
the new Tub via `Tub.setOption`
|
||||||
"""
|
"""
|
||||||
tub = Tub(**kwargs)
|
tub = Tub(**kwargs)
|
||||||
for (name, value) in tub_options.items():
|
for (name, value) in list(tub_options.items()):
|
||||||
tub.setOption(name, value)
|
tub.setOption(name, value)
|
||||||
handlers = default_connection_handlers.copy()
|
handlers = default_connection_handlers.copy()
|
||||||
handlers.update(handler_overrides)
|
handlers.update(handler_overrides)
|
||||||
tub.removeAllConnectionHintHandlers()
|
tub.removeAllConnectionHintHandlers()
|
||||||
for hint_type, handler_name in handlers.items():
|
for hint_type, handler_name in list(handlers.items()):
|
||||||
handler = foolscap_connection_handlers.get(handler_name)
|
handler = foolscap_connection_handlers.get(handler_name)
|
||||||
if handler:
|
if handler:
|
||||||
tub.addConnectionHintHandler(hint_type, handler)
|
tub.addConnectionHintHandler(hint_type, handler)
|
||||||
@ -591,9 +596,12 @@ def _convert_tub_port(s):
|
|||||||
:returns: a proper Twisted endpoint string like (`tcp:X`) is `s`
|
:returns: a proper Twisted endpoint string like (`tcp:X`) is `s`
|
||||||
is a bare number, or returns `s` as-is
|
is a bare number, or returns `s` as-is
|
||||||
"""
|
"""
|
||||||
if re.search(r'^\d+$', s):
|
us = s
|
||||||
return "tcp:{}".format(int(s))
|
if isinstance(s, bytes):
|
||||||
return s
|
us = s.decode("utf-8")
|
||||||
|
if re.search(r'^\d+$', us):
|
||||||
|
return "tcp:{}".format(int(us))
|
||||||
|
return us
|
||||||
|
|
||||||
|
|
||||||
def _tub_portlocation(config):
|
def _tub_portlocation(config):
|
||||||
@ -681,6 +689,10 @@ def _tub_portlocation(config):
|
|||||||
new_locations.append(loc)
|
new_locations.append(loc)
|
||||||
location = ",".join(new_locations)
|
location = ",".join(new_locations)
|
||||||
|
|
||||||
|
# Lacking this, Python 2 blows up in Foolscap when it is confused by a
|
||||||
|
# Unicode FURL.
|
||||||
|
location = location.encode("utf-8")
|
||||||
|
|
||||||
return tubport, location
|
return tubport, location
|
||||||
|
|
||||||
|
|
||||||
@ -728,6 +740,9 @@ def create_main_tub(config, tub_options,
|
|||||||
port_or_endpoint = tor_provider.get_listener()
|
port_or_endpoint = tor_provider.get_listener()
|
||||||
else:
|
else:
|
||||||
port_or_endpoint = port
|
port_or_endpoint = port
|
||||||
|
# Foolscap requires native strings:
|
||||||
|
if isinstance(port_or_endpoint, (bytes, str)):
|
||||||
|
port_or_endpoint = ensure_str(port_or_endpoint)
|
||||||
tub.listenOn(port_or_endpoint)
|
tub.listenOn(port_or_endpoint)
|
||||||
tub.setLocation(location)
|
tub.setLocation(location)
|
||||||
log.msg("Tub location set to %s" % (location,))
|
log.msg("Tub location set to %s" % (location,))
|
||||||
@ -784,7 +799,7 @@ class Node(service.MultiService):
|
|||||||
if self.tub is not None:
|
if self.tub is not None:
|
||||||
self.nodeid = b32decode(self.tub.tubID.upper()) # binary format
|
self.nodeid = b32decode(self.tub.tubID.upper()) # binary format
|
||||||
self.short_nodeid = b32encode(self.nodeid).lower()[:8] # for printing
|
self.short_nodeid = b32encode(self.nodeid).lower()[:8] # for printing
|
||||||
self.config.write_config_file("my_nodeid", b32encode(self.nodeid).lower() + "\n")
|
self.config.write_config_file("my_nodeid", b32encode(self.nodeid).lower() + b"\n", mode="wb")
|
||||||
self.tub.setServiceParent(self)
|
self.tub.setServiceParent(self)
|
||||||
else:
|
else:
|
||||||
self.nodeid = self.short_nodeid = None
|
self.nodeid = self.short_nodeid = None
|
||||||
@ -868,9 +883,9 @@ class Node(service.MultiService):
|
|||||||
for o in twlog.theLogPublisher.observers:
|
for o in twlog.theLogPublisher.observers:
|
||||||
# o might be a FileLogObserver's .emit method
|
# o might be a FileLogObserver's .emit method
|
||||||
if type(o) is type(self.setup_logging): # bound method
|
if type(o) is type(self.setup_logging): # bound method
|
||||||
ob = o.im_self
|
ob = o.__self__
|
||||||
if isinstance(ob, twlog.FileLogObserver):
|
if isinstance(ob, twlog.FileLogObserver):
|
||||||
newmeth = types.UnboundMethodType(formatTimeTahoeStyle, ob, ob.__class__)
|
newmeth = types.MethodType(formatTimeTahoeStyle, ob)
|
||||||
ob.formatTime = newmeth
|
ob.formatTime = newmeth
|
||||||
# TODO: twisted >2.5.0 offers maxRotatedFiles=50
|
# TODO: twisted >2.5.0 offers maxRotatedFiles=50
|
||||||
|
|
||||||
@ -881,12 +896,13 @@ class Node(service.MultiService):
|
|||||||
lgfurl = self.config.get_config("node", "log_gatherer.furl", "")
|
lgfurl = self.config.get_config("node", "log_gatherer.furl", "")
|
||||||
if lgfurl:
|
if lgfurl:
|
||||||
# this is in addition to the contents of log-gatherer-furlfile
|
# this is in addition to the contents of log-gatherer-furlfile
|
||||||
|
lgfurl = lgfurl.encode("utf-8")
|
||||||
self.log_tub.setOption("log-gatherer-furl", lgfurl)
|
self.log_tub.setOption("log-gatherer-furl", lgfurl)
|
||||||
self.log_tub.setOption("log-gatherer-furlfile",
|
self.log_tub.setOption("log-gatherer-furlfile",
|
||||||
self.config.get_config_path("log_gatherer.furl"))
|
self.config.get_config_path("log_gatherer.furl"))
|
||||||
|
|
||||||
incident_dir = self.config.get_config_path("logs", "incidents")
|
incident_dir = self.config.get_config_path("logs", "incidents")
|
||||||
foolscap.logging.log.setLogDir(incident_dir.encode(get_filesystem_encoding()))
|
foolscap.logging.log.setLogDir(incident_dir)
|
||||||
twlog.msg("Foolscap logging initialized")
|
twlog.msg("Foolscap logging initialized")
|
||||||
twlog.msg("Note to developers: twistd.log does not receive very much.")
|
twlog.msg("Note to developers: twistd.log does not receive very much.")
|
||||||
twlog.msg("Use 'flogtool tail -c NODEDIR/private/logport.furl' instead")
|
twlog.msg("Use 'flogtool tail -c NODEDIR/private/logport.furl' instead")
|
||||||
|
@ -2,9 +2,18 @@ from __future__ import print_function
|
|||||||
|
|
||||||
import os, sys, urllib, textwrap
|
import os, sys, urllib, textwrap
|
||||||
import codecs
|
import codecs
|
||||||
from six.moves.configparser import NoSectionError
|
|
||||||
from os.path import join
|
from os.path import join
|
||||||
|
|
||||||
|
# Python 2 compatibility
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import str # noqa: F401
|
||||||
|
|
||||||
|
# On Python 2 this will be the backported package:
|
||||||
|
from configparser import NoSectionError
|
||||||
|
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
|
|
||||||
from allmydata.util.assertutil import precondition
|
from allmydata.util.assertutil import precondition
|
||||||
from allmydata.util.encodingutil import unicode_to_url, quote_output, \
|
from allmydata.util.encodingutil import unicode_to_url, quote_output, \
|
||||||
quote_local_unicode_path, argv_to_abspath
|
quote_local_unicode_path, argv_to_abspath
|
||||||
@ -188,7 +197,7 @@ def get_alias(aliases, path_unicode, default):
|
|||||||
and default is not found in aliases, an UnknownAliasError is
|
and default is not found in aliases, an UnknownAliasError is
|
||||||
raised.
|
raised.
|
||||||
"""
|
"""
|
||||||
precondition(isinstance(path_unicode, unicode), path_unicode)
|
precondition(isinstance(path_unicode, str), path_unicode)
|
||||||
|
|
||||||
from allmydata import uri
|
from allmydata import uri
|
||||||
path = path_unicode.encode('utf-8').strip(" ")
|
path = path_unicode.encode('utf-8').strip(" ")
|
||||||
|
@ -180,10 +180,10 @@ def dump_mutable_share(options):
|
|||||||
share_type = "unknown"
|
share_type = "unknown"
|
||||||
f.seek(m.DATA_OFFSET)
|
f.seek(m.DATA_OFFSET)
|
||||||
version = f.read(1)
|
version = f.read(1)
|
||||||
if version == "\x00":
|
if version == b"\x00":
|
||||||
# this slot contains an SMDF share
|
# this slot contains an SMDF share
|
||||||
share_type = "SDMF"
|
share_type = "SDMF"
|
||||||
elif version == "\x01":
|
elif version == b"\x01":
|
||||||
share_type = "MDMF"
|
share_type = "MDMF"
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
@ -714,10 +714,10 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
|
|||||||
share_type = "unknown"
|
share_type = "unknown"
|
||||||
f.seek(m.DATA_OFFSET)
|
f.seek(m.DATA_OFFSET)
|
||||||
version = f.read(1)
|
version = f.read(1)
|
||||||
if version == "\x00":
|
if version == b"\x00":
|
||||||
# this slot contains an SMDF share
|
# this slot contains an SMDF share
|
||||||
share_type = "SDMF"
|
share_type = "SDMF"
|
||||||
elif version == "\x01":
|
elif version == b"\x01":
|
||||||
share_type = "MDMF"
|
share_type = "MDMF"
|
||||||
|
|
||||||
if share_type == "SDMF":
|
if share_type == "SDMF":
|
||||||
@ -920,7 +920,7 @@ def corrupt_share(options):
|
|||||||
f.seek(m.DATA_OFFSET)
|
f.seek(m.DATA_OFFSET)
|
||||||
data = f.read(2000)
|
data = f.read(2000)
|
||||||
# make sure this slot contains an SMDF share
|
# make sure this slot contains an SMDF share
|
||||||
assert data[0] == "\x00", "non-SDMF mutable shares not supported"
|
assert data[0] == b"\x00", "non-SDMF mutable shares not supported"
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
(version, ig_seqnum, ig_roothash, ig_IV, ig_k, ig_N, ig_segsize,
|
(version, ig_seqnum, ig_roothash, ig_IV, ig_k, ig_N, ig_segsize,
|
||||||
|
@ -1,7 +1,14 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
# Python 2 compatibility
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import str # noqa: F401
|
||||||
|
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
|
|
||||||
from allmydata.scripts.common import NoDefaultBasedirOptions
|
from allmydata.scripts.common import NoDefaultBasedirOptions
|
||||||
from allmydata.scripts.create_node import write_tac
|
from allmydata.scripts.create_node import write_tac
|
||||||
from allmydata.util.assertutil import precondition
|
from allmydata.util.assertutil import precondition
|
||||||
@ -62,7 +69,7 @@ def create_stats_gatherer(config):
|
|||||||
err = config.stderr
|
err = config.stderr
|
||||||
basedir = config['basedir']
|
basedir = config['basedir']
|
||||||
# This should always be called with an absolute Unicode basedir.
|
# This should always be called with an absolute Unicode basedir.
|
||||||
precondition(isinstance(basedir, unicode), basedir)
|
precondition(isinstance(basedir, str), basedir)
|
||||||
|
|
||||||
if os.path.exists(basedir):
|
if os.path.exists(basedir):
|
||||||
if listdir_unicode(basedir):
|
if listdir_unicode(basedir):
|
||||||
|
@ -2,7 +2,14 @@ from __future__ import print_function
|
|||||||
|
|
||||||
import urllib
|
import urllib
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
# Python 2 compatibility
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import str # noqa: F401
|
||||||
|
|
||||||
from twisted.protocols.basic import LineOnlyReceiver
|
from twisted.protocols.basic import LineOnlyReceiver
|
||||||
|
|
||||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||||
UnknownAliasError
|
UnknownAliasError
|
||||||
from allmydata.scripts.common_http import do_http, format_http_error
|
from allmydata.scripts.common_http import do_http, format_http_error
|
||||||
@ -101,7 +108,7 @@ def check_location(options, where):
|
|||||||
|
|
||||||
def check(options):
|
def check(options):
|
||||||
if len(options.locations) == 0:
|
if len(options.locations) == 0:
|
||||||
errno = check_location(options, unicode())
|
errno = check_location(options, str())
|
||||||
if errno != 0:
|
if errno != 0:
|
||||||
return errno
|
return errno
|
||||||
return 0
|
return 0
|
||||||
@ -325,7 +332,7 @@ class DeepCheckStreamer(LineOnlyReceiver, object):
|
|||||||
|
|
||||||
def run(self, options):
|
def run(self, options):
|
||||||
if len(options.locations) == 0:
|
if len(options.locations) == 0:
|
||||||
errno = self.deepcheck_location(options, unicode())
|
errno = self.deepcheck_location(options, str())
|
||||||
if errno != 0:
|
if errno != 0:
|
||||||
return errno
|
return errno
|
||||||
return 0
|
return 0
|
||||||
|
@ -1,13 +1,16 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
from past.builtins import unicode
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import pprint
|
import pprint
|
||||||
import time
|
import time
|
||||||
from collections import deque
|
from collections import deque
|
||||||
|
|
||||||
|
# Python 2 compatibility
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import str # noqa: F401
|
||||||
|
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
from twisted.application.internet import TimerService
|
from twisted.application.internet import TimerService
|
||||||
@ -157,7 +160,7 @@ class StatsProvider(Referenceable, service.MultiService):
|
|||||||
service.MultiService.startService(self)
|
service.MultiService.startService(self)
|
||||||
|
|
||||||
def count(self, name, delta=1):
|
def count(self, name, delta=1):
|
||||||
if isinstance(name, unicode):
|
if isinstance(name, str):
|
||||||
name = name.encode("utf-8")
|
name = name.encode("utf-8")
|
||||||
val = self.counters.setdefault(name, 0)
|
val = self.counters.setdefault(name, 0)
|
||||||
self.counters[name] = val + delta
|
self.counters[name] = val + delta
|
||||||
@ -178,7 +181,7 @@ class StatsProvider(Referenceable, service.MultiService):
|
|||||||
def to_bytes(d):
|
def to_bytes(d):
|
||||||
result = {}
|
result = {}
|
||||||
for (k, v) in d.items():
|
for (k, v) in d.items():
|
||||||
if isinstance(k, unicode):
|
if isinstance(k, str):
|
||||||
k = k.encode("utf-8")
|
k = k.encode("utf-8")
|
||||||
result[k] = v
|
result[k] = v
|
||||||
return result
|
return result
|
||||||
|
@ -2,7 +2,13 @@
|
|||||||
"""
|
"""
|
||||||
I contain the client-side code which speaks to storage servers, in particular
|
I contain the client-side code which speaks to storage servers, in particular
|
||||||
the foolscap-based server implemented in src/allmydata/storage/*.py .
|
the foolscap-based server implemented in src/allmydata/storage/*.py .
|
||||||
|
|
||||||
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
# roadmap:
|
# roadmap:
|
||||||
#
|
#
|
||||||
@ -28,19 +34,20 @@ the foolscap-based server implemented in src/allmydata/storage/*.py .
|
|||||||
#
|
#
|
||||||
# 6: implement other sorts of IStorageClient classes: S3, etc
|
# 6: implement other sorts of IStorageClient classes: S3, etc
|
||||||
|
|
||||||
from past.builtins import unicode
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
import hashlib
|
import hashlib
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
try:
|
|
||||||
from ConfigParser import (
|
# On Python 2 this will be the backport.
|
||||||
NoSectionError,
|
from configparser import NoSectionError
|
||||||
)
|
|
||||||
except ImportError:
|
|
||||||
from configparser import NoSectionError
|
|
||||||
import attr
|
import attr
|
||||||
from zope.interface import (
|
from zope.interface import (
|
||||||
Attribute,
|
Attribute,
|
||||||
@ -74,7 +81,8 @@ from allmydata.util.assertutil import precondition
|
|||||||
from allmydata.util.observer import ObserverList
|
from allmydata.util.observer import ObserverList
|
||||||
from allmydata.util.rrefutil import add_version_to_remote_reference
|
from allmydata.util.rrefutil import add_version_to_remote_reference
|
||||||
from allmydata.util.hashutil import permute_server_hash
|
from allmydata.util.hashutil import permute_server_hash
|
||||||
from allmydata.crypto import ed25519
|
from allmydata.util.dictutil import BytesKeyDict, UnicodeKeyDict
|
||||||
|
|
||||||
|
|
||||||
# who is responsible for de-duplication?
|
# who is responsible for de-duplication?
|
||||||
# both?
|
# both?
|
||||||
@ -102,7 +110,7 @@ class StorageClientConfig(object):
|
|||||||
decreasing preference. See the *[client]peers.preferred*
|
decreasing preference. See the *[client]peers.preferred*
|
||||||
documentation for details.
|
documentation for details.
|
||||||
|
|
||||||
:ivar dict[unicode, dict[bytes, bytes]] storage_plugins: A mapping from
|
:ivar dict[unicode, dict[unicode, unicode]] storage_plugins: A mapping from
|
||||||
names of ``IFoolscapStoragePlugin`` configured in *tahoe.cfg* to the
|
names of ``IFoolscapStoragePlugin`` configured in *tahoe.cfg* to the
|
||||||
respective configuration.
|
respective configuration.
|
||||||
"""
|
"""
|
||||||
@ -117,24 +125,24 @@ class StorageClientConfig(object):
|
|||||||
|
|
||||||
:param _Config config: The loaded Tahoe-LAFS node configuration.
|
:param _Config config: The loaded Tahoe-LAFS node configuration.
|
||||||
"""
|
"""
|
||||||
ps = config.get_config("client", "peers.preferred", b"").split(b",")
|
ps = config.get_config("client", "peers.preferred", "").split(",")
|
||||||
preferred_peers = tuple([p.strip() for p in ps if p != b""])
|
preferred_peers = tuple([p.strip() for p in ps if p != ""])
|
||||||
|
|
||||||
enabled_storage_plugins = (
|
enabled_storage_plugins = (
|
||||||
name.strip()
|
name.strip()
|
||||||
for name
|
for name
|
||||||
in config.get_config(
|
in config.get_config(
|
||||||
b"client",
|
"client",
|
||||||
b"storage.plugins",
|
"storage.plugins",
|
||||||
b"",
|
"",
|
||||||
).decode("utf-8").split(u",")
|
).split(u",")
|
||||||
if name.strip()
|
if name.strip()
|
||||||
)
|
)
|
||||||
|
|
||||||
storage_plugins = {}
|
storage_plugins = {}
|
||||||
for plugin_name in enabled_storage_plugins:
|
for plugin_name in enabled_storage_plugins:
|
||||||
try:
|
try:
|
||||||
plugin_config = config.items(b"storageclient.plugins." + plugin_name)
|
plugin_config = config.items("storageclient.plugins." + plugin_name)
|
||||||
except NoSectionError:
|
except NoSectionError:
|
||||||
plugin_config = []
|
plugin_config = []
|
||||||
storage_plugins[plugin_name] = dict(plugin_config)
|
storage_plugins[plugin_name] = dict(plugin_config)
|
||||||
@ -185,7 +193,7 @@ class StorageFarmBroker(service.MultiService):
|
|||||||
# storage servers that we've heard about. Each descriptor manages its
|
# storage servers that we've heard about. Each descriptor manages its
|
||||||
# own Reconnector, and will give us a RemoteReference when we ask
|
# own Reconnector, and will give us a RemoteReference when we ask
|
||||||
# them for it.
|
# them for it.
|
||||||
self.servers = {}
|
self.servers = BytesKeyDict()
|
||||||
self._static_server_ids = set() # ignore announcements for these
|
self._static_server_ids = set() # ignore announcements for these
|
||||||
self.introducer_client = None
|
self.introducer_client = None
|
||||||
self._threshold_listeners = [] # tuples of (threshold, Deferred)
|
self._threshold_listeners = [] # tuples of (threshold, Deferred)
|
||||||
@ -199,7 +207,10 @@ class StorageFarmBroker(service.MultiService):
|
|||||||
# this sorted order).
|
# this sorted order).
|
||||||
for (server_id, server) in sorted(servers.items()):
|
for (server_id, server) in sorted(servers.items()):
|
||||||
try:
|
try:
|
||||||
storage_server = self._make_storage_server(server_id, server)
|
storage_server = self._make_storage_server(
|
||||||
|
server_id.encode("utf-8"),
|
||||||
|
server,
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
# TODO: The _make_storage_server failure is logged but maybe
|
# TODO: The _make_storage_server failure is logged but maybe
|
||||||
# we should write a traceback here. Notably, tests don't
|
# we should write a traceback here. Notably, tests don't
|
||||||
@ -209,6 +220,8 @@ class StorageFarmBroker(service.MultiService):
|
|||||||
# information.
|
# information.
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
|
if isinstance(server_id, str):
|
||||||
|
server_id = server_id.encode("utf-8")
|
||||||
self._static_server_ids.add(server_id)
|
self._static_server_ids.add(server_id)
|
||||||
self.servers[server_id] = storage_server
|
self.servers[server_id] = storage_server
|
||||||
storage_server.setServiceParent(self)
|
storage_server.setServiceParent(self)
|
||||||
@ -242,8 +255,19 @@ class StorageFarmBroker(service.MultiService):
|
|||||||
include_result=False,
|
include_result=False,
|
||||||
)
|
)
|
||||||
def _make_storage_server(self, server_id, server):
|
def _make_storage_server(self, server_id, server):
|
||||||
assert isinstance(server_id, unicode) # from YAML
|
"""
|
||||||
server_id = server_id.encode("ascii")
|
Create a new ``IServer`` for the given storage server announcement.
|
||||||
|
|
||||||
|
:param bytes server_id: The unique identifier for the server.
|
||||||
|
|
||||||
|
:param dict server: The server announcement. See ``Static Server
|
||||||
|
Definitions`` in the configuration documentation for details about
|
||||||
|
the structure and contents.
|
||||||
|
|
||||||
|
:return IServer: The object-y representation of the server described
|
||||||
|
by the given announcement.
|
||||||
|
"""
|
||||||
|
assert isinstance(server_id, bytes)
|
||||||
handler_overrides = server.get("connections", {})
|
handler_overrides = server.get("connections", {})
|
||||||
gm_verifier = create_grid_manager_verifier(
|
gm_verifier = create_grid_manager_verifier(
|
||||||
self._grid_manager_keys,
|
self._grid_manager_keys,
|
||||||
@ -276,7 +300,7 @@ class StorageFarmBroker(service.MultiService):
|
|||||||
# these two are used in unit tests
|
# these two are used in unit tests
|
||||||
def test_add_rref(self, serverid, rref, ann):
|
def test_add_rref(self, serverid, rref, ann):
|
||||||
s = self._make_storage_server(
|
s = self._make_storage_server(
|
||||||
serverid.decode("ascii"),
|
serverid,
|
||||||
{"ann": ann.copy()},
|
{"ann": ann.copy()},
|
||||||
)
|
)
|
||||||
s._rref = rref
|
s._rref = rref
|
||||||
@ -308,28 +332,71 @@ class StorageFarmBroker(service.MultiService):
|
|||||||
remaining.append( (threshold, d) )
|
remaining.append( (threshold, d) )
|
||||||
self._threshold_listeners = remaining
|
self._threshold_listeners = remaining
|
||||||
|
|
||||||
def _got_announcement(self, key_s, ann):
|
def _should_ignore_announcement(self, server_id, ann):
|
||||||
precondition(isinstance(key_s, str), key_s)
|
"""
|
||||||
precondition(key_s.startswith("v0-"), key_s)
|
Determine whether a new storage announcement should be discarded or used
|
||||||
precondition(ann["service-name"] == "storage", ann["service-name"])
|
to update our collection of storage servers.
|
||||||
server_id = key_s
|
|
||||||
|
:param bytes server_id: The unique identifier for the storage server
|
||||||
|
which made the announcement.
|
||||||
|
|
||||||
|
:param dict ann: The announcement.
|
||||||
|
|
||||||
|
:return bool: ``True`` if the announcement should be ignored,
|
||||||
|
``False`` if it should be used to update our local storage server
|
||||||
|
state.
|
||||||
|
"""
|
||||||
|
# Let local static configuration always override any announcement for
|
||||||
|
# a particular server.
|
||||||
if server_id in self._static_server_ids:
|
if server_id in self._static_server_ids:
|
||||||
log.msg(format="ignoring announcement for static server '%(id)s'",
|
log.msg(format="ignoring announcement for static server '%(id)s'",
|
||||||
id=server_id,
|
id=server_id,
|
||||||
facility="tahoe.storage_broker", umid="AlxzqA",
|
facility="tahoe.storage_broker", umid="AlxzqA",
|
||||||
level=log.UNUSUAL)
|
level=log.UNUSUAL)
|
||||||
|
return True
|
||||||
|
|
||||||
|
try:
|
||||||
|
old = self.servers[server_id]
|
||||||
|
except KeyError:
|
||||||
|
# We don't know anything about this server. Let's use the
|
||||||
|
# announcement to change that.
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
# Determine if this announcement is at all difference from the
|
||||||
|
# announcement we already have for the server. If it is the same,
|
||||||
|
# we don't need to change anything.
|
||||||
|
return old.get_announcement() == ann
|
||||||
|
|
||||||
|
def _got_announcement(self, key_s, ann):
|
||||||
|
"""
|
||||||
|
This callback is given to the introducer and called any time an
|
||||||
|
announcement is received which has a valid signature and does not have
|
||||||
|
a sequence number less than or equal to a previous sequence number
|
||||||
|
seen for that server by that introducer.
|
||||||
|
|
||||||
|
Note sequence numbers are not considered between different introducers
|
||||||
|
so if we use more than one introducer it is possible for them to
|
||||||
|
deliver us stale announcements in some cases.
|
||||||
|
"""
|
||||||
|
precondition(isinstance(key_s, bytes), key_s)
|
||||||
|
precondition(key_s.startswith(b"v0-"), key_s)
|
||||||
|
precondition(ann["service-name"] == "storage", ann["service-name"])
|
||||||
|
server_id = key_s
|
||||||
|
|
||||||
|
if self._should_ignore_announcement(server_id, ann):
|
||||||
return
|
return
|
||||||
|
|
||||||
s = self._make_storage_server(
|
s = self._make_storage_server(
|
||||||
server_id.decode("utf-8"),
|
server_id,
|
||||||
{u"ann": ann},
|
{u"ann": ann},
|
||||||
)
|
)
|
||||||
server_id = s.get_serverid()
|
|
||||||
old = self.servers.get(server_id)
|
try:
|
||||||
if old:
|
old = self.servers.pop(server_id)
|
||||||
if old.get_announcement() == ann:
|
except KeyError:
|
||||||
return # duplicate
|
pass
|
||||||
# replacement
|
else:
|
||||||
del self.servers[server_id]
|
# It's a replacement, get rid of the old one.
|
||||||
old.stop_connecting()
|
old.stop_connecting()
|
||||||
old.disownServiceParent()
|
old.disownServiceParent()
|
||||||
# NOTE: this disownServiceParent() returns a Deferred that
|
# NOTE: this disownServiceParent() returns a Deferred that
|
||||||
@ -344,6 +411,7 @@ class StorageFarmBroker(service.MultiService):
|
|||||||
# until they have fired (but hopefully don't keep reference
|
# until they have fired (but hopefully don't keep reference
|
||||||
# cycles around when they fire earlier than that, which will
|
# cycles around when they fire earlier than that, which will
|
||||||
# almost always be the case for normal runtime).
|
# almost always be the case for normal runtime).
|
||||||
|
|
||||||
# now we forget about them and start using the new one
|
# now we forget about them and start using the new one
|
||||||
s.setServiceParent(self)
|
s.setServiceParent(self)
|
||||||
self.servers[server_id] = s
|
self.servers[server_id] = s
|
||||||
@ -359,7 +427,7 @@ class StorageFarmBroker(service.MultiService):
|
|||||||
# connections to only a subset of the servers, which would increase
|
# connections to only a subset of the servers, which would increase
|
||||||
# the chances that we'll put shares in weird places (and not update
|
# the chances that we'll put shares in weird places (and not update
|
||||||
# existing shares of mutable files). See #374 for more details.
|
# existing shares of mutable files). See #374 for more details.
|
||||||
for dsc in self.servers.values():
|
for dsc in list(self.servers.values()):
|
||||||
dsc.try_to_connect()
|
dsc.try_to_connect()
|
||||||
|
|
||||||
def get_servers_for_psi(self, peer_selection_index, for_upload=False):
|
def get_servers_for_psi(self, peer_selection_index, for_upload=False):
|
||||||
@ -414,7 +482,7 @@ class StorageFarmBroker(service.MultiService):
|
|||||||
# Upload Results web page). If the Helper is running 1.12 or newer,
|
# Upload Results web page). If the Helper is running 1.12 or newer,
|
||||||
# it will send pubkeys, but if it's still running 1.11, it will send
|
# it will send pubkeys, but if it's still running 1.11, it will send
|
||||||
# tubids. This clause maps the old tubids to our existing servers.
|
# tubids. This clause maps the old tubids to our existing servers.
|
||||||
for s in self.servers.values():
|
for s in list(self.servers.values()):
|
||||||
if isinstance(s, NativeStorageServer):
|
if isinstance(s, NativeStorageServer):
|
||||||
if serverid == s.get_tubid():
|
if serverid == s.get_tubid():
|
||||||
return s
|
return s
|
||||||
@ -528,10 +596,10 @@ class _FoolscapStorage(object):
|
|||||||
tubid = base32.a2b(tubid_s)
|
tubid = base32.a2b(tubid_s)
|
||||||
if "permutation-seed-base32" in ann:
|
if "permutation-seed-base32" in ann:
|
||||||
seed = ann["permutation-seed-base32"]
|
seed = ann["permutation-seed-base32"]
|
||||||
if isinstance(seed, unicode):
|
if isinstance(seed, str):
|
||||||
seed = seed.encode("utf-8")
|
seed = seed.encode("utf-8")
|
||||||
ps = base32.a2b(seed)
|
ps = base32.a2b(seed)
|
||||||
elif re.search(r'^v0-[0-9a-zA-Z]{52}$', server_id):
|
elif re.search(br'^v0-[0-9a-zA-Z]{52}$', server_id):
|
||||||
ps = base32.a2b(server_id[3:])
|
ps = base32.a2b(server_id[3:])
|
||||||
else:
|
else:
|
||||||
log.msg("unable to parse serverid '%(server_id)s as pubkey, "
|
log.msg("unable to parse serverid '%(server_id)s as pubkey, "
|
||||||
@ -627,7 +695,7 @@ def _storage_from_foolscap_plugin(node_config, config, announcement, get_rref):
|
|||||||
in getPlugins(IFoolscapStoragePlugin)
|
in getPlugins(IFoolscapStoragePlugin)
|
||||||
}
|
}
|
||||||
storage_options = announcement.get(u"storage-options", [])
|
storage_options = announcement.get(u"storage-options", [])
|
||||||
for plugin_name, plugin_config in config.storage_plugins.items():
|
for plugin_name, plugin_config in list(config.storage_plugins.items()):
|
||||||
try:
|
try:
|
||||||
plugin = plugins[plugin_name]
|
plugin = plugins[plugin_name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@ -660,16 +728,16 @@ class NativeStorageServer(service.MultiService):
|
|||||||
@ivar remote_host: the IAddress, if connected, otherwise None
|
@ivar remote_host: the IAddress, if connected, otherwise None
|
||||||
"""
|
"""
|
||||||
|
|
||||||
VERSION_DEFAULTS = {
|
VERSION_DEFAULTS = UnicodeKeyDict({
|
||||||
b"http://allmydata.org/tahoe/protocols/storage/v1" :
|
"http://allmydata.org/tahoe/protocols/storage/v1" :
|
||||||
{ b"maximum-immutable-share-size": 2**32 - 1,
|
UnicodeKeyDict({ "maximum-immutable-share-size": 2**32 - 1,
|
||||||
b"maximum-mutable-share-size": 2*1000*1000*1000, # maximum prior to v1.9.2
|
"maximum-mutable-share-size": 2*1000*1000*1000, # maximum prior to v1.9.2
|
||||||
b"tolerates-immutable-read-overrun": False,
|
"tolerates-immutable-read-overrun": False,
|
||||||
b"delete-mutable-shares-with-zero-length-writev": False,
|
"delete-mutable-shares-with-zero-length-writev": False,
|
||||||
b"available-space": None,
|
"available-space": None,
|
||||||
},
|
}),
|
||||||
b"application-version": "unknown: no get_version()",
|
"application-version": "unknown: no get_version()",
|
||||||
}
|
})
|
||||||
|
|
||||||
def __init__(self, server_id, ann, tub_maker, handler_overrides, node_config, config=None,
|
def __init__(self, server_id, ann, tub_maker, handler_overrides, node_config, config=None,
|
||||||
grid_manager_verifier=None):
|
grid_manager_verifier=None):
|
||||||
@ -755,7 +823,7 @@ class NativeStorageServer(service.MultiService):
|
|||||||
# Nope
|
# Nope
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
if isinstance(furl, unicode):
|
if isinstance(furl, str):
|
||||||
furl = furl.encode("utf-8")
|
furl = furl.encode("utf-8")
|
||||||
# See comment above for the _storage_from_foolscap_plugin case
|
# See comment above for the _storage_from_foolscap_plugin case
|
||||||
# about passing in get_rref.
|
# about passing in get_rref.
|
||||||
@ -830,7 +898,7 @@ class NativeStorageServer(service.MultiService):
|
|||||||
version = self.get_version()
|
version = self.get_version()
|
||||||
if version is None:
|
if version is None:
|
||||||
return None
|
return None
|
||||||
protocol_v1_version = version.get(b'http://allmydata.org/tahoe/protocols/storage/v1', {})
|
protocol_v1_version = version.get('http://allmydata.org/tahoe/protocols/storage/v1', UnicodeKeyDict())
|
||||||
available_space = protocol_v1_version.get('available-space')
|
available_space = protocol_v1_version.get('available-space')
|
||||||
if available_space is None:
|
if available_space is None:
|
||||||
available_space = protocol_v1_version.get('maximum-immutable-share-size', None)
|
available_space = protocol_v1_version.get('maximum-immutable-share-size', None)
|
||||||
|
@ -37,6 +37,11 @@ a mean of 10kB and a max of 100MB, so filesize=min(int(1.0/random(.0002)),1e8)
|
|||||||
import os, sys, httplib, binascii
|
import os, sys, httplib, binascii
|
||||||
import urllib, json, random, time, urlparse
|
import urllib, json, random, time, urlparse
|
||||||
|
|
||||||
|
# Python 2 compatibility
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import str # noqa: F401
|
||||||
|
|
||||||
if sys.argv[1] == "--stats":
|
if sys.argv[1] == "--stats":
|
||||||
statsfiles = sys.argv[2:]
|
statsfiles = sys.argv[2:]
|
||||||
# gather stats every 10 seconds, do a moving-window average of the last
|
# gather stats every 10 seconds, do a moving-window average of the last
|
||||||
@ -116,7 +121,7 @@ def listdir(nodeurl, root, remote_pathname):
|
|||||||
assert nodetype == "dirnode"
|
assert nodetype == "dirnode"
|
||||||
global directories_read
|
global directories_read
|
||||||
directories_read += 1
|
directories_read += 1
|
||||||
children = dict( [(unicode(name),value)
|
children = dict( [(str(name),value)
|
||||||
for (name,value)
|
for (name,value)
|
||||||
in d["children"].iteritems()] )
|
in d["children"].iteritems()] )
|
||||||
return children
|
return children
|
||||||
|
@ -1,18 +1,25 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import os, shutil, sys, urllib, time, stat, urlparse
|
import os, shutil, sys, urllib, time, stat, urlparse
|
||||||
|
|
||||||
|
# Python 2 compatibility
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import str # noqa: F401
|
||||||
from six.moves import cStringIO as StringIO
|
from six.moves import cStringIO as StringIO
|
||||||
|
|
||||||
from twisted.internet import defer, reactor, protocol, error
|
from twisted.internet import defer, reactor, protocol, error
|
||||||
from twisted.application import service, internet
|
from twisted.application import service, internet
|
||||||
from twisted.web import client as tw_client
|
from twisted.web import client as tw_client
|
||||||
|
from twisted.python import log, procutils
|
||||||
|
from foolscap.api import Tub, fireEventually, flushEventualQueue
|
||||||
|
|
||||||
from allmydata import client, introducer
|
from allmydata import client, introducer
|
||||||
from allmydata.immutable import upload
|
from allmydata.immutable import upload
|
||||||
from allmydata.scripts import create_node
|
from allmydata.scripts import create_node
|
||||||
from allmydata.util import fileutil, pollmixin
|
from allmydata.util import fileutil, pollmixin
|
||||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||||
from allmydata.util.encodingutil import get_filesystem_encoding
|
from allmydata.util.encodingutil import get_filesystem_encoding
|
||||||
from foolscap.api import Tub, fireEventually, flushEventualQueue
|
|
||||||
from twisted.python import log, procutils
|
|
||||||
|
|
||||||
class StallableHTTPGetterDiscarder(tw_client.HTTPPageGetter, object):
|
class StallableHTTPGetterDiscarder(tw_client.HTTPPageGetter, object):
|
||||||
full_speed_ahead = False
|
full_speed_ahead = False
|
||||||
@ -69,7 +76,7 @@ class SystemFramework(pollmixin.PollMixin):
|
|||||||
numnodes = 7
|
numnodes = 7
|
||||||
|
|
||||||
def __init__(self, basedir, mode):
|
def __init__(self, basedir, mode):
|
||||||
self.basedir = basedir = abspath_expanduser_unicode(unicode(basedir))
|
self.basedir = basedir = abspath_expanduser_unicode(str(basedir))
|
||||||
if not (basedir + os.path.sep).startswith(abspath_expanduser_unicode(u".") + os.path.sep):
|
if not (basedir + os.path.sep).startswith(abspath_expanduser_unicode(u".") + os.path.sep):
|
||||||
raise AssertionError("safety issue: basedir must be a subdir")
|
raise AssertionError("safety issue: basedir must be a subdir")
|
||||||
self.testdir = testdir = os.path.join(basedir, "test")
|
self.testdir = testdir = os.path.join(basedir, "test")
|
||||||
|
@ -46,6 +46,7 @@ from testtools.twistedsupport import (
|
|||||||
flush_logged_errors,
|
flush_logged_errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from twisted.application import service
|
||||||
from twisted.plugin import IPlugin
|
from twisted.plugin import IPlugin
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from twisted.internet.defer import inlineCallbacks, returnValue
|
from twisted.internet.defer import inlineCallbacks, returnValue
|
||||||
@ -87,16 +88,15 @@ from ..crypto import (
|
|||||||
from .eliotutil import (
|
from .eliotutil import (
|
||||||
EliotLoggedRunTest,
|
EliotLoggedRunTest,
|
||||||
)
|
)
|
||||||
# Backwards compatibility imports:
|
from .common_util import ShouldFailMixin # noqa: F401
|
||||||
from .common_py3 import LoggingServiceParent, ShouldFailMixin # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
TEST_RSA_KEY_SIZE = 522
|
TEST_RSA_KEY_SIZE = 522
|
||||||
|
|
||||||
EMPTY_CLIENT_CONFIG = config_from_string(
|
EMPTY_CLIENT_CONFIG = config_from_string(
|
||||||
b"/dev/null",
|
"/dev/null",
|
||||||
b"tub.port",
|
"tub.port",
|
||||||
b""
|
""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -230,16 +230,16 @@ class UseNode(object):
|
|||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
def format_config_items(config):
|
def format_config_items(config):
|
||||||
return b"\n".join(
|
return "\n".join(
|
||||||
b" = ".join((key, value))
|
" = ".join((key, value))
|
||||||
for (key, value)
|
for (key, value)
|
||||||
in config.items()
|
in config.items()
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.plugin_config is None:
|
if self.plugin_config is None:
|
||||||
plugin_config_section = b""
|
plugin_config_section = ""
|
||||||
else:
|
else:
|
||||||
plugin_config_section = b"""
|
plugin_config_section = """
|
||||||
[storageclient.plugins.{storage_plugin}]
|
[storageclient.plugins.{storage_plugin}]
|
||||||
{config}
|
{config}
|
||||||
""".format(
|
""".format(
|
||||||
@ -249,8 +249,8 @@ class UseNode(object):
|
|||||||
|
|
||||||
self.config = config_from_string(
|
self.config = config_from_string(
|
||||||
self.basedir.asTextMode().path,
|
self.basedir.asTextMode().path,
|
||||||
u"tub.port",
|
"tub.port",
|
||||||
b"""
|
"""
|
||||||
[node]
|
[node]
|
||||||
{node_config}
|
{node_config}
|
||||||
|
|
||||||
@ -781,6 +781,11 @@ def create_mutable_filenode(contents, mdmf=False, all_contents=None):
|
|||||||
return filenode
|
return filenode
|
||||||
|
|
||||||
|
|
||||||
|
class LoggingServiceParent(service.MultiService):
|
||||||
|
def log(self, *args, **kwargs):
|
||||||
|
return log.msg(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
TEST_DATA=b"\x02"*(Uploader.URI_LIT_SIZE_THRESHOLD+1)
|
TEST_DATA=b"\x02"*(Uploader.URI_LIT_SIZE_THRESHOLD+1)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,170 +0,0 @@
|
|||||||
"""
|
|
||||||
Common utilities that have been ported to Python 3.
|
|
||||||
|
|
||||||
Ported to Python 3.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
from past.builtins import unicode
|
|
||||||
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
import signal
|
|
||||||
|
|
||||||
from twisted.internet import defer, reactor
|
|
||||||
from twisted.application import service
|
|
||||||
from twisted.python import failure
|
|
||||||
from twisted.trial import unittest
|
|
||||||
|
|
||||||
from ..util.assertutil import precondition
|
|
||||||
from ..util.encodingutil import unicode_platform, get_filesystem_encoding
|
|
||||||
from ..util import log
|
|
||||||
|
|
||||||
|
|
||||||
class TimezoneMixin(object):
|
|
||||||
|
|
||||||
def setTimezone(self, timezone):
|
|
||||||
def tzset_if_possible():
|
|
||||||
# Windows doesn't have time.tzset().
|
|
||||||
if hasattr(time, 'tzset'):
|
|
||||||
time.tzset()
|
|
||||||
|
|
||||||
unset = object()
|
|
||||||
originalTimezone = os.environ.get('TZ', unset)
|
|
||||||
def restoreTimezone():
|
|
||||||
if originalTimezone is unset:
|
|
||||||
del os.environ['TZ']
|
|
||||||
else:
|
|
||||||
os.environ['TZ'] = originalTimezone
|
|
||||||
tzset_if_possible()
|
|
||||||
|
|
||||||
os.environ['TZ'] = timezone
|
|
||||||
self.addCleanup(restoreTimezone)
|
|
||||||
tzset_if_possible()
|
|
||||||
|
|
||||||
def have_working_tzset(self):
|
|
||||||
return hasattr(time, 'tzset')
|
|
||||||
|
|
||||||
|
|
||||||
class SignalMixin(object):
|
|
||||||
# This class is necessary for any code which wants to use Processes
|
|
||||||
# outside the usual reactor.run() environment. It is copied from
|
|
||||||
# Twisted's twisted.test.test_process . Note that Twisted-8.2.0 uses
|
|
||||||
# something rather different.
|
|
||||||
sigchldHandler = None
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
# make sure SIGCHLD handler is installed, as it should be on
|
|
||||||
# reactor.run(). problem is reactor may not have been run when this
|
|
||||||
# test runs.
|
|
||||||
if hasattr(reactor, "_handleSigchld") and hasattr(signal, "SIGCHLD"):
|
|
||||||
self.sigchldHandler = signal.signal(signal.SIGCHLD,
|
|
||||||
reactor._handleSigchld)
|
|
||||||
return super(SignalMixin, self).setUp()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
if self.sigchldHandler:
|
|
||||||
signal.signal(signal.SIGCHLD, self.sigchldHandler)
|
|
||||||
return super(SignalMixin, self).tearDown()
|
|
||||||
|
|
||||||
|
|
||||||
class ShouldFailMixin(object):
|
|
||||||
|
|
||||||
def shouldFail(self, expected_failure, which, substring,
|
|
||||||
callable, *args, **kwargs):
|
|
||||||
"""Assert that a function call raises some exception. This is a
|
|
||||||
Deferred-friendly version of TestCase.assertRaises() .
|
|
||||||
|
|
||||||
Suppose you want to verify the following function:
|
|
||||||
|
|
||||||
def broken(a, b, c):
|
|
||||||
if a < 0:
|
|
||||||
raise TypeError('a must not be negative')
|
|
||||||
return defer.succeed(b+c)
|
|
||||||
|
|
||||||
You can use:
|
|
||||||
d = self.shouldFail(TypeError, 'test name',
|
|
||||||
'a must not be negative',
|
|
||||||
broken, -4, 5, c=12)
|
|
||||||
in your test method. The 'test name' string will be included in the
|
|
||||||
error message, if any, because Deferred chains frequently make it
|
|
||||||
difficult to tell which assertion was tripped.
|
|
||||||
|
|
||||||
The substring= argument, if not None, must appear in the 'repr'
|
|
||||||
of the message wrapped by this Failure, or the test will fail.
|
|
||||||
"""
|
|
||||||
|
|
||||||
assert substring is None or isinstance(substring, (bytes, unicode))
|
|
||||||
d = defer.maybeDeferred(callable, *args, **kwargs)
|
|
||||||
def done(res):
|
|
||||||
if isinstance(res, failure.Failure):
|
|
||||||
res.trap(expected_failure)
|
|
||||||
if substring:
|
|
||||||
self.failUnless(substring in str(res),
|
|
||||||
"%s: substring '%s' not in '%s'"
|
|
||||||
% (which, substring, str(res)))
|
|
||||||
# return the Failure for further analysis, but in a form that
|
|
||||||
# doesn't make the Deferred chain think that we failed.
|
|
||||||
return [res]
|
|
||||||
else:
|
|
||||||
self.fail("%s was supposed to raise %s, not get '%s'" %
|
|
||||||
(which, expected_failure, res))
|
|
||||||
d.addBoth(done)
|
|
||||||
return d
|
|
||||||
|
|
||||||
|
|
||||||
class ReallyEqualMixin(object):
|
|
||||||
def failUnlessReallyEqual(self, a, b, msg=None):
|
|
||||||
self.assertEqual(a, b, msg)
|
|
||||||
self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, type(a), b, type(b), msg))
|
|
||||||
|
|
||||||
|
|
||||||
def skip_if_cannot_represent_filename(u):
|
|
||||||
precondition(isinstance(u, unicode))
|
|
||||||
|
|
||||||
enc = get_filesystem_encoding()
|
|
||||||
if not unicode_platform():
|
|
||||||
try:
|
|
||||||
u.encode(enc)
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
raise unittest.SkipTest("A non-ASCII filename could not be encoded on this platform.")
|
|
||||||
|
|
||||||
|
|
||||||
class Marker(object):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class FakeCanary(object):
|
|
||||||
"""For use in storage tests.
|
|
||||||
|
|
||||||
Can be moved back to test_storage.py once enough Python 3 porting has been
|
|
||||||
done.
|
|
||||||
"""
|
|
||||||
def __init__(self, ignore_disconnectors=False):
|
|
||||||
self.ignore = ignore_disconnectors
|
|
||||||
self.disconnectors = {}
|
|
||||||
def notifyOnDisconnect(self, f, *args, **kwargs):
|
|
||||||
if self.ignore:
|
|
||||||
return
|
|
||||||
m = Marker()
|
|
||||||
self.disconnectors[m] = (f, args, kwargs)
|
|
||||||
return m
|
|
||||||
def dontNotifyOnDisconnect(self, marker):
|
|
||||||
if self.ignore:
|
|
||||||
return
|
|
||||||
del self.disconnectors[marker]
|
|
||||||
def getRemoteTubID(self):
|
|
||||||
return None
|
|
||||||
def getPeer(self):
|
|
||||||
return "<fake>"
|
|
||||||
|
|
||||||
|
|
||||||
class LoggingServiceParent(service.MultiService):
|
|
||||||
def log(self, *args, **kwargs):
|
|
||||||
return log.msg(*args, **kwargs)
|
|
@ -1,22 +1,33 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import time
|
||||||
|
import signal
|
||||||
from random import randrange
|
from random import randrange
|
||||||
from six.moves import StringIO
|
from six.moves import StringIO
|
||||||
|
|
||||||
from twisted.internet import reactor, defer
|
from twisted.internet import reactor, defer
|
||||||
|
from twisted.python import failure
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
|
|
||||||
from ..util.assertutil import precondition
|
from ..util.assertutil import precondition
|
||||||
from ..scripts import runner
|
from ..scripts import runner
|
||||||
from allmydata.util.encodingutil import get_io_encoding
|
from allmydata.util.encodingutil import unicode_platform, get_filesystem_encoding, get_io_encoding
|
||||||
# Imported for backwards compatibility:
|
# Imported for backwards compatibility:
|
||||||
from future.utils import bord, bchr, binary_type
|
from future.utils import bord, bchr, binary_type
|
||||||
from .common_py3 import (
|
from past.builtins import unicode
|
||||||
SignalMixin, skip_if_cannot_represent_filename, ReallyEqualMixin, ShouldFailMixin
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
|
def skip_if_cannot_represent_filename(u):
|
||||||
|
precondition(isinstance(u, unicode))
|
||||||
|
|
||||||
|
enc = get_filesystem_encoding()
|
||||||
|
if not unicode_platform():
|
||||||
|
try:
|
||||||
|
u.encode(enc)
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
raise unittest.SkipTest("A non-ASCII filename could not be encoded on this platform.")
|
||||||
|
|
||||||
def skip_if_cannot_represent_argv(u):
|
def skip_if_cannot_represent_argv(u):
|
||||||
precondition(isinstance(u, unicode))
|
precondition(isinstance(u, unicode))
|
||||||
try:
|
try:
|
||||||
@ -78,6 +89,34 @@ def flip_one_bit(s, offset=0, size=None):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class ReallyEqualMixin(object):
|
||||||
|
def failUnlessReallyEqual(self, a, b, msg=None):
|
||||||
|
self.assertEqual(a, b, msg)
|
||||||
|
self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, type(a), b, type(b), msg))
|
||||||
|
|
||||||
|
|
||||||
|
class SignalMixin(object):
|
||||||
|
# This class is necessary for any code which wants to use Processes
|
||||||
|
# outside the usual reactor.run() environment. It is copied from
|
||||||
|
# Twisted's twisted.test.test_process . Note that Twisted-8.2.0 uses
|
||||||
|
# something rather different.
|
||||||
|
sigchldHandler = None
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
# make sure SIGCHLD handler is installed, as it should be on
|
||||||
|
# reactor.run(). problem is reactor may not have been run when this
|
||||||
|
# test runs.
|
||||||
|
if hasattr(reactor, "_handleSigchld") and hasattr(signal, "SIGCHLD"):
|
||||||
|
self.sigchldHandler = signal.signal(signal.SIGCHLD,
|
||||||
|
reactor._handleSigchld)
|
||||||
|
return super(SignalMixin, self).setUp()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
if self.sigchldHandler:
|
||||||
|
signal.signal(signal.SIGCHLD, self.sigchldHandler)
|
||||||
|
return super(SignalMixin, self).tearDown()
|
||||||
|
|
||||||
|
|
||||||
class StallMixin(object):
|
class StallMixin(object):
|
||||||
def stall(self, res=None, delay=1):
|
def stall(self, res=None, delay=1):
|
||||||
d = defer.Deferred()
|
d = defer.Deferred()
|
||||||
@ -85,6 +124,76 @@ class StallMixin(object):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
class Marker(object):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class FakeCanary(object):
|
||||||
|
"""For use in storage tests.
|
||||||
|
"""
|
||||||
|
def __init__(self, ignore_disconnectors=False):
|
||||||
|
self.ignore = ignore_disconnectors
|
||||||
|
self.disconnectors = {}
|
||||||
|
def notifyOnDisconnect(self, f, *args, **kwargs):
|
||||||
|
if self.ignore:
|
||||||
|
return
|
||||||
|
m = Marker()
|
||||||
|
self.disconnectors[m] = (f, args, kwargs)
|
||||||
|
return m
|
||||||
|
def dontNotifyOnDisconnect(self, marker):
|
||||||
|
if self.ignore:
|
||||||
|
return
|
||||||
|
del self.disconnectors[marker]
|
||||||
|
def getRemoteTubID(self):
|
||||||
|
return None
|
||||||
|
def getPeer(self):
|
||||||
|
return "<fake>"
|
||||||
|
|
||||||
|
|
||||||
|
class ShouldFailMixin(object):
|
||||||
|
|
||||||
|
def shouldFail(self, expected_failure, which, substring,
|
||||||
|
callable, *args, **kwargs):
|
||||||
|
"""Assert that a function call raises some exception. This is a
|
||||||
|
Deferred-friendly version of TestCase.assertRaises() .
|
||||||
|
|
||||||
|
Suppose you want to verify the following function:
|
||||||
|
|
||||||
|
def broken(a, b, c):
|
||||||
|
if a < 0:
|
||||||
|
raise TypeError('a must not be negative')
|
||||||
|
return defer.succeed(b+c)
|
||||||
|
|
||||||
|
You can use:
|
||||||
|
d = self.shouldFail(TypeError, 'test name',
|
||||||
|
'a must not be negative',
|
||||||
|
broken, -4, 5, c=12)
|
||||||
|
in your test method. The 'test name' string will be included in the
|
||||||
|
error message, if any, because Deferred chains frequently make it
|
||||||
|
difficult to tell which assertion was tripped.
|
||||||
|
|
||||||
|
The substring= argument, if not None, must appear in the 'repr'
|
||||||
|
of the message wrapped by this Failure, or the test will fail.
|
||||||
|
"""
|
||||||
|
|
||||||
|
assert substring is None or isinstance(substring, (bytes, unicode))
|
||||||
|
d = defer.maybeDeferred(callable, *args, **kwargs)
|
||||||
|
def done(res):
|
||||||
|
if isinstance(res, failure.Failure):
|
||||||
|
res.trap(expected_failure)
|
||||||
|
if substring:
|
||||||
|
self.failUnless(substring in str(res),
|
||||||
|
"%s: substring '%s' not in '%s'"
|
||||||
|
% (which, substring, str(res)))
|
||||||
|
# return the Failure for further analysis, but in a form that
|
||||||
|
# doesn't make the Deferred chain think that we failed.
|
||||||
|
return [res]
|
||||||
|
else:
|
||||||
|
self.fail("%s was supposed to raise %s, not get '%s'" %
|
||||||
|
(which, expected_failure, res))
|
||||||
|
d.addBoth(done)
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
class TestMixin(SignalMixin):
|
class TestMixin(SignalMixin):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
return super(TestMixin, self).setUp()
|
return super(TestMixin, self).setUp()
|
||||||
@ -132,6 +241,31 @@ class TestMixin(SignalMixin):
|
|||||||
self.fail("Reactor was still active when it was required to be quiescent.")
|
self.fail("Reactor was still active when it was required to be quiescent.")
|
||||||
|
|
||||||
|
|
||||||
|
class TimezoneMixin(object):
|
||||||
|
|
||||||
|
def setTimezone(self, timezone):
|
||||||
|
def tzset_if_possible():
|
||||||
|
# Windows doesn't have time.tzset().
|
||||||
|
if hasattr(time, 'tzset'):
|
||||||
|
time.tzset()
|
||||||
|
|
||||||
|
unset = object()
|
||||||
|
originalTimezone = os.environ.get('TZ', unset)
|
||||||
|
def restoreTimezone():
|
||||||
|
if originalTimezone is unset:
|
||||||
|
del os.environ['TZ']
|
||||||
|
else:
|
||||||
|
os.environ['TZ'] = originalTimezone
|
||||||
|
tzset_if_possible()
|
||||||
|
|
||||||
|
os.environ['TZ'] = timezone
|
||||||
|
self.addCleanup(restoreTimezone)
|
||||||
|
tzset_if_possible()
|
||||||
|
|
||||||
|
def have_working_tzset(self):
|
||||||
|
return hasattr(time, 'tzset')
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import win32file
|
import win32file
|
||||||
import win32con
|
import win32con
|
||||||
|
@ -1,25 +1,50 @@
|
|||||||
|
from six import ensure_str
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"do_http",
|
||||||
|
"render",
|
||||||
|
]
|
||||||
|
|
||||||
import treq
|
|
||||||
from twisted.internet.defer import (
|
from twisted.internet.defer import (
|
||||||
maybeDeferred,
|
|
||||||
inlineCallbacks,
|
inlineCallbacks,
|
||||||
returnValue,
|
returnValue,
|
||||||
)
|
)
|
||||||
from twisted.web.error import Error
|
from twisted.web.error import (
|
||||||
|
Error,
|
||||||
|
)
|
||||||
|
from twisted.python.reflect import (
|
||||||
|
fullyQualifiedName,
|
||||||
|
)
|
||||||
|
from twisted.internet.defer import (
|
||||||
|
succeed,
|
||||||
|
)
|
||||||
|
from twisted.web.test.requesthelper import (
|
||||||
|
DummyChannel,
|
||||||
|
)
|
||||||
|
from twisted.web.error import (
|
||||||
|
UnsupportedMethod,
|
||||||
|
)
|
||||||
|
from twisted.web.http import (
|
||||||
|
NOT_ALLOWED,
|
||||||
|
)
|
||||||
|
from twisted.web.server import (
|
||||||
|
NOT_DONE_YET,
|
||||||
|
)
|
||||||
|
|
||||||
from nevow.context import WebContext
|
import treq
|
||||||
from nevow.testutil import FakeRequest
|
|
||||||
from nevow.appserver import (
|
from ..webish import (
|
||||||
processingFailed,
|
TahoeLAFSRequest,
|
||||||
DefaultExceptionHandler,
|
|
||||||
)
|
|
||||||
from nevow.inevow import (
|
|
||||||
ICanHandleException,
|
|
||||||
IRequest,
|
|
||||||
IResource as INevowResource,
|
|
||||||
IData,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class VerboseError(Error):
|
||||||
|
"""Include the HTTP body response too."""
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return Error.__str__(self) + " " + ensure_str(self.response)
|
||||||
|
|
||||||
|
|
||||||
@inlineCallbacks
|
@inlineCallbacks
|
||||||
def do_http(method, url, **kwargs):
|
def do_http(method, url, **kwargs):
|
||||||
response = yield treq.request(method, url, persistent=False, **kwargs)
|
response = yield treq.request(method, url, persistent=False, **kwargs)
|
||||||
@ -27,14 +52,14 @@ def do_http(method, url, **kwargs):
|
|||||||
# TODO: replace this with response.fail_for_status when
|
# TODO: replace this with response.fail_for_status when
|
||||||
# https://github.com/twisted/treq/pull/159 has landed
|
# https://github.com/twisted/treq/pull/159 has landed
|
||||||
if 400 <= response.code < 600:
|
if 400 <= response.code < 600:
|
||||||
raise Error(response.code, response=body)
|
raise VerboseError(response.code, response=body)
|
||||||
returnValue(body)
|
returnValue(body)
|
||||||
|
|
||||||
|
|
||||||
def render(resource, query_args):
|
def render(resource, query_args):
|
||||||
"""
|
"""
|
||||||
Render (in the manner of the Nevow appserver) a Nevow ``Page`` or a
|
Render (in the manner of the Twisted Web Site) a Twisted ``Resource``
|
||||||
Twisted ``Resource`` against a request with the given query arguments .
|
against a request with the given query arguments .
|
||||||
|
|
||||||
:param resource: The page or resource to render.
|
:param resource: The page or resource to render.
|
||||||
|
|
||||||
@ -44,19 +69,36 @@ def render(resource, query_args):
|
|||||||
:return Deferred: A Deferred that fires with the rendered response body as
|
:return Deferred: A Deferred that fires with the rendered response body as
|
||||||
``bytes``.
|
``bytes``.
|
||||||
"""
|
"""
|
||||||
ctx = WebContext(tag=resource)
|
channel = DummyChannel()
|
||||||
req = FakeRequest(args=query_args)
|
request = TahoeLAFSRequest(channel)
|
||||||
ctx.remember(DefaultExceptionHandler(), ICanHandleException)
|
request.method = b"GET"
|
||||||
ctx.remember(req, IRequest)
|
request.args = query_args
|
||||||
ctx.remember(None, IData)
|
request.prepath = [b""]
|
||||||
|
request.postpath = []
|
||||||
|
try:
|
||||||
|
result = resource.render(request)
|
||||||
|
except UnsupportedMethod:
|
||||||
|
request.setResponseCode(NOT_ALLOWED)
|
||||||
|
result = b""
|
||||||
|
|
||||||
def maybe_concat(res):
|
if isinstance(result, bytes):
|
||||||
if isinstance(res, bytes):
|
request.write(result)
|
||||||
return req.v + res
|
done = succeed(None)
|
||||||
return req.v
|
elif result == NOT_DONE_YET:
|
||||||
|
if request.finished:
|
||||||
resource = INevowResource(resource)
|
done = succeed(None)
|
||||||
d = maybeDeferred(resource.renderHTTP, ctx)
|
else:
|
||||||
d.addErrback(processingFailed, req, ctx)
|
done = request.notifyFinish()
|
||||||
d.addCallback(maybe_concat)
|
else:
|
||||||
return d
|
raise ValueError(
|
||||||
|
"{!r} returned {!r}, required bytes or NOT_DONE_YET.".format(
|
||||||
|
fullyQualifiedName(resource.render),
|
||||||
|
result,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
def get_body(ignored):
|
||||||
|
complete_response = channel.transport.written.getvalue()
|
||||||
|
header, body = complete_response.split(b"\r\n\r\n", 1)
|
||||||
|
return body
|
||||||
|
done.addCallback(get_body)
|
||||||
|
return done
|
||||||
|
@ -2,7 +2,11 @@
|
|||||||
Tools aimed at the interaction between tests and Eliot.
|
Tools aimed at the interaction between tests and Eliot.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from past.builtins import unicode
|
# Python 2 compatibility
|
||||||
|
# Can't use `builtins.str` because it's not JSON encodable:
|
||||||
|
# `exceptions.TypeError: <class 'future.types.newstr.newstr'> is not JSON-encodeable`
|
||||||
|
from past.builtins import unicode as str
|
||||||
|
from future.utils import PY3
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"RUN_TEST",
|
"RUN_TEST",
|
||||||
@ -29,7 +33,7 @@ from twisted.internet.defer import (
|
|||||||
|
|
||||||
_NAME = Field.for_types(
|
_NAME = Field.for_types(
|
||||||
u"name",
|
u"name",
|
||||||
[unicode],
|
[str],
|
||||||
u"The name of the test.",
|
u"The name of the test.",
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -103,7 +107,7 @@ def eliot_logged_test(f):
|
|||||||
|
|
||||||
# Begin an action that should comprise all messages from the decorated
|
# Begin an action that should comprise all messages from the decorated
|
||||||
# test method.
|
# test method.
|
||||||
with RUN_TEST(name=self.id().decode("utf-8")).context() as action:
|
with RUN_TEST(name=self.id()).context() as action:
|
||||||
# When the test method Deferred fires, the RUN_TEST action is
|
# When the test method Deferred fires, the RUN_TEST action is
|
||||||
# done. However, we won't have re-published the MemoryLogger
|
# done. However, we won't have re-published the MemoryLogger
|
||||||
# messages into the global/default logger when this Deferred
|
# messages into the global/default logger when this Deferred
|
||||||
@ -161,6 +165,9 @@ class EliotLoggedRunTest(object):
|
|||||||
|
|
||||||
@eliot_logged_test
|
@eliot_logged_test
|
||||||
def run(self, result=None):
|
def run(self, result=None):
|
||||||
|
# Workaround for https://github.com/itamarst/eliot/issues/456
|
||||||
|
if PY3:
|
||||||
|
self.case.eliot_logger._validate_message = lambda *args, **kwargs: None
|
||||||
return self._run_tests_with_factory(
|
return self._run_tests_with_factory(
|
||||||
self.case,
|
self.case,
|
||||||
self.handlers,
|
self.handlers,
|
||||||
|
@ -1,3 +1,15 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from foolscap.api import flushEventualQueue
|
from foolscap.api import flushEventualQueue
|
||||||
from allmydata.monitor import Monitor
|
from allmydata.monitor import Monitor
|
||||||
@ -22,7 +34,7 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
def test_check_no_shares(self):
|
def test_check_no_shares(self):
|
||||||
for shares in self._storage._peers.values():
|
for shares in list(self._storage._peers.values()):
|
||||||
shares.clear()
|
shares.clear()
|
||||||
d = self._fn.check(Monitor())
|
d = self._fn.check(Monitor())
|
||||||
d.addCallback(self.check_bad, "test_check_no_shares")
|
d.addCallback(self.check_bad, "test_check_no_shares")
|
||||||
@ -31,7 +43,7 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin):
|
|||||||
def test_check_mdmf_no_shares(self):
|
def test_check_mdmf_no_shares(self):
|
||||||
d = self.publish_mdmf()
|
d = self.publish_mdmf()
|
||||||
def _then(ignored):
|
def _then(ignored):
|
||||||
for share in self._storage._peers.values():
|
for share in list(self._storage._peers.values()):
|
||||||
share.clear()
|
share.clear()
|
||||||
d.addCallback(_then)
|
d.addCallback(_then)
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
@ -40,8 +52,8 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
def test_check_not_enough_shares(self):
|
def test_check_not_enough_shares(self):
|
||||||
for shares in self._storage._peers.values():
|
for shares in list(self._storage._peers.values()):
|
||||||
for shnum in shares.keys():
|
for shnum in list(shares.keys()):
|
||||||
if shnum > 0:
|
if shnum > 0:
|
||||||
del shares[shnum]
|
del shares[shnum]
|
||||||
d = self._fn.check(Monitor())
|
d = self._fn.check(Monitor())
|
||||||
@ -51,8 +63,8 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin):
|
|||||||
def test_check_mdmf_not_enough_shares(self):
|
def test_check_mdmf_not_enough_shares(self):
|
||||||
d = self.publish_mdmf()
|
d = self.publish_mdmf()
|
||||||
def _then(ignored):
|
def _then(ignored):
|
||||||
for shares in self._storage._peers.values():
|
for shares in list(self._storage._peers.values()):
|
||||||
for shnum in shares.keys():
|
for shnum in list(shares.keys()):
|
||||||
if shnum > 0:
|
if shnum > 0:
|
||||||
del shares[shnum]
|
del shares[shnum]
|
||||||
d.addCallback(_then)
|
d.addCallback(_then)
|
||||||
@ -83,7 +95,7 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin):
|
|||||||
# On 8 of the shares, corrupt the beginning of the share data.
|
# On 8 of the shares, corrupt the beginning of the share data.
|
||||||
# The signature check during the servermap update won't catch this.
|
# The signature check during the servermap update won't catch this.
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
corrupt(None, self._storage, "share_data", range(8)))
|
corrupt(None, self._storage, "share_data", list(range(8))))
|
||||||
# On 2 of the shares, corrupt the end of the share data.
|
# On 2 of the shares, corrupt the end of the share data.
|
||||||
# The signature check during the servermap update won't catch
|
# The signature check during the servermap update won't catch
|
||||||
# this either, and the retrieval process will have to process
|
# this either, and the retrieval process will have to process
|
||||||
@ -242,14 +254,14 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
def test_verify_sdmf_empty(self):
|
def test_verify_sdmf_empty(self):
|
||||||
d = self.publish_sdmf("")
|
d = self.publish_sdmf(b"")
|
||||||
d.addCallback(lambda ignored: self._fn.check(Monitor(), verify=True))
|
d.addCallback(lambda ignored: self._fn.check(Monitor(), verify=True))
|
||||||
d.addCallback(self.check_good, "test_verify_sdmf")
|
d.addCallback(self.check_good, "test_verify_sdmf")
|
||||||
d.addCallback(flushEventualQueue)
|
d.addCallback(flushEventualQueue)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def test_verify_mdmf_empty(self):
|
def test_verify_mdmf_empty(self):
|
||||||
d = self.publish_mdmf("")
|
d = self.publish_mdmf(b"")
|
||||||
d.addCallback(lambda ignored: self._fn.check(Monitor(), verify=True))
|
d.addCallback(lambda ignored: self._fn.check(Monitor(), verify=True))
|
||||||
d.addCallback(self.check_good, "test_verify_mdmf")
|
d.addCallback(self.check_good, "test_verify_mdmf")
|
||||||
d.addCallback(flushEventualQueue)
|
d.addCallback(flushEventualQueue)
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user