diff --git a/.circleci/config.yml b/.circleci/config.yml index f662a3702..29b55ad5f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -14,44 +14,73 @@ version: 2.1 workflows: ci: jobs: - # Platforms - - "debian-9" + # Start with jobs testing various platforms. + + # Every job that pulls a Docker image from Docker Hub needs to provide + # credentials for that pull operation to avoid being subjected to + # unauthenticated pull limits shared across all of CircleCI. Use this + # first job to define a yaml anchor that can be used to supply a + # CircleCI job context which makes Docker Hub credentials available in + # the environment. + # + # Contexts are managed in the CircleCI web interface: + # + # https://app.circleci.com/settings/organization/github/tahoe-lafs/contexts + - "debian-9": &DOCKERHUB_CONTEXT + context: "dockerhub-auth" + - "debian-8": + <<: *DOCKERHUB_CONTEXT requires: - "debian-9" - - "ubuntu-20-04" + - "ubuntu-20-04": + <<: *DOCKERHUB_CONTEXT - "ubuntu-18-04": + <<: *DOCKERHUB_CONTEXT requires: - "ubuntu-20-04" - "ubuntu-16-04": + <<: *DOCKERHUB_CONTEXT requires: - "ubuntu-20-04" - - "fedora-29" + - "fedora-29": + <<: *DOCKERHUB_CONTEXT - "fedora-28": + <<: *DOCKERHUB_CONTEXT requires: - "fedora-29" - - "centos-8" + - "centos-8": + <<: *DOCKERHUB_CONTEXT - - "nixos-19-09" + - "nixos-19-09": + <<: *DOCKERHUB_CONTEXT # Test against PyPy 2.7 - - "pypy27-buster" + - "pypy27-buster": + <<: *DOCKERHUB_CONTEXT # Just one Python 3.6 configuration while the port is in-progress. - - "python36" + - "python36": + <<: *DOCKERHUB_CONTEXT # Other assorted tasks and configurations - - "lint" - - "pyinstaller" - - "deprecations" - - "c-locale" + - "lint": + <<: *DOCKERHUB_CONTEXT + - "pyinstaller": + <<: *DOCKERHUB_CONTEXT + - "deprecations": + <<: *DOCKERHUB_CONTEXT + - "c-locale": + <<: *DOCKERHUB_CONTEXT # Any locale other than C or UTF-8. - - "another-locale" + - "another-locale": + <<: *DOCKERHUB_CONTEXT - "integration": + <<: *DOCKERHUB_CONTEXT requires: # If the unit test suite doesn't pass, don't bother running the # integration tests. @@ -59,7 +88,11 @@ workflows: # Generate the underlying data for a visualization to aid with Python 3 # porting. - - "build-porting-depgraph" + - "build-porting-depgraph": + <<: *DOCKERHUB_CONTEXT + + - "typechecks": + <<: *DOCKERHUB_CONTEXT images: # Build the Docker images used by the ci jobs. This makes the ci jobs @@ -74,22 +107,55 @@ workflows: - "master" jobs: - - "build-image-debian-8" - - "build-image-debian-9" - - "build-image-ubuntu-16-04" - - "build-image-ubuntu-18-04" - - "build-image-ubuntu-20-04" - - "build-image-fedora-28" - - "build-image-fedora-29" - - "build-image-centos-8" - - "build-image-pypy27-buster" - - "build-image-python36-ubuntu" + - "build-image-debian-8": + <<: *DOCKERHUB_CONTEXT + - "build-image-debian-9": + <<: *DOCKERHUB_CONTEXT + - "build-image-ubuntu-16-04": + <<: *DOCKERHUB_CONTEXT + - "build-image-ubuntu-18-04": + <<: *DOCKERHUB_CONTEXT + - "build-image-ubuntu-20-04": + <<: *DOCKERHUB_CONTEXT + - "build-image-fedora-28": + <<: *DOCKERHUB_CONTEXT + - "build-image-fedora-29": + <<: *DOCKERHUB_CONTEXT + - "build-image-centos-8": + <<: *DOCKERHUB_CONTEXT + - "build-image-pypy27-buster": + <<: *DOCKERHUB_CONTEXT + - "build-image-python36-ubuntu": + <<: *DOCKERHUB_CONTEXT jobs: + dockerhub-auth-template: + # This isn't a real job. It doesn't get scheduled as part of any + # workflow. Instead, it's just a place we can hang a yaml anchor to + # finish the Docker Hub authentication configuration. Workflow jobs using + # the DOCKERHUB_CONTEXT anchor will have access to the environment + # variables used here. These variables will allow the Docker Hub image + # pull to be authenticated and hopefully avoid hitting and rate limits. + docker: &DOCKERHUB_AUTH + - image: "null" + auth: + username: $DOCKERHUB_USERNAME + password: $DOCKERHUB_PASSWORD + + steps: + - run: + name: "CircleCI YAML schema conformity" + command: | + # This isn't a real command. We have to have something in this + # space, though, or the CircleCI yaml schema validator gets angry. + # Since this job is never scheduled this step is never run so the + # actual value here is irrelevant. + lint: docker: - - image: "circleci/python:2" + - <<: *DOCKERHUB_AUTH + image: "circleci/python:2" steps: - "checkout" @@ -106,7 +172,8 @@ jobs: pyinstaller: docker: - - image: "circleci/python:2" + - <<: *DOCKERHUB_AUTH + image: "circleci/python:2" steps: - "checkout" @@ -131,7 +198,8 @@ jobs: debian-9: &DEBIAN docker: - - image: "tahoelafsci/debian:9-py2.7" + - <<: *DOCKERHUB_AUTH + image: "tahoelafsci/debian:9-py2.7" user: "nobody" environment: &UTF_8_ENVIRONMENT @@ -154,6 +222,8 @@ jobs: # we maintain. WHEELHOUSE_PATH: &WHEELHOUSE_PATH "/tmp/wheelhouse" PIP_FIND_LINKS: "file:///tmp/wheelhouse" + # Upload the coverage report. + UPLOAD_COVERAGE: "yes" # pip cannot install packages if the working directory is not readable. # We want to run a lot of steps as nobody instead of as root. @@ -202,26 +272,32 @@ jobs: - run: &SUBMIT_COVERAGE name: "Submit coverage results" command: | - /tmp/venv/bin/codecov + if [ -n "${UPLOAD_COVERAGE}" ]; then + /tmp/venv/bin/codecov + fi debian-8: <<: *DEBIAN docker: - - image: "tahoelafsci/debian:8-py2.7" + - <<: *DOCKERHUB_AUTH + image: "tahoelafsci/debian:8-py2.7" user: "nobody" pypy27-buster: <<: *DEBIAN docker: - - image: "tahoelafsci/pypy:buster-py2" + - <<: *DOCKERHUB_AUTH + image: "tahoelafsci/pypy:buster-py2" user: "nobody" environment: <<: *UTF_8_ENVIRONMENT # We don't do coverage since it makes PyPy far too slow: TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27" + # Since we didn't collect it, don't upload it. + UPLOAD_COVERAGE: "" c-locale: @@ -250,6 +326,8 @@ jobs: TAHOE_LAFS_TOX_ENVIRONMENT: "deprecations,upcoming-deprecations" # Put the logs somewhere we can report them. TAHOE_LAFS_WARNINGS_LOG: "/tmp/artifacts/deprecation-warnings.log" + # The deprecations tox environments don't do coverage measurement. + UPLOAD_COVERAGE: "" integration: @@ -272,21 +350,24 @@ jobs: ubuntu-16-04: <<: *DEBIAN docker: - - image: "tahoelafsci/ubuntu:16.04-py2.7" + - <<: *DOCKERHUB_AUTH + image: "tahoelafsci/ubuntu:16.04-py2.7" user: "nobody" ubuntu-18-04: &UBUNTU_18_04 <<: *DEBIAN docker: - - image: "tahoelafsci/ubuntu:18.04-py2.7" + - <<: *DOCKERHUB_AUTH + image: "tahoelafsci/ubuntu:18.04-py2.7" user: "nobody" python36: <<: *UBUNTU_18_04 docker: - - image: "tahoelafsci/ubuntu:18.04-py3" + - <<: *DOCKERHUB_AUTH + image: "tahoelafsci/ubuntu:18.04-py3" user: "nobody" environment: @@ -301,13 +382,15 @@ jobs: ubuntu-20-04: <<: *DEBIAN docker: - - image: "tahoelafsci/ubuntu:20.04" + - <<: *DOCKERHUB_AUTH + image: "tahoelafsci/ubuntu:20.04" user: "nobody" centos-8: &RHEL_DERIV docker: - - image: "tahoelafsci/centos:8-py2" + - <<: *DOCKERHUB_AUTH + image: "tahoelafsci/centos:8-py2" user: "nobody" environment: *UTF_8_ENVIRONMENT @@ -329,21 +412,24 @@ jobs: fedora-28: <<: *RHEL_DERIV docker: - - image: "tahoelafsci/fedora:28-py" + - <<: *DOCKERHUB_AUTH + image: "tahoelafsci/fedora:28-py" user: "nobody" fedora-29: <<: *RHEL_DERIV docker: - - image: "tahoelafsci/fedora:29-py" + - <<: *DOCKERHUB_AUTH + image: "tahoelafsci/fedora:29-py" user: "nobody" nixos-19-09: docker: # Run in a highly Nix-capable environment. - - image: "nixorg/nix:circleci" + - <<: *DOCKERHUB_AUTH + image: "nixorg/nix:circleci" environment: NIX_PATH: "nixpkgs=https://github.com/NixOS/nixpkgs-channels/archive/nixos-19.09-small.tar.gz" @@ -392,6 +478,18 @@ jobs: . /tmp/venv/bin/activate ./misc/python3/depgraph.sh + typechecks: + docker: + - <<: *DOCKERHUB_AUTH + image: "tahoelafsci/ubuntu:18.04-py3" + + steps: + - "checkout" + - run: + name: "Validate Types" + command: | + /tmp/venv/bin/tox -e typechecks + build-image: &BUILD_IMAGE # This is a template for a job to build a Docker image that has as much of # the setup as we can manage already done and baked in. This cuts down on @@ -400,7 +498,8 @@ jobs: # # https://circleci.com/blog/how-to-build-a-docker-image-on-circleci-2-0/ docker: - - image: "docker:17.05.0-ce-git" + - <<: *DOCKERHUB_AUTH + image: "docker:17.05.0-ce-git" environment: DISTRO: "tahoelafsci/:foo-py2" @@ -410,47 +509,10 @@ jobs: steps: - "checkout" - "setup_remote_docker" - - run: - name: "Get openssl" - command: | - apk add --no-cache openssl - - run: - name: "Get Dockerhub secrets" - command: | - # If you create an encryption key like this: - # - # openssl enc -aes-256-cbc -k secret -P -md sha256 - - # From the output that looks like: - # - # salt=... - # key=... - # iv =... - # - # extract just the value for ``key``. - - # then you can re-generate ``secret-env-cipher`` locally using the - # command: - # - # openssl aes-256-cbc -e -md sha256 -in secret-env-plain -out .circleci/secret-env-cipher -pass env:KEY - # - # Make sure the key is set as the KEY environment variable in the - # CircleCI web interface. You can do this by visiting - # - # after logging in to CircleCI with an account in the tahoe-lafs - # CircleCI team. - # - # Then you can recover the environment plaintext (for example, to - # change and re-encrypt it) like just like CircleCI recovers it - # here: - # - openssl aes-256-cbc -d -md sha256 -in .circleci/secret-env-cipher -pass env:KEY >> ~/.env - run: name: "Log in to Dockerhub" command: | - . ~/.env - # TAHOELAFSCI_PASSWORD come from the secret env. - docker login -u tahoelafsci -p ${TAHOELAFSCI_PASSWORD} + docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD} - run: name: "Build image" command: | diff --git a/.circleci/secret-env-cipher b/.circleci/secret-env-cipher deleted file mode 100644 index 2facc470c..000000000 --- a/.circleci/secret-env-cipher +++ /dev/null @@ -1 +0,0 @@ -Salted__GP)|![U[vS,Fm:~Y[U_Fxג%4lֻ81/l`n^Z]q&݂%Tn \ No newline at end of file diff --git a/.codecov.yml b/.codecov.yml index 57abf7c0a..166190c5e 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -32,3 +32,17 @@ coverage: patch: default: threshold: 1% + + +codecov: + # This is a public repository so supposedly we don't "need" to use an upload + # token. However, using one makes sure that CI jobs running against forked + # repositories have coverage uploaded to the right place in codecov so + # their reports aren't incomplete. + token: "abf679b6-e2e6-4b33-b7b5-6cfbd41ee691" + + notify: + # The reference documentation suggests that this is the default setting: + # https://docs.codecov.io/docs/codecovyml-reference#codecovnotifywait_for_ci + # However observation suggests otherwise. + wait_for_ci: true diff --git a/.coveragerc b/.coveragerc index 636258717..d09554cad 100644 --- a/.coveragerc +++ b/.coveragerc @@ -14,3 +14,14 @@ branch = True [report] show_missing = True skip_covered = True + +[paths] +source = +# It looks like this in the checkout + src/ +# It looks like this in the Windows build environment + D:/a/tahoe-lafs/tahoe-lafs/.tox/py*-coverage/Lib/site-packages/ +# Although sometimes it looks like this instead. Also it looks like this on macOS. + .tox/py*-coverage/lib/python*/site-packages/ +# On some Linux CI jobs it looks like this + /tmp/tahoe-lafs.tox/py*-coverage/lib/python*/site-packages/ diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst index c8f5093f1..b59385aa4 100644 --- a/.github/CONTRIBUTING.rst +++ b/.github/CONTRIBUTING.rst @@ -17,4 +17,4 @@ Examples of contributions include: * `Patch reviews `_ Before authoring or reviewing a patch, -please familiarize yourself with the `coding standard `_. +please familiarize yourself with the `Coding Standards `_ and the `Contributor Code of Conduct <../docs/CODE_OF_CONDUCT.md>`_. diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 34a4e0875..ee36833ca 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -21,7 +21,7 @@ jobs: steps: - # Get vcpython27 on Windows + Python 2.7, to build zfec + # Get vcpython27 on Windows + Python 2.7, to build netifaces # extension. See https://chocolatey.org/packages/vcpython27 and # https://github.com/crazy-max/ghaction-chocolatey - name: Install MSVC 9.0 for Python 2.7 [Windows] @@ -30,17 +30,37 @@ jobs: with: args: install vcpython27 + # See https://github.com/actions/checkout. A fetch-depth of 0 + # fetches all tags and branches. - name: Check out Tahoe-LAFS sources uses: actions/checkout@v2 - - - name: Fetch all history for all tags and branches - run: git fetch --prune --unshallow + with: + fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 with: python-version: ${{ matrix.python-version }} + # To use pip caching with GitHub Actions in an OS-independent + # manner, we need `pip cache dir` command, which became + # available since pip v20.1+. At the time of writing this, + # GitHub Actions offers pip v20.3.3 for both ubuntu-latest and + # windows-latest, and pip v20.3.1 for macos-latest. + - name: Get pip cache directory + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + + # See https://github.com/actions/cache + - name: Use pip cache + uses: actions/cache@v2 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Install Python packages run: | pip install --upgrade codecov tox setuptools @@ -78,6 +98,15 @@ jobs: steps: + # Get vcpython27 for Windows + Python 2.7, to build netifaces + # extension. See https://chocolatey.org/packages/vcpython27 and + # https://github.com/crazy-max/ghaction-chocolatey + - name: Install MSVC 9.0 for Python 2.7 [Windows] + if: matrix.os == 'windows-latest' && matrix.python-version == '2.7' + uses: crazy-max/ghaction-chocolatey@v1 + with: + args: install vcpython27 + - name: Install Tor [Ubuntu] if: matrix.os == 'ubuntu-latest' run: sudo apt install tor @@ -92,23 +121,29 @@ jobs: with: args: install tor - - name: Install MSVC 9.0 for Python 2.7 [Windows] - if: matrix.os == 'windows-latest' && matrix.python-version == '2.7' - uses: crazy-max/ghaction-chocolatey@v1 - with: - args: install vcpython27 - - name: Check out Tahoe-LAFS sources uses: actions/checkout@v2 - - - name: Fetch all history for all tags and branches - run: git fetch --prune --unshallow + with: + fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 with: python-version: ${{ matrix.python-version }} + - name: Get pip cache directory + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + + - name: Use pip cache + uses: actions/cache@v2 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Install Python packages run: | pip install --upgrade tox @@ -141,7 +176,7 @@ jobs: steps: - # Get vcpython27 on Windows + Python 2.7, to build zfec + # Get vcpython27 for Windows + Python 2.7, to build netifaces # extension. See https://chocolatey.org/packages/vcpython27 and # https://github.com/crazy-max/ghaction-chocolatey - name: Install MSVC 9.0 for Python 2.7 [Windows] @@ -152,15 +187,27 @@ jobs: - name: Check out Tahoe-LAFS sources uses: actions/checkout@v2 - - - name: Fetch all history for all tags and branches - run: git fetch --prune --unshallow + with: + fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 with: python-version: ${{ matrix.python-version }} + - name: Get pip cache directory + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + + - name: Use pip cache + uses: actions/cache@v2 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Install Python packages run: | pip install --upgrade tox diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 604614eb0..916b331e4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,15 +1,10 @@ repos: - - repo: local + - repo: "local" hooks: - - id: codechecks - name: codechecks - stages: ["commit"] - entry: "tox -e codechecks" - language: system - pass_filenames: false - - id: test - name: test + - id: "codechecks" + name: "codechecks" stages: ["push"] - entry: "make test" - language: system - pass_filenames: false + language: "system" + files: ".py$" + entry: "tox -e codechecks" + pass_filenames: true diff --git a/CREDITS b/CREDITS index dd247757e..b0923fc35 100644 --- a/CREDITS +++ b/CREDITS @@ -113,6 +113,8 @@ E: jacob@appelbaum.com W: http://www.appelbaum.net/ P: 12E4 04FF D3C9 31F9 3405 2D06 B884 1A91 9D0F ACE4 D: Debian packaging including init scripts +D: Note that contributions from Jacob Appelbaum (ioerror) are no longer welcome +D: due to behavior unacceptable to community standards in Tor and other projects N: Jeremy Visser D: Ubuntu packaging, usability testing @@ -229,3 +231,12 @@ D: Python3 porting N: Pete Fein E: pete@snake.dev D: Python3 porting + +N: Viktoriia Savchuk +W: https://twitter.com/viktoriiasvchk +D: Developer community focused improvements on the README file. + +N: Lukas Pirl +E: tahoe@lukas-pirl.de +W: http://lukas-pirl.de +D: Buildslaves (Debian, Fedora, CentOS; 2016-2021) diff --git a/Makefile b/Makefile index b48e74b0e..f7a357588 100644 --- a/Makefile +++ b/Makefile @@ -13,8 +13,6 @@ MAKEFLAGS += --warn-undefined-variables MAKEFLAGS += --no-builtin-rules # Local target variables -VCS_HOOK_SAMPLES=$(wildcard .git/hooks/*.sample) -VCS_HOOKS=$(VCS_HOOK_SAMPLES:%.sample=%) PYTHON=python export PYTHON PYFLAKES=flake8 @@ -31,15 +29,6 @@ TEST_SUITE=allmydata default: @echo "no default target" -.PHONY: install-vcs-hooks -## Install the VCS hooks to run linters on commit and all tests on push -install-vcs-hooks: .git/hooks/pre-commit .git/hooks/pre-push -.PHONY: uninstall-vcs-hooks -## Remove the VCS hooks -uninstall-vcs-hooks: .tox/create-venvs.log - "./$(dir $(<))py36/bin/pre-commit" uninstall || true - "./$(dir $(<))py36/bin/pre-commit" uninstall -t pre-push || true - .PHONY: test ## Run all tests and code reports test: .tox/create-venvs.log @@ -215,7 +204,7 @@ clean: rm -f *.pkg .PHONY: distclean -distclean: clean uninstall-vcs-hooks +distclean: clean rm -rf src/*.egg-info rm -f src/allmydata/_version.py rm -f src/allmydata/_appname.py @@ -261,6 +250,3 @@ src/allmydata/_version.py: .tox/create-venvs.log: tox.ini setup.py tox --notest -p all | tee -a "$(@)" - -$(VCS_HOOKS): .tox/create-venvs.log .pre-commit-config.yaml - "./$(dir $(<))py36/bin/pre-commit" install --hook-type $(@:.git/hooks/%=%) diff --git a/README.rst b/README.rst index d3b089186..98150ed27 100644 --- a/README.rst +++ b/README.rst @@ -1,97 +1,119 @@ -========== -Tahoe-LAFS -========== +====================================== +Free and Open decentralized data store +====================================== -Tahoe-LAFS is a Free and Open decentralized cloud storage system. It -distributes your data across multiple servers. Even if some of the servers -fail or are taken over by an attacker, the entire file store continues to -function correctly, preserving your privacy and security. +|image0| -For full documentation, please see -http://tahoe-lafs.readthedocs.io/en/latest/ . +`Tahoe-LAFS `__ (Tahoe Least-Authority File Store) is the first free software / open-source storage technology that distributes your data across multiple servers. Even if some servers fail or are taken over by an attacker, the entire file store continues to function correctly, preserving your privacy and security. |Contributor Covenant| |readthedocs| |travis| |circleci| |codecov| -INSTALLING -========== +Table of contents -There are three ways to install Tahoe-LAFS. +- `About Tahoe-LAFS <#about-tahoe-lafs>`__ -using OS packages -^^^^^^^^^^^^^^^^^ +- `Installation <#installation>`__ -Pre-packaged versions are available for several operating systems: +- `Issues <#issues>`__ -* Debian and Ubuntu users can ``apt-get install tahoe-lafs`` -* NixOS, NetBSD (pkgsrc), ArchLinux, Slackware, and Gentoo have packages - available, see `OSPackages`_ for details -* `Mac`_ and Windows installers are in development. +- `Documentation <#documentation>`__ -via pip -^^^^^^^ +- `Community <#community>`__ -If you don't use an OS package, you'll need Python 2.7 and `pip`_. You may -also need a C compiler, and the development headers for python, libffi, and -OpenSSL. On a Debian-like system, use ``apt-get install build-essential -python-dev libffi-dev libssl-dev python-virtualenv``. On Windows, see -``_. +- `Contributing <#contributing>`__ -Then, to install the most recent release, just run: +- `FAQ <#faq>`__ -* ``pip install tahoe-lafs`` +- `License <#license>`__ -from source -^^^^^^^^^^^ -To install from source (either so you can hack on it, or just to run -pre-release code), you should create a virtualenv and install into that: +💡 About Tahoe-LAFS +------------------- -* ``git clone https://github.com/tahoe-lafs/tahoe-lafs.git`` -* ``cd tahoe-lafs`` -* ``virtualenv --python=python2.7 venv`` -* ``venv/bin/pip install --upgrade setuptools`` -* ``venv/bin/pip install --editable .`` -* ``venv/bin/tahoe --version`` +Tahoe-LAFS helps you to store files while granting confidentiality, integrity, and availability of your data. -To run the unit test suite: +How does it work? You run a client program on your computer, which talks to one or more storage servers on other computers. When you tell your client to store a file, it will encrypt that file, encode it into multiple pieces, then spread those pieces out among various servers. The pieces are all encrypted and protected against modifications. Later, when you ask your client to retrieve the file, it will find the necessary pieces, make sure they haven’t been corrupted, reassemble them, and decrypt the result. -* ``tox`` +| |image2| +| *The image is taken from meejah's* \ `blog `__ \ *post at Torproject.org.* -You can pass arguments to ``trial`` with an environment variable. For -example, you can run the test suite on multiple cores to speed it up: +| -* ``TAHOE_LAFS_TRIAL_ARGS="-j4" tox`` +The client creates pieces (“shares”) that have a configurable amount of redundancy, so even if some servers fail, you can still get your data back. Corrupt shares are detected and ignored so that the system can tolerate server-side hard-drive errors. All files are encrypted (with a unique key) before uploading, so even a malicious server operator cannot read your data. The only thing you ask of the servers is that they can (usually) provide the shares when you ask for them: you aren’t relying upon them for confidentiality, integrity, or absolute availability. -For more detailed instructions, read ``_ . +Tahoe-LAFS was first designed in 2007, following the "principle of least authority", a security best practice requiring system components to only have the privilege necessary to complete their intended function and not more. -Once ``tahoe --version`` works, see ``_ to learn how to set -up your first Tahoe-LAFS node. +Please read more about Tahoe-LAFS architecture `here `__. -LICENCE -======= +✅ Installation +--------------- -Copyright 2006-2018 The Tahoe-LAFS Software Foundation +For more detailed instructions, read `docs/INSTALL.rst `__ . -You may use this package under the GNU General Public License, version 2 or, -at your option, any later version. You may use this package under the -Transitive Grace Period Public Licence, version 1.0, or at your option, any -later version. (You may choose to use this package under the terms of either -licence, at your option.) See the file `COPYING.GPL`_ for the terms of the -GNU General Public License, version 2. See the file `COPYING.TGPPL`_ for -the terms of the Transitive Grace Period Public Licence, version 1.0. +- `Building Tahoe-LAFS on Windows `__ -See `TGPPL.PDF`_ for why the TGPPL exists, graphically illustrated on three -slides. +- `OS-X Packaging `__ -.. _OSPackages: https://tahoe-lafs.org/trac/tahoe-lafs/wiki/OSPackages -.. _Mac: docs/OS-X.rst -.. _pip: https://pip.pypa.io/en/stable/installing/ -.. _COPYING.GPL: https://github.com/tahoe-lafs/tahoe-lafs/blob/master/COPYING.GPL -.. _COPYING.TGPPL: https://github.com/tahoe-lafs/tahoe-lafs/blob/master/COPYING.TGPPL.rst -.. _TGPPL.PDF: https://tahoe-lafs.org/~zooko/tgppl.pdf +Once tahoe --version works, see `docs/running.rst `__ to learn how to set up your first Tahoe-LAFS node. ----- +🤖 Issues +--------- + +Tahoe-LAFS uses the Trac instance to track `issues `__. Please email jean-paul plus tahoe-lafs at leastauthority dot com for an account. + +📑 Documentation +---------------- + +You can find the full Tahoe-LAFS documentation at our `documentation site `__. + +💬 Community +------------ + +Get involved with the Tahoe-LAFS community: + +- Chat with Tahoe-LAFS developers at #tahoe-lafs chat on irc.freenode.net or `Slack `__. + +- Join our `weekly conference calls `__ with core developers and interested community members. + +- Subscribe to `the tahoe-dev mailing list `__, the community forum for discussion of Tahoe-LAFS design, implementation, and usage. + +🤗 Contributing +--------------- + +As a community-driven open source project, Tahoe-LAFS welcomes contributions of any form: + +- `Code patches `__ + +- `Documentation improvements `__ + +- `Bug reports `__ + +- `Patch reviews `__ + +Before authoring or reviewing a patch, please familiarize yourself with the `Coding Standard `__ and the `Contributor Code of Conduct `__. + + +❓ FAQ +------ + +Need more information? Please check our `FAQ page `__. + +📄 License +---------- + +Copyright 2006-2020 The Tahoe-LAFS Software Foundation + +You may use this package under the GNU General Public License, version 2 or, at your option, any later version. You may use this package under the Transitive Grace Period Public Licence, version 1.0, or at your choice, any later version. (You may choose to use this package under the terms of either license, at your option.) See the file `COPYING.GPL `__ for the terms of the GNU General Public License, version 2. See the file `COPYING.TGPPL `__ for the terms of the Transitive Grace Period Public Licence, version 1.0. + +See `TGPPL.PDF `__ for why the TGPPL exists, graphically illustrated on three slides. + +.. |image0| image:: docs/_static/media/image2.png + :width: 3in + :height: 0.91667in +.. |image2| image:: docs/_static/media/image1.png + :width: 6.9252in + :height: 2.73611in .. |readthedocs| image:: http://readthedocs.org/projects/tahoe-lafs/badge/?version=latest :alt: documentation status :target: http://tahoe-lafs.readthedocs.io/en/latest/?badge=latest diff --git a/docs/INSTALL.rst b/docs/INSTALL.rst index 1f021d576..29a3b5b6b 100644 --- a/docs/INSTALL.rst +++ b/docs/INSTALL.rst @@ -39,9 +39,7 @@ If you are on Windows, please see :doc:`windows` for platform-specific instructions. If you are on a Mac, you can either follow these instructions, or use the -pre-packaged bundle described in :doc:`OS-X`. The Tahoe project hosts -pre-compiled "wheels" for all dependencies, so use the ``--find-links=`` -option described below to avoid needing a compiler. +pre-packaged bundle described in :doc:`OS-X`. Many Linux distributions include Tahoe-LAFS packages. Debian and Ubuntu users can ``apt-get install tahoe-lafs``. See `OSPackages`_ for other @@ -54,9 +52,14 @@ Preliminaries ============= If you don't use a pre-packaged copy of Tahoe, you can build it yourself. -You'll need Python2.7, pip, and virtualenv. On unix-like platforms, you will -need a C compiler, the Python development headers, and some libraries -(libffi-dev and libssl-dev). +You'll need Python2.7, pip, and virtualenv. +Tahoe-LAFS depends on some libraries which require a C compiler to build. +However, for many platforms, PyPI hosts already-built packages of libraries. + +If there is no already-built package for your platform, +you will need a C compiler, +the Python development headers, +and some libraries (libffi-dev and libssl-dev). On a modern Debian/Ubuntu-derived distribution, this command will get you everything you need:: @@ -64,8 +67,7 @@ everything you need:: apt-get install build-essential python-dev libffi-dev libssl-dev libyaml-dev python-virtualenv On OS-X, install pip and virtualenv as described below. If you want to -compile the dependencies yourself (instead of using ``--find-links`` to take -advantage of the pre-compiled ones we host), you'll also need to install +compile the dependencies yourself, you'll also need to install Xcode and its command-line tools. **Note** that Tahoe-LAFS depends on `openssl 1.1.1c` or greater. @@ -150,30 +152,24 @@ from PyPI with ``venv/bin/pip install tahoe-lafs``. After installation, run % virtualenv venv New python executable in ~/venv/bin/python2.7 Installing setuptools, pip, wheel...done. - + % venv/bin/pip install -U pip setuptools Downloading/unpacking pip from https://pypi.python.org/... ... Successfully installed pip setuptools - + % venv/bin/pip install tahoe-lafs Collecting tahoe-lafs ... Installing collected packages: ... Successfully installed ... - + % venv/bin/tahoe --version tahoe-lafs: 1.15.0 foolscap: ... - + % -On OS-X, instead of ``pip install tahoe-lafs``, use this command to take -advantage of the hosted pre-compiled wheels:: - - venv/bin/pip install --find-links=https://tahoe-lafs.org/deps tahoe-lafs - - Install From a Source Tarball ----------------------------- @@ -182,13 +178,13 @@ You can also install directly from the source tarball URL:: % virtualenv venv New python executable in ~/venv/bin/python2.7 Installing setuptools, pip, wheel...done. - + % venv/bin/pip install https://tahoe-lafs.org/downloads/tahoe-lafs-1.15.0.tar.bz2 Collecting https://tahoe-lafs.org/downloads/tahoe-lafs-1.15.0.tar.bz2 ... Installing collected packages: ... Successfully installed ... - + % venv/bin/tahoe --version tahoe-lafs: 1.15.0 ... @@ -213,16 +209,16 @@ with the ``--editable`` flag. You should also use the ``[test]`` extra to get the additional libraries needed to run the unit tests:: % git clone https://github.com/tahoe-lafs/tahoe-lafs.git - + % cd tahoe-lafs - + % virtualenv venv - + % venv/bin/pip install --editable .[test] Obtaining file::~/tahoe-lafs ... Successfully installed ... - + % venv/bin/tahoe --version tahoe-lafs: 1.15.0 ... @@ -282,7 +278,7 @@ result in a "all tests passed" mesage:: test_missing_signature ... [OK] ... Ran 1186 tests in 423.179s - + PASSED (skips=7, expectedFailures=3, successes=1176) __________________________ summary ___________________________________ py27: commands succeeded diff --git a/docs/README.md b/docs/README.txt similarity index 100% rename from docs/README.md rename to docs/README.txt diff --git a/docs/_static/media/image1.png b/docs/_static/media/image1.png new file mode 100644 index 000000000..e25576f47 Binary files /dev/null and b/docs/_static/media/image1.png differ diff --git a/docs/_static/media/image2.png b/docs/_static/media/image2.png new file mode 100644 index 000000000..d8704f359 Binary files /dev/null and b/docs/_static/media/image2.png differ diff --git a/docs/about.rst b/docs/about.rst index 626792d6b..120abb079 100644 --- a/docs/about.rst +++ b/docs/about.rst @@ -67,12 +67,12 @@ Here's how it works: A "storage grid" is made up of a number of storage servers. A storage server has direct attached storage (typically one or more hard disks). A "gateway" communicates with storage nodes, and uses them to provide access to the -grid over protocols such as HTTP(S), SFTP or FTP. +grid over protocols such as HTTP(S) and SFTP. Note that you can find "client" used to refer to gateway nodes (which act as a client to storage servers), and also to processes or programs connecting to a gateway node and performing operations on the grid -- for example, a CLI -command, Web browser, SFTP client, or FTP client. +command, Web browser, or SFTP client. Users do not rely on storage servers to provide *confidentiality* nor *integrity* for their data -- instead all of the data is encrypted and diff --git a/docs/anonymity-configuration.rst b/docs/anonymity-configuration.rst index 5ad9ae740..d25f8ad41 100644 --- a/docs/anonymity-configuration.rst +++ b/docs/anonymity-configuration.rst @@ -273,7 +273,7 @@ Then, do the following: [connections] tcp = tor -* Launch the Tahoe server with ``tahoe start $NODEDIR`` +* Launch the Tahoe server with ``tahoe run $NODEDIR`` The ``tub.port`` section will cause the Tahoe server to listen on PORT, but bind the listening socket to the loopback interface, which is not reachable @@ -435,4 +435,3 @@ It is therefore important that your I2P router is sharing bandwidth with other routers, so that you can give back as you use I2P. This will never impair the performance of your Tahoe-LAFS node, because your I2P router will always prioritize your own traffic. - diff --git a/docs/backdoors.rst b/docs/backdoors.rst index 97716fcad..0fec9efbc 100644 --- a/docs/backdoors.rst +++ b/docs/backdoors.rst @@ -64,3 +64,9 @@ Peter Secor Shawn Willden Terrell Russell + +Jean-Paul Calderone + +meejah + +Sajith Sasidharan diff --git a/docs/conf.py b/docs/conf.py index 34ddd1bd4..612c324a3 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -28,7 +28,7 @@ import os # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = [] +extensions = ['recommonmark'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -36,7 +36,7 @@ templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ['.rst', '.md'] # The encoding of source files. #source_encoding = 'utf-8-sig' diff --git a/docs/configuration.rst b/docs/configuration.rst index ab4751a04..93c9aa0f1 100644 --- a/docs/configuration.rst +++ b/docs/configuration.rst @@ -75,13 +75,12 @@ The item descriptions below use the following types: Node Types ========== -A node can be a client/server, an introducer, or a statistics gatherer. +A node can be a client/server or an introducer. Client/server nodes provide one or more of the following services: * web-API service * SFTP service -* FTP service * helper service * storage service. @@ -365,7 +364,7 @@ set the ``tub.location`` option described below. also generally reduced when operating in private mode. When False, any of the following configuration problems will cause - ``tahoe start`` to throw a PrivacyError instead of starting the node: + ``tahoe run`` to throw a PrivacyError instead of starting the node: * ``[node] tub.location`` contains any ``tcp:`` hints @@ -398,13 +397,13 @@ This section controls *when* Tor and I2P are used. The ``[tor]`` and ``[i2p]`` sections (described later) control *how* Tor/I2P connections are managed. -All Tahoe nodes need to make a connection to the Introducer; the ``[client] -introducer.furl`` setting (described below) indicates where the Introducer -lives. Tahoe client nodes must also make connections to storage servers: -these targets are specified in announcements that come from the Introducer. -Both are expressed as FURLs (a Foolscap URL), which include a list of -"connection hints". Each connection hint describes one (of perhaps many) -network endpoints where the service might live. +All Tahoe nodes need to make a connection to the Introducer; the +``private/introducers.yaml`` file (described below) configures where one or more +Introducers live. Tahoe client nodes must also make connections to storage +servers: these targets are specified in announcements that come from the +Introducer. Both are expressed as FURLs (a Foolscap URL), which include a +list of "connection hints". Each connection hint describes one (of perhaps +many) network endpoints where the service might live. Connection hints include a type, and look like: @@ -580,6 +579,8 @@ Client Configuration ``introducer.furl = (FURL string, mandatory)`` + DEPRECATED. See :ref:`introducer-definitions`. + This FURL tells the client how to connect to the introducer. Each Tahoe-LAFS grid is defined by an introducer. The introducer's FURL is created by the introducer node and written into its private base @@ -591,11 +592,6 @@ Client Configuration If provided, the node will attempt to connect to and use the given helper for uploads. See :doc:`helper` for details. -``stats_gatherer.furl = (FURL string, optional)`` - - If provided, the node will connect to the given stats gatherer and - provide it with operational statistics. - ``shares.needed = (int, optional) aka "k", default 3`` ``shares.total = (int, optional) aka "N", N >= k, default 10`` @@ -711,12 +707,12 @@ CLI file store, uploading/downloading files, and creating/running Tahoe nodes. See :doc:`frontends/CLI` for details. -SFTP, FTP +SFTP - Tahoe can also run both SFTP and FTP servers, and map a username/password + Tahoe can also run SFTP servers, and map a username/password pair to a top-level Tahoe directory. See :doc:`frontends/FTP-and-SFTP` - for instructions on configuring these services, and the ``[sftpd]`` and - ``[ftpd]`` sections of ``tahoe.cfg``. + for instructions on configuring this service, and the ``[sftpd]`` + section of ``tahoe.cfg``. Storage Server Configuration @@ -909,11 +905,6 @@ This section describes these other files. This file is used to construct an introducer, and is created by the "``tahoe create-introducer``" command. -``tahoe-stats-gatherer.tac`` - - This file is used to construct a statistics gatherer, and is created by the - "``tahoe create-stats-gatherer``" command. - ``private/control.furl`` This file contains a FURL that provides access to a control port on the @@ -965,29 +956,28 @@ This section describes these other files. with as many people as possible, put the empty string (so that ``private/convergence`` is a zero-length file). -Additional Introducer Definitions -================================= +.. _introducer-definitions: -The ``private/introducers.yaml`` file defines additional Introducers. The -first introducer is defined in ``tahoe.cfg``, in ``[client] -introducer.furl``. To use two or more Introducers, choose a locally-unique -"petname" for each one, then define their FURLs in -``private/introducers.yaml`` like this:: +Introducer Definitions +====================== + +The ``private/introducers.yaml`` file defines Introducers. +Choose a locally-unique "petname" for each one then define their FURLs in ``private/introducers.yaml`` like this:: introducers: petname2: - furl: FURL2 + furl: "FURL2" petname3: - furl: FURL3 + furl: "FURL3" Servers will announce themselves to all configured introducers. Clients will merge the announcements they receive from all introducers. Nothing will re-broadcast an announcement (i.e. telling introducer 2 about something you heard from introducer 1). -If you omit the introducer definitions from both ``tahoe.cfg`` and -``introducers.yaml``, the node will not use an Introducer at all. Such -"introducerless" clients must be configured with static servers (described +If you omit the introducer definitions from ``introducers.yaml``, +the node will not use an Introducer at all. +Such "introducerless" clients must be configured with static servers (described below), or they will not be able to upload and download files. Static Server Definitions @@ -1152,7 +1142,6 @@ a legal one. timeout.disconnect = 1800 [client] - introducer.furl = pb://ok45ssoklj4y7eok5c3xkmj@tcp:tahoe.example:44801/ii3uumo helper.furl = pb://ggti5ssoklj4y7eok5c3xkmj@tcp:helper.tahoe.example:7054/kk8lhr [storage] @@ -1163,6 +1152,11 @@ a legal one. [helper] enabled = True +To be introduced to storage servers, here is a sample ``private/introducers.yaml`` which can be used in conjunction:: + + introducers: + examplegrid: + furl: "pb://ok45ssoklj4y7eok5c3xkmj@tcp:tahoe.example:44801/ii3uumo" Old Configuration Files ======================= diff --git a/docs/contributing.rst b/docs/contributing.rst new file mode 100644 index 000000000..15e1b6432 --- /dev/null +++ b/docs/contributing.rst @@ -0,0 +1 @@ +.. include:: ../.github/CONTRIBUTING.rst diff --git a/docs/developer-guide.rst b/docs/developer-guide.rst index 2d26e68a4..a44414f8f 100644 --- a/docs/developer-guide.rst +++ b/docs/developer-guide.rst @@ -5,23 +5,17 @@ Developer Guide Pre-commit Checks ----------------- -This project is configured for use with `pre-commit`_ to install `VCS/git hooks`_ which -perform some static code analysis checks and other code checks to catch common errors -before each commit and to run the full self-test suite to find less obvious regressions -before each push to a remote. +This project is configured for use with `pre-commit`_ to install `VCS/git hooks`_ which perform some static code analysis checks and other code checks to catch common errors. +These hooks can be configured to run before commits or pushes For example:: - tahoe-lafs $ make install-vcs-hooks - ... - + ./.tox//py36/bin/pre-commit install --hook-type pre-commit - pre-commit installed at .git/hooks/pre-commit - + ./.tox//py36/bin/pre-commit install --hook-type pre-push + tahoe-lafs $ pre-commit install --hook-type pre-push pre-commit installed at .git/hooks/pre-push - tahoe-lafs $ python -c "import pathlib; pathlib.Path('src/allmydata/tabbed.py').write_text('def foo():\\n\\tpass\\n')" - tahoe-lafs $ git add src/allmydata/tabbed.py + tahoe-lafs $ echo "undefined" > src/allmydata/undefined_name.py + tahoe-lafs $ git add src/allmydata/undefined_name.py tahoe-lafs $ git commit -a -m "Add a file that violates flake8" - ... + tahoe-lafs $ git push codechecks...............................................................Failed - hook id: codechecks - exit code: 1 @@ -30,58 +24,17 @@ For example:: codechecks inst-nodeps: ... codechecks installed: ... codechecks run-test-pre: PYTHONHASHSEED='...' - codechecks run-test: commands[0] | flake8 src static misc setup.py - src/allmydata/tabbed.py:2:1: W191 indentation contains tabs - ERROR: InvocationError for command ./tahoe-lafs/.tox/codechecks/bin/flake8 src static misc setup.py (exited with code 1) + codechecks run-test: commands[0] | flake8 src/allmydata/undefined_name.py + src/allmydata/undefined_name.py:1:1: F821 undefined name 'undefined' + ERROR: InvocationError for command ./tahoe-lafs/.tox/codechecks/bin/flake8 src/allmydata/undefined_name.py (exited with code 1) ___________________________________ summary ____________________________________ ERROR: codechecks: commands failed - ... To uninstall:: - tahoe-lafs $ make uninstall-vcs-hooks - ... - + ./.tox/py36/bin/pre-commit uninstall - pre-commit uninstalled - + ./.tox/py36/bin/pre-commit uninstall -t pre-push + tahoe-lafs $ pre-commit uninstall --hook-type pre-push pre-push uninstalled -Note that running the full self-test suite takes several minutes so expect pushing to -take some time. If you can't or don't want to wait for the hooks in some cases, use the -``--no-verify`` option to ``$ git commit ...`` or ``$ git push ...``. Alternatively, -see the `pre-commit`_ documentation and CLI help output and use the committed -`pre-commit configuration`_ as a starting point to write a local, uncommitted -``../.pre-commit-config.local.yaml`` configuration to use instead. For example:: - - tahoe-lafs $ ./.tox/py36/bin/pre-commit --help - tahoe-lafs $ ./.tox/py36/bin/pre-commit instll --help - tahoe-lafs $ cp "./.pre-commit-config.yaml" "./.pre-commit-config.local.yaml" - tahoe-lafs $ editor "./.pre-commit-config.local.yaml" - ... - tahoe-lafs $ ./.tox/py36/bin/pre-commit install -c "./.pre-commit-config.local.yaml" -t pre-push - pre-commit installed at .git/hooks/pre-push - tahoe-lafs $ git commit -a -m "Add a file that violates flake8" - [3398.pre-commit 29f8f43d2] Add a file that violates flake8 - 1 file changed, 2 insertions(+) - create mode 100644 src/allmydata/tabbed.py - tahoe-lafs $ git push - ... - codechecks...............................................................Failed - - hook id: codechecks - - exit code: 1 - - GLOB sdist-make: ./tahoe-lafs/setup.py - codechecks inst-nodeps: ... - codechecks installed: ... - codechecks run-test-pre: PYTHONHASHSEED='...' - codechecks run-test: commands[0] | flake8 src static misc setup.py - src/allmydata/tabbed.py:2:1: W191 indentation contains tabs - ERROR: InvocationError for command ./tahoe-lafs/.tox/codechecks/bin/flake8 src static misc setup.py (exited with code 1) - ___________________________________ summary ____________________________________ - ERROR: codechecks: commands failed - ... - - error: failed to push some refs to 'github.com:jaraco/tahoe-lafs.git' .. _`pre-commit`: https://pre-commit.com diff --git a/docs/frontends/CLI.rst b/docs/frontends/CLI.rst index e46936bad..0badede98 100644 --- a/docs/frontends/CLI.rst +++ b/docs/frontends/CLI.rst @@ -85,7 +85,7 @@ Node Management "``tahoe create-node [NODEDIR]``" is the basic make-a-new-node command. It creates a new directory and populates it with files that -will allow the "``tahoe start``" and related commands to use it later +will allow the "``tahoe run``" and related commands to use it later on. ``tahoe create-node`` creates nodes that have client functionality (upload/download files), web API services (controlled by the '[node]web.port' configuration), and storage services (unless @@ -94,8 +94,7 @@ on. ``tahoe create-node`` creates nodes that have client functionality NODEDIR defaults to ``~/.tahoe/`` , and newly-created nodes default to publishing a web server on port 3456 (limited to the loopback interface, at 127.0.0.1, to restrict access to other programs on the same host). All of the -other "``tahoe``" subcommands use corresponding defaults (with the exception -that "``tahoe run``" defaults to running a node in the current directory). +other "``tahoe``" subcommands use corresponding defaults. "``tahoe create-client [NODEDIR]``" creates a node with no storage service. That is, it behaves like "``tahoe create-node --no-storage [NODEDIR]``". @@ -117,25 +116,6 @@ the same way on all platforms and logs to stdout. If you want to run the process as a daemon, it is recommended that you use your favourite daemonization tool. -The now-deprecated "``tahoe start [NODEDIR]``" command will launch a -previously-created node. It will launch the node into the background -using ``tahoe daemonize`` (and internal-only command, not for user -use). On some platforms (including Windows) this command is unable to -run a daemon in the background; in that case it behaves in the same -way as "``tahoe run``". ``tahoe start`` also monitors the logs for up -to 5 seconds looking for either a succesful startup message or for -early failure messages and produces an appropriate exit code. You are -encouraged to use ``tahoe run`` along with your favourite -daemonization tool instead of this. ``tahoe start`` is maintained for -backwards compatibility of users already using it; new scripts should -depend on ``tahoe run``. - -"``tahoe stop [NODEDIR]``" will shut down a running node. "``tahoe -restart [NODEDIR]``" will stop and then restart a running -node. Similar to above, you should use ``tahoe run`` instead alongside -your favourite daemonization tool. - - File Store Manipulation ======================= diff --git a/docs/frontends/FTP-and-SFTP.rst b/docs/frontends/FTP-and-SFTP.rst index dc348af34..ee6371812 100644 --- a/docs/frontends/FTP-and-SFTP.rst +++ b/docs/frontends/FTP-and-SFTP.rst @@ -1,22 +1,21 @@ .. -*- coding: utf-8-with-signature -*- -================================= -Tahoe-LAFS SFTP and FTP Frontends -================================= +======================== +Tahoe-LAFS SFTP Frontend +======================== -1. `SFTP/FTP Background`_ +1. `SFTP Background`_ 2. `Tahoe-LAFS Support`_ 3. `Creating an Account File`_ 4. `Running An Account Server (accounts.url)`_ 5. `Configuring SFTP Access`_ -6. `Configuring FTP Access`_ -7. `Dependencies`_ -8. `Immutable and Mutable Files`_ -9. `Known Issues`_ +6. `Dependencies`_ +7. `Immutable and Mutable Files`_ +8. `Known Issues`_ -SFTP/FTP Background -=================== +SFTP Background +=============== FTP is the venerable internet file-transfer protocol, first developed in 1971. The FTP server usually listens on port 21. A separate connection is @@ -33,20 +32,18 @@ Both FTP and SFTP were developed assuming a UNIX-like server, with accounts and passwords, octal file modes (user/group/other, read/write/execute), and ctime/mtime timestamps. -We recommend SFTP over FTP, because the protocol is better, and the server -implementation in Tahoe-LAFS is more complete. See `Known Issues`_, below, -for details. +Previous versions of Tahoe-LAFS supported FTP, but now only the superior SFTP +frontend is supported. See `Known Issues`_, below, for details on the +limitations of SFTP. Tahoe-LAFS Support ================== All Tahoe-LAFS client nodes can run a frontend SFTP server, allowing regular SFTP clients (like ``/usr/bin/sftp``, the ``sshfs`` FUSE plugin, and many -others) to access the file store. They can also run an FTP server, so FTP -clients (like ``/usr/bin/ftp``, ``ncftp``, and others) can too. These -frontends sit at the same level as the web-API interface. +others) to access the file store. -Since Tahoe-LAFS does not use user accounts or passwords, the SFTP/FTP +Since Tahoe-LAFS does not use user accounts or passwords, the SFTP servers must be configured with a way to first authenticate a user (confirm that a prospective client has a legitimate claim to whatever authorities we might grant a particular user), and second to decide what directory cap @@ -173,39 +170,6 @@ clients and with the sshfs filesystem, see wiki:SftpFrontend_ .. _wiki:SftpFrontend: https://tahoe-lafs.org/trac/tahoe-lafs/wiki/SftpFrontend -Configuring FTP Access -====================== - -To enable the FTP server with an accounts file, add the following lines to -the BASEDIR/tahoe.cfg file:: - - [ftpd] - enabled = true - port = tcp:8021:interface=127.0.0.1 - accounts.file = private/accounts - -The FTP server will listen on the given port number and on the loopback -interface only. The "accounts.file" pathname will be interpreted relative to -the node's BASEDIR. - -To enable the FTP server with an account server instead, provide the URL of -that server in an "accounts.url" directive:: - - [ftpd] - enabled = true - port = tcp:8021:interface=127.0.0.1 - accounts.url = https://example.com/login - -You can provide both accounts.file and accounts.url, although it probably -isn't very useful except for testing. - -FTP provides no security, and so your password or caps could be eavesdropped -if you connect to the FTP server remotely. The examples above include -":interface=127.0.0.1" in the "port" option, which causes the server to only -accept connections from localhost. - -Public key authentication is not supported for FTP. - Dependencies ============ @@ -216,7 +180,7 @@ separately: debian puts it in the "python-twisted-conch" package. Immutable and Mutable Files =========================== -All files created via SFTP (and FTP) are immutable files. However, files can +All files created via SFTP are immutable files. However, files can only be created in writeable directories, which allows the directory entry to be relinked to a different file. Normally, when the path of an immutable file is opened for writing by SFTP, the directory entry is relinked to another @@ -256,18 +220,3 @@ See also wiki:SftpFrontend_. .. _ticket #1059: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1059 .. _ticket #1089: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1089 - -Known Issues in the FTP Frontend --------------------------------- - -Mutable files are not supported by the FTP frontend (`ticket #680`_). - -Non-ASCII filenames are not supported by FTP (`ticket #682`_). - -The FTP frontend sometimes fails to report errors, for example if an upload -fails because it does meet the "servers of happiness" threshold (`ticket -#1081`_). - -.. _ticket #680: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/680 -.. _ticket #682: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/682 -.. _ticket #1081: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1081 diff --git a/docs/frontends/webapi.rst b/docs/frontends/webapi.rst index 2ee348080..77ce11974 100644 --- a/docs/frontends/webapi.rst +++ b/docs/frontends/webapi.rst @@ -2032,10 +2032,11 @@ potential for surprises when the file store structure is changed. Tahoe-LAFS provides a mutable file store, but the ways that the store can change are limited. The only things that can change are: - * the mapping from child names to child objects inside mutable directories - (by adding a new child, removing an existing child, or changing an - existing child to point to a different object) - * the contents of mutable files + +* the mapping from child names to child objects inside mutable directories + (by adding a new child, removing an existing child, or changing an + existing child to point to a different object) +* the contents of mutable files Obviously if you query for information about the file store and then act to change it (such as by getting a listing of the contents of a mutable @@ -2145,7 +2146,7 @@ you could do the following:: tahoe debug dump-cap URI:CHK:n7r3m6wmomelk4sep3kw5cvduq:os7ijw5c3maek7pg65e5254k2fzjflavtpejjyhshpsxuqzhcwwq:3:20:14861 -> storage index: whpepioyrnff7orecjolvbudeu echo "whpepioyrnff7orecjolvbudeu my puppy told me to" >>$NODEDIR/access.blacklist - tahoe restart $NODEDIR + # ... restart the node to re-read configuration ... tahoe get URI:CHK:n7r3m6wmomelk4sep3kw5cvduq:os7ijw5c3maek7pg65e5254k2fzjflavtpejjyhshpsxuqzhcwwq:3:20:14861 -> error, 403 Access Prohibited: my puppy told me to @@ -2157,7 +2158,7 @@ When modifying the file, be careful to update it atomically, otherwise a request may arrive while the file is only halfway written, and the partial file may be incorrectly parsed. -The blacklist is applied to all access paths (including SFTP, FTP, and CLI +The blacklist is applied to all access paths (including SFTP and CLI operations), not just the web-API. The blacklist also applies to directories. If a directory is blacklisted, the gateway will refuse access to both that directory and any child files/directories underneath it, when accessed via diff --git a/docs/helper.rst b/docs/helper.rst index 0fcdf4601..55d302cac 100644 --- a/docs/helper.rst +++ b/docs/helper.rst @@ -122,7 +122,7 @@ Who should consider using a Helper? * clients who experience problems with TCP connection fairness: if other programs or machines in the same home are getting less than their fair share of upload bandwidth. If the connection is being shared fairly, then - a Tahoe upload that is happening at the same time as a single FTP upload + a Tahoe upload that is happening at the same time as a single SFTP upload should get half the bandwidth. * clients who have been given the helper.furl by someone who is running a Helper and is willing to let them use it diff --git a/docs/historical/configuration.rst b/docs/historical/configuration.rst index 660bc8489..867ed0de0 100644 --- a/docs/historical/configuration.rst +++ b/docs/historical/configuration.rst @@ -20,10 +20,10 @@ Config setting File Comment ``[node]log_gatherer.furl`` ``BASEDIR/log_gatherer.furl`` (one per line) ``[node]timeout.keepalive`` ``BASEDIR/keepalive_timeout`` ``[node]timeout.disconnect`` ``BASEDIR/disconnect_timeout`` -``[client]introducer.furl`` ``BASEDIR/introducer.furl`` + ``BASEDIR/introducer.furl`` ``BASEDIR/private/introducers.yaml`` ``[client]helper.furl`` ``BASEDIR/helper.furl`` ``[client]key_generator.furl`` ``BASEDIR/key_generator.furl`` -``[client]stats_gatherer.furl`` ``BASEDIR/stats_gatherer.furl`` + ``BASEDIR/stats_gatherer.furl`` Stats gatherer has been removed. ``[storage]enabled`` ``BASEDIR/no_storage`` (``False`` if ``no_storage`` exists) ``[storage]readonly`` ``BASEDIR/readonly_storage`` (``True`` if ``readonly_storage`` exists) ``[storage]sizelimit`` ``BASEDIR/sizelimit`` @@ -47,3 +47,10 @@ the now (since Tahoe-LAFS v1.3.0) unsupported addresses specified in ``advertised_ip_addresses`` were used in addition to any that were automatically discovered), whereas the new ``tahoe.cfg`` directive is not (``tub.location`` is used verbatim). + +The stats gatherer has been broken at least since Tahoe-LAFS v1.13.0. +The (broken) functionality of ``[client]stats_gatherer.furl`` (which +was previously in ``BASEDIR/stats_gatherer.furl``), is scheduled to be +completely removed after Tahoe-LAFS v1.15.0. After that point, if +your configuration contains a ``[client]stats_gatherer.furl``, your +node will refuse to start. diff --git a/docs/how_to_make_a_tahoe-lafs_release.org b/docs/how_to_make_a_tahoe-lafs_release.org deleted file mode 100644 index b3f2a84d7..000000000 --- a/docs/how_to_make_a_tahoe-lafs_release.org +++ /dev/null @@ -1,110 +0,0 @@ -How to Make a Tahoe-LAFS Release - -Any developer with push priveleges can do most of these steps, but a -"Release Maintainer" is required for some signing operations -- these -steps are marked with (Release Maintainer). Currently, the following -people are Release Maintainers: - - - Brian Warner (https://github.com/warner) - - -* select features/PRs for new release [0/2] - - [ ] made sure they are tagged/labeled - - [ ] merged all release PRs - -* basic quality checks [0/3] - - [ ] all travis CI checks pass - - [ ] all appveyor checks pass - - [ ] all buildbot workers pass their checks - -* freeze master branch [0/1] - - [ ] announced the freeze of the master branch on IRC (i.e. non-release PRs won't be merged until after release) - -* sync documentation [0/7] - - - [ ] NEWS.rst: (run "tox -e news") - - [ ] added final release name and date to top-most item in NEWS.rst - - [ ] updated relnotes.txt (change next, last versions; summarize NEWS) - - [ ] updated CREDITS - - [ ] updated docs/known_issues.rst - - [ ] docs/INSTALL.rst only points to current tahoe-lafs-X.Y.Z.tar.gz source code file - - [ ] updated https://tahoe-lafs.org/hacktahoelafs/ - -* sign + build the tag [0/8] - - - [ ] code passes all checks / tests (i.e. all CI is green) - - [ ] documentation is ready (see above) - - [ ] (Release Maintainer): git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-X.Y.Z" tahoe-lafs-X.Y.Z - - [ ] build code locally: - tox -e py27,codechecks,deprecations,docs,integration,upcoming-deprecations - - [ ] created tarballs (they'll be in dist/ for later comparison) - tox -e tarballs - - [ ] release version is reporting itself as intended version - ls dist/ - - [ ] 'git pull' doesn't pull anything - - [ ] pushed tag to trigger buildslaves - git push official master TAGNAME - - [ ] confirmed Dockerhub built successfully: - https://hub.docker.com/r/tahoelafs/base/builds/ - -* sign the release artifacts [0/8] - - - [ ] (Release Maintainer): pushed signed tag (should trigger Buildbot builders) - - [ ] Buildbot workers built all artifacts successfully - - [ ] downloaded upstream tarballs+wheels - - [ ] announce on IRC that master is unlocked - - [ ] compared upstream tarballs+wheels against local copies - - [ ] (Release Maintainer): signed each upstream artifacts with "gpg -ba -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A FILE" - - [ ] added to relnotes.txt: [0/3] - - [ ] prefix with SHA256 of tarballs - - [ ] release pubkey - - [ ] git revision hash - - [ ] GPG-signed the release email with release key (write to - relnotes.txt.asc) Ideally this is a Release Maintainer, but could - be any developer - -* publish release artifacts [0/9] - - - [ ] uploaded to PyPI via: twine upload dist/* - - [ ] uploaded *.asc to org ~source/downloads/ - - [ ] test install works properly: pip install tahoe-lafs - - [ ] copied the release tarballs and signatures to tahoe-lafs.org: ~source/downloads/ - - [ ] moved old release out of ~source/downloads (to downloads/old/?) - - [ ] ensured readthedocs.org updated - - [ ] uploaded wheels to https://tahoe-lafs.org/deps/ - - [ ] uploaded release to https://github.com/tahoe-lafs/tahoe-lafs/releases - -* check release downloads [0/] - - - [ ] test PyPI via: pip install tahoe-lafs - - [ ] https://github.com/tahoe-lafs/tahoe-lafs/releases - - [ ] https://tahoe-lafs.org/downloads/ - - [ ] https://tahoe-lafs.org/deps/ - -* document release in trac [0/] - - - [ ] closed the Milestone on the trac Roadmap - -* unfreeze master branch [0/] - - - [ ] announced on IRC that new PRs will be looked at/merged - -* announce new release [0/] - - - [ ] sent release email and relnotes.txt.asc to tahoe-announce@tahoe-lafs.org - - [ ] sent release email and relnotes.txt.asc to tahoe-dev@tahoe-lafs.org - - [ ] updated Wiki front page: version on download link, News column - - [ ] updated Wiki "Doc": parade of release notes (with rev of NEWS.rst) - - [ ] make an "announcement of new release" on freshmeat (XXX still a thing?) - - [ ] make an "announcement of new release" on launchpad - - [ ] tweeted as @tahoelafs - - [ ] emailed relnotes.txt.asc to below listed mailing-lists/organizations - - [ ] also announce release to (trimmed from previous version of this doc): - - twisted-python@twistedmatrix.com - - liberationtech@lists.stanford.edu - - lwn@lwn.net - - p2p-hackers@lists.zooko.com - - python-list@python.org - - http://listcultures.org/pipermail/p2presearch_listcultures.org/ - - cryptopp-users@googlegroups.com - - (others?) diff --git a/docs/index.rst b/docs/index.rst index 3d0a41302..60a3aa5d4 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -23,8 +23,9 @@ Contents: frontends/download-status known_issues - ../.github/CONTRIBUTING + contributing CODE_OF_CONDUCT + release-checklist servers helper diff --git a/docs/known_issues.rst b/docs/known_issues.rst index e040ffaf6..98bd1b35d 100644 --- a/docs/known_issues.rst +++ b/docs/known_issues.rst @@ -23,7 +23,7 @@ Known Issues in Tahoe-LAFS v1.10.3, released 30-Mar-2016 * `Disclosure of file through embedded hyperlinks or JavaScript in that file`_ * `Command-line arguments are leaked to other local users`_ * `Capabilities may be leaked to web browser phishing filter / "safe browsing" servers`_ - * `Known issues in the FTP and SFTP frontends`_ + * `Known issues in the SFTP frontend`_ * `Traffic analysis based on sizes of files/directories, storage indices, and timing`_ * `Privacy leak via Google Chart API link in map-update timing web page`_ @@ -213,8 +213,8 @@ To disable the filter in Chrome: ---- -Known issues in the FTP and SFTP frontends ------------------------------------------- +Known issues in the SFTP frontend +--------------------------------- These are documented in :doc:`frontends/FTP-and-SFTP` and on `the SftpFrontend page`_ on the wiki. diff --git a/docs/logging.rst b/docs/logging.rst index 88cdebc00..11835b283 100644 --- a/docs/logging.rst +++ b/docs/logging.rst @@ -128,10 +128,9 @@ provided in ``misc/incident-gatherer/support_classifiers.py`` . There is roughly one category for each ``log.WEIRD``-or-higher level event in the Tahoe source code. -The incident gatherer is created with the "``flogtool -create-incident-gatherer WORKDIR``" command, and started with "``tahoe -start``". The generated "``gatherer.tac``" file should be modified to add -classifier functions. +The incident gatherer is created with the "``flogtool create-incident-gatherer +WORKDIR``" command, and started with "``tahoe run``". The generated +"``gatherer.tac``" file should be modified to add classifier functions. The incident gatherer writes incident names (which are simply the relative pathname of the ``incident-\*.flog.bz2`` file) into ``classified/CATEGORY``. @@ -175,7 +174,7 @@ things that happened on multiple machines (such as comparing a client node making a request with the storage servers that respond to that request). Create the Log Gatherer with the "``flogtool create-gatherer WORKDIR``" -command, and start it with "``tahoe start``". Then copy the contents of the +command, and start it with "``twistd -ny gatherer.tac``". Then copy the contents of the ``log_gatherer.furl`` file it creates into the ``BASEDIR/tahoe.cfg`` file (under the key ``log_gatherer.furl`` of the section ``[node]``) of all nodes that should be sending it log events. (See :doc:`configuration`) diff --git a/docs/man/man1/tahoe.1 b/docs/man/man1/tahoe.1 index 23162af63..113f6a311 100644 --- a/docs/man/man1/tahoe.1 +++ b/docs/man/man1/tahoe.1 @@ -45,9 +45,6 @@ Create a client node (with storage initially disabled). .TP .B \f[B]create-introducer\f[] Create an introducer node. -.TP -.B \f[B]create-stats-gatherer\f[] -Create a stats-gatherer service. .SS OPTIONS .TP .B \f[B]-C,\ --basedir=\f[] diff --git a/docs/release-checklist.rst b/docs/release-checklist.rst new file mode 100644 index 000000000..52d7d9344 --- /dev/null +++ b/docs/release-checklist.rst @@ -0,0 +1,218 @@ + +================= +Release Checklist +================= + +These instructions were produced while making the 1.15.0 release. They +are based on the original instructions (in old revisions in the file +`docs/how_to_make_a_tahoe-lafs_release.org`). + +Any contributor can do the first part of the release preparation. Only +certain contributors can perform other parts. These are the two main +sections of this checklist (and could be done by different people). + +A final section describes how to announce the release. + + +Any Contributor +--------------- + +Anyone who can create normal PRs should be able to complete this +portion of the release process. + + +Prepare for the Release +``````````````````````` + +The `master` branch should always be releasable. + +It may be worth asking (on IRC or mailing-ist) if anything will be +merged imminently (for example, "I will prepare a release this coming +Tuesday if you want to get anything in"). + +- Create a ticket for the release in Trac +- Ticket number needed in next section + + +Create Branch and Apply Updates +``````````````````````````````` + +- Create a branch for release-candidates (e.g. `XXXX.release-1.15.0.rc0`) +- run `tox -e news` to produce a new NEWS.txt file (this does a commit) +- create the news for the release + + - newsfragments/.minor + - commit it + +- manually fix NEWS.txt + + - proper title for latest release ("Release 1.15.0" instead of "Release ...post1432") + - double-check date (maybe release will be in the future) + - spot-check the release notes (these come from the newsfragments + files though so don't do heavy editing) + - commit these changes + +- update "relnotes.txt" + + - update all mentions of 1.14.0 -> 1.15.0 + - update "previous release" statement and date + - summarize major changes + - commit it + +- update "CREDITS" + + - are there any new contributors in this release? + - one way: git log release-1.14.0.. | grep Author | sort | uniq + - commit it + +- update "docs/known_issues.rst" if appropriate +- update "docs/INSTALL.rst" references to the new release +- Push the branch to github +- Create a (draft) PR; this should trigger CI (note that github + doesn't let you create a PR without some changes on the branch so + running + committing the NEWS.txt file achieves that without changing + any code) +- Confirm CI runs successfully on all platforms + + +Create Release Candidate +```````````````````````` + +Before "officially" tagging any release, we will make a +release-candidate available. So there will be at least 1.15.0rc0 (for +example). If there are any problems, an rc1 or rc2 etc may also be +released. Anyone can sign these releases (ideally they'd be signed +"officially" as well, but it's better to get them out than to wait for +that). + +Typically expert users will be the ones testing release candidates and +they will need to evaluate which contributors' signatures they trust. + +- (all steps above are completed) +- sign the release + + - git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-1.15.0rc0" tahoe-lafs-1.15.0rc0 + - (replace the key-id above with your own) + +- build all code locally + - these should all pass: + + - tox -e py27,codechecks,docs,integration + + - these can fail (ideally they should not of course): + + - tox -e deprecations,upcoming-deprecations + +- build tarballs + + - tox -e tarballs + - confirm it at least exists: + - ls dist/ | grep 1.15.0rc0 + +- inspect and test the tarballs + + - install each in a fresh virtualenv + - run `tahoe` command + +- when satisfied, sign the tarballs: + + - gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0-py2-none-any.whl + - gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.tar.bz2 + - gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.tar.gz + - gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.zip + + +Privileged Contributor +----------------------- + +Steps in this portion require special access to keys or +infrastructure. For example, **access to tahoe-lafs.org** to upload +binaries or edit HTML. + + +Hack Tahoe-LAFS +``````````````` + +Did anyone contribute a hack since the last release? If so, then +https://tahoe-lafs.org/hacktahoelafs/ needs to be updated. + + +Upload Artifacts +```````````````` + +Any release-candidate or actual release plus signature (.asc file) +need to be uploaded to https://tahoe-lafs.org in `~source/downloads` + +- secure-copy all release artifacts to the download area on the + tahoe-lafs.org host machine. `~source/downloads` on there maps to + https://tahoe-lafs.org/downloads/ on the Web. +- scp dist/*1.15.0* username@tahoe-lafs.org:/home/source/downloads +- the following developers have access to do this: + + - exarkun + - meejah + - warner + +For the actual release, the tarball and signature files need to be +uploaded to PyPI as well. + +- how to do this? +- (original guide says only `twine upload dist/*`) +- the following developers have access to do this: + + - warner + - exarkun (partial?) + - meejah (partial?) + +Announcing the Release Candidate +```````````````````````````````` + +The release-candidate should be announced by posting to the +mailing-list (tahoe-dev@tahoe-lafs.org). For example: +https://tahoe-lafs.org/pipermail/tahoe-dev/2020-October/009995.html + + +Is The Release Done Yet? +```````````````````````` + +If anyone reports a problem with a release-candidate then a new +release-candidate should be made once a fix has been merged to +master. Repeat the above instructions with `rc1` or `rc2` or whatever +is appropriate. + +Once a release-candidate has marinated for some time then it can be +made into a the actual release. + +XXX Write this section when doing 1.15.0 actual release + +(In general, this means dropping the "rcX" part of the release and the +tag, uploading those artifacts, uploading to PyPI, ... ) + + + +Announcing the Release +---------------------- + + +mailing-lists +````````````` + +A new Tahoe release is traditionally announced on our mailing-list +(tahoe-dev@tahoe-lafs.org). The former version of these instructions +also announced the release on the following other lists: + +- tahoe-announce@tahoe-lafs.org +- twisted-python@twistedmatrix.com +- liberationtech@lists.stanford.edu +- lwn@lwn.net +- p2p-hackers@lists.zooko.com +- python-list@python.org +- http://listcultures.org/pipermail/p2presearch_listcultures.org/ +- cryptopp-users@googlegroups.com + + +wiki +```` + +Edit the "News" section of the front page of https://tahoe-lafs.org +with a link to the mailing-list archive of the announcement message. diff --git a/docs/running.rst b/docs/running.rst index 2b43adf75..82b0443f9 100644 --- a/docs/running.rst +++ b/docs/running.rst @@ -65,9 +65,9 @@ Running a Client To construct a client node, run “``tahoe create-client``”, which will create ``~/.tahoe`` to be the node's base directory. Acquire the ``introducer.furl`` (see below if you are running your own introducer, or use the one from the -`TestGrid page`_), and paste it after ``introducer.furl =`` in the -``[client]`` section of ``~/.tahoe/tahoe.cfg``. Then use “``tahoe run -~/.tahoe``”. After that, the node should be off and running. The first thing +`TestGrid page`_), and write it to ``~/.tahoe/private/introducers.yaml`` +(see :ref:`introducer-definitions`). Then use “``tahoe run ~/.tahoe``”. +After that, the node should be off and running. The first thing it will do is connect to the introducer and get itself connected to all other nodes on the grid. @@ -81,9 +81,7 @@ does not offer its disk space to other nodes. To configure other behavior, use “``tahoe create-node``” or see :doc:`configuration`. The “``tahoe run``” command above will run the node in the foreground. -On Unix, you can run it in the background instead by using the -“``tahoe start``” command. To stop a node started in this way, use -“``tahoe stop``”. ``tahoe --help`` gives a summary of all commands. +``tahoe --help`` gives a summary of all commands. Running a Server or Introducer @@ -99,12 +97,10 @@ and ``--location`` arguments. To construct an introducer, create a new base directory for it (the name of the directory is up to you), ``cd`` into it, and run “``tahoe create-introducer --hostname=example.net .``” (but using the hostname of -your VPS). Now run the introducer using “``tahoe start .``”. After it +your VPS). Now run the introducer using “``tahoe run .``”. After it starts, it will write a file named ``introducer.furl`` into the ``private/`` subdirectory of that base directory. This file contains the URL the other nodes must use in order to connect to this introducer. -(Note that “``tahoe run .``” doesn't work for introducers, this is a -known issue: `#937`_.) You can distribute your Introducer fURL securely to new clients by using the ``tahoe invite`` command. This will prepare some JSON to send to the @@ -211,10 +207,10 @@ create a new directory and lose the capability to it, then you cannot access that directory ever again. -The SFTP and FTP frontends --------------------------- +The SFTP frontend +----------------- -You can access your Tahoe-LAFS grid via any SFTP_ or FTP_ client. See +You can access your Tahoe-LAFS grid via any SFTP_ client. See :doc:`frontends/FTP-and-SFTP` for how to set this up. On most Unix platforms, you can also use SFTP to plug Tahoe-LAFS into your computer's local filesystem via ``sshfs``, but see the `FAQ about performance @@ -224,7 +220,6 @@ The SftpFrontend_ page on the wiki has more information about using SFTP with Tahoe-LAFS. .. _SFTP: https://en.wikipedia.org/wiki/SSH_file_transfer_protocol -.. _FTP: https://en.wikipedia.org/wiki/File_Transfer_Protocol .. _FAQ about performance problems: https://tahoe-lafs.org/trac/tahoe-lafs/wiki/FAQ#Q23_FUSE .. _SftpFrontend: https://tahoe-lafs.org/trac/tahoe-lafs/wiki/SftpFrontend diff --git a/docs/specifications/index.rst b/docs/specifications/index.rst index 7d99934f6..2029c9e5a 100644 --- a/docs/specifications/index.rst +++ b/docs/specifications/index.rst @@ -8,6 +8,7 @@ the data formats used by Tahoe. :maxdepth: 2 outline + url uri file-encoding URI-extension diff --git a/docs/specifications/url.rst b/docs/specifications/url.rst new file mode 100644 index 000000000..31fb05fad --- /dev/null +++ b/docs/specifications/url.rst @@ -0,0 +1,165 @@ +URLs +==== + +The goal of this document is to completely specify the construction and use of the URLs by Tahoe-LAFS for service location. +This includes, but is not limited to, the original Foolscap-based URLs. +These are not to be confused with the URI-like capabilities Tahoe-LAFS uses to refer to stored data. +An attempt is also made to outline the rationale for certain choices about these URLs. +The intended audience for this document is Tahoe-LAFS maintainers and other developers interested in interoperating with Tahoe-LAFS or these URLs. + +Background +---------- + +Tahoe-LAFS first used Foolscap_ for network communication. +Foolscap connection setup takes as an input a Foolscap URL or a *fURL*. +A fURL includes three components: + +* the base32-encoded SHA1 hash of the DER form of an x509v3 certificate +* zero or more network addresses [1]_ +* an object identifier + +A Foolscap client tries to connect to each network address in turn. +If a connection is established then TLS is negotiated. +The server is authenticated by matching its certificate against the hash in the fURL. +A matching certificate serves as proof that the handshaking peer is the correct server. +This serves as the process by which the client authenticates the server. + +The client can then exercise further Foolscap functionality using the fURL's object identifier. +If the object identifier is an unguessable, secret string then it serves as a capability. +This unguessable identifier is sometimes called a `swiss number`_ (or swissnum). +The client's use of the swissnum is what allows the server to authorize the client. + +.. _`swiss number`: http://wiki.erights.org/wiki/Swiss_number + +NURLs +----- + +The authentication and authorization properties of fURLs are a good fit for Tahoe-LAFS' requirements. +These are not inherently tied to the Foolscap protocol itself. +In particular they are beneficial to :doc:`../proposed/http-storage-node-protocol` which uses HTTP instead of Foolscap. +It is conceivable they will also be used with WebSockets at some point as well. + +Continuing to refer to these URLs as fURLs when they are being used for other protocols may cause confusion. +Therefore, +this document coins the name **NURL** for these URLs. +This can be considered to expand to "**N**\ ew URLs" or "Authe\ **N**\ ticating URLs" or "Authorizi\ **N**\ g URLs" as the reader prefers. + +The anticipated use for a **NURL** will still be to establish a TLS connection to a peer. +The protocol run over that TLS connection could be Foolscap though it is more likely to be an HTTP-based protocol (such as GBS). + +Syntax +------ + +The EBNF for a NURL is as follows:: + + nurl = scheme, hash, "@", net-loc-list, "/", swiss-number, [ version1 ] + + scheme = "pb://" + + hash = unreserved + + net-loc-list = net-loc, [ { ",", net-loc } ] + net-loc = tcp-loc | tor-loc | i2p-loc + + tcp-loc = [ "tcp:" ], hostname, [ ":" port ] + tor-loc = "tor:", hostname, [ ":" port ] + i2p-loc = "i2p:", i2p-addr, [ ":" port ] + + i2p-addr = { unreserved }, ".i2p" + hostname = domain | IPv4address | IPv6address + + swiss-number = segment + + version1 = "#v=1" + +See https://tools.ietf.org/html/rfc3986#section-3.3 for the definition of ``segment``. +See https://tools.ietf.org/html/rfc2396#appendix-A for the definition of ``unreserved``. +See https://tools.ietf.org/html/draft-main-ipaddr-text-rep-02#section-3.1 for the definition of ``IPv4address``. +See https://tools.ietf.org/html/draft-main-ipaddr-text-rep-02#section-3.2 for the definition of ``IPv6address``. +See https://tools.ietf.org/html/rfc1035#section-2.3.1 for the definition of ``domain``. + +Versions +-------- + +Though all NURLs are syntactically compatible some semantic differences are allowed. +These differences are separated into distinct versions. + +Version 0 +--------- + +A Foolscap fURL is considered the canonical definition of a version 0 NURL. +Notably, +the hash component is defined as the base32-encoded SHA1 hash of the DER form of an x509v3 certificate. +A version 0 NURL is identified by the absence of the ``v=1`` fragment. + +Examples +~~~~~~~~ + +* ``pb://sisi4zenj7cxncgvdog7szg3yxbrnamy@tcp:127.1:34399/xphmwz6lx24rh2nxlinni`` +* ``pb://2uxmzoqqimpdwowxr24q6w5ekmxcymby@localhost:47877/riqhpojvzwxujhna5szkn`` + +Version 1 +--------- + +The hash component of a version 1 NURL differs in three ways from the prior version. + +1. The hash function used is SHA3-224 instead of SHA1. + The security of SHA1 `continues to be eroded`_. + Contrariwise SHA3 is currently the most recent addition to the SHA family by NIST. + The 224 bit instance is chosen to keep the output short and because it offers greater collision resistance than SHA1 was thought to offer even at its inception + (prior to security research showing actual collision resistance is lower). +2. The hash is computed over the certificate's SPKI instead of the whole certificate. + This allows certificate re-generation so long as the public key remains the same. + This is useful to allow contact information to be updated or extension of validity period. + Use of an SPKI hash has also been `explored by the web community`_ during its flirtation with using it for HTTPS certificate pinning + (though this is now largely abandoned). + +.. note:: + *Only* the certificate's keypair is pinned by the SPKI hash. + The freedom to change every other part of the certificate is coupled with the fact that all other parts of the certificate contain arbitrary information set by the private key holder. + It is neither guaranteed nor expected that a certificate-issuing authority has validated this information. + Therefore, + *all* certificate fields should be considered within the context of the relationship identified by the SPKI hash. + +3. The hash is encoded using urlsafe-base64 (without padding) instead of base32. + This provides a more compact representation and minimizes the usability impacts of switching from a 160 bit hash to a 224 bit hash. + +A version 1 NURL is identified by the presence of the ``v=1`` fragment. +Though the length of the hash string (38 bytes) could also be used to differentiate it from a version 0 NURL, +there is no guarantee that this will be effective in differentiating it from future versions so this approach should not be used. + +It is possible for a client to unilaterally upgrade a version 0 NURL to a version 1 NURL. +After establishing and authenticating a connection the client will have received a copy of the server's certificate. +This is sufficient to compute the new hash and rewrite the NURL to upgrade it to version 1. +This provides stronger authentication assurances for future uses but it is not required. + +Examples +~~~~~~~~ + +* ``pb://1WUX44xKjKdpGLohmFcBNuIRN-8rlv1Iij_7rQ@tcp:127.1:34399/jhjbc3bjbhk#v=1`` +* ``pb://azEu8vlRpnEeYm0DySQDeNY3Z2iJXHC_bsbaAw@localhost:47877/64i4aokv4ej#v=1`` + +.. _`continues to be eroded`: https://en.wikipedia.org/wiki/SHA-1#Cryptanalysis_and_validation +.. _`explored by the web community`: https://www.imperialviolet.org/2011/05/04/pinning.html +.. _Foolscap: https://github.com/warner/foolscap + +.. [1] ``foolscap.furl.decode_furl`` is taken as the canonical definition of the syntax of a fURL. + The **location hints** part of the fURL, + as it is referred to in Foolscap, + is matched by the regular expression fragment ``([^/]*)``. + Since this matches the empty string, + no network addresses are required to form a fURL. + The supporting code around the regular expression also takes extra steps to allow an empty string to match here. + +Open Questions +-------------- + +1. Should we make a hard recommendation that all certificate fields are ignored? + The system makes no guarantees about validation of these fields. + Is it just an unnecessary risk to let a user see them? + +2. Should the version specifier be a query-arg-alike or a fragment-alike? + The value is only necessary on the client side which makes it similar to an HTTP URL fragment. + The current Tahoe-LAFS configuration parsing code has special handling of the fragment character (``#``) which makes it unusable. + However, + the configuration parsing code is easily changed. diff --git a/docs/stats.rst b/docs/stats.rst index 8fbc8647a..50642d816 100644 --- a/docs/stats.rst +++ b/docs/stats.rst @@ -6,8 +6,7 @@ Tahoe Statistics 1. `Overview`_ 2. `Statistics Categories`_ -3. `Running a Tahoe Stats-Gatherer Service`_ -4. `Using Munin To Graph Stats Values`_ +3. `Using Munin To Graph Stats Values`_ Overview ======== @@ -243,92 +242,6 @@ The currently available stats (as of release 1.6.0 or so) are described here: the process was started. Ticket #472 indicates that .total may sometimes be negative due to wraparound of the kernel's counter. -**stats.load_monitor.\*** - - When enabled, the "load monitor" continually schedules a one-second - callback, and measures how late the response is. This estimates system load - (if the system is idle, the response should be on time). This is only - enabled if a stats-gatherer is configured. - - avg_load - average "load" value (seconds late) over the last minute - - max_load - maximum "load" value over the last minute - - -Running a Tahoe Stats-Gatherer Service -====================================== - -The "stats-gatherer" is a simple daemon that periodically collects stats from -several tahoe nodes. It could be useful, e.g., in a production environment, -where you want to monitor dozens of storage servers from a central management -host. It merely gatherers statistics from many nodes into a single place: it -does not do any actual analysis. - -The stats gatherer listens on a network port using the same Foolscap_ -connection library that Tahoe clients use to connect to storage servers. -Tahoe nodes can be configured to connect to the stats gatherer and publish -their stats on a periodic basis. (In fact, what happens is that nodes connect -to the gatherer and offer it a second FURL which points back to the node's -"stats port", which the gatherer then uses to pull stats on a periodic basis. -The initial connection is flipped to allow the nodes to live behind NAT -boxes, as long as the stats-gatherer has a reachable IP address.) - -.. _Foolscap: https://foolscap.lothar.com/trac - -The stats-gatherer is created in the same fashion as regular tahoe client -nodes and introducer nodes. Choose a base directory for the gatherer to live -in (but do not create the directory). Choose the hostname that should be -advertised in the gatherer's FURL. Then run: - -:: - - tahoe create-stats-gatherer --hostname=HOSTNAME $BASEDIR - -and start it with "tahoe start $BASEDIR". Once running, the gatherer will -write a FURL into $BASEDIR/stats_gatherer.furl . - -To configure a Tahoe client/server node to contact the stats gatherer, copy -this FURL into the node's tahoe.cfg file, in a section named "[client]", -under a key named "stats_gatherer.furl", like so: - -:: - - [client] - stats_gatherer.furl = pb://qbo4ktl667zmtiuou6lwbjryli2brv6t@HOSTNAME:PORTNUM/wxycb4kaexzskubjnauxeoptympyf45y - -or simply copy the stats_gatherer.furl file into the node's base directory -(next to the tahoe.cfg file): it will be interpreted in the same way. - -When the gatherer is created, it will allocate a random unused TCP port, so -it should not conflict with anything else that you have running on that host -at that time. To explicitly control which port it uses, run the creation -command with ``--location=`` and ``--port=`` instead of ``--hostname=``. If -you use a hostname of ``example.org`` and a port number of ``1234``, then -run:: - - tahoe create-stats-gatherer --location=tcp:example.org:1234 --port=tcp:1234 - -``--location=`` is a Foolscap FURL hints string (so it can be a -comma-separated list of connection hints), and ``--port=`` is a Twisted -"server endpoint specification string", as described in :doc:`configuration`. - -Once running, the stats gatherer will create a standard JSON file in -``$BASEDIR/stats.json``. Once a minute, the gatherer will pull stats -information from every connected node and write them into the file. The file -will contain a dictionary, in which node identifiers (known as "tubid" -strings) are the keys, and the values are a dict with 'timestamp', -'nickname', and 'stats' keys. d[tubid][stats] will contain the stats -dictionary as made available at http://localhost:3456/statistics?t=json . The -file will only contain the most recent update from each node. - -Other tools can be built to examine these stats and render them into -something useful. For example, a tool could sum the -"storage_server.disk_avail' values from all servers to compute a -total-disk-available number for the entire grid (however, the "disk watcher" -daemon, in misc/operations_helpers/spacetime/, is better suited for this -specific task). Using Munin To Graph Stats Values ================================= diff --git a/docs/windows.rst b/docs/windows.rst index 568e502bc..1f69ac743 100644 --- a/docs/windows.rst +++ b/docs/windows.rst @@ -33,7 +33,7 @@ You can use whatever name you like for the virtualenv, but example uses 3: Use the virtualenv's ``pip`` to install the latest release of Tahoe-LAFS into this virtualenv:: - PS C:\Users\me> venv\Scripts\pip install --find-links=https://tahoe-lafs.org/deps/ tahoe-lafs + PS C:\Users\me> venv\Scripts\pip install tahoe-lafs Collecting tahoe-lafs ... Installing collected packages: ... @@ -69,7 +69,7 @@ The ``pip install tahoe-lafs`` command above will install the latest release the following command (using pip from the virtualenv, from the root of your git checkout):: - $ venv\Scripts\pip install --find-links=https://tahoe-lafs.org/deps/ . + $ venv\Scripts\pip install . If you're planning to hack on the source code, you might want to add ``--editable`` so you won't have to re-install each time you make a change. @@ -77,12 +77,7 @@ If you're planning to hack on the source code, you might want to add Dependencies ------------ -Tahoe-LAFS depends upon several packages that use compiled C code -(such as zfec). This code must be built separately for each platform -(Windows, OS-X, and different flavors of Linux). - -Pre-compiled "wheels" of all Tahoe's dependencies are hosted on the -tahoe-lafs.org website in the ``deps/`` directory. The ``--find-links=`` -argument (used in the examples above) instructs ``pip`` to look at that URL -for dependencies. This should avoid the need for anything to be compiled -during the install. +Tahoe-LAFS depends upon several packages that use compiled C code (such as zfec). +This code must be built separately for each platform (Windows, OS-X, and different flavors of Linux). +Fortunately, this is now done by upstream packages for most platforms. +The result is that a C compiler is usually not required to install Tahoe-LAFS. diff --git a/integration/conftest.py b/integration/conftest.py index 04e3dcb52..533cbdb67 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -7,11 +7,15 @@ from os import mkdir, listdir, environ from os.path import join, exists from tempfile import mkdtemp, mktemp from functools import partial +from json import loads + +from foolscap.furl import ( + decode_furl, +) from eliot import ( to_file, log_call, - start_action, ) from twisted.python.procutils import which @@ -30,11 +34,14 @@ from util import ( _DumpOutputProtocol, _ProcessExitedProtocol, _create_node, - _run_node, _cleanup_tahoe_process, _tahoe_runner_optional_coverage, await_client_ready, TahoeProcess, + cli, + _run_node, + generate_ssh_key, + block_with_timeout, ) @@ -150,7 +157,7 @@ def flog_gatherer(reactor, temp_dir, flog_binary, request): ) print("Waiting for flogtool to complete") try: - pytest_twisted.blockon(flog_protocol.done) + block_with_timeout(flog_protocol.done, reactor) except ProcessTerminated as e: print("flogtool exited unexpectedly: {}".format(str(e))) print("Flogtool completed") @@ -199,9 +206,8 @@ log_gatherer.furl = {log_furl} with open(join(intro_dir, 'tahoe.cfg'), 'w') as f: f.write(config) - # on windows, "tahoe start" means: run forever in the foreground, - # but on linux it means daemonize. "tahoe run" is consistent - # between platforms. + # "tahoe run" is consistent across Linux/macOS/Windows, unlike the old + # "start" command. protocol = _MagicTextProtocol('introducer running') transport = _tahoe_runner_optional_coverage( protocol, @@ -226,6 +232,16 @@ def introducer_furl(introducer, temp_dir): print("Don't see {} yet".format(furl_fname)) sleep(.1) furl = open(furl_fname, 'r').read() + tubID, location_hints, name = decode_furl(furl) + if not location_hints: + # If there are no location hints then nothing can ever possibly + # connect to it and the only thing that can happen next is something + # will hang or time out. So just give up right now. + raise ValueError( + "Introducer ({!r}) fURL has no location hints!".format( + introducer_furl, + ), + ) return furl @@ -266,9 +282,8 @@ log_gatherer.furl = {log_furl} with open(join(intro_dir, 'tahoe.cfg'), 'w') as f: f.write(config) - # on windows, "tahoe start" means: run forever in the foreground, - # but on linux it means daemonize. "tahoe run" is consistent - # between platforms. + # "tahoe run" is consistent across Linux/macOS/Windows, unlike the old + # "start" command. protocol = _MagicTextProtocol('introducer running') transport = _tahoe_runner_optional_coverage( protocol, @@ -283,7 +298,7 @@ log_gatherer.furl = {log_furl} def cleanup(): try: transport.signalProcess('TERM') - pytest_twisted.blockon(protocol.exited) + block_with_timeout(protocol.exited, reactor) except ProcessExitedAlready: pass request.addfinalizer(cleanup) @@ -337,8 +352,50 @@ def alice(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, requ reactor, request, temp_dir, introducer_furl, flog_gatherer, "alice", web_port="tcp:9980:interface=localhost", storage=False, + # We're going to kill this ourselves, so no need for finalizer to + # do it: + finalize=False, ) ) + await_client_ready(process) + + # 1. Create a new RW directory cap: + cli(process, "create-alias", "test") + rwcap = loads(cli(process, "list-aliases", "--json"))["test"]["readwrite"] + + # 2. Enable SFTP on the node: + host_ssh_key_path = join(process.node_dir, "private", "ssh_host_rsa_key") + accounts_path = join(process.node_dir, "private", "accounts") + with open(join(process.node_dir, "tahoe.cfg"), "a") as f: + f.write("""\ +[sftpd] +enabled = true +port = tcp:8022:interface=127.0.0.1 +host_pubkey_file = {ssh_key_path}.pub +host_privkey_file = {ssh_key_path} +accounts.file = {accounts_path} +""".format(ssh_key_path=host_ssh_key_path, accounts_path=accounts_path)) + generate_ssh_key(host_ssh_key_path) + + # 3. Add a SFTP access file with username/password and SSH key auth. + + # The client SSH key path is typically going to be somewhere else (~/.ssh, + # typically), but for convenience sake for testing we'll put it inside node. + client_ssh_key_path = join(process.node_dir, "private", "ssh_client_rsa_key") + generate_ssh_key(client_ssh_key_path) + # Pub key format is "ssh-rsa ". We want the key. + ssh_public_key = open(client_ssh_key_path + ".pub").read().strip().split()[1] + with open(accounts_path, "w") as f: + f.write("""\ +alice password {rwcap} + +alice2 ssh-rsa {ssh_public_key} {rwcap} +""".format(rwcap=rwcap, ssh_public_key=ssh_public_key)) + + # 4. Restart the node with new SFTP config. + process.kill() + pytest_twisted.blockon(_run_node(reactor, process.node_dir, request, None)) + await_client_ready(process) return process @@ -480,7 +537,13 @@ def tor_network(reactor, temp_dir, chutney, request): path=join(chutney_dir), env=env, ) - pytest_twisted.blockon(proto.done) + try: + block_with_timeout(proto.done, reactor) + except ProcessTerminated: + # If this doesn't exit cleanly, that's fine, that shouldn't fail + # the test suite. + pass + request.addfinalizer(cleanup) return chut diff --git a/integration/install-tor.sh b/integration/install-tor.sh index 2478389f8..66fa64cb1 100755 --- a/integration/install-tor.sh +++ b/integration/install-tor.sh @@ -25,694 +25,768 @@ tknIyk5Goa36GMBl84gQceRs/4Zx3kxqCV+JYXE9CmdkpkVrh2K3j5+ysDWfD/kO dTzwu3WHaAwL8d5MJAGQn2i6bTw4UHytrYemS1DdG/0EThCCyAnPmmb8iBkZlSW8 6MzVqTrN37yvYWTXk6MwKH50twaX5hzZAlSh9eqRjZLq51DDomO7EumXP90rS5mT QrS+wiYfGQttoZfbh3wl5ZjejgEjx+qrnOH7ABEBAAG0JmRlYi50b3Jwcm9qZWN0 -Lm9yZyBhcmNoaXZlIHNpZ25pbmcga2V5iEYEEBECAAYFAkqqojIACgkQ61qJaiiY -i/WmOgCfTyf3NJ7wHTBckwAeE4MSt5ZtXVsAn0XDq8PWWnk4nK6TlevqK/VoWItF -iEYEEBECAAYFAkqsYDUACgkQO50JPzGwl0voJwCcCSokiJSNY+yIr3nBPN/LJldb -xekAmwfU60GeaWFwz7hqwVFL23xeTpyniEYEEBECAAYFAkt9ndgACgkQYhWWT1sX -KrI5TACfcBPbsaPA1AUVVXXPv0KeWFYgVaIAoMr3jwd1NYVD6Te3D+yJhGzzCD6P -iEYEEBECAAYFAkt+li8ACgkQTlMAGaGhvAU4FwCfX3H4Ggm/x0yIAvmt4CW8AP9F -5D8AoKapuwbjsGncT3UdNFiHminAaq1tiEYEEBECAAYFAky6mjsACgkQhfcmMSeh -yJpL+gCggxs4C5o+Oznk7WmFrPQ3lbnfDKIAni4p20aRuwx6QWGH8holjzTSmm5F -iEYEEBECAAYFAlMI0FEACgkQhEMxewZV94DLagCcDG5SR00+00VHzBVE6fDg027e -N2sAnjNLOYbRSBxBnELUDKC7Vjaz/sAMiEYEExECAAYFAlJStIQACgkQKQwSSb3Y -cAuCRgCgv0d7P2Yu1R6Jiy/btNP18viYT5EAoIY1Lc47SYFUMA7FwyFFX6WSAb5Y -iEwEExECAAwFAkqg7nQFgwll/3cACgkQ3nqvbpTAnH+GJACgxPkSbEp+WQCLZTLB -P30+5AandyQAniMm5s8k2ccV4I1nr9O0qYejOJTiiF4EEBEIAAYFAkzBD8YACgkQ -azeBLFtU1oxDCAD+KUQ7nSRJqZOY0CI6nAD7tak9K7Jlk0ORJcT3i6ZDyD8A/33a -BXzMw0knTTdJ6DufeQYBTMK+CNXM+hkrHfBggPDXiF4EEBEIAAYFAk4Mhd4ACgkQ -g6I5C/2iihoNrwEAzOrMMTbCho8OsG/tDxgnlwY9x/kBIqCfCdKLrZCMk9UA/i+Y -GBQCHg1MaZzZrfbSeoE7/qyZOYDYzq78+0E16WLZiF4EEBEIAAYFAlPeZ9MACgkQ -TqUU5bQa5qhFZwEAoWTXMOMQSx784WcMHXt8OEeQdOGEOSHksOJuWhyJ9CABAKBk -eGV4TxerY2YPqeI6V/SBfzHqzMegt26ADIph2dG7iF4EEBEIAAYFAlViC18ACgkQ -fX0Rv2KdWmd6sQEAnTAi5ZGUqq0S0Io5URugswOr/RwEFh8bO7nJOUWOcNkA/is3 -LmGIvmYS7kYmoYRjSj3Bc0vMndvD6Q2KYjp3L1cDiF4EEBEKAAYFAlFVUVkACgkQ -h1gyehCfJZHbYgEAg6q8LKukKxNabqo2ovHBryFHWOVFogVY+iI605rwHZQA/1hK -q3rEa8EHaDyeseFSiciQckDwrib5X5ep86ZwYNi8iGEEMBEIAAkFAlPeaoYCHQAA -CgkQTqUU5bQa5qiGngD/ds3IJS3BbXy5dzS7vCZTYZGFq+wzVqMCVo4VXBZDZK0B -AKWDu8MCktTdWUqd2H2lnS3w4xMDHdpxB5aEVg2kjK/piJwEEAECAAYFAkzUfOUA -CgkQ47Feim8Q/EJp2gP/dFeyE02Rn3W723u/7rLss69unufYLR5rEXUsSZ+8xt75 -4PrTI4w02qcGOL05P+bOwbIZRhU9lcNZJetVYQtL3/sBVAIBoZVe3B+w0MiTWgRX -cSdJ89FyfoGyowzdoAO7SuVWwA/I/DP7CRupvHC5hZpeffr/nmKOFQP135eakWCJ -ARwEEAECAAYFAkyRaqYACgkQY5Cb4ntdZmsmWggAxgz83X4rA51TyuvIZye78dbg -oHZDCsgCZjV3GtLcCImJdaCpmfetYdWOalCTo9NgI7cSoHiPm9YUcBgMUOLkvGx7 -WI+j5/5lytENxtZcNEOjPquJg3Y98ywHh0f1qMgkExVl9oJoHeOgtF0JKqX2PZpn -z2caSqIpTMZYV+M+k8cWEYsG8WTgf48IWTAjTKF8eUmAwtwHKEal1nd8AsMMuZbL -/Fwt93EHf3Pl2ySAuIc7uJU4953Q5abaSafUjzUlIjXvGA9LMEiE1/kdbszuJeiy -2r8NNo/zAIX1Yt3RKX/JbeGSmkVVBwf1z07FJsWMe4zrQ8q/sP5T52RTIQBAg4kB -HAQQAQIABgUCToOsZAAKCRD9hPy49bQwR2LNB/4tEamTJhxWcReIVRS4mIxmVZKh -N4WwWVMt0FWPECVxNqdbk9RnU75/PGFJOO0CARmbVQlS/dFonEaUx45VX7WjoXvH -OxpM4VqOMAoPCt8/1Z29HKILkiu91+4kHpMcKSC7mXTKgzEA3IFeL2UQ8cU+WU6T -qxON8ST0uUlOfVC7Ldzmpv0YmCJJsD7uxLoA7vCgTnZPF0AmPEH48zV238VkYbiG -N4fdaaNS19qGbVSUG1YsRWV47PgQVfBNASs2kd8FpF4l5w58ln/fQ4YQk1aQ2Sau -D553W4uwT4rYPEQdMUJl3zc49AYemL6phy/1IMMxjHPN2XKeQ6fkOhHTPzs3iQEc -BBABAgAGBQJQSx6AAAoJEH+pHtoamZ2Ehb0IAJzD7va1uonOpQiUuIRmUpoyYQ0E -XOa+jlWpO8DQ/RPORPM1IEGIsDZ3kTx6UJ+Zha1TAisQJzuLqAeNRaRUo0Tt3elI -UgI+oDNKRWGEpc4Z8/Rv4s6zBnPBkDwCEslAeFj3fnbLSR+9fHF0eD/u1Pj7uPyM -23kiwWSnG4KQCyZhHPKRjhmBg1UhEA25fOr8p9yHuMqTjadMbp3+S8lBI3MZBXOK -l2JUPRIZFe6rXqx+SVJjRW6cXMGHhe6QQGISzQBeBobqQnSim08sr18jvhleKqeg -GZVs1YhadZQzmQBNJXNT/YmVX9cyrpktkHAPGRQ8NyjRSPwkRZAqaBnB71CJARwE -EAECAAYFAlBbsukACgkQLJrFl69P+H9BSQf/Sv1aGS7wJKz7/Yi54t7hVmwxQuVE -pvAy6/m6e/ikLRFInWe1kNiLlOcs5sjUgqQtoAlkpvw35klIwmNtR8jRVZDsvwu0 -E1U5XIJ0icQEsf4n0N81rYOlwrQuzDNOY0p4a7jpLFAwMhNwrBreF4ebz3ZF9yqu -xmWuCoJHE3iA+J/FaMzmGdNVxMpQXUPOjdX1hNH2e1BBGwbUqpSlqI8qfjEVuYjZ -Ts0u7xaHN9e6DaqwRoI9zcv143yY1FrRJuWFBLCsdogFxDDUKk2VwLSFw45dmZRT -ABD8ew0Y7kkwHTmsEcVg8PM6XAVcVOT04+kVZQJ0so2Cd2sL041JreDaDokBHAQQ -AQIABgUCUS5/vwAKCRB3FndEyejkKMDxB/4szydmGO8nIZ2eBqfTkQqrBzkcCmmL -fily02lKt4m83FIFdDi/J1VyS90Ak0i20Z5aNUOvpnrXDr6H2syhTBmQowtTnCKL -momS/Aa0/DkllV7p5wQomuv+n22QyMiNMd6d5iub7MYkDH8Xx4LL4LNbAZpwvDXD -rwgccfrOwllGHI2VIFz1kkA1HNdE9ZzS3Md9Pse2I3Z1ArY6UUtGv7i30osVp7Qy -w1GvgzqcG05f4IE/g50pNt4BLJecrwZumekSOfRviKyvp6gxwls3BUFfhecjlEb9 -SC6vh6z2S05CRQXHLxmmnz++T/6HJYe6evUbZ6ZrZ1qTzMchrsbZPwFviQEcBBAB -AgAGBQJS2YorAAoJEEjriy1mvrzjXL4H/3Z17wsMqPhSN9XTmjp7QufqUhGDGl7F -uCrJDsD+h1n0rwH831w01oVklHi0AC34TxdqFzJ3eqfSuym8jtx3CXimyU74Mix2 -h6O4vyDtIENYKMT2xAsQMvEbaGpSQKtzmaHox4BdysrXYsoKrW4w9DNzY/k+vPYL -iPRchMHNIbG6TG2gL++P3f6H+AZxBTNAircvhATWrxcXpupMWm3qL60lQtJl2sU2 -RyDfPHQGQsBx4YJ34EO/74zgFGla7ORcf+0Wler+t6G6HdlKy2S+mpmi36rfgYwK -AZS+wz9TXxqRgkVimiDbmt1hMtOzd5hKpHV5/oFDiWEZYPHh6/jVgiuJARwEEAEC -AAYFAlMGdm0ACgkQ2C/gPMVbz+M9cAf9E5tc5jNpTRHyW50ISElxaXHciJthEJBl -RxBRRN2I8cSIRWra8+u66O8v0qYrZzmW8rdMa4+bzTgX0ykIFYDoZIzy3GYid08h -S4Aqhk/90Ssyj4Dr30FsF6xMZjS/WkXp7Io8DlyCHpw5pRccII6Xks+JY3rrgS7C -T4hQzxuLdDHvw+ilb4TQQl6F3c8uQLlfIEgh7pgj2i9d7wrQHQMwxYPJ2B1p9OMY -IH+dI78LWqlru1XC8YsV2H2qEqd1vWRsVgEe/3ntmFdCgsWj0PUgA8TNcSver0Ww -2BpW8k2UmPvemN6w7oM18ERccevohsaX8iuYf5aCjtmbhEhhbwN9OIkBHAQQAQIA -BgUCVcQyrgAKCRDHXurc0X7YRErCB/4uDl6B5/rymPi/3AK3LMyJbLqZZzErK917 -s491J+zelFywOoUEWdH+xvUzEOonioTvKkGrQ5Tooy3+7cHojW2qSauLh+rG+b+7 -3TZJyRSYDD4nwWz3/Wlg21BLinQioaNTgj0pb5Hm70NwQwUcFtvyJNw/LJ9mfQax -t//OFSF2TRpBMr5MMhs5vd85G5hGHydZw9v0sLRglk5IzkcxNdkuWEG+MpCNBTJs -3rkSzCmYSczS1aand3l/3KAwtkau/ru9wtBftrqsbLJZ8Nmv6Ud44nKTF0dsj5hZ -aLrGbL5oeMfkEuYEZYSXl0CMtIg0wA9OCvk3ZjutMy0+8calRF87iQEcBBABAgAG -BQJWc8vRAAoJELPrw2Cr+RQDqw4H/2Oj/o3ApVt42KmxOXC5McuaaINf3BrTwK0H -DzIP+PSmRd3APVVku0Xv89obY/7l4YakI2UTzyvx5hvjRTM5jEOqm4bd0E1atLo5 -UIzGtSdgTnPeAbH07beW4UHSG1FCWw35CwYtdyXm9qri9ppWlPKmHc91PIwGJTfS -oIfWUT6MnCSaPjCed3peTXj4FpW1JeOjDtE3yR8gvmIdIfrI4a8Y6CGYAUIdVWaw -NifLahEZjYS2rFcGCssjBSaWR25McL7m8lb/ChpoqpvQry3MaJXoeOFE7X1zInPd -a9vDdWR4QFrLDN8JjxzBzwsQcfaA+ypv95SlD3qL6vFpHGHZ4/6JARwEEAECAAYF -AlZ1TPMACgkQGMawXRQNVOhCaQf/aQZ0xEVW+iBuqXzd65axP3yWS9dM//h9psP/ -UKhFzfxCdn3XzmJ92J0sv22DjR8AbbGLP/H9CeZY8nCQnYOHp+GQikGJNjzyd1Zn -i+Ph67EYfEV2eqRO55GGmiRtUrZaur2pfnbNsvTQtA2rGXen5tLSsCh4qDNHrM1T -lP9MSV0clzoVWRrRNvkODrSDaCdEEDrOqfy0AEFlLmBTqSsduo4cO46j0ruC0Svf -lYx+2HN3rVtZzt1wrhaPBPnV6gP7dhKp9XM4erWV40dP14YyDExZoKNys7Kq7pnR -QMbE3HL6UGa8VPvu9eiELs7kw01pYBtYl1my9ekminj8cygpdYkBHAQQAQgABgUC -VolllwAKCRAjRRsQeqA5QYnjB/9oDZYh20qEpGIZRSmur8M/cGFKJ6IMxBHFIz73 -PM+hHB3v28aYRW0lXGu8BNGZVxkTuTjd1HlSFMCNpcNfbMmRhEGtEp3qGq+cq7zu -72lVEiY8tJliq9zyOm+guFzUQ00pvaXuTUFlshvwlRS+GIGn8U2P/SVRGqSOqCki -dp4f06yElt5QifwzvHT8KvxjPgFA5NfQAXE5i/IoepV53XDhECqOvsORbc0JT8n8 -/4hT8qHTno8UNbYK5BQjHlby92v7ZFVgI86Li2zb0HgQSmvpU/qRibSzg0gEUrWw -UR4knTkoKYQwjry2bQ653oNgv0OsnSGEroYOyQ1Q96jOMFKViQEcBBABCAAGBQJW -xLxwAAoJENnYUJL2kkSzPbcH/jl1mYhR4f25pRe1InyR7BJF83YDhJYIhbBCGqGV -enFEy29hco832HkhMUukaos34KZjsWGDFX1IWe6cxOJvBZsDYHuaLCueh5I8/Tmt -q+HuebuF0RJtJh7ItJoCrEv7ZyUQmbJ+aHLx2pXSqYUIiWlPvIlG2/esQlUo7pOu -b7eEb8U3oKWYgs9HkytMeHSTKiuFJ7mzEyh2fLcgsc2q1XT4VxuqksWxYv8MstTO -xrltQ7LyP2QH/BzfqI5yE3UfSSg1sZE2Nh2cIFNWTYVxdx1fBJWGtTT7l2o99mYw -ufSLz1UTbGF5PcXeK3sYxN5IJta2FUByaJAWPJonRnojinyJARwEEAEKAAYFAlaU -NeYACgkQhKVEYnRGm/7r8AgAkY8sPCR4JKQEgbCSDky2uVzc82QaxfaFvYY/oJSI -54X9QBhT0dzEu/racr/apjyj3pdjkP8IM5Mya9+v9LZKLKne7pJUNsSiPUpfudPM -i19Z2TW3+7F8LT53XNALS3Ink78MdAENpuxn1ERkOoqOFOKaKUUhaW4ai/cd1prz -GQSKP1/TlERqs4E2+JphTGjL2LlV+jpSHyMD1dpfD6ZLlEiuyxr8qUV+HTbBcfnG -UTEd56mjiDv2cUP6WacTlP0+F+NGcG2iAJXdkF6EClLyEnN70l8ud7HuXUMZI+nr -J0jKqhYduTxViI8w98cxKVelp66mt+rzF0GGoxPZroWn4okBHAQRAQIABgUCU76j -IgAKCRCPqWBGRct+91LXCACsE0vF2X24OnXg3uSlhhYMaLQUyA2HJAIvObrAVlWF -n0vVvCF6XyPX32vOlM4v3qKMBl/hX8+uhSO+z8snQabR6ostipGiWgGRKWmbLB/5 -PfAAONPmJPsB2ACM4R2ojfiauMNcT3+Rszkr1rwnZZYu2Xg/hJpOqJaZAEkFs0GV -ovm4i62pf17Zyb7+O59x+ki8AWL8AsK7QAELZed8Aql7Oi3PLKTZPGalXB6Yl4Gx -wmBsX6Y3gmyxBCr9ZyAaJAe0jG1l4qOlJUL5P32/j+g+xw7I+3ntw/n9AeC/c3zu -KTMIfC186lZdYmYfNr2oiYM/PGWTdd4xnJt+97i07ptJiQEcBBIBAgAGBQJUmpGD -AAoJEJQEaQws42QMeBQH/Rj976yL2XWYrA6nDqEZipfIlpGOV95ZsXuQWkOKo0Om -lL8bKKaizeBEy026fKB/XN7E8rIUANATLXSRbLf2Y4QxPaVPdCnpfQjGxIKtCORR -OQPt+PDr5qG3tE1D59lgpcMkRjwnuF9FjBIpBB7NwW9Qhc2H22yHtDdTPw6o1L3y -r9JXN9uT+4tg5Ww7lKC5jTIBx8evoxarMZTQqG5KviK4Si8b9u/yt9d1DsxAoj0S -GzIR5ix0InPTHiadG8yYk6NYnEFUZ4puWaL3KV1tqYiUGlqsf01FxmkXQo2KySSW -TmyIuqp0ge0o3ccCVKuhrkxG/wSRJUBZ47Mckr07tuGJARwEEwECAAYFAkzhRMsA -CgkQTsYHIylgbnfbuggAwM65VhsyIv1qfHT6xG4QRBltjWi0KhMIh/ysMQEDDREE -9i5c59wyQdY0/N+iiFbqoCN4QrzfUBI9WDdy1rkK2af+YzZ6E7dj5cIS16dNkk/x -m0eDelkS3g+1Bo4G2tbGpfWHrfcoQhrRrt0BJpTgo5mD9LIqgKFxKvalj6O3MNpy -xnyr9637PPaCS129wNKQm6uQ+OU5HH0JxYWE53s8U/hlafQDQCS58ylsteGVUkKZ -LKTLIbQOifcL2LuwbTjnfTco3LoID6WO9yb8QF54xa8sx2OvnVeaQYWNoCzgvLDQ -J8qP241l2uI61JW0faRwyY1K9xSWfYEVlMGjY15EoYkBPAQTAQIAJgIbAwYLCQgH -AwIEFQIIAwQWAgMBAh4BAheABQJQPjNuBQkNIhUAAAoJEO6MvJ6Ibd2JGbAH/2fj -tebQ7xsC8zUTnjIk8jmeH8kNZcp1KTkt31CZd6jN9KFj5dbSuaXQGYMJXi9AqPHd -ux79eM6QjsMCN4bYJe3bA/CEueuL9bBxsfl9any8yJ8BcSJVcc61W4VDXi0iogSe -qsHGagCHqXkti7/pd5RCzr42x0OG8eQ6qFWZ9LlKpLIdz5MjfQ7uJWdlhok5taSF -g8WPJCSIMaQxRC93uYv3CEMusLH3hNjcNk9KqMZ/rFkr8AVIo7X6tCuNcOI6RLJ5 -o4mUNJflU8HKBpRf6ELhJAFfhV0Ai8Numtmj1F4s7bZTyDSfCYjc5evI/BWjJ6pG -hQMyX32zPA9VDmVXZp2JATwEEwECACYCGwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIX -gAUCVANGvAUJFKmPRwAKCRDujLyeiG3dic/HCACn17vJF6vW/UajLCut9qbdp9Y3 -LsbRsjmB/3xY+uGSBzdxqlVUOk57zs5p57N4gI/p25MTxLli6vDH6VtwI9mnsKji -7KMB0iNUcCHAYaGnMbfhjxnp5sJ3jPQASU0akNCky0qmkucJFHCSe6+koEifizEO -Cyh8iDq+jiLycvmasjOEOl9tb00r2VXgq+a8HhP5flnN1ORDTJrVoe0Z0xpO0qY4 -xWT/hx4ZEPDrmmWUYSjhlASAZVndOY0eJ2K5KEaqJURtIMZEURU5SIX3K9H67qH/ -2KpQvE6gA59gy4a1q2Otkzjp3wNHY7WjTSdsYdZhpBWqKYZ3nZasImCEAgzFiQE8 -BBMBAgAmBQJKoOxrAhsDBQkJZgGABgsJCAcDAgQVAggDBBYCAwECHgECF4AACgkQ -7oy8noht3YmVUAgApMyyFaBxvie1/jAMoQ3uZLjnrP/SWK9Sv9TIiiJxig4PLSNn -+dlu1EZicFoZaGx+wLMhOOuCoLKAVfo3RSF2WgvBePkxqN03hILPAVuT2kus+7f7 -y926lkRy2mF+eWVd5CZDoHERABFtgX0Zf24TBz90Cza1tu+1OWiYgD7zi24AIlFw -cU4Up9+ejZWGSG4J3yOZj5xkEAxg5RDKfkbsRVV+ZnqaxcDqe+Gpu4BFEiNv1r/O -yZIA8FbWEjn0rnXDA4ynOsown9paQE0NrMIHrh6fR9+CUyeFzn+xFhPaNho7k8GA -zC02WctTGX5lZRBaLt7MDC1i6eajVcC1eXgtPYkBXAQQAQIABgUCU3uwcQAKCRCK -cvkT9Qxk2uuTCf4xTAn7tQPaq5wu6MIjizqrUuYnh/1B4bFW85HUrJ45BxqLZ3a1 -mk5Kl2hiV6bLoCXH+sOrCrDmdsYBuheth9lzDTcTljTEZR9v5vYyjDlxkuRvCiZ2 -/KLmjX9m5sg6NUPOgeQxc3R0JQ6D+IgevkgTrgN1F+eEHjS+rh4nsJzuRUiUvZnO -IH1Vc92IejeOWafg7rAY/AvCYWJL20YbJ2cxDXa7wGc9SBn8h+7Nvp0+Q4Q95BdW -2ux2aRfmBEG2JuC4KPYswZJI9MWKlzeQEW6aegXpynTtVieG8Ixa+IViqqREk2ia -XtfoxVuvilBUcu5w9gNCJF+fHHZjUor5qHvZz91/6T0NBlCqZrcjwlONsReSh1St -ez8SLEZk1NyYmG56nvCaYSb1FvOv+nCBjz5JaoyERfgv4LnI+A1hbXqn3YkBnAQQ -AQIABgUCU3+zcQAKCRBPo46CLk+kj1MWC/44XL3oiuhfZ/lv+VGFXxLRI7bkN3rZ -rn1Ed+6MONU5qz9pT9aF4C5H/IgAmIHWxDaA30zSXAEAGXY3ztXYOcm4/pnox/Wr -6sXG83rG5M/L4fqD0PMv7mCbVt6bsINX5FTrCVUYU7ErsdpCgMRyJ8gKRh/tGsOt -byMZ/3q9E+hyq/cGu8DjhfEjtQZDhP1Gpq4cyZrTRevl+Q2+5juA4bCyUl00DQLH -dCuEEjryq4XWl0Q2CENDhkVV+WkvfuIOIVgW11j7+MmMXLzMMyk4MZtzgedJW8aU -2/q0mPn313357E9DwMZj9XvB3JCx4dRjBR67zwYySVvnK8KMWVNPWcleVrY+oj1l -9psq+d4pkjtAa/cd1mBfh7h6uKzkekj/zWuJV0+HEbKRmmBpc8SWc4QRNUrCBk7v -VfGsBLCmiCK9Rij1zgrwihrw/T77BcvOcxhZNd3Y9Vs9vavExF0/5IqclwcuJqQO -5fRKmMCFi1rwT5ZcWANmJXdaN8H/7D1WNXuJAZwEEAEKAAYFAlN4AagACgkQRCkH -tYjfxFfaSQwAjmRJHNBnTYQ2Sluy9KzmgtiVlxl6Maxr2zBQvXv4/mH2Sl2BeFWa -M8kiyQzl6XZV5/q8TCkmskW0N8YOl+l6AhFGuh4PS8UWe050fcxJCB6Z6XUFdvVQ -1F1dI3bNcmm5libcMSNFNS7pQF1qaz4fmVniwPx1ezBdAvd4n4l4dipg2bW93iPM -iy1JDRc1Um6U/ouW2KnD7l5/PkQKWLzSx96xvfimDD6DXbW+/7nFhle7foTLSlFO -cyeuXCOQCa04XQOJGKZtiVp1Ax3Mv8t1A0t2EzYlTTKZCCCCa9EDReI1m7EJZ7+S -JueaW6u6/TuM887l4FFuM+6Bow0IEC8FJyPdZg/BqnZ3tK4xSm3tF6oxc8IkaQJi -p9R76hPSWRfzc7ooTbxQrzYVzTZa/pb6RfL5bTi3Q9D1xCRjPtkZIceMWfPtnyml -TIDwdefzTT0wxj1vTSluqMih0LODRDrmysDSx9MBfH+zhigweooCCj0wLmOkmT0P -jgJvL9TBG5HViQGcBBABCgAGBQJTeNsQAAoJEPLvL0cGnouP5ewL+wVOickmGd+D -out44YAmPXSzdP1KervaRAWIQLFda7XFb2krwGwIpkw7hR9qhAG/CWbF/WRQqWB9 -M2qQEaHP7LXjPuCQVf9w5UJXzKUBft//PRF6IzBOm8g+yHY1MJo3x3PDd2Bym2hn -r4iV4teVnoHiutAcKPndpu6idaTkhguNuKOc1hXqILi3x9WRVi1d2UL8MakyamVz -2k2sRktKQEZ4goEYq+8kFeT/T0DH/bB5N3PEKwpK/v03T4fD8ihMFYwblN7Y+Rx0 -mrYthCIQYpfAVA6eXjyABv4kRj/l1G1ir8ar1PnrHiNp2Hv1aipDvfDZnNpicwyS -OrdyQgpjGao75Ipw1RNcCuS9DWUUPOYYQQfknCeUMgtQDqoJBYiE3wp24QZw3Pss -zyMk86bQWqGuhdrmA97zwX9f1me2BdhwyLPkBJVt/6t2Tp+vx00VmhbQKLbpPIAC -zqAGw8RtUx1G5bmSjRgAuo6xWOC2u9Ncxt33u/zQ7UvC/wQ2FwHHD4kBnAQQAQoA -BgUCU4DA6QAKCRAq0+1D59sVj5pDDAC+MneOmun1zAq7WSSZmf+AI3BzYGoYN67l -J8QXTcgDgbqXAtGQvp71G2It9ugdPEeyQ4T3DxNIYA2uC344hdsVCAnQHO6NMvR5 -A1qBUldxp1w7GfgV39p1ANzxDNwGjwwfUQfqk9VEOp4+puut4o2fhyMmkC9RaGzW -V5taPyWL1N9+JqfNfsjWFC5qeS9JOLTvhmk2lLVKnw7uKluiQVzr7yj/gqcsyA2s -Pfs938cIr96CveTdd3d1IWcRErB72e3zb0PKKvrtXjfAMoZG0vrsA4So0D2Z3Y71 -0bGgLQ1WYDlRw7YM7/XKN2WWIBWxLNfEjVIuVnpHLCTNdmntLp5oaBsC9TrDwUMD -Z5DEro1XHijX3h7x5Ni+XU89ZodSeQy9uvLwkgjiZIxD4DfCXQNc7I2a7h+M3rvu -3LeBIQe3v/KNMDpgL20AyLxUs7/eqe0zWm3F4sfYu7ywA/mkH1Az3xTWj/I76Wlm -KPSeJpNEi/fol0PCsTJ3vWdpu1Hkt4KJAZwEEQEKAAYFAk6poj8ACgkQoPIT8Ubr -WB+JSgv/WGMIx4wAa6IHQdrG+PSSIjNg6nvhvvhos1U2bJldujyV1kCyq9symQzC -5N5f2/WC1ZLhXhtitN+RzxQraViJcZMaW2qyOYvRdDYzJoGiMqr4deMTYQ0ujR4I -qA3TSr3TSOS2LVlxkRI2CVQgSMHcVmR4uSbEIFdhNL3yGRgylhXzCBdsa7esdLr4 -VmZw/eHnFNA3fLBc+0yiaAnc33WKaI+UTnpyxieznAzFC5J0gODRFSBGNAVckAm9 -0wo326AkcUXV7Puss+GBKkwszzb0KiCijujAP198rZlKJEWSFZlPHrOImcWFF6xy -njM672FaW8MB8Kc4hsAa/kjpK0uYr+XNVdTW/qUmo7cwJuLUJzaMo7wdmLBDr0y1 -DYGknGOTu17vxRoSOhPWPCmw+529rfV3bRBYB4qtUk9ebYnsOmDQmrVQPxlt6/e3 -nYgY6gF9+YahUmdOcmjQ5QoHysa0Yr9kh7vobrQF+3FS0wHrZtpqyQW9Iqe4UCq5 -lDqc4k+4iQHwBBABAgAGBQJSn809AAoJEP21uMBn8lOH5OwOn2sjIQLjowLD+bOx -nm2Fc0SPAI+UJqeMOw8iUWKcE88iFT18y06TpqUEaeLAoZSAPWDPx6PI4WnL6QSg -z7ia96dg4VU1FS/gUUVqExScq5WwttH7a95wjjBFuMhwNrvGyrx8DyFM2rKqj7cm -ydsHY18e1eVuk6cFp6SRR5ek8qhRLG9G/zXutU8e5gfJv9FFIysruNztJOaePXRQ -OxVFJZmmZoxq33cUjdx28EV22jplmJ+Ku00S+egZi1mZJccFwmNZ8BF3ZAv0dFNL -OjTVOKNO4WoW3NtNksy1/T2hwhAERFZU89p1XSF3D1HaN4TiCYRP9pm3yk6gfFkY -BOY3183aICQYT+w+KVdg8rxPkiryF0NV6f9kjfYS1sAYPqUMIzKYhV/h1UcPuqe0 -KK0f5NpK8obXO2lJzLZE7gqtbc88G3Rg+3vDukD4boSHdOhm6ii6ZamD2qdrFQDj -ROjRy85lmmNJxXc14iAl0pNKE1EvTcJgJ9FnQngtaSYuk+YsqKcQzTMiBRUwUwxY -8Vm3jKOh0SrG454RBGsleG8CfCGj44bxFE6Q4MldvJAkkYCfM7zSww/p5rOpN/Su -3NMY4gYCHpOO8zKem4EzwJ9ENt4cAjH/hYkB8AQQAQIABgUCUtLMDAAKCRCkIrnY -5Sfb27Z0Dp9OELHk0T6eraFt6k8z7NrxU6Bq2VroUQxcRFBLkdRhjN1BVr5P2u1J -2h7Gly6maqiiHalpQm9RMHXRSSomVPhav7EZvOlQHiujkJDcDLWyLoFtlgvTD63v -A/YFbnceWY2ATY6gp1/sp/t9zO/ywGuk6+xlVKld2jNJbQpkBwAUadWnEFpFixty -EIgOU8uuIV6wj6/3VywshbG1Ml3HL4E4qpSqOTgoesQvLyyNjlI3JL8KB+cljNxJ -xBOE5sqRgB5PhD9mlZDX5WFjL3EWwQHi1SpPPmuviciSZy0Shw0yjevvpMnHnkze -H29Qb9gDfkvlmS5Hk2rYm3qvu/I59xEtmJfSXYpOyhe3EsffmOxHEqLmtQ12cx1T -uKz0gXFnh+Mm/txE+sVHsiuPomanf9Ou6k8RA+mQ3+715P/PhoqG0Qu6G+GNCIoB -+21ln8Yr+gwbsKXEYqVQITEXqDkeNlCHy9SFjpPXf5XJh45k4mLxul0THAwLt1R4 -1ChP42/+r3KuYWfXXAUsz0Cf5kfQLYqpwN2OkXytVK26UN6yVLuFBetTU4uJWtJB -QBE3HyUtq00YASfrWy9ITz1Qv/NWc4xdpoJ789An1cV6UO7p9oi50Sine6uJAfAE -EAEKAAYFAlKGBO0ACgkQN4Uj/AufSZbFOQ6fbHEEerx0zf6FtLG2/EyK00q95yQY -363WfM6fXvEbEHe8RThPoZswxLAn96yfTNWXLhDS64muDntsPPpenk86siNzp9Br -8qN1fKkZY2tBjyUtvGz9i+paQWowXPfFeV5WutjqRY3cn6xY4SXWNWyffr3XTYqu -blnWs4s+yJuHQeb3XiWX4o8p9csmTuC5sJgmZpkvppRgzRpHAd8VCzzC/cMEVeV2 -+cbFon4sHw5NJVAXbaRoZ/P4SoA6S2Tz0SB1FWNa1v9TEu57/f7l8XYdI6nL4y6i -mnJ/RZqgpG7gJUqJSwS/iu80JJqnZJ030hWrRZHHp2k+ZWr/kZgKGCxHbRCcQNpJ -CmPmSuJccVABWIkoKjgVR4jXDbh+saGYLn2eUUzxkZmd7xaDSNUBhP2qdtKlGFc8 -ESL0qZkwixLhmpgUgFsf7D/bGGJyVkhOji4rJDZx9I0K5s0JrDrEqO0nzYod08s7 -aaOcQrgMYcQA7x/Z3BlSuRRo6KK61dOO42SzSbFSEW5Z8IEfSoUYHoyN81kbfC+j -/q1dpwg+Bhw9PTqSWfLiXI6H15X7H/Ig6NDK0U9v9s+gqmqG0AtQhEnCEqKNZFV1 -K8rnY+B+lNXMA0PIgxA0iQHwBBABCgAGBQJSjUjjAAoJEMQJSn+pq5SBKV4On0Gz -b3r2SAx4CM9zAhGoQw81yM34WUHrkDESj2TrKw0sLYLMzM3wriEzFT+88buowSBT -8h3ONNDijbj8NdjYQCfY90bqgAROZ+W9/dmV2C9dJxmv5kWJQ/5D2ksuVpu1LUyK -6AWXEkV1KpIcRHCP+Kb8EWaMEjPPQbNJ1KrFzAFfIUeFTbBL5kMmJK5aYVUiHWnL -Zq0SK5OlWGqBihuRLI7OIoBOjlcoXvFoEgSkgUKpapE6C9VkErW60WCK91sMhaa8 -CY9pVDPaanMG2o73BfS3jGPylm4H2+8jlJ1+l5ietvoyiqOST1iIfOsbi30mxuVJ -4JBvKtmapqpBwT6eNvCiPKsMyjB5oWI5IVbK8MDIaYQM9TL+nyMGhl19GzcUMP8t -ZRlCifM9b/zmMMt1sgVY0koF8AZfh3Ho9KLyXqNMUtXAFSQrAcTbN5SmzjlJtl+h -z6uhiHH9kAeSX4MFRXX6JDfZxyAw72JqJkZaPEAKQCpodkNwNG9b2dedIBsTaD9I -oEkryDtR17qV2ePwlCeymuwNnGVVaJ8hLbI7ZATbIaSn7XNvMGM8hX0N/ram5nTv -rR2laG1o1ss5oxtg7PfTrhMyCTrzTcxc8VskAgtbJjoyi4kCGwQQAQIABgUCUVSN -VAAKCRB+fTNcWi1ewX4xD/d0R2OHFLo42KJPsIc9Wz3AMO7mfpbCmSXcxoM+Cyd9 -/GT2qgAt9hgItv3iqg9dj+AbjPNUKfpGG4Q4D/x/tb018C3F4U1PLC/PQ2lYX0cs -vuv3Gp5MuNpCuHS5bW4kLyOpRZh1JrqniL8K1Mp8cdBhMf6H+ZckQuXShGHwOhGy -BMu3X7biXikSvdgQmbDQMtaDbxuYZ+JGXF0uacPVnlAUwW1F55IIhmUHIV7t+poY -o/8M0HJ/lB9y5auamrJT4acsPWS+fYHAjfGfpSE7T7QWuiIKJ2EmpVa5hpGhzII9 -ahF0wtHTKkF7d7RYV1p1UUA5nu8QFTope8fyERJDZg88ICt+TpXJ7+PJ9THcXgNI -+papKy2wKHPfly6B+071BA4n0UX0tV7zqWk9axoN+nyUL97/k572kLTbxahrBEYX -phdNeqqXHa/udWpTYaKwSGYmIohTSIqBZh7Xa/rhLsx2UfgR5B0WW34E8cTzuiZz -iYalIC/9694vjOtPaSTpiPyK2Bn/gOF6zXEqtUYPTdVfYADyhD00uNAxAsmgmju+ -KkoYl6j4oG3a71LZWcdQ+hx3n+TgpNx51hXlqdv8g1HmkGM5KJW31ZgxfPmqgO6J -fUiWucRaGHNjA2AdinU+pFq9rlIaHWaxG+xw+tFNtdTDxmmzaj2pCsYUz/qTAN31 -iQIcBBABAgAGBQJLaRPhAAoJEMXpfCtjn2pmYaYP/j/TT5PPK6kZxLg1Qx6HZZAO -YRtHdGIub5Ffa8NO8o2LreO+GlHdxYyRajRKIlvunRWzcumKqmD4a1y7Z3yZeSwF -CVMzANmki7W7l/nKtfAwr+WZlOA1upGTloub1+0JEAk0yz9N1ZXA9xruh8qH7HgT -IBOM6BF3ZmUmZj5zsoGpBS8wvcPg9V3ytoHGkyowCSXVvNGmOenlHsxQyi4TsPmM -yCtf2Xnjk0uC3iE7U6uSev4Z8B6yXYwKV/NL9lic1VaMu5UG8QD7JSR2XWFRQgct -k8pO5GHXXVcWAnHWK9HvAPhnxv7UCRsb2dzuJzq3s0r9F5pYS2ea4wp/DOn4PzSl -F7D7V4mnPg0CW6+UcEOUnO25z1bAssKnrTngPsb9y9sIveK4OLve0IsKoQ1tEhPc -2bkC+b2l5fxhaWkV7PplRgE0vYftJQwUD4ttaD5HTfwSis6//9hgpeVRW/q5DmOu -R7YQroiK0/IxRgKySBeJ15Lv+AT6Ta4GpwvPYk7HeflFDRSJbWvlmJBDUPbQtpsI -/egWitCskUGT/QAM06OcBvGqLnM6bacEh9GhAiTcvJHf1EfCAJGZMY2OPs8n0A5W -+GjQ7FRr3pqYIxXDaNK3Iiqz0JeRskS0I9ms7r+OoGhnGM6rKG3o0v9o6iSzJ5E3 -hMWgq8q1rl6P62lgVkCziQIcBBABAgAGBQJMm4KuAAoJENh0cn4zmn+obikP/2H3 -suQSV6maiLfYurcsNlaszLWdYAKhXRCnrkps99MbcvYOipJyI6XmaPjzm960BVCh -mf6uAI1inQ/QuVlLy3F3dEQlngxu3Zg+/Id+TlsKoXPvBVztb1NJxshXRMfPXDYj -uNjP8/nmHqMrIFS94sWwoyZazeDB64parIcR+TLxuyXyH6D8LnEMrTXMEmvE/ZE5 -Zgvbkpda8BJZSpQzWm8TKbH/vU9JGbSPikK7zAYPAOEUSYaT9dwbesvePRW0eM72 -u5KlduIfuXP2yrIGOD11zPgJyLl8vg6tWkVYES4VsqSanO91J6Q/zAwzjyl/J5Bd -xdJo3HxLKOirbzbJ4jwq+RinJ76Brt/KpUOyC5tj79LYwRzSGEDRvcT59kzB++A+ -n/PDWoR499x2uzxvCZ/3WTLioO6hHh4re/pSQ59fHE0/MSDDFKZfQKZoy7lsKOXk -18rGouz0EFP3sxGzoGKs5wShBSvglx+iiDZxh9d3f6/S+9QGY/ymbCPnOxNIpi8F -ErbyRGa9jPZ0fsmwOEjev5MHBeZ9pMfpQSY6gZ+9oW9MMml17U2BRnXq7mCBrMRM -fIpmyAQ7V+q1jjCK2QB5TwUuTU4+B8nteF3AoUfwKHZl64CQ/8/vPrAxhmaRwHNv -dcJJxzvvo9trxeO0NlUrfE/ljOk8fL6tPlrJ7ov4iQIcBBABAgAGBQJNGJ3wAAoJ -EIO1uBYaG9UOMXcP/0kA1SRdYd24ORdRdkVyhI8QqBE49+seV3iElKsk6e54auaQ -DhpSFXfCLbSY2tmEnxD2AWDVwUDHtBPuKXREr8ytB44MKVm5Ar7M1o/ner+RJsMd -YR1bxLxF4j5MuPgTLaZKEszxmI5C+eo8wvf5heFwtIq23HxO+7DtYO2XKWLj/k7Q -3K760YvLtO72awqfMXr+MxX57/L6qyWdiMNfNiT1uGv9BpixRGB6xbDN18unpVKk -3sLPcE3oc44UdkSuxVrqHXVMzUIxpQGqOf+KYk9s5Z0KijllK09uoZI3WyKOR2I5 -iGJDuBBzbuMGP23Gr3IMRTmVNAEWmjpxgLC2j1t80ocaAkguejTAKTjjXH1MWJHo -ESsBXKdbk2xuAvnvqQqZ7weZfLCBS4XoSGdg3teeGa/ZQOHDknrLurqaa2ahFGxc -G4lOrf0OBZWMaI9Kj3HnrcThmEOwIozL4SDmUvvQxyK5s3uZjphFAyxRhQx1fCKh -nyA+D8oVtnTZ9uxtUWstIKK5RlOCxWJH3obvEGmGi+6E+zgDsK+ivqM8gFjj3XmM -pO6dh3/yZ6B8b8kanj4cYlCHhpeJ7v16G+FvGh/aMBlCopXAvoTprxQgXa12MgYz -YGRyuviOV+PWo+RTTPRyYmJ9RLADKSdHwA8VUvHp+nxZucES1M9PxVq92hhWiQIc -BBABAgAGBQJQezFyAAoJEFOcQ2uC5Av326UQALBzrx914us/lT+hEnfz5aRDE7Tw -Ohrt2ymPVzLvreRcaXOnbvG9eVz3FYwSQtl4UbprP6wjdi9bourU9ljNBEuyOAwo -M0MwMwHnFHeDrmVFbgop3SkKzn8JHGzaEM+Tq6WKHYTXY3/KrCBdOy1sQPNeZoF7 -/rq4Z20CcrQaKdd0T7nAEy7TLQIXEnKCQKa2j+E55i584dIshxVWvNuwsfeZ649f -2FTGM3hEg527BZ4eLQhZQLHkjIY+0w0EB9f4AhViZfutakQf5uqV9oRlgmHmQsN5 -vMKryC1G15HO9HPSMJf9mvtJm7U+ySNE354wt2Q2CwX1NdDLa8UUzlpGgR6cd4Pm -AyVrykEWdtk/4ADic+tu4pTJVx92ssgiBAQoi/GMp61KPcxXU9O4flg0HDYjerGu -Cau/5iUKWaLL9VBe3YdznoQBCzwquTs3TT1toXHjiujGFo5arl5elPv4eNfU/S0Y -f3aguYbwj2vVrDbp3JxYjJouxklxQ2J4jOXD1cehjZ+xFRfdnyUDV2o9FzvWCc3N -04var7Wx8+0mtok0N0xTkJunN8rkxvVUuh32zJlFlvZX4u61ZY4wI3hPz072AFBd -qv+B645Hrk04Hbu93iZ5ZgcICNZppyd6xZeBvqaEZXS+Zv92HCbxIBS9P7zB3sXm -QT57jusVSUdQtfJwiQIcBBABAgAGBQJQezFyAAoJEFOcQ2uC5Av326UQALBzrx91 -4us/lT+hEnfz5aRDE7TwOhrt2ymPVzLvreRcaXOnbvG9eVz3FYwSQtl4UbprP6wj -di9bourU9ljNBEuyOAwoM0MwMwHnFHeDrmVFbgop3SkKzn8JHGzaEM+Tq6WKHYTX -Y3/KrCBdOy1sQPNeZoF7/rq4Z20CcrQaKdd0T7nAEy7TLQIXEnKCQKa2j+E55i58 -4dIshxVWvNuwsfeZ649f2FTGM3hEg527BZ4eLQhZQLHkjIY+0w0EB9f4AhViZfut -akQf5uqV9oRlgmHmQsN5vMKryC1G15HO9HPSMJf9mvtJm7U+ySNE354wt2Q2CwX1 -NdDLa8UUzlpGgR6cd4PmAyVrykEWdtk/4ADic+tu4pTJVx92ssgiBAQoi/GMp61K -PcxXU9O4flg0HDYjerGuCau/5iUKWaLL9VBe3f////////////////////////// +Lm9yZyBhcmNoaXZlIHNpZ25pbmcga2V5iQE8BBMBAgAmBQJKoOxrAhsDBQkJZgGA +BgsJCAcDAgQVAggDBBYCAwECHgECF4AACgkQ7oy8noht3YmVUAgApMyyFaBxvie1 +/jAMoQ3uZLjnrP/SWK9Sv9TIiiJxig4PLSNn+dlu1EZicFoZaGx+wLMhOOuCoLKA +Vfo3RSF2WgvBePkxqN03hILPAVuT2kus+7f7y926lkRy2mF+eWVd5CZDoHERABFt +gX0Zf24TBz90Cza1tu+1OWiYgD7zi24AIlFwcU4Up9+ejZWGSG4J3yOZj5xkEAxg +5RDKfkbsRVV+ZnqaxcDqe+Gpu4BFEiNv1r/OyZIA8FbWEjn0rnXDA4ynOsown9pa +QE0NrMIHrh6fR9+CUyeFzn+xFhPaNho7k8GAzC02WctTGX5lZRBaLt7MDC1i6eaj +VcC1eXgtPYhMBBMRAgAMBQJKoO50BYMJZf93AAoJEN56r26UwJx/hiQAoMT5EmxK +flkAi2UywT99PuQGp3ckAJ4jJubPJNnHFeCNZ6/TtKmHoziU4okBPAQTAQIAJgIb +AwYLCQgHAwIEFQIIAwQWAgMBAh4BAheABQJQPjNuBQkNIhUAAAoJEO6MvJ6Ibd2J +GbAH/2fjtebQ7xsC8zUTnjIk8jmeH8kNZcp1KTkt31CZd6jN9KFj5dbSuaXQGYMJ +Xi9AqPHdux79eM6QjsMCN4bYJe3bA/CEueuL9bBxsfl9any8yJ8BcSJVcc61W4VD +Xi0iogSeqsHGagCHqXkti7/pd5RCzr42x0OG8eQ6qFWZ9LlKpLIdz5MjfQ7uJWdl +hok5taSFg8WPJCSIMaQxRC93uYv3CEMusLH3hNjcNk9KqMZ/rFkr8AVIo7X6tCuN +cOI6RLJ5o4mUNJflU8HKBpRf6ELhJAFfhV0Ai8Numtmj1F4s7bZTyDSfCYjc5evI +/BWjJ6pGhQMyX32zPA9VDmVXZp2IRgQQEQIABgUCSqqiMgAKCRDrWolqKJiL9aY6 +AJ9PJ/c0nvAdMFyTAB4TgxK3lm1dWwCfRcOrw9ZaeTicrpOV6+or9WhYi0WIRgQQ +EQIABgUCSqxgNQAKCRA7nQk/MbCXS+gnAJwJKiSIlI1j7IivecE838smV1vF6QCb +B9TrQZ5pYXDPuGrBUUvbfF5OnKeIRgQQEQIABgUCS32d2AAKCRBiFZZPWxcqsjlM +AJ9wE9uxo8DUBRVVdc+/Qp5YViBVogCgyvePB3U1hUPpN7cP7ImEbPMIPo+IRgQQ +EQIABgUCS36WLwAKCRBOUwAZoaG8BTgXAJ9fcfgaCb/HTIgC+a3gJbwA/0XkPwCg +pqm7BuOwadxPdR00WIeaKcBqrW2IRgQQEQIABgUCTLqaOwAKCRCF9yYxJ6HImkv6 +AKCDGzgLmj47OeTtaYWs9DeVud8MogCeLinbRpG7DHpBYYfyGiWPNNKabkWIXgQQ +EQgABgUCTMEPxgAKCRBrN4EsW1TWjEMIAP4pRDudJEmpk5jQIjqcAPu1qT0rsmWT +Q5ElxPeLpkPIPwD/fdoFfMzDSSdNN0noO595BgFMwr4I1cz6GSsd8GCA8NeIXgQQ +EQgABgUCTgyF3gAKCRCDojkL/aKKGg2vAQDM6swxNsKGjw6wb+0PGCeXBj3H+QEi +oJ8J0outkIyT1QD+L5gYFAIeDUxpnNmt9tJ6gTv+rJk5gNjOrvz7QTXpYtmInAQQ +AQIABgUCTNR85QAKCRDjsV6KbxD8QmnaA/90V7ITTZGfdbvbe7/usuyzr26e59gt +HmsRdSxJn7zG3vng+tMjjDTapwY4vTk/5s7BshlGFT2Vw1kl61VhC0vf+wFUAgGh +lV7cH7DQyJNaBFdxJ0nz0XJ+gbKjDN2gA7tK5VbAD8j8M/sJG6m8cLmFml59+v+e +Yo4VA/Xfl5qRYIkBHAQQAQIABgUCTJFqpgAKCRBjkJvie11mayZaCADGDPzdfisD +nVPK68hnJ7vx1uCgdkMKyAJmNXca0twIiYl1oKmZ961h1Y5qUJOj02AjtxKgeI+b +1hRwGAxQ4uS8bHtYj6Pn/mXK0Q3G1lw0Q6M+q4mDdj3zLAeHR/WoyCQTFWX2gmgd +46C0XQkqpfY9mmfPZxpKoilMxlhX4z6TxxYRiwbxZOB/jwhZMCNMoXx5SYDC3Aco +RqXWd3wCwwy5lsv8XC33cQd/c+XbJIC4hzu4lTj3ndDlptpJp9SPNSUiNe8YD0sw +SITX+R1uzO4l6LLavw02j/MAhfVi3dEpf8lt4ZKaRVUHB/XPTsUmxYx7jOtDyr+w +/lPnZFMhAECDiQEcBBMBAgAGBQJM4UTLAAoJEE7GByMpYG5327oIAMDOuVYbMiL9 +anx0+sRuEEQZbY1otCoTCIf8rDEBAw0RBPYuXOfcMkHWNPzfoohW6qAjeEK831AS +PVg3cta5Ctmn/mM2ehO3Y+XCEtenTZJP8ZtHg3pZEt4PtQaOBtrWxqX1h633KEIa +0a7dASaU4KOZg/SyKoChcSr2pY+jtzDacsZ8q/et+zz2gktdvcDSkJurkPjlORx9 +CcWFhOd7PFP4ZWn0A0AkufMpbLXhlVJCmSykyyG0Don3C9i7sG045303KNy6CA+l +jvcm/EBeeMWvLMdjr51XmkGFjaAs4Lyw0CfKj9uNZdriOtSVtH2kcMmNSvcUln2B +FZTBo2NeRKGJAhwEEAECAAYFAktpE+EACgkQxel8K2OfamZhpg/+P9NPk88rqRnE +uDVDHodlkA5hG0d0Yi5vkV9rw07yjYut474aUd3FjJFqNEoiW+6dFbNy6YqqYPhr +XLtnfJl5LAUJUzMA2aSLtbuX+cq18DCv5ZmU4DW6kZOWi5vX7QkQCTTLP03VlcD3 +Gu6HyofseBMgE4zoEXdmZSZmPnOygakFLzC9w+D1XfK2gcaTKjAJJdW80aY56eUe +zFDKLhOw+YzIK1/ZeeOTS4LeITtTq5J6/hnwHrJdjApX80v2WJzVVoy7lQbxAPsl +JHZdYVFCBy2Tyk7kYdddVxYCcdYr0e8A+GfG/tQJGxvZ3O4nOrezSv0XmlhLZ5rj +Cn8M6fg/NKUXsPtXiac+DQJbr5RwQ5Sc7bnPVsCywqetOeA+xv3L2wi94rg4u97Q +iwqhDW0SE9zZuQL5vaXl/GFpaRXs+mVGATS9h+0lDBQPi21oPkdN/BKKzr//2GCl +5VFb+rkOY65HthCuiIrT8jFGArJIF4nXku/4BPpNrganC89iTsd5+UUNFIlta+WY +kENQ9tC2mwj96BaK0KyRQZP9AAzTo5wG8aouczptpwSH0aECJNy8kd/UR8IAkZkx +jY4+zyfQDlb4aNDsVGvempgjFcNo0rciKrPQl5GyRLQj2azuv46gaGcYzqsobejS +/2jqJLMnkTeExaCryrWuXo/raWBWQLOJAhwEEAECAAYFAkybgq4ACgkQ2HRyfjOa +f6huKQ//Yfey5BJXqZqIt9i6tyw2VqzMtZ1gAqFdEKeuSmz30xty9g6KknIjpeZo ++POb3rQFUKGZ/q4AjWKdD9C5WUvLcXd0RCWeDG7dmD78h35OWwqhc+8FXO1vU0nG +yFdEx89cNiO42M/z+eYeoysgVL3ixbCjJlrN4MHrilqshxH5MvG7JfIfoPwucQyt +NcwSa8T9kTlmC9uSl1rwEllKlDNabxMpsf+9T0kZtI+KQrvMBg8A4RRJhpP13Bt6 +y949FbR4zva7kqV24h+5c/bKsgY4PXXM+AnIuXy+Dq1aRVgRLhWypJqc73UnpD/M +DDOPKX8nkF3F0mjcfEso6KtvNsniPCr5GKcnvoGu38qlQ7ILm2Pv0tjBHNIYQNG9 +xPn2TMH74D6f88NahHj33Ha7PG8Jn/dZMuKg7qEeHit7+lJDn18cTT8xIMMUpl9A +pmjLuWwo5eTXysai7PQQU/ezEbOgYqznBKEFK+CXH6KINnGH13d/r9L71AZj/KZs +I+c7E0imLwUStvJEZr2M9nR+ybA4SN6/kwcF5n2kx+lBJjqBn72hb0wyaXXtTYFG +deruYIGsxEx8imbIBDtX6rWOMIrZAHlPBS5NTj4Hye14XcChR/AodmXrgJD/z+8+ +sDGGZpHAc291wknHO++j22vF47Q2VSt8T+WM6Tx8vq0+Wsnui/iJARwEEAECAAYF +Ak6DrGQACgkQ/YT8uPW0MEdizQf+LRGpkyYcVnEXiFUUuJiMZlWSoTeFsFlTLdBV +jxAlcTanW5PUZ1O+fzxhSTjtAgEZm1UJUv3RaJxGlMeOVV+1o6F7xzsaTOFajjAK +DwrfP9WdvRyiC5IrvdfuJB6THCkgu5l0yoMxANyBXi9lEPHFPllOk6sTjfEk9LlJ +Tn1Quy3c5qb9GJgiSbA+7sS6AO7woE52TxdAJjxB+PM1dt/FZGG4hjeH3WmjUtfa +hm1UlBtWLEVleOz4EFXwTQErNpHfBaReJecOfJZ/30OGEJNWkNkmrg+ed1uLsE+K +2DxEHTFCZd83OPQGHpi+qYcv9SDDMYxzzdlynkOn5DoR0z87N4kBnAQRAQoABgUC +TqmiPwAKCRCg8hPxRutYH4lKC/9YYwjHjABrogdB2sb49JIiM2Dqe+G++GizVTZs +mV26PJXWQLKr2zKZDMLk3l/b9YLVkuFeG2K035HPFCtpWIlxkxpbarI5i9F0NjMm +gaIyqvh14xNhDS6NHgioDdNKvdNI5LYtWXGREjYJVCBIwdxWZHi5JsQgV2E0vfIZ +GDKWFfMIF2xrt6x0uvhWZnD94ecU0Dd8sFz7TKJoCdzfdYpoj5ROenLGJ7OcDMUL +knSA4NEVIEY0BVyQCb3TCjfboCRxRdXs+6yz4YEqTCzPNvQqIKKO6MA/X3ytmUok +RZIVmU8es4iZxYUXrHKeMzrvYVpbwwHwpziGwBr+SOkrS5iv5c1V1Nb+pSajtzAm +4tQnNoyjvB2YsEOvTLUNgaScY5O7Xu/FGhI6E9Y8KbD7nb2t9XdtEFgHiq1ST15t +iew6YNCatVA/GW3r97ediBjqAX35hqFSZ05yaNDlCgfKxrRiv2SHu+hutAX7cVLT +Aetm2mrJBb0ip7hQKrmUOpziT7iIXgQQEQoABgUCUVVRWQAKCRCHWDJ6EJ8lkdti +AQCDqrwsq6QrE1puqjai8cGvIUdY5UWiBVj6IjrTmvAdlAD/WEqresRrwQdoPJ6x +4VKJyJByQPCuJvlfl6nzpnBg2LyJARwEEAECAAYFAlEuf78ACgkQdxZ3RMno5CjA +8Qf+LM8nZhjvJyGdngan05EKqwc5HAppi34pctNpSreJvNxSBXQ4vydVckvdAJNI +ttGeWjVDr6Z61w6+h9rMoUwZkKMLU5wii5qJkvwGtPw5JZVe6ecEKJrr/p9tkMjI +jTHeneYrm+zGJAx/F8eCy+CzWwGacLw1w68IHHH6zsJZRhyNlSBc9ZJANRzXRPWc +0tzHfT7HtiN2dQK2OlFLRr+4t9KLFae0MsNRr4M6nBtOX+CBP4OdKTbeASyXnK8G +bpnpEjn0b4isr6eoMcJbNwVBX4XnI5RG/Ugur4es9ktOQkUFxy8Zpp8/vk/+hyWH +unr1G2ema2dak8zHIa7G2T8Bb4kCGwQQAQIABgUCUVSNVAAKCRB+fTNcWi1ewX4x +D/d0R2OHFLo42KJPsIc9Wz3AMO7mfpbCmSXcxoM+Cyd9/GT2qgAt9hgItv3iqg9d +j+AbjPNUKfpGG4Q4D/x/tb018C3F4U1PLC/PQ2lYX0csvuv3Gp5MuNpCuHS5bW4k +LyOpRZh1JrqniL8K1Mp8cdBhMf6H+ZckQuXShGHwOhGyBMu3X7biXikSvdgQmbDQ +MtaDbxuYZ+JGXF0uacPVnlAUwW1F55IIhmUHIV7t+poYo/8M0HJ/lB9y5auamrJT +4acsPWS+fYHAjfGfpSE7T7QWuiIKJ2EmpVa5hpGhzII9ahF0wtHTKkF7d7RYV1p1 +UUA5nu8QFTope8fyERJDZg88ICt+TpXJ7+PJ9THcXgNI+papKy2wKHPfly6B+071 +BA4n0UX0tV7zqWk9axoN+nyUL97/k572kLTbxahrBEYXphdNeqqXHa/udWpTYaKw +SGYmIohTSIqBZh7Xa/rhLsx2UfgR5B0WW34E8cTzuiZziYalIC/9694vjOtPaSTp +iPyK2Bn/gOF6zXEqtUYPTdVfYADyhD00uNAxAsmgmju+KkoYl6j4oG3a71LZWcdQ ++hx3n+TgpNx51hXlqdv8g1HmkGM5KJW31ZgxfPmqgO6JfUiWucRaGHNjA2AdinU+ +pFq9rlIaHWaxG+xw+tFNtdTDxmmzaj2pCsYUz/qTAN31iQIcBBABAgAGBQJNGJ3w +AAoJEIO1uBYaG9UOMXcP/0kA1SRdYd24ORdRdkVyhI8QqBE49+seV3iElKsk6e54 +auaQDhpSFXfCLbSY2tmEnxD2AWDVwUDHtBPuKXREr8ytB44MKVm5Ar7M1o/ner+R +JsMdYR1bxLxF4j5MuPgTLaZKEszxmI5C+eo8wvf5heFwtIq23HxO+7DtYO2XKWLj +/k7Q3K760YvLtO72awqfMXr+MxX57/L6qyWdiMNfNiT1uGv9BpixRGB6xbDN18un +pVKk3sLPcE3oc44UdkSuxVrqHXVMzUIxpQGqOf+KYk9s5Z0KijllK09uoZI3WyKO +R2I5iGJDuBBzbuMGP23Gr3IMRTmVNAEWmjpxgLC2j1t80ocaAkguejTAKTjjXH1M +WJHoESsBXKdbk2xuAvnvqQqZ7weZfLCBS4XoSGdg3teeGa/ZQOHDknrLurqaa2ah +FGxcG4lOrf0OBZWMaI9Kj3HnrcThmEOwIozL4SDmUvvQxyK5s3uZjphFAyxRhQx1 +fCKhnyA+D8oVtnTZ9uxtUWstIKK5RlOCxWJH3obvEGmGi+6E+zgDsK+ivqM8gFjj +3XmMpO6dh3/yZ6B8b8kanj4cYlCHhpeJ7v16G+FvGh/aMBlCopXAvoTprxQgXa12 +MgYzYGRyuviOV+PWo+RTTPRyYmJ9RLADKSdHwA8VUvHp+nxZucES1M9PxVq92hhW +iQIcBBABAgAGBQJQezFyAAoJEFOcQ2uC5Av326UQALBzrx914us/lT+hEnfz5aRD +E7TwOhrt2ymPVzLvreRcaXOnbvG9eVz3FYwSQtl4UbprP6wjdi9bourU9ljNBEuy +OAwoM0MwMwHnFHeDrmVFbgop3SkKzn8JHGzaEM+Tq6WKHYTXY3/KrCBdOy1sQPNe +ZoF7/rq4Z20CcrQaKdd0T7nAEy7TLQIXEnKCQKa2j+E55i584dIshxVWvNuwsfeZ +649f2FTGM3hEg527BZ4eLQhZQLHkjIY+0w0EB9f4AhViZfutakQf5uqV9oRlgmHm +QsN5vMKryC1G15HO9HPSMJf9mvtJm7U+ySNE354wt2Q2CwX1NdDLa8UUzlpGgR6c +d4PmAyVrykEWdtk/4ADic+tu4pTJVx92ssgiBAQoi/GMp61KPcxXU9O4flg0HDYj +erGuCau/5iUKWaLL9VBe3YdznoQBCzwquTs3TT1toXHjiujGFo5arl5elPv4eNfU +/S0Yf3aguYbwj2vVrDbp3JxYjJouxklxQ2J4jOXD1cehjZ+xFRfdnyUDV2o9FzvW +Cc3N04var7Wx8+0mtok0N0xTkJunN8rkxvVUuh32zJlFlvZX4u61ZY4wI3hPz072 +AFBdqv+B645Hrk04Hbu93iZ5ZgcICNZppyd6xZeBvqaEZXS+Zv92HCbxIBS9P7zB +3sXmQT57jusVSUdQtfJwiQIcBBABAgAGBQJRcGlBAAoJELlvIwCtEcvuoWwP/ReL +zhFKWlc/F35MvNyO1usz+qvs+SrlAtwaNcv3Dd9ih0mw+bH+U+PVVgXlk1g0NY9h +NNRLxt2mUc+mg9ttN+ha0RkqUYsYjg1Wj9bDuR0a+3DhtuS9hhEjWrBBT3UbTcWT +5lxKkUgy4Sj+Dh0N78spHo2orUN3qRw3VkHY4hWcxAvlXreuEv6J7Ik4uZ+8MMgJ +Fld4oVhMmnWOrMwt10D58URvZsGypI+dK0p2JSue5yfBWkSMpFsJ8z2cCOBMAPQq +9S63mhXZiORrxJS4pzJ87wcYG/H3R1pqF6I/49tWBlyZwiwOYs0fFEJc9idF/hSz +en/qDDQpvy4gNF48if7SGEtOBu1vEGqWKvNsataNcjYgj4BZhDlMHgAxWn0G7VNR +Vsx1D6nzOzEAlFa/PQgQfCXScJXRV72uKoMk2uuOk8yb2+toOW5LoS/0UbsnUi77 +VvknpZPbQPQ5svsGBCU1BQpDeFsQk4IMW5Flv1VVSEtxnfLi89An4HPMN92+qNUD +RM3E/eLkFnrPdiB3yMkjAgDbao5Gh+CTszQ118xkhmRC+pNCI75AS/X4V1WrcAJU +niTbFgBRZr4t2tWfLMgx44XMtVrKraROj7QH4rEODSInBBEWT2hiJeWm4QS1g5Rf +oym4ur02xxqhwXAsCXFGFKZirXDoTMHDds6dI0QXiQEcBBABAgAGBQJQSx6AAAoJ +EH+pHtoamZ2Ehb0IAJzD7va1uonOpQiUuIRmUpoyYQ0EXOa+jlWpO8DQ/RPORPM1 +IEGIsDZ3kTx6UJ+Zha1TAisQJzuLqAeNRaRUo0Tt3elIUgI+oDNKRWGEpc4Z8/Rv +4s6zBnPBkDwCEslAeFj3fnbLSR+9fHF0eD/u1Pj7uPyM23kiwWSnG4KQCyZhHPKR +jhmBg1UhEA25fOr8p9yHuMqTjadMbp3+S8lBI3MZBXOKl2JUPRIZFe6rXqx+SVJj +RW6cXMGHhe6QQGISzQBeBobqQnSim08sr18jvhleKqegGZVs1YhadZQzmQBNJXNT +/YmVX9cyrpktkHAPGRQ8NyjRSPwkRZAqaBnB71CJAfAEEAEKAAYFAlKGBO0ACgkQ +N4Uj/AufSZbFOQ6fbHEEerx0zf6FtLG2/EyK00q95yQY363WfM6fXvEbEHe8RThP +oZswxLAn96yfTNWXLhDS64muDntsPPpenk86siNzp9Br8qN1fKkZY2tBjyUtvGz9 +i+paQWowXPfFeV5WutjqRY3cn6xY4SXWNWyffr3XTYqublnWs4s+yJuHQeb3XiWX +4o8p9csmTuC5sJgmZpkvppRgzRpHAd8VCzzC/cMEVeV2+cbFon4sHw5NJVAXbaRo +Z/P4SoA6S2Tz0SB1FWNa1v9TEu57/f7l8XYdI6nL4y6imnJ/RZqgpG7gJUqJSwS/ +iu80JJqnZJ030hWrRZHHp2k+ZWr/kZgKGCxHbRCcQNpJCmPmSuJccVABWIkoKjgV +R4jXDbh+saGYLn2eUUzxkZmd7xaDSNUBhP2qdtKlGFc8ESL0qZkwixLhmpgUgFsf +7D/bGGJyVkhOji4rJDZx9I0K5s0JrDrEqO0nzYod08s7aaOcQrgMYcQA7x/Z3BlS +uRRo6KK61dOO42SzSbFSEW5Z8IEfSoUYHoyN81kbfC+j/q1dpwg+Bhw9PTqSWfLi +XI6H15X7H/Ig6NDK0U9v9s+gqmqG0AtQhEnCEqKNZFV1K8rnY+B+lNXMA0PIgxA0 +iQHwBBABCgAGBQJSjUjjAAoJEMQJSn+pq5SBKV4On0Gzb3r2SAx4CM9zAhGoQw81 +yM34WUHrkDESj2TrKw0sLYLMzM3wriEzFT+88buowSBT8h3ONNDijbj8NdjYQCfY +90bqgAROZ+W9/dmV2C9dJxmv5kWJQ/5D2ksuVpu1LUyK6AWXEkV1KpIcRHCP+Kb8 +EWaMEjPPQbNJ1KrFzAFfIUeFTbBL5kMmJK5aYVUiHWnLZq0SK5OlWGqBihuRLI7O +IoBOjlcoXvFoEgSkgUKpapE6C9VkErW60WCK91sMhaa8CY9pVDPaanMG2o73BfS3 +jGPylm4H2+8jlJ1+l5ietvoyiqOST1iIfOsbi30mxuVJ4JBvKtmapqpBwT6eNvCi +PKsMyjB5oWI5IVbK8MDIaYQM9TL+nyMGhl19GzcUMP8tZRlCifM9b/zmMMt1sgVY +0koF8AZfh3Ho9KLyXqNMUtXAFSQrAcTbN5SmzjlJtl+hz6uhiHH9kAeSX4MFRXX6 +JDfZxyAw72JqJkZaPEAKQCpodkNwNG9b2dedIBsTaD9IoEkryDtR17qV2ePwlCey +muwNnGVVaJ8hLbI7ZATbIaSn7XNvMGM8hX0N/ram5nTvrR2laG1o1ss5oxtg7PfT +rhMyCTrzTcxc8VskAgtbJjoyi4kCHAQQAQIABgUCUHsxcgAKCRBTnENrguQL99ul +EACwc68fdeLrP5U/oRJ38+WkQxO08Doa7dspj1cy763kXGlzp27xvXlc9xWMEkLZ +eFG6az+sI3YvW6Lq1PZYzQRLsjgMKDNDMDMB5xR3g65lRW4KKd0pCs5/CRxs2hDP +k6ulih2E12N/yqwgXTstbEDzXmaBe/66uGdtAnK0GinXdE+5wBMu0y0CFxJygkCm +to/hOeYufOHSLIcVVrzbsLH3meuPX9hUxjN4RIOduwWeHi0IWUCx5IyGPtMNBAfX ++AIVYmX7rWpEH+bqlfaEZYJh5kLDebzCq8gtRteRzvRz0jCX/Zr7SZu1PskjRN+e +MLdkNgsF9TXQy2vFFM5aRoEenHeD5gMla8pBFnbZP+AA4nPrbuKUyVcfdrLIIgQE +KIvxjKetSj3MV1PTuH5YNBw2I3qxrgmrv+YlClmiy/VQXt3///////////////// //////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////// -////////////////////////////////////iQIcBBABAgAGBQJRcGlBAAoJELlv -IwCtEcvuoWwP/ReLzhFKWlc/F35MvNyO1usz+qvs+SrlAtwaNcv3Dd9ih0mw+bH+ -U+PVVgXlk1g0NY9hNNRLxt2mUc+mg9ttN+ha0RkqUYsYjg1Wj9bDuR0a+3DhtuS9 -hhEjWrBBT3UbTcWT5lxKkUgy4Sj+Dh0N78spHo2orUN3qRw3VkHY4hWcxAvlXreu -Ev6J7Ik4uZ+8MMgJFld4oVhMmnWOrMwt10D58URvZsGypI+dK0p2JSue5yfBWkSM -pFsJ8z2cCOBMAPQq9S63mhXZiORrxJS4pzJ87wcYG/H3R1pqF6I/49tWBlyZwiwO -Ys0fFEJc9idF/hSzen/qDDQpvy4gNF48if7SGEtOBu1vEGqWKvNsataNcjYgj4BZ -hDlMHgAxWn0G7VNRVsx1D6nzOzEAlFa/PQgQfCXScJXRV72uKoMk2uuOk8yb2+to -OW5LoS/0UbsnUi77VvknpZPbQPQ5svsGBCU1BQpDeFsQk4IMW5Flv1VVSEtxnfLi -89An4HPMN92+qNUDRM3E/eLkFnrPdiB3yMkjAgDbao5Gh+CTszQ118xkhmRC+pNC -I75AS/X4V1WrcAJUniTbFgBRZr4t2tWfLMgx44XMtVrKraROj7QH4rEODSInBBEW -T2hiJeWm4QS1g5Rfoym4ur02xxqhwXAsCXFGFKZirXDoTMHDds6dI0QXiQIcBBAB -AgAGBQJR+DzBAAoJECIs6MQ2RAKIjU4QAIi24KlFH1hL0d45GsQswFJ3YiokF62j -pXRU2x7/+D+cJUqA4omjaGkSn0Go+J2MG8/bQST/Eioev8/PtHpPVRWyOq1ACUue -DFpvzXAmxEBA25OkdDRWiy2y2CUSwu2n/OJBg6+C3TIRyoqzs2YiXIDr9TDi7NcX -UP2Gd+xDWyEh5zd3xilAZl/SNkW73gen2GnG0WRMjzvJ9SSqYVFGw2L0oaSyX+HI -3ulAybWuYaHtwREcgcKJpRK7VMeICERRzmGQxaUzbBtsWf1lhVUaCjINbKEOOfuq -EqcRGsXl3AJw/qYUaj3CE7hTiUpQ0kcDw7G0NvuYOFqBjTAZVOpr45vbOqCqKp4u -pNh2KLsGcGqzBy+RubsEsbOmIuDImyjFLpGHOZv54mJNLQ+SDbbLcj96EPZ5+gg7 -ip7+e6gGqGhJEOQLWeXejTk2rAX5zgkHutmjqY7qZIe63iXnlq88B66tZct2dYwv -3M9t2X2Mkx3UR1UxQZ8wJjmSYSS49HDfIZh5NIz58QH8AePltBk32yMxSFq0lndG -KEyhE2omMyNYzSt0EcXcsaaiqrphQ9iPJ8fCY29MOkKRQz9S3P/NOZFQrqL3zavf -JX2+npx0umP97xPIowMPn0QZEWkTf2rvcG3s8s5jfUNOsi+ZcPazhjwqV6tX2Ovb -fK49CG3vTdcAiQIcBBABAgAGBQJTChVdAAoJEA7aqUXopP+Xnn8QAITUo4Kkapoc -g1wurgpYjetGyz9pI6PwtV27Q9xWWjLWRnZhlsHhSo1JhvNY9QBIKb2QQU+WGoBR -tWPIxm0AtbhmGBlKscRRMYfKB0U2pFE6HGDh7tWPzSzPWHKb3oobyB2bmgtBNsWG -BxgcoQESC18uZnYJ2ffk6N5BhU7JnN5PD6TeKFokengr+BVkxwB0sVP5Zahc7lXN -nj7mDTeths0ZyxDTzog9AnImKlJR7Qu/uhhhz2mYnobS9tgzvyqRtibmxd7RLwlG -Owf6/jUA3wmYgvN1B17reB1GwylK9eRIem1OPG0t2UV/i6ik9BFMrwruoeTDd1PO -nA7+SDLXC2tUbGwBK4PEKbD/IMe43dKQypaAYXQgWqwl3Lf3t9eCAfW0X/PRUdcS -xA8WWpY6pFqT6Eg1GicSnwarhvSWcs9I3FNo7foBcu3S6+wO6jK6/izOOymznuzG -putQnVCZuVIL3FF+QC95popTUjnTBRF6O1p2o5OfEOAJ2f76c3a/tZuPB5Z0Wfiv -uQXzDDsEC7O7SRkYMvCI8Vs/H6fijsOnKYtSvHRbm52R0zFWHPFCK36/tFaoUVrc -PNbGyFYT2klL177G+e6mJubz8nzPMLlWzP5utUYY8gM4UIaGr/mPudKmT2jFp/25 -oFKpxaiBrROsXVv9+NQ01QYtxT5BueyoiQIcBBABAgAGBQJTgEpqAAoJEPEtmFdD -7iYgp/AP/jMbTr4b6Vqyrbaq/SBAfN2SpcmqOWmQn5tiBRvG/FgV68v55dugKMia -B1Opw3zGgF3l8xzdjFduMFH1iZBgv7kaooguZ7ttV84g6xGVE/9XjOughX4KchIR -rpS8Qqba7Sr46vYrNmGhP6YMh9CmOG5ydg0H6MwXmiC9osKY/G71OyP8/K07ziE+ -iImS+oOsCrnnwkxBEDLkwo3engbJMoCNLK+qpSuatwydtI6Gy7LaZOIBjEuf8I3m -D4wkKU1SE6f6w83pEMzDq6xC2Y6hLCz1kooamJA08WJ01u1npIrje873yY6Qw0Wf -E5Jsp0WR4gdhbScL4S4m2Q9ZLW/2jWFbwz6tNHAfo3//nWZn2II6GDJfgfX7dU9W -6pid+p52bAAughh6hDjbH/eVF9BaWommIVtjjHAkpHWJ8V6vOKpojG+lyrrzi3Ye -Tw44s14BsfxRx/8TcSKzM+1jXNpeT27pIHohjvlRVdJtrw5MYUOqTGpVtH6GhoJS -m62sM196CbB7RkpEH0TojOenzQhsV+e+W4FVrb2QEQQqWB3TkmGLpiRt98FPltZO -7fHJmMSaqe+WOxTxrciP0FoyZJxYQM+NujQUTlMxmAdSw5AHAwhVIHmtreTEZKHp -lx984hSZiiKzdsTUr7/AcndDcdY9KN2/p3Zf11y6nPlYsv4ToDXTiQIcBBABAgAG -BQJTwUz5AAoJEHhUBU7O/hnkcfcP/3+Vv2CLClRCfpgPTLjoG31P425RxTLmx0Hg -H+ULaQI8D1Ymrx6j+UUW5mkFNtx1HkF8ebezH+wi4PrROJ5UNdJ0pW4cgMZCHlPU -2uh1d/THKQnVOaBlqsDIyOAZem1sUtJfkYsOZTnUbbyS4CSYkf9HTfPfQ3TWePS0 -gEhj2zV4r4APMPKrfAfc761CWu33IY0SqYLwDWPQezd50pGJDJYnBWJixArgJQK7 -PCkD6hRslnXPW/Vj8VqBptiNO5yZGAKn8UPUg5LeMeXTU21Qh9KQbQHAGOWxbpUF -Vnon3pVPms7JbrBA5I4U9Q3emBoiFgV+ARLQFcBehTBDOB3NHN+1nKaA0ZG7Q7Hk -pTH1ophpbC9q67NvQvDgihlMXHwZRF+EwKBafo1IsgXwx8k/0Ju+6N+i8PlbnB98 -yTpxgo2rnj+9V5HkfqGGV33s9Aio959mNM6gOrTwVZvL/DzfyyOXvdUl9nIWM6su -azxO6wWRItj9vUqDgq/byJ6X5R9rXyCioKhwe6ztU283hgpNAY7KkvTDyPpK+W8r -u2sfY0IENUtT6w3pHGVVkoqGox5dkcO9fv+Ok0eGOLJnperFcvGdd4V/mGLKH+TS -Ksn3fJPs+9MH8XMKcp6NDbki3aff5vPDfj2+nhwLXG6WT9l9IPmAxeaw5QxmkNDW -v/2zU4RFiQIcBBABAgAGBQJT6qc3AAoJEDov2JR5p8TBfZEP/Rgw5Imwcis+iIVV -IW7r3DyW+A9+9JAI8muShU+Z1zliImIwanwNWn5RbcmY4sohQ9SDmnb8L7wMuCNY -tXR/neys4J2qn2pcHH/TIo629E0aVMRjLBU8Dok543ONx2BSAGuRPyXDciPzn54k -NVXpAW5NI+Z5SniEQRJ40o9YpPQHcjGXnNtUc7pB9r02JsIqk6WX1iSzAl3Ke14U -ySlWb4urmamvomlufVYOtGdxeniN1lgbrJY/BCb2b/ZRr0gupv3EEEp/uU2TLBVu -S7yGbNlf+9gZk8ZVl117HVALCnpQ11QxodQaGH8HuR/QEex4Y802DyzEsT1Fnzm8 -eZcqZB9QO8pjcrOTU3yCiB/7vwpeHymLkogrMirQaxQK6OUnEhYNOuj9Det7cJPw -zeinerHaCNlumxKbB6gm0w1R8tjqHHkzcjp9rEoH1UDf91ugHCxevhR11Cz4ZSwF -bkx6+sUr1HgK4fCktKlkFcP8adDSncz8N2btbLmWYfBSKZK1z6GStIquwTLyLFrm -xXSGrsiLaETXF0S0VI4IuCggggDOn43oxqoISSaMJTPr//vhbL387wwXbK271wC7 -WVAQmslrZhImTzLbxjTvL2L1/NCSRBJXKalRV9HCzZgGaq+7LFwz0PQeciSK+ijq -RrmQKIyxhDVjcXm4OU6LrIFzoAzniQIcBBABAgAGBQJUOeCYAAoJENFZiZ/T2fiy -0AcP/i7qIAAFw6wqYgojDsqA7/YifGh9RGvrxmC4dWdrgLxW7dorUh0uw/JLn0JR -rzKoS6EF3hHrPQasmCPyz9ckZeRZjIhR5mKYtqrWsF3vpaL2VALXsb54KqAR8l4/ -iT083JTm1mvEbMJ4JFVGNrGNVIWYdDgfQOKzD6lZtwRZTEjY5u+sJHS4VRvjAju8 -2vlmEx8hmrcDV2f+9St40pThNR9o1Rcna562NFldsccL7fFL9uM7kmFMGid5JwaR -U4b/iXiSZ6YctNQyfitkOWoHG0aKXvJM39WsJulHKCekSi4z4nNd5hZgMRFG2L4f -zgcm6wNEh081yhsVN4xHxURT1DrMg3Xtd3Zj4wBL5XFHySludRCd/PYPRpvfcCwe -JJ/OTroepfr3DGw/Qo2VnZKe+Hu+4KpZnB5NrYIz5mMcysJMDCXiA9YdwRlF7EsP -/ma8FdYpxyrR61+GRY+ANUP1KMqautJj9qW7HtIbqaZUFAuhmD6uetcCraWD7EF7 -meaFJuozenO9fBzBgcpJiJWKjNElJxpaPXiWPC+dQVvK0jpy12U1UNp38PBs18M9 -w2eOsC70tVhko53rCr1clL5Tdb133jNWo+jyWmKcYFKARziGQ3Q3GTE8ycRwebZe -SgIHYLzm9zHGZQc9crpC0Mfoa09vcbBNyt5NRT6s/nOE4tjTiQIcBBABAgAGBQJU -OiR+AAoJEJo0q5orsokP9bYQALApojYAycnlIEF8GVnt0fbzSYLwGBxWuMMmzdiH -3HHvTxsUBQ2KvcBRrvSC1C8gOhpYdouI+RSXPXb6pkBHWJPFmGaPp0RKqgLMDi+w -K7zZiPESMK8vaYJS9RmLS2KzJMn30QYQ0VZfrJiw+K5ejSgdoFz3pOpcJCNlBmNj -MocA6M+u6O1PDb/OOqSPRqSlzZzu1S5HyDaOK32XXZj20G7ltwn5abtdk9KO8KWR -2b3ZT6GMzxx3L83lBL6hcg7a6NrYQKsXdUP/HEvt6pnVBBKTk6LzjRNAPp79a2w4 -muT0rMAfHdGzRhe77828KTlTllQXFBEKH6m23daQAHEw1ydB/9H+rG+S0ulP1V2I -gza9agB9XASIgRKLjwkDdMzOehf0oKt0U6P6kpytS3M025t9yVA2qUuG6A5DWwwf -uRrY+dxUdbF5ZoQYEJuXDLk7vVuh2ggJcnfGZ3fIHjtCwvdlMRQzDX2adpzoNy7u -xZHz0QBbaSOeFhGs2xE1/lLfpRaWw+ISXdp7b9HjB6dI6bSfYUP940Mi72d62HUH -wbWNkaWe4afeCAWbTuHWCe4jnPvTBF0t6mU4k+lwWo0uYYjJM2W4V0OGSflIdB8s -gOxgmFzlB72TIwwVBeLrCUEvymHqbTUedY4jUSWrL15sfDjxAhGJxZiHe4ydqxz6 -p29jiQIcBBABAgAGBQJUZxhJAAoJEM2XTJo5TWM/ICcQAI5555kQLg4N5+fmq1rN -6AwjrI4lW13IjX3H462PMvVWCHgJV86o+5/ab747czA0xbszW8vr/K0iayA6hfwR -VtuEXfQl0Uh/Taj4+fhI8cfW2+5EX4+lpGrOclVCsHHVfVZ/k8LL7mbjboJhG9xj -SFf6JBtqr+/AFsEIA+MYGFBaiLgL68j3CJuDPvjkwpC/Ofov5FRdPEOWQ9odC0z2 -yvDjAqxOkmDjjZ04FJ5PBnNbo67AOUk25shqHuBzVH94MAP7Hrg7UaFeRQiMgNEl -f8qsTzukyzCFPxJ4yyFpb56dDil8wxsvGcJlOEfT8sAi3YT2J2QoT7KAcES+aYgP -1Q2Uj3gv0MkaLNcDtPsQGksbxXoipq/Wygj2UwOQh9ZVtycjuv0D2LyfPMVTPVG6 -6DYUpaverd5QB2nT2LZLxXhOH0tIqmFUaoIFrvrLn25A0Z5QFy/HbPfWAw7PJvgj -l6AoxT3nKvozt0tUdZ2DfE3h4zjBXDiv3Y014FmhgqwEOgnCn89SR/7SHMJsMKp8 -oyn1mfzsncs+gqcpOhvj2XfPEpnObDLqg98J6eyFGDfhEv1bNEOB4IcFF8YrUNEu -6/rS+l7rNH7vlw7hVWE0D1EnpZ8KYk8qPlOuHDHLMgemECUbR7Ogt63H3jqFfDAh -lKBUY3hG5S7lpWiLzcQccYZciQIcBBABAgAGBQJUa/DbAAoJEFyzYeVS+w0Q16QP -/10IdfE8aurLIfVMURxzr0CWHBwuAGV6mCKAriYRaEEjMWFThYsRtCS/CGtdc9Bx -XU5GwuHFcHFuBCP425I9kxmxh/Rc+w8A/ZZAVU5A4gaSB0hkM5oZdB2QwYmXrECE -Sdt0iHxcz9/zyB1R4q2KryzbbkJNJJzbOrGpxG6vh6Dk4B9rFJeRYc7lVfH3TqiO -HCljlHBdEw9iQDGl6IFuQxUqOJNJK75p+4/f0eK64W1jXI2bGekTAQ3V1mA9xv6P -+SR+NjPg4WQlx6sTyksaxbkzOcchyx8zzm1DNH9wm4NsoZKME4n0sCIB7CdY7oBS -FxJfyRp1JSPrUwdNIX8kSsdgJpM7ORgZkojfWWCqt6unlgRsZmurFYigzZFWBAGR -eHIeHJ54eULpg2QPKnwwWuwYHdEPp/bbuaLcPQcklPOGnnQynBpUvu3Ud/Fr7+4T -MHmOI/e5EUUyKbmK0pJLP36Lp3i28bHUTALF2mrDlx3+oMRjF5iSySC41KikBSBi -pRx0WO3jFzdS6NLVdjNlxG9lpiHCkc7bHz9edMvuAnahK/EbS6hFUEkWQOJtJKc8 -B8hXJmChM2YxtEDVv0GngAAwcHZAvphFeuy9vYf2S5IbIqKMNrKgq4VQ+jTqHHXI -57LkGHDCY2igDHQGo/StbI4s8Ow5btQMdXPnAO4rZ61FiQIcBBABAgAGBQJUcelG -AAoJEJjdu04iyiyDqF0QAJUdUtSUzHV3Vo36pbamTnCtyOqEp2X5L5wCjh+UAw9K -GeZu7Jiiz7ueQqxKQtz0miLnb2i3NeK9EWdoaKrM1+PIym7H40ATaurleKD9sq49 -b859tz5iy6DLh1YPeeeuQV/NbjJyh06SzNkMjke6S34CcpDa1OoczVsI1RufWVMu -q1C94+PZD0yCCVLjMUD53c0AldgsFXdd2oEU/JPd7P9wCYSKV/+9F+wRa7/U77HR -KNHd2FCshmJ1mbhk3BFHTALFn9ld1/mqtjUTArt14wxs5GxsPkr1YsWQ1A8uVUtn -W3rsn0UnP9bFcAfn3/d382HhuyW+HOV4g5JhKVlG/hBbvdL+HV17Y+YksGeQW1sK -Iqmjvr7ArFhCIUYo4+emyDEjQmTfuv8RRO1u4yR0iAZqlkk8/8z53ewE3HEfepwx -uo6el/uuRuXfQOmWfdNENjd9xn5gzIDbwqwvtZExjN2PolbiaSLP/3pI8prtrOYu -W+Mk5o6iucceazwdPyevOhoMuW5gZFffKo9w6TU5SRGcYIhrTJY8C7h2Yumsmir/ -XXpLaadcBp2R4uDEoHb6eGlXqvSYMED/mu1fw2VOuKosddCpf/JkwXHwwB39z+dX -o3HYSofyec1mb3kcAsOUbTkAh86IWN1ymqcNTyytK8AEwOLHQ1f4o0ml7n9Xzf1H -iQIcBBABAgAGBQJUsRPJAAoJEBe/lIwEdhN9Z5MP/3Oo8Oc767lRFi1Oj5FVoHvR -xfZvX3oKrG3jphPlCBgKWK8xR7c5YECNIwnlQ8uCqUgxpFf8/iPV3xVuO1HFwDna -fokTqyNtKz2XgpmyfteV/02e32hsDNGfaDCkqbUC2hkuDfWWZa/g0tWfSCryZaI6 -OkoD8UHSiYeDwVzLQXgGsR08iFP9xiHyQHNtCpy0HHeOutrjiWibADwEMZ6n9/1D -SqTQkxnxBwIHpGqK1M06QQT6ty2Bbm16gru0N6ulMr3Dc516PdOzQzqo0T7c2BzS -4wOydYE7UGEeRzuzA7Q57dVK+P0DLtqhiblJuyxBgMLxKICgEeR6ScjWQpHW19bC -wfmbHIqHeeNCZCirF17KEtPqFCv5k5uzsqPvRv9yVwjo1/LF+k1iFgRez41AvGlN -B+VrzziRK0YvdfS5wtQ1I/a9m2g+oyWPj6c3p57CrqxaSiGa+FOHOxUx+rQk2AdB -8l4xtG3HNuiwjEy75CbKsHwIBRd/9kRrGcilb16/osU/c/jr4QopKU9HKhb0DIcl -pY8B/ZMdYV3uG+oy0aLlld10GJ4SHW0x1uB/rZU5zireTudOb+12qMfF6AyVV/ts -Aq4pELEVFD4INWxgh4EuzDAkJCvt6r7XfmojXTFR3vv9fHCc8vAVwRdbxK1NKn4B -mMUVlSwZwLyy1roeLveCiQIcBBABAgAGBQJW5/QxAAoJEPvqMRCoU3iU3SkP+wRd -T8z3EczONAcvJsu7ZHgh1ggzsmozTciSuaAZRfvFmUyB9h63cKNTS86CIrqHmMZr -tHRu9llkNNiE4Nj8JAAsMPSR4YaKHfHxc3bOH0iWtcPxtIiQEwYs/7oP0/YzFAxc -UmZBDeLvy7aKpFqdPUcEhMTWmscVajjJXv+6G8IZwYGFAFvSkYSimZP102gmgKQh -cfPDqmlqy78Ft+T5MfIha1Q950iZyAM3j46lVWMkBaKPQKq1G3kKaL7Sy3o75y4N -7lgzY5WfYnBYVAU8eUjv408FoFKAYFTsA3RG7P2VROoNefPaLRSgEgZPR6efVux9 -Z3R4zOUQuljvq8r00zMS0t5RVcDp1gCNZQ9xv2QeN/ZDld0U0IbDQRrlT15+l3St -hkXapMMvbSVKEILMgaL+ysl7raMW/Zqv1KN2ByVJsPjWnwWCPnn0fMFWr15ExzfZ -BUNh2rZlQ56jBsJanHF69Th0vI7JNm7/Gd5FRWL8RcXzAL/UbVDuyGaO2JPztQ2d -L1lnHVL5mgOMjs90YpADenNR5XkQxuazTRiQIOXfoZhgPwe99S9vEdYM6UPYZjt8 -uo1bmFEkV0CGjWngJc2ySSurftXPFJ7gzFhDbx70Ga/1lw/4H2RPs9ZiZKKTtiGc -DLhDxSuX5z3MgzzD3CNp7uKJQlTIg4aFeX9JWQvUiQIcBBABCgAGBQJTgEwEAAoJ -EBYg3FrGoH2curQQAKKAZDUbPFSAyRMFlr3TFAYjzPgHz4+tdSgwFGaXjHb1b0Z9 -MJKBkqjoiTOo6ysTOzFeOVuql5tFv5lUR1ocHJHtIX7kARvLrlaAMAVPsG+f9Ft7 -jNg2B0E3uokZHUOCXdvX8O5KNMFjiT8arYbiw1hugAJrQ1KMKIv3EsT5Zf6AnwXI -UN8eI4hUjZrJqmx1jjhKLam3SLuF8YMpAIAFwFb/OutQoRUU7CQzVb6/1B5FCIYd -SWEHv5tT6dguFyUC2pjxIf7Oxz4qntPk4HDJtr4sOBj46cNUsW7Xrr23wpvabCQW -YcGQc4gK12bB9uyleIo52UoDqjqLddbhDDv26GuyJVu1mlJR6oW6EYtRLZLb8cp+ -9p+9vWtLbp4AeyX3NGtY5iyZkGZCj7aks1DvxpNdcdU2u3Qp4IBZyneVvVYaj+UM -y/jrVX6uKYvKUEW6xHsR6g9DIGUFK8dexYdkRHQ52ueT7W9cA6V8jzME8CE8YCtT -Jxw/IQM1mHbcHkrx1iNXz33Of2qBouqMf2vDXyAvd/ilzca+dwOyoSGum2tnpD2M -nCROrfo4eCeAOb4bZ46hEayzr6RNtmtUgnrTmV0iIxDkxSzGXfjWWt11H2W9H5bg -aPG8dEqKcxFLYPOnCLJfvmYn4hhy72MKqdI/4/DlHHa13gBuL+2cm1pTLgltiQIc -BBABCgAGBQJTgLe0AAoJELdhiDtEKL3AEToP/3kV24dJyYCcqzWg2NWLHUACkeXC -GOLmKSoVVV3oFzu1OnZ9KSdhpwU0M+b199GAUM4Q4o6cIeTnqLd/plfWdNDmEtqw -8T7hyGJWAHkf0n4c1nNgE3QFW4ri8zPeWPaJ3+nDms7GpIbYcLjLLNCzSActo68p -vaKrn6EQ5UOub97g500VjWlcS7qfXWlgMcKvLVLUNHBgVSxTyghQDkQwhRl1IZB+ -LSM1p1qHgWYZdeMu7DXzK2m5htscHjcv+BlVxRXCPFf6zh7ZIKnaZoWKiWAjp2zX -y9VntYJ7DpbOmYukH7PWys9b26agMUa+iHylBPlyijC2dvEEu5+myqPBZk60T+On -trTp4PPXpX50TgylbabM0glxoHJBvPtgyOW5QM4UMdn1WAX3ohW+9y55WyMWWPXW -nrQl9sZ79QyKLmoPJE8u7pcOXBpBJ5NvLghR/wRb04DPXjLrRvqE5V+mPpIYFFrG -XD9wXhjWsgMIVC6oxGH55LIS2ZgLto1MJ7HfMEPWG6zkx/NIGss1Xxbd7ZOMvUFi -eY2l7zWWVDs1aAA3ydc6/tA2ekjvbRWjOkIbA2ctmdGqo6CfqiqZsDhoqDs+xY6t -J0IkOek5TRAMGbN3GpO92n3IO5BLpZ8mzoi49uoDNiVlZlDLViWclETtmr9Cfvav -YXui4CtPbIsik4utiQIcBBABCgAGBQJTgSAwAAoJEF1w0uvK0snmuUcP/1kWyfoA -qIt1DFY+Od+vL5HY1IMKG62t9c3TTff7le+QtOG7fvu0IHFZHpsiiYumOvhSDBBo -0Bfy3aDHF13ul+hcTfDzuGdvbDNoma+GO6ccW0ZrFjD3eSVrUnO9nT12sTqrWl5+ -/GywcuH8htfA6pL60GgktympcMbi/lvTtFNW1Dcfo423f9bYdEkN71+P1UfT594b -bGUQclIugeCLHsGK/GIN9tAoBOpa6b98U28cHxs6eoWaTRu1fhAW9MCP4Juj7d4O -vfPA7o9XIRrQzcKFicpmRi0VRe7zB4btbIMie8jhMrUm1mez13PSVB8LB3/bivxt -DgqYBy+B9V2dNQjYE7+aT0g8JVmoXr8WdyfP18wD9orWUowpBBj4R+RgpR/S8QfM -lZJMfHIkAhSAYIwaAcJ4dboaNGAEtKsS7aeH/6LUUGIUuUeTJmFSn0o7v0hD0PUG -d6Z33/v5JR4f5esaZwZd38SSjj7lObtzdgkQL5sCd73gTLhZm511DjNasnlJpROq -KeB9LQCUON463vX6QWLXHtD72gaG4G8SRIUUHjt7vd9UoVUwqoV2N5ZRhoDmg+La -UpnHz1zdbmVZrE6WHBDqxB1J3C09HQbV822EJAW/CRDrN9Y0fhucWN3TFQ6ZG+UX -USWcJg7zxyUYB0tOMNULNvC+XrkiahmtxspWiQIcBBABCgAGBQJTpLA5AAoJEHQ3 -f59qR5Gf128P/iBTk6pvJaqe+17zV3z1G3WVyUtQOdMkVptBuMtHIykZZuBUQmTY -XptQH+t+4da6pMFrxcsqu7JZAvelkz49y0zKt0cYpKivG/87qCAER/x3E24FoMkV -WlrsN5J3STT30SXSZyL+lVEKU5zuqgtK2wjstn2xT4TuhQOZ3CDSWxjWBjbqcl4P -JOnhzSlRJL28kq0Zx8SukxVwTpJarIKSL2dPivy7TZrlSdPO7sdIKnaOPHnekVaF -35/SfTCm/sfnaZcCobQZd4sJij+xgc/HDJJcfsROhRUK9BvlBzcJDCohlz3FnOyK -Xjafmk7nVfcwqRMlhX2rsO0abQQKxxnVzoUGSBf6SpRq3q1g2SRy7ABe2YnnCl+c -q9acwmH7S0tzGGNLwdjEAHUA/1HdVq984kqx2eUiSCJ1vxIuHR36cNQYdyplnxr0 -+bn9Yb/wghF6E++z8xkX6WxKT/oWV/GTqL+jcH2efOOksR8MfjmTkncRsESbi1X/ -xAt6Fn9hv+qJUas7MSkKCkiOhAz7ZRxZMu2Qd9Vh//i6hP7qs8aMNo+/pXlYwJYJ -YGuPo0oU/NhWm7yTM+MDrdBZDZ0EvP+t11R/yMrHk+aFXqEqTaB+Uw/LaaHMWv+Y -DB8mfRUE0jbFipuWoSt55ElemSa18nnRcgBTbFL+U8Nm2IGbDGeqToGniQIcBBAB -CgAGBQJVfZS1AAoJEFuCGoE7lKfEYBsP+gOUOmmHg0c09v/iPkel7JJGcNnipk4z -8xl5nTxXay4nTY6TKtelOhQUBqDHBqdOe8PNWVutXqSDQKyzRPvXJRYgF2i3IUHq -/GtCK2yPaGV7XnYfEvddXmjAlYS9LkHcYH7zp7vLMW/8HgZ0JjeHAfmNF5+Q62rk -DUMVBnSRVlA+1mc3/o1O5p/Kn1Tt47kCkLJUMNyBxXl9BnbqJtFWKzoqgMovr2QE -IZeUQzlJKygexnU4tCP5q5VefVqaVnEHkluXJq9knYK/G3c2Pet/GEDe5Fkukzou -QvcqGaujjvc/pmT7VISkeO4YXvmfctOpggJ9J/ohxg4RgvqaRYdGoFgnNQMEnFLI -xd5+8Sb48mskS59rVwwOllWsbR+6T/ZDW8FYmpNzzuK7Af/JoOcWy7/j0fwOhJa4 -qX5aKgph5S/rE9pvhmhbkgZta5m8GQ9bHInQnbefud5axRtSyx4cG1ZB/mRLFD7+ -kkVfW/KrtdP/7PuuYtIP/nEhs9HnwOmcoRI1WpDGERC6eUc+Dgc5sFD16tvp+2PW -8/EBAWQK55b9jZ4Uws0D/3Tn8BE0CP1lJCZzIzKqbO4+VhWNq0eJgwZWTUNoXQuF -P1gOhJT+yqtxBRBP9YAOg+bO5kdjqS9IinbbYoaMkY8rUmqrF5r5XNob9mJzgF52 -2npjWOx4P+7KiQIcBBIBAgAGBQJUyWhmAAoJEIHFzE+IMpocFMoP/RJWptx2l2qa -aJW1r5p1F1wSYHFgkUPWgS2mNwcgkFgGm0+QhPXiNAw7evt6aTMLMatewzq3i34W -9rIaNj1UNs7VFYEVzYzWrAGlBiMgkmvHpmMmNIoH5sOc6D8pzxagOalvHjHXXabR -Ch6r8C6FX2jpQmwYVT/lF10ARGoQMW59MGFhUcEPfGVTFWgSEj5hgKvLhvDYj3Lq -LreSsiKuVU7yU+K5kMY7q7wT+8jGt5zdoV/99OjbJOo/a7gmIDHGeuJnSuNRRV3D -ltaRyk0N2FQcoB96q53++BdNXwDNTVA3eKVcrjpTXJcxMlpcmDvaF/KlIpctEDIA -50aTNlkLvRLMnPTlFMeoNyURSc38HO5c35chioH8zd+2Cs/QHGyI+JBlTZOOodUB -4alKB6SKHwMrWpy4+JfSxF+DUEW0VQwj/wXEpi+B3HKGYI0QNuzpEGZ1qvaq0Vi7 -SqlcyKbZuvUGBz/RdKeAFiSjmOOQUbm2cebmFQzYNr8KWPt42knV+PQMet92aaNV -WhgPp7Z/OcvpUABQZBPchJvBRr+Qso+uqQvLRvlXGD+rRni1/NZxgnVh1cHN7CiF -IJOlE+bBozJ+xtDx5ZOAlH5qWJ/bm19zQDnufWxocqNv3ek8DuM2iyOmvpbi1REi -4ASbhDjMQDFmRNYx+3bIi80KJEnC2kZViQIcBBMBAgAGBQJWOIXXAAoJEE8/UHhs -QB3OlqIP/3lofZqqiV+uoiTdV91Tjmij9Rioz0kohpQsm/tau6JKXItjG7DaG3XP -L6NPckNGI+twD393Hdb/VkqatbpxLeJUQLoCjV3M02p6zDJHQ5wPiXgC/8HZVdcP -2jlvnrkg4N5dpLJJK4wpZ/KXMsw/SrBj047ZnySIl5qw9ytXrQm58R7FBB/ANjEN -vo9C3LEsaDAKv0TL4vyMpz52TjUfgoz68g31Sl6KKOw1HG+dUB69M7MARSVEgaWU -Om33eM12QQtCTndJQDg+LeYjfvfHbcnMZnniCZR7rHGxAhBzgKQqJU/JizfZ4FDc -BkABhsUQgkSeg3llFVzSU1iofT37A5cbQr0xUShPQwKgkESryuyL059neVsAhDY/ -hFeyWCKtVQ12i3H7cvzRlfYxD8c/mN5TDiC70Cft1pcLU++u/6Ga1kuzA7rkfoUo -crCSjqb9FwLBokWcwbi7SyA8YD5m7W8sPINx7reokK7mvDsbOxpBp/y/yT5ZpTjK -3/MNgESrq2N+Qg9EFC4Srlg8wzovn0zamzb2xDJpLfrV/t2DsFrVf2SWFd/YMjkl -jOLQhbsEpQIdrfS8/hNGgfoUIiko8lqNi50sGQ7kO9kirmjCZaAuOaOi8U0K1C9R -vVGTN3oGrxzRRXeqt2Z3bBqs5Lz5lrCNkerWZYXcItIyZ415i/FsiF4EEBYIAAYF -AlpeZjsACgkQG7icBgI2dEl5UQD+LepkokCazIBkNFnZraHcCESgXDW5f8f+dpOx -ZVo5Z0sA/1FkP70D6Mw5HbRuebIZJ6Ma56I7+Hjg2pVSs+vJ050HiQEcBBABAgAG -BQJPdxJcAAoJEMP2qyU7W7Bccy8IAJSvbu6RkwVtTznNXGtGFXqVsCP/yJMAgU2l -hLMAl6yvUMk9IrRyKZloxxFeBObqQ3urdLQqXeDIJmhrIoxix5Mv2VuSUJ7vj9Gx -Ts+w6vldvPHc4BzJWR8YALTngfyUURMuJXV6BxseXvdq2WhOedSptLgGKFgZAQxG -/LcUzlLES32H1IsEHnhUhbmi8yrrR7sTi2KD5XBUJf6cDeEbwBQQd0MDrr0oPOe7 -wLJSNtIbYj0hXAQug5AezJrh0dWvBqJIZxm9HGTsMc+dnpgWamVvcBMXdXxtKau+ -XxBfr35zVFDylNuULr8hj4ZmtOKKILCT7BCNQ5HpkVTXHru2kwaJARwEEAEIAAYF -Alf7Qx8ACgkQo/9aebCRiCSTowf+Jm7U7n83AR4MriM1ehGg+QfX9kB3jsG1OXgK -RpGPIORqxLAniMFGQKP/pqeg2X530HctqjpV+ALG4Ass/kNn4exu5se2KuThQMKL -K7h7kfqCnrC8ObeCM7X70ny80b2h+749xWZtahpTuQwVrhcAikgPfS2nXSKdubOy -eBH3y0kT2zAoml0MOQsUb6yGycjdnbFrKvfINKfuZvF+z16YOu3eYZ3NO6dErWQ5 -iTecuNe0nnn30D8+nWA5JfCxNDPfc0e85dm6xK6GTPdaQd5hpF14TdYZu5eT34BX -JcmL5hJ6MzM+OFn5CIn2Xa6r6h9AOp5C0o15Qb6SXpUdZrV/34kBHAQQAQgABgUC -WCj2AQAKCRABFQplW72BAiXGCACSHG54fSeKZysDiX7yUnaUeDf2szdvegD+OPSV -JQhcDdhyC/YnipEN4XFpeIkpxUrBXWYyy5B/ymzDQl95O8vI6TnDpUa+bvpkWEAl -BK2DuElRojXfPo35ABu0IetQ9xyR+3IzaepHL7Ekf0n0H9vFTmeyYUc3B1m7RDwn -UJuAlWRt1qQHmOejkzTDBZALeg+BJ5PtnWqCr29+JZB8cwUJ3Ca8YpbiCrXWYHu3 -jlXDDyEhQ73t5OlruOMiYp+opmRySu4rF2d9yJIXnq6uf0WNb6G6JzlVMOqHKvtm -rnwXb9zlFTSXb/NkxNmbYPrTvKmSr09YDC/p9iRkuDSeI/OEiQEzBBABCAAdFiEE -IFnjmbk0Pj2JY1NS8U5YASgDCxkFAlqf+YgACgkQ8U5YASgDCxkWRwf9FHB9FN2G -FXNhPGRrgtSzffos5ccxXGFKuzmNoJzceQNpecWbsWuzCG5gNOKlROgTzRsIV98h -Nq8JWhlViEHq+fOUwt7m7pnPRSmDGIW+yAo4wHGsqgO0Y69viw66Rx4rG+g2ADdj -qFfo5KuS1rQOyeF5MMJKPj8SvPLxWcfjnpdDg7OOnzJtG5FPviSektDc6kMac77I -nF2WstLBykhxpdhtoYQk8uYdKoxDQWMqNDEh2pJkAKELMnHl898uiNTgLqgOQoNA -C6UWVITDvUHqoq+uI12ZW6x2mwVDFWIQnTUsnhEnPIlM/zUHg0BuTmUv5/9x6XvW -fJJkHis8YEBXXYkBUwQTAQIAPQIbAwYLCQgHAwIEFQIIAwQWAgMBAh4BAheAFiEE -o8Tw+XnKoizbqPUS7oy8noht3YkFAltn6jwFCRhLy9EACgkQ7oy8noht3YkhfAf+ -L/XXwlc/4k/sWL3A4Kxe2LejqrrfSGdzo6A9JQTkwuGzb5t2UbynACNpbYxFlbdl -g2zOH2rBx72Yjg4EYSyzPEOmCMvwAO3ekBmreO8UyPV38b3c6mss9JxTenkKokFt -BqsAnUhryykaGlQ8fZs87oXbOtpHZL48DG2TlSiQ2k4j3YjiXnsHlPZpDPfVHrU1 -wlcxciI3SEPQNUxcRwHXkGtAcXK2P4fmRcDSXcgISh43Dg9ikV3yPLlJuxa887/u -Qe2ytHNOCgC9GhGyCOfQV09lr7mKpfJmz2YR0xZ+NGd6n5Tvs5GpKwoc30zo9eOQ -f6TAnQAX6w0NWHhKQEJCFYkCGwQQAQIABgUCUVSNVAAKCRB+fTNcWi1ewX4xD/d0 -R2OHFLo42KJPsIc9Wz3AMO7mfpbCmSXcxoM+Cyd9/GT2qgAt9hgItv3iqg9dj+Ab -jPNUKfpGG4Q4D/x/tb018C3F4U1PLC/PQ2lYX0csvuv3Gp5MuNpCuHS5bW4kLyOp -RZh1JrqniL8K1Mp8cdBhMf6H+ZckQuXShGHwOhGyBMu3X7biXikSvdgQmbDQMtaD -bxuYZ+JGXF0uacPVnlAUwW1F55IIhmUHIV7t+poYo/8M0HJ/lB9y5auamrJT4acs -PWS+fYHAjfGfpSE7T7QWuiIKJ2EmpVa5hpGhzII9ahF0wtHTKkF7d7RYV1p1UUA5 -nu8QFTope8fyERJDZg88ICt+TpXJ7+PJ9THcXgNI+papKy2wKHPfly6B+071BA4n -0UX0tV7zqWk9axoN+nyUL97/k572kLTbxahrBEYXphdNeqqXHa/udWpTYaKwSGYm -IohTSIqBZh7Xa/rhLsx2UfgR5B0WW34E8cTzuiZz//////////////////////// +/////////////////////////////////////////////4kCHAQQAQIABgUCUfg8 +wQAKCRAiLOjENkQCiI1OEACItuCpRR9YS9HeORrELMBSd2IqJBeto6V0VNse//g/ +nCVKgOKJo2hpEp9BqPidjBvP20Ek/xIqHr/Pz7R6T1UVsjqtQAlLngxab81wJsRA +QNuTpHQ0VoststglEsLtp/ziQYOvgt0yEcqKs7NmIlyA6/Uw4uzXF1D9hnfsQ1sh +Iec3d8YpQGZf0jZFu94Hp9hpxtFkTI87yfUkqmFRRsNi9KGksl/hyN7pQMm1rmGh +7cERHIHCiaUSu1THiAhEUc5hkMWlM2wbbFn9ZYVVGgoyDWyhDjn7qhKnERrF5dwC +cP6mFGo9whO4U4lKUNJHA8OxtDb7mDhagY0wGVTqa+Ob2zqgqiqeLqTYdii7BnBq +swcvkbm7BLGzpiLgyJsoxS6Rhzmb+eJiTS0Pkg22y3I/ehD2efoIO4qe/nuoBqho +SRDkC1nl3o05NqwF+c4JB7rZo6mO6mSHut4l55avPAeurWXLdnWML9zPbdl9jJMd +1EdVMUGfMCY5kmEkuPRw3yGYeTSM+fEB/AHj5bQZN9sjMUhatJZ3RihMoRNqJjMj +WM0rdBHF3LGmoqq6YUPYjyfHwmNvTDpCkUM/Utz/zTmRUK6i982r3yV9vp6cdLpj +/e8TyKMDD59EGRFpE39q73Bt7PLOY31DTrIvmXD2s4Y8KlerV9jr23yuPQht703X +AIkBPAQTAQIAJgIbAwYLCQgHAwIEFQIIAwQWAgMBAh4BAheABQJUA0a8BQkUqY9H +AAoJEO6MvJ6Ibd2Jz8cIAKfXu8kXq9b9RqMsK632pt2n1jcuxtGyOYH/fFj64ZIH +N3GqVVQ6TnvOzmnns3iAj+nbkxPEuWLq8MfpW3Aj2aewqOLsowHSI1RwIcBhoacx +t+GPGenmwneM9ABJTRqQ0KTLSqaS5wkUcJJ7r6SgSJ+LMQ4LKHyIOr6OIvJy+Zqy +M4Q6X21vTSvZVeCr5rweE/l+Wc3U5ENMmtWh7RnTGk7SpjjFZP+HHhkQ8OuaZZRh +KOGUBIBlWd05jR4nYrkoRqolRG0gxkRRFTlIhfcr0fruof/YqlC8TqADn2DLhrWr +Y62TOOnfA0djtaNNJ2xh1mGkFaophnedlqwiYIQCDMWIRgQQEQIABgUCUwjQUQAK +CRCEQzF7BlX3gMtqAJwMblJHTT7TRUfMFUTp8ODTbt43awCeM0s5htFIHEGcQtQM +oLtWNrP+wAyIXgQQEQgABgUCU95n0wAKCRBOpRTltBrmqEVnAQChZNcw4xBLHvzh +Zwwde3w4R5B04YQ5IeSw4m5aHIn0IAEAoGR4ZXhPF6tjZg+p4jpX9IF/MerMx6C3 +boAMimHZ0buIYQQwEQgACQUCU95qhgIdAAAKCRBOpRTltBrmqIaeAP92zcglLcFt +fLl3NLu8JlNhkYWr7DNWowJWjhVcFkNkrQEApYO7wwKS1N1ZSp3YfaWdLfDjEwMd +2nEHloRWDaSMr+mJARwEEAECAAYFAlBbsukACgkQLJrFl69P+H9BSQf/Sv1aGS7w +JKz7/Yi54t7hVmwxQuVEpvAy6/m6e/ikLRFInWe1kNiLlOcs5sjUgqQtoAlkpvw3 +5klIwmNtR8jRVZDsvwu0E1U5XIJ0icQEsf4n0N81rYOlwrQuzDNOY0p4a7jpLFAw +MhNwrBreF4ebz3ZF9yquxmWuCoJHE3iA+J/FaMzmGdNVxMpQXUPOjdX1hNH2e1BB +GwbUqpSlqI8qfjEVuYjZTs0u7xaHN9e6DaqwRoI9zcv143yY1FrRJuWFBLCsdogF +xDDUKk2VwLSFw45dmZRTABD8ew0Y7kkwHTmsEcVg8PM6XAVcVOT04+kVZQJ0so2C +d2sL041JreDaDokBHAQQAQIABgUCUtmKKwAKCRBI64stZr6841y+B/92de8LDKj4 +UjfV05o6e0Ln6lIRgxpexbgqyQ7A/odZ9K8B/N9cNNaFZJR4tAAt+E8Xahcyd3qn +0rspvI7cdwl4pslO+DIsdoejuL8g7SBDWCjE9sQLEDLxG2hqUkCrc5mh6MeAXcrK +12LKCq1uMPQzc2P5Prz2C4j0XITBzSGxukxtoC/vj93+h/gGcQUzQIq3L4QE1q8X +F6bqTFpt6i+tJULSZdrFNkcg3zx0BkLAceGCd+BDv++M4BRpWuzkXH/tFpXq/reh +uh3ZSstkvpqZot+q34GMCgGUvsM/U18akYJFYpog25rdYTLTs3eYSqR1ef6BQ4lh +GWDx4ev41YIriQEcBBABAgAGBQJTBnZtAAoJENgv4DzFW8/jPXAH/RObXOYzaU0R +8ludCEhJcWlx3IibYRCQZUcQUUTdiPHEiEVq2vPruujvL9KmK2c5lvK3TGuPm804 +F9MpCBWA6GSM8txmIndPIUuAKoZP/dErMo+A699BbBesTGY0v1pF6eyKPA5cgh6c +OaUXHCCOl5LPiWN664Euwk+IUM8bi3Qx78PopW+E0EJehd3PLkC5XyBIIe6YI9ov +Xe8K0B0DMMWDydgdafTjGCB/nSO/C1qpa7tVwvGLFdh9qhKndb1kbFYBHv957ZhX +QoLFo9D1IAPEzXEr3q9FsNgaVvJNlJj73pjesO6DNfBEXHHr6IbGl/IrmH+Wgo7Z +m4RIYW8DfTiJARwEEQECAAYFAlO+oyIACgkQj6lgRkXLfvdS1wgArBNLxdl9uDp1 +4N7kpYYWDGi0FMgNhyQCLzm6wFZVhZ9L1bwhel8j199rzpTOL96ijAZf4V/ProUj +vs/LJ0Gm0eqLLYqRoloBkSlpmywf+T3wADjT5iT7AdgAjOEdqI34mrjDXE9/kbM5 +K9a8J2WWLtl4P4SaTqiWmQBJBbNBlaL5uIutqX9e2cm+/jufcfpIvAFi/ALCu0AB +C2XnfAKpezotzyyk2TxmpVwemJeBscJgbF+mN4JssQQq/WcgGiQHtIxtZeKjpSVC ++T99v4/oPscOyPt57cP5/QHgv3N87ikzCHwtfOpWXWJmHza9qImDPzxlk3XeMZyb +fve4tO6bSYkBXAQQAQIABgUCU3uwcQAKCRCKcvkT9Qxk2uuTCf4xTAn7tQPaq5wu +6MIjizqrUuYnh/1B4bFW85HUrJ45BxqLZ3a1mk5Kl2hiV6bLoCXH+sOrCrDmdsYB +uheth9lzDTcTljTEZR9v5vYyjDlxkuRvCiZ2/KLmjX9m5sg6NUPOgeQxc3R0JQ6D ++IgevkgTrgN1F+eEHjS+rh4nsJzuRUiUvZnOIH1Vc92IejeOWafg7rAY/AvCYWJL +20YbJ2cxDXa7wGc9SBn8h+7Nvp0+Q4Q95BdW2ux2aRfmBEG2JuC4KPYswZJI9MWK +lzeQEW6aegXpynTtVieG8Ixa+IViqqREk2iaXtfoxVuvilBUcu5w9gNCJF+fHHZj +Uor5qHvZz91/6T0NBlCqZrcjwlONsReSh1Stez8SLEZk1NyYmG56nvCaYSb1FvOv ++nCBjz5JaoyERfgv4LnI+A1hbXqn3YkBnAQQAQIABgUCU3+zcQAKCRBPo46CLk+k +j1MWC/44XL3oiuhfZ/lv+VGFXxLRI7bkN3rZrn1Ed+6MONU5qz9pT9aF4C5H/IgA +mIHWxDaA30zSXAEAGXY3ztXYOcm4/pnox/Wr6sXG83rG5M/L4fqD0PMv7mCbVt6b +sINX5FTrCVUYU7ErsdpCgMRyJ8gKRh/tGsOtbyMZ/3q9E+hyq/cGu8DjhfEjtQZD +hP1Gpq4cyZrTRevl+Q2+5juA4bCyUl00DQLHdCuEEjryq4XWl0Q2CENDhkVV+Wkv +fuIOIVgW11j7+MmMXLzMMyk4MZtzgedJW8aU2/q0mPn313357E9DwMZj9XvB3JCx +4dRjBR67zwYySVvnK8KMWVNPWcleVrY+oj1l9psq+d4pkjtAa/cd1mBfh7h6uKzk +ekj/zWuJV0+HEbKRmmBpc8SWc4QRNUrCBk7vVfGsBLCmiCK9Rij1zgrwihrw/T77 +BcvOcxhZNd3Y9Vs9vavExF0/5IqclwcuJqQO5fRKmMCFi1rwT5ZcWANmJXdaN8H/ +7D1WNXuJAZwEEAEKAAYFAlN4AagACgkQRCkHtYjfxFfaSQwAjmRJHNBnTYQ2Sluy +9KzmgtiVlxl6Maxr2zBQvXv4/mH2Sl2BeFWaM8kiyQzl6XZV5/q8TCkmskW0N8YO +l+l6AhFGuh4PS8UWe050fcxJCB6Z6XUFdvVQ1F1dI3bNcmm5libcMSNFNS7pQF1q +az4fmVniwPx1ezBdAvd4n4l4dipg2bW93iPMiy1JDRc1Um6U/ouW2KnD7l5/PkQK +WLzSx96xvfimDD6DXbW+/7nFhle7foTLSlFOcyeuXCOQCa04XQOJGKZtiVp1Ax3M +v8t1A0t2EzYlTTKZCCCCa9EDReI1m7EJZ7+SJueaW6u6/TuM887l4FFuM+6Bow0I +EC8FJyPdZg/BqnZ3tK4xSm3tF6oxc8IkaQJip9R76hPSWRfzc7ooTbxQrzYVzTZa +/pb6RfL5bTi3Q9D1xCRjPtkZIceMWfPtnymlTIDwdefzTT0wxj1vTSluqMih0LOD +RDrmysDSx9MBfH+zhigweooCCj0wLmOkmT0PjgJvL9TBG5HViQGcBBABCgAGBQJT +eNsQAAoJEPLvL0cGnouP5ewL+wVOickmGd+Dout44YAmPXSzdP1KervaRAWIQLFd +a7XFb2krwGwIpkw7hR9qhAG/CWbF/WRQqWB9M2qQEaHP7LXjPuCQVf9w5UJXzKUB +ft//PRF6IzBOm8g+yHY1MJo3x3PDd2Bym2hnr4iV4teVnoHiutAcKPndpu6idaTk +hguNuKOc1hXqILi3x9WRVi1d2UL8MakyamVz2k2sRktKQEZ4goEYq+8kFeT/T0DH +/bB5N3PEKwpK/v03T4fD8ihMFYwblN7Y+Rx0mrYthCIQYpfAVA6eXjyABv4kRj/l +1G1ir8ar1PnrHiNp2Hv1aipDvfDZnNpicwySOrdyQgpjGao75Ipw1RNcCuS9DWUU +POYYQQfknCeUMgtQDqoJBYiE3wp24QZw3PsszyMk86bQWqGuhdrmA97zwX9f1me2 +BdhwyLPkBJVt/6t2Tp+vx00VmhbQKLbpPIACzqAGw8RtUx1G5bmSjRgAuo6xWOC2 +u9Ncxt33u/zQ7UvC/wQ2FwHHD4kBnAQQAQoABgUCU4DA6QAKCRAq0+1D59sVj5pD +DAC+MneOmun1zAq7WSSZmf+AI3BzYGoYN67lJ8QXTcgDgbqXAtGQvp71G2It9ugd +PEeyQ4T3DxNIYA2uC344hdsVCAnQHO6NMvR5A1qBUldxp1w7GfgV39p1ANzxDNwG +jwwfUQfqk9VEOp4+puut4o2fhyMmkC9RaGzWV5taPyWL1N9+JqfNfsjWFC5qeS9J +OLTvhmk2lLVKnw7uKluiQVzr7yj/gqcsyA2sPfs938cIr96CveTdd3d1IWcRErB7 +2e3zb0PKKvrtXjfAMoZG0vrsA4So0D2Z3Y710bGgLQ1WYDlRw7YM7/XKN2WWIBWx +LNfEjVIuVnpHLCTNdmntLp5oaBsC9TrDwUMDZ5DEro1XHijX3h7x5Ni+XU89ZodS +eQy9uvLwkgjiZIxD4DfCXQNc7I2a7h+M3rvu3LeBIQe3v/KNMDpgL20AyLxUs7/e +qe0zWm3F4sfYu7ywA/mkH1Az3xTWj/I76WlmKPSeJpNEi/fol0PCsTJ3vWdpu1Hk +t4KJAfAEEAECAAYFAlKfzT0ACgkQ/bW4wGfyU4fk7A6fayMhAuOjAsP5s7GebYVz +RI8Aj5Qmp4w7DyJRYpwTzyIVPXzLTpOmpQRp4sChlIA9YM/Ho8jhacvpBKDPuJr3 +p2DhVTUVL+BRRWoTFJyrlbC20ftr3nCOMEW4yHA2u8bKvHwPIUzasqqPtybJ2wdj +Xx7V5W6TpwWnpJFHl6TyqFEsb0b/Ne61Tx7mB8m/0UUjKyu43O0k5p49dFA7FUUl +maZmjGrfdxSN3HbwRXbaOmWYn4q7TRL56BmLWZklxwXCY1nwEXdkC/R0U0s6NNU4 +o07hahbc202SzLX9PaHCEAREVlTz2nVdIXcPUdo3hOIJhE/2mbfKTqB8WRgE5jfX +zdogJBhP7D4pV2DyvE+SKvIXQ1Xp/2SN9hLWwBg+pQwjMpiFX+HVRw+6p7QorR/k +2kryhtc7aUnMtkTuCq1tzzwbdGD7e8O6QPhuhId06GbqKLplqYPap2sVAONE6NHL +zmWaY0nFdzXiICXSk0oTUS9NwmAn0WdCeC1pJi6T5iyopxDNMyIFFTBTDFjxWbeM +o6HRKsbjnhEEayV4bwJ8IaPjhvEUTpDgyV28kCSRgJ8zvNLDD+nms6k39K7c0xji +BgIek47zMp6bgTPAn0Q23hwCMf+FiQHwBBABAgAGBQJS0swMAAoJEKQiudjlJ9vb +tnQOn04QseTRPp6toW3qTzPs2vFToGrZWuhRDFxEUEuR1GGM3UFWvk/a7UnaHsaX +LqZqqKIdqWlCb1EwddFJKiZU+Fq/sRm86VAeK6OQkNwMtbIugW2WC9MPre8D9gVu +dx5ZjYBNjqCnX+yn+33M7/LAa6Tr7GVUqV3aM0ltCmQHABRp1acQWkWLG3IQiA5T +y64hXrCPr/dXLCyFsbUyXccvgTiqlKo5OCh6xC8vLI2OUjckvwoH5yWM3EnEE4Tm +ypGAHk+EP2aVkNflYWMvcRbBAeLVKk8+a6+JyJJnLRKHDTKN6++kyceeTN4fb1Bv +2AN+S+WZLkeTatibeq+78jn3ES2Yl9Jdik7KF7cSx9+Y7EcSoua1DXZzHVO4rPSB +cWeH4yb+3ET6xUeyK4+iZqd/067qTxED6ZDf7vXk/8+GiobRC7ob4Y0IigH7bWWf +xiv6DBuwpcRipVAhMReoOR42UIfL1IWOk9d/lcmHjmTiYvG6XRMcDAu3VHjUKE/j +b/6vcq5hZ9dcBSzPQJ/mR9AtiqnA3Y6RfK1UrbpQ3rJUu4UF61NTi4la0kFAETcf +JS2rTRgBJ+tbL0hPPVC/81ZzjF2mgnvz0CfVxXpQ7un2iLnRKKd7q4kCHAQQAQIA +BgUCUwoVXQAKCRAO2qlF6KT/l55/EACE1KOCpGqaHINcLq4KWI3rRss/aSOj8LVd +u0PcVloy1kZ2YZbB4UqNSYbzWPUASCm9kEFPlhqAUbVjyMZtALW4ZhgZSrHEUTGH +ygdFNqRROhxg4e7Vj80sz1hym96KG8gdm5oLQTbFhgcYHKEBEgtfLmZ2Cdn35Oje +QYVOyZzeTw+k3ihaJHp4K/gVZMcAdLFT+WWoXO5VzZ4+5g03rYbNGcsQ086IPQJy +JipSUe0Lv7oYYc9pmJ6G0vbYM78qkbYm5sXe0S8JRjsH+v41AN8JmILzdQde63gd +RsMpSvXkSHptTjxtLdlFf4uopPQRTK8K7qHkw3dTzpwO/kgy1wtrVGxsASuDxCmw +/yDHuN3SkMqWgGF0IFqsJdy397fXggH1tF/z0VHXEsQPFlqWOqRak+hINRonEp8G +q4b0lnLPSNxTaO36AXLt0uvsDuoyuv4szjsps57sxqbrUJ1QmblSC9xRfkAveaaK +U1I50wURejtadqOTnxDgCdn++nN2v7WbjweWdFn4r7kF8ww7BAuzu0kZGDLwiPFb +Px+n4o7DpymLUrx0W5udkdMxVhzxQit+v7RWqFFa3DzWxshWE9pJS9e+xvnupibm +8/J8zzC5Vsz+brVGGPIDOFCGhq/5j7nSpk9oxaf9uaBSqcWoga0TrF1b/fjUNNUG +LcU+QbnsqIkCHAQQAQIABgUCU4BKagAKCRDxLZhXQ+4mIKfwD/4zG06+G+lasq22 +qv0gQHzdkqXJqjlpkJ+bYgUbxvxYFevL+eXboCjImgdTqcN8xoBd5fMc3YxXbjBR +9YmQYL+5GqKILme7bVfOIOsRlRP/V4zroIV+CnISEa6UvEKm2u0q+Or2KzZhoT+m +DIfQpjhucnYNB+jMF5ogvaLCmPxu9Tsj/PytO84hPoiJkvqDrAq558JMQRAy5MKN +3p4GyTKAjSyvqqUrmrcMnbSOhsuy2mTiAYxLn/CN5g+MJClNUhOn+sPN6RDMw6us +QtmOoSws9ZKKGpiQNPFidNbtZ6SK43vO98mOkMNFnxOSbKdFkeIHYW0nC+EuJtkP +WS1v9o1hW8M+rTRwH6N//51mZ9iCOhgyX4H1+3VPVuqYnfqedmwALoIYeoQ42x/3 +lRfQWlqJpiFbY4xwJKR1ifFerziqaIxvpcq684t2Hk8OOLNeAbH8Ucf/E3EiszPt +Y1zaXk9u6SB6IY75UVXSba8OTGFDqkxqVbR+hoaCUputrDNfegmwe0ZKRB9E6Izn +p80IbFfnvluBVa29kBEEKlgd05Jhi6YkbffBT5bWTu3xyZjEmqnvljsU8a3Ij9Ba +MmScWEDPjbo0FE5TMZgHUsOQBwMIVSB5ra3kxGSh6ZcffOIUmYois3bE1K+/wHJ3 +Q3HWPSjdv6d2X9dcupz5WLL+E6A104kCHAQQAQIABgUCU8FM+QAKCRB4VAVOzv4Z +5HH3D/9/lb9giwpUQn6YD0y46Bt9T+NuUcUy5sdB4B/lC2kCPA9WJq8eo/lFFuZp +BTbcdR5BfHm3sx/sIuD60TieVDXSdKVuHIDGQh5T1NrodXf0xykJ1TmgZarAyMjg +GXptbFLSX5GLDmU51G28kuAkmJH/R03z30N01nj0tIBIY9s1eK+ADzDyq3wH3O+t +Qlrt9yGNEqmC8A1j0Hs3edKRiQyWJwViYsQK4CUCuzwpA+oUbJZ1z1v1Y/FagabY +jTucmRgCp/FD1IOS3jHl01NtUIfSkG0BwBjlsW6VBVZ6J96VT5rOyW6wQOSOFPUN +3pgaIhYFfgES0BXAXoUwQzgdzRzftZymgNGRu0Ox5KUx9aKYaWwvauuzb0Lw4IoZ +TFx8GURfhMCgWn6NSLIF8MfJP9CbvujfovD5W5wffMk6cYKNq54/vVeR5H6hhld9 +7PQIqPefZjTOoDq08FWby/w838sjl73VJfZyFjOrLms8TusFkSLY/b1Kg4Kv28ie +l+Ufa18goqCocHus7VNvN4YKTQGOypL0w8j6SvlvK7trH2NCBDVLU+sN6RxlVZKK +hqMeXZHDvX7/jpNHhjiyZ6XqxXLxnXeFf5hiyh/k0irJ93yT7PvTB/FzCnKejQ25 +It2n3+bzw349vp4cC1xulk/ZfSD5gMXmsOUMZpDQ1r/9s1OERYkCHAQQAQIABgUC +U+qnNwAKCRA6L9iUeafEwX2RD/0YMOSJsHIrPoiFVSFu69w8lvgPfvSQCPJrkoVP +mdc5YiJiMGp8DVp+UW3JmOLKIUPUg5p2/C+8DLgjWLV0f53srOCdqp9qXBx/0yKO +tvRNGlTEYywVPA6JOeNzjcdgUgBrkT8lw3Ij85+eJDVV6QFuTSPmeUp4hEESeNKP +WKT0B3Ixl5zbVHO6Qfa9NibCKpOll9YkswJdynteFMkpVm+Lq5mpr6Jpbn1WDrRn +cXp4jdZYG6yWPwQm9m/2Ua9ILqb9xBBKf7lNkywVbku8hmzZX/vYGZPGVZddex1Q +Cwp6UNdUMaHUGhh/B7kf0BHseGPNNg8sxLE9RZ85vHmXKmQfUDvKY3Kzk1N8gogf ++78KXh8pi5KIKzIq0GsUCujlJxIWDTro/Q3re3CT8M3op3qx2gjZbpsSmweoJtMN +UfLY6hx5M3I6faxKB9VA3/dboBwsXr4UddQs+GUsBW5MevrFK9R4CuHwpLSpZBXD +/GnQ0p3M/Ddm7Wy5lmHwUimStc+hkrSKrsEy8ixa5sV0hq7Ii2hE1xdEtFSOCLgo +IIIAzp+N6MaqCEkmjCUz6//74Wy9/O8MF2ytu9cAu1lQEJrJa2YSJk8y28Y07y9i +9fzQkkQSVympUVfRws2YBmqvuyxcM9D0HnIkivoo6ka5kCiMsYQ1Y3F5uDlOi6yB +c6AM54kCHAQQAQIABgUCVDngmAAKCRDRWYmf09n4stAHD/4u6iAABcOsKmIKIw7K +gO/2InxofURr68ZguHVna4C8Vu3aK1IdLsPyS59CUa8yqEuhBd4R6z0GrJgj8s/X +JGXkWYyIUeZimLaq1rBd76Wi9lQC17G+eCqgEfJeP4k9PNyU5tZrxGzCeCRVRjax +jVSFmHQ4H0Disw+pWbcEWUxI2ObvrCR0uFUb4wI7vNr5ZhMfIZq3A1dn/vUreNKU +4TUfaNUXJ2uetjRZXbHHC+3xS/bjO5JhTBoneScGkVOG/4l4kmemHLTUMn4rZDlq +BxtGil7yTN/VrCbpRygnpEouM+JzXeYWYDERRti+H84HJusDRIdPNcobFTeMR8VE +U9Q6zIN17Xd2Y+MAS+VxR8kpbnUQnfz2D0ab33AsHiSfzk66HqX69wxsP0KNlZ2S +nvh7vuCqWZweTa2CM+ZjHMrCTAwl4gPWHcEZRexLD/5mvBXWKccq0etfhkWPgDVD +9SjKmrrSY/alux7SG6mmVBQLoZg+rnrXAq2lg+xBe5nmhSbqM3pzvXwcwYHKSYiV +iozRJScaWj14ljwvnUFbytI6ctdlNVDad/DwbNfDPcNnjrAu9LVYZKOd6wq9XJS+ +U3W9d94zVqPo8lpinGBSgEc4hkN0NxkxPMnEcHm2XkoCB2C85vcxxmUHPXK6QtDH +6GtPb3GwTcreTUU+rP5zhOLY04kCHAQQAQIABgUCVDokfgAKCRCaNKuaK7KJD/W2 +EACwKaI2AMnJ5SBBfBlZ7dH280mC8BgcVrjDJs3Yh9xx708bFAUNir3AUa70gtQv +IDoaWHaLiPkUlz12+qZAR1iTxZhmj6dESqoCzA4vsCu82YjxEjCvL2mCUvUZi0ti +syTJ99EGENFWX6yYsPiuXo0oHaBc96TqXCQjZQZjYzKHAOjPrujtTw2/zjqkj0ak +pc2c7tUuR8g2jit9l12Y9tBu5bcJ+Wm7XZPSjvClkdm92U+hjM8cdy/N5QS+oXIO +2uja2ECrF3VD/xxL7eqZ1QQSk5Oi840TQD6e/WtsOJrk9KzAHx3Rs0YXu+/NvCk5 +U5ZUFxQRCh+ptt3WkABxMNcnQf/R/qxvktLpT9VdiIM2vWoAfVwEiIESi48JA3TM +znoX9KCrdFOj+pKcrUtzNNubfclQNqlLhugOQ1sMH7ka2PncVHWxeWaEGBCblwy5 +O71bodoICXJ3xmd3yB47QsL3ZTEUMw19mnac6Dcu7sWR89EAW2kjnhYRrNsRNf5S +36UWlsPiEl3ae2/R4wenSOm0n2FD/eNDIu9neth1B8G1jZGlnuGn3ggFm07h1gnu +I5z70wRdLeplOJPpcFqNLmGIyTNluFdDhkn5SHQfLIDsYJhc5Qe9kyMMFQXi6wlB +L8ph6m01HnWOI1Elqy9ebHw48QIRicWYh3uMnasc+qdvY4kCHAQQAQIABgUCVGcY +SQAKCRDNl0yaOU1jPyAnEACOeeeZEC4ODefn5qtazegMI6yOJVtdyI19x+OtjzL1 +Vgh4CVfOqPuf2m++O3MwNMW7M1vL6/ytImsgOoX8EVbbhF30JdFIf02o+Pn4SPHH +1tvuRF+PpaRqznJVQrBx1X1Wf5PCy+5m426CYRvcY0hX+iQbaq/vwBbBCAPjGBhQ +Woi4C+vI9wibgz745MKQvzn6L+RUXTxDlkPaHQtM9srw4wKsTpJg442dOBSeTwZz +W6OuwDlJNubIah7gc1R/eDAD+x64O1GhXkUIjIDRJX/KrE87pMswhT8SeMshaW+e +nQ4pfMMbLxnCZThH0/LAIt2E9idkKE+ygHBEvmmID9UNlI94L9DJGizXA7T7EBpL +G8V6Iqav1soI9lMDkIfWVbcnI7r9A9i8nzzFUz1Ruug2FKWr3q3eUAdp09i2S8V4 +Th9LSKphVGqCBa76y59uQNGeUBcvx2z31gMOzyb4I5egKMU95yr6M7dLVHWdg3xN +4eM4wVw4r92NNeBZoYKsBDoJwp/PUkf+0hzCbDCqfKMp9Zn87J3LPoKnKTob49l3 +zxKZzmwy6oPfCenshRg34RL9WzRDgeCHBRfGK1DRLuv60vpe6zR+75cO4VVhNA9R +J6WfCmJPKj5TrhwxyzIHphAlG0ezoLetx946hXwwIZSgVGN4RuUu5aVoi83EHHGG +XIkCHAQQAQIABgUCVGvw2wAKCRBcs2HlUvsNENekD/9dCHXxPGrqyyH1TFEcc69A +lhwcLgBlepgigK4mEWhBIzFhU4WLEbQkvwhrXXPQcV1ORsLhxXBxbgQj+NuSPZMZ +sYf0XPsPAP2WQFVOQOIGkgdIZDOaGXQdkMGJl6xAhEnbdIh8XM/f88gdUeKtiq8s +225CTSSc2zqxqcRur4eg5OAfaxSXkWHO5VXx906ojhwpY5RwXRMPYkAxpeiBbkMV +KjiTSSu+afuP39HiuuFtY1yNmxnpEwEN1dZgPcb+j/kkfjYz4OFkJcerE8pLGsW5 +MznHIcsfM85tQzR/cJuDbKGSjBOJ9LAiAewnWO6AUhcSX8kadSUj61MHTSF/JErH +YCaTOzkYGZKI31lgqrerp5YEbGZrqxWIoM2RVgQBkXhyHhyeeHlC6YNkDyp8MFrs +GB3RD6f227mi3D0HJJTzhp50MpwaVL7t1Hfxa+/uEzB5jiP3uRFFMim5itKSSz9+ +i6d4tvGx1EwCxdpqw5cd/qDEYxeYkskguNSopAUgYqUcdFjt4xc3UujS1XYzZcRv +ZaYhwpHO2x8/XnTL7gJ2oSvxG0uoRVBJFkDibSSnPAfIVyZgoTNmMbRA1b9Bp4AA +MHB2QL6YRXrsvb2H9kuSGyKijDayoKuFUPo06hx1yOey5BhwwmNooAx0BqP0rWyO +LPDsOW7UDHVz5wDuK2etRYkCHAQQAQIABgUCVHHpRgAKCRCY3btOIsosg6hdEACV +HVLUlMx1d1aN+qW2pk5wrcjqhKdl+S+cAo4flAMPShnmbuyYos+7nkKsSkLc9Joi +529otzXivRFnaGiqzNfjyMpux+NAE2rq5Xig/bKuPW/Ofbc+Ysugy4dWD3nnrkFf +zW4ycodOkszZDI5Hukt+AnKQ2tTqHM1bCNUbn1lTLqtQvePj2Q9MgglS4zFA+d3N +AJXYLBV3XdqBFPyT3ez/cAmEilf/vRfsEWu/1O+x0SjR3dhQrIZidZm4ZNwRR0wC +xZ/ZXdf5qrY1EwK7deMMbORsbD5K9WLFkNQPLlVLZ1t67J9FJz/WxXAH59/3d/Nh +4bslvhzleIOSYSlZRv4QW73S/h1de2PmJLBnkFtbCiKpo76+wKxYQiFGKOPnpsgx +I0Jk37r/EUTtbuMkdIgGapZJPP/M+d3sBNxxH3qcMbqOnpf7rkbl30Dpln3TRDY3 +fcZ+YMyA28KsL7WRMYzdj6JW4mkiz/96SPKa7azmLlvjJOaOornHHms8HT8nrzoa +DLluYGRX3yqPcOk1OUkRnGCIa0yWPAu4dmLprJoq/116S2mnXAadkeLgxKB2+nhp +V6r0mDBA/5rtX8NlTriqLHXQqX/yZMFx8MAd/c/nV6Nx2EqH8nnNZm95HALDlG05 +AIfOiFjdcpqnDU8srSvABMDix0NX+KNJpe5/V839R4kCHAQQAQIABgUCVLETyQAK +CRAXv5SMBHYTfWeTD/9zqPDnO+u5URYtTo+RVaB70cX2b196Cqxt46YT5QgYCliv +MUe3OWBAjSMJ5UPLgqlIMaRX/P4j1d8VbjtRxcA52n6JE6sjbSs9l4KZsn7Xlf9N +nt9obAzRn2gwpKm1AtoZLg31lmWv4NLVn0gq8mWiOjpKA/FB0omHg8Fcy0F4BrEd +PIhT/cYh8kBzbQqctBx3jrra44lomwA8BDGep/f9Q0qk0JMZ8QcCB6RqitTNOkEE ++rctgW5teoK7tDerpTK9w3Odej3Ts0M6qNE+3Ngc0uMDsnWBO1BhHkc7swO0Oe3V +Svj9Ay7aoYm5SbssQYDC8SiAoBHkeknI1kKR1tfWwsH5mxyKh3njQmQoqxdeyhLT +6hQr+ZObs7Kj70b/clcI6NfyxfpNYhYEXs+NQLxpTQfla884kStGL3X0ucLUNSP2 +vZtoPqMlj4+nN6eewq6sWkohmvhThzsVMfq0JNgHQfJeMbRtxzbosIxMu+QmyrB8 +CAUXf/ZEaxnIpW9ev6LFP3P46+EKKSlPRyoW9AyHJaWPAf2THWFd7hvqMtGi5ZXd +dBieEh1tMdbgf62VOc4q3k7nTm/tdqjHxegMlVf7bAKuKRCxFRQ+CDVsYIeBLsww +JCQr7eq+135qI10xUd77/XxwnPLwFcEXW8StTSp+AZjFFZUsGcC8sta6Hi73gokC +HAQQAQoABgUCU4BMBAAKCRAWINxaxqB9nLq0EACigGQ1GzxUgMkTBZa90xQGI8z4 +B8+PrXUoMBRml4x29W9GfTCSgZKo6IkzqOsrEzsxXjlbqpebRb+ZVEdaHByR7SF+ +5AEby65WgDAFT7Bvn/Rbe4zYNgdBN7qJGR1Dgl3b1/DuSjTBY4k/Gq2G4sNYboAC +a0NSjCiL9xLE+WX+gJ8FyFDfHiOIVI2ayapsdY44Si2pt0i7hfGDKQCABcBW/zrr +UKEVFOwkM1W+v9QeRQiGHUlhB7+bU+nYLhclAtqY8SH+zsc+Kp7T5OBwyba+LDgY ++OnDVLFu1669t8Kb2mwkFmHBkHOICtdmwfbspXiKOdlKA6o6i3XW4Qw79uhrsiVb +tZpSUeqFuhGLUS2S2/HKfvafvb1rS26eAHsl9zRrWOYsmZBmQo+2pLNQ78aTXXHV +Nrt0KeCAWcp3lb1WGo/lDMv461V+rimLylBFusR7EeoPQyBlBSvHXsWHZER0Odrn +k+1vXAOlfI8zBPAhPGArUyccPyEDNZh23B5K8dYjV899zn9qgaLqjH9rw18gL3f4 +pc3GvncDsqEhrptrZ6Q9jJwkTq36OHgngDm+G2eOoRGss6+kTbZrVIJ605ldIiMQ +5MUsxl341lrddR9lvR+W4GjxvHRKinMRS2DzpwiyX75mJ+IYcu9jCqnSP+Pw5Rx2 +td4Abi/tnJtaUy4JbYkCHAQQAQoABgUCU4C3tAAKCRC3YYg7RCi9wBE6D/95FduH +ScmAnKs1oNjVix1AApHlwhji5ikqFVVd6Bc7tTp2fSknYacFNDPm9ffRgFDOEOKO +nCHk56i3f6ZX1nTQ5hLasPE+4chiVgB5H9J+HNZzYBN0BVuK4vMz3lj2id/pw5rO +xqSG2HC4yyzQs0gHLaOvKb2iq5+hEOVDrm/e4OdNFY1pXEu6n11pYDHCry1S1DRw +YFUsU8oIUA5EMIUZdSGQfi0jNadah4FmGXXjLuw18ytpuYbbHB43L/gZVcUVwjxX ++s4e2SCp2maFiolgI6ds18vVZ7WCew6WzpmLpB+z1srPW9umoDFGvoh8pQT5coow +tnbxBLufpsqjwWZOtE/jp7a06eDz16V+dE4MpW2mzNIJcaByQbz7YMjluUDOFDHZ +9VgF96IVvvcueVsjFlj11p60JfbGe/UMii5qDyRPLu6XDlwaQSeTby4IUf8EW9OA +z14y60b6hOVfpj6SGBRaxlw/cF4Y1rIDCFQuqMRh+eSyEtmYC7aNTCex3zBD1hus +5MfzSBrLNV8W3e2TjL1BYnmNpe81llQ7NWgAN8nXOv7QNnpI720VozpCGwNnLZnR +qqOgn6oqmbA4aKg7PsWOrSdCJDnpOU0QDBmzdxqTvdp9yDuQS6WfJs6IuPbqAzYl +ZWZQy1YlnJRE7Zq/Qn72r2F7ouArT2yLIpOLrYkCHAQQAQoABgUCU4EgMAAKCRBd +cNLrytLJ5rlHD/9ZFsn6AKiLdQxWPjnfry+R2NSDChutrfXN0033+5XvkLThu377 +tCBxWR6bIomLpjr4UgwQaNAX8t2gxxdd7pfoXE3w87hnb2wzaJmvhjunHFtGaxYw +93kla1JzvZ09drE6q1pefvxssHLh/IbXwOqS+tBoJLcpqXDG4v5b07RTVtQ3H6ON +t3/W2HRJDe9fj9VH0+feG2xlEHJSLoHgix7BivxiDfbQKATqWum/fFNvHB8bOnqF +mk0btX4QFvTAj+Cbo+3eDr3zwO6PVyEa0M3ChYnKZkYtFUXu8weG7WyDInvI4TK1 +JtZns9dz0lQfCwd/24r8bQ4KmAcvgfVdnTUI2BO/mk9IPCVZqF6/Fncnz9fMA/aK +1lKMKQQY+EfkYKUf0vEHzJWSTHxyJAIUgGCMGgHCeHW6GjRgBLSrEu2nh/+i1FBi +FLlHkyZhUp9KO79IQ9D1Bnemd9/7+SUeH+XrGmcGXd/Eko4+5Tm7c3YJEC+bAne9 +4Ey4WZuddQ4zWrJ5SaUTqingfS0AlDjeOt71+kFi1x7Q+9oGhuBvEkSFFB47e73f +VKFVMKqFdjeWUYaA5oPi2lKZx89c3W5lWaxOlhwQ6sQdSdwtPR0G1fNthCQFvwkQ +6zfWNH4bnFjd0xUOmRvlF1ElnCYO88clGAdLTjDVCzbwvl65ImoZrcbKVokCHAQQ +AQoABgUCU6SwOQAKCRB0N3+fakeRn9dvD/4gU5OqbyWqnvte81d89Rt1lclLUDnT +JFabQbjLRyMpGWbgVEJk2F6bUB/rfuHWuqTBa8XLKruyWQL3pZM+PctMyrdHGKSo +rxv/O6ggBEf8dxNuBaDJFVpa7DeSd0k099El0mci/pVRClOc7qoLStsI7LZ9sU+E +7oUDmdwg0lsY1gY26nJeDyTp4c0pUSS9vJKtGcfErpMVcE6SWqyCki9nT4r8u02a +5UnTzu7HSCp2jjx53pFWhd+f0n0wpv7H52mXAqG0GXeLCYo/sYHPxwySXH7EToUV +CvQb5Qc3CQwqIZc9xZzsil42n5pO51X3MKkTJYV9q7DtGm0ECscZ1c6FBkgX+kqU +at6tYNkkcuwAXtmJ5wpfnKvWnMJh+0tLcxhjS8HYxAB1AP9R3VavfOJKsdnlIkgi +db8SLh0d+nDUGHcqZZ8a9Pm5/WG/8IIRehPvs/MZF+lsSk/6Flfxk6i/o3B9nnzj +pLEfDH45k5J3EbBEm4tV/8QLehZ/Yb/qiVGrOzEpCgpIjoQM+2UcWTLtkHfVYf/4 +uoT+6rPGjDaPv6V5WMCWCWBrj6NKFPzYVpu8kzPjA63QWQ2dBLz/rddUf8jKx5Pm +hV6hKk2gflMPy2mhzFr/mAwfJn0VBNI2xYqblqEreeRJXpkmtfJ50XIAU2xS/lPD +ZtiBmwxnqk6Bp4kCHAQSAQIABgUCVMloZgAKCRCBxcxPiDKaHBTKD/0SVqbcdpdq +mmiVta+adRdcEmBxYJFD1oEtpjcHIJBYBptPkIT14jQMO3r7emkzCzGrXsM6t4t+ +FvayGjY9VDbO1RWBFc2M1qwBpQYjIJJrx6ZjJjSKB+bDnOg/Kc8WoDmpbx4x112m +0Qoeq/AuhV9o6UJsGFU/5RddAERqEDFufTBhYVHBD3xlUxVoEhI+YYCry4bw2I9y +6i63krIirlVO8lPiuZDGO6u8E/vIxrec3aFf/fTo2yTqP2u4JiAxxnriZ0rjUUVd +w5bWkcpNDdhUHKAfequd/vgXTV8AzU1QN3ilXK46U1yXMTJaXJg72hfypSKXLRAy +AOdGkzZZC70SzJz05RTHqDclEUnN/BzuXN+XIYqB/M3ftgrP0BxsiPiQZU2TjqHV +AeGpSgekih8DK1qcuPiX0sRfg1BFtFUMI/8FxKYvgdxyhmCNEDbs6RBmdar2qtFY +u0qpXMim2br1Bgc/0XSngBYko5jjkFG5tnHm5hUM2Da/Clj7eNpJ1fj0DHrfdmmj +VVoYD6e2fznL6VAAUGQT3ISbwUa/kLKPrqkLy0b5Vxg/q0Z4tfzWcYJ1YdXBzewo +hSCTpRPmwaMyfsbQ8eWTgJR+alif25tfc0A57n1saHKjb93pPA7jNosjpr6W4tUR +IuAEm4Q4zEAxZkTWMft2yIvNCiRJwtpGVYhGBBMRAgAGBQJSUrSEAAoJECkMEkm9 +2HALgkYAoL9Hez9mLtUeiYsv27TT9fL4mE+RAKCGNS3OO0mBVDAOxcMhRV+lkgG+ +WIheBBARCAAGBQJVYgtfAAoJEH19Eb9inVpnerEBAJ0wIuWRlKqtEtCKOVEboLMD +q/0cBBYfGzu5yTlFjnDZAP4rNy5hiL5mEu5GJqGEY0o9wXNLzJ3bw+kNimI6dy9X +A4kBHAQQAQIABgUCVcQyrgAKCRDHXurc0X7YRErCB/4uDl6B5/rymPi/3AK3LMyJ +bLqZZzErK917s491J+zelFywOoUEWdH+xvUzEOonioTvKkGrQ5Tooy3+7cHojW2q +SauLh+rG+b+73TZJyRSYDD4nwWz3/Wlg21BLinQioaNTgj0pb5Hm70NwQwUcFtvy +JNw/LJ9mfQaxt//OFSF2TRpBMr5MMhs5vd85G5hGHydZw9v0sLRglk5IzkcxNdku +WEG+MpCNBTJs3rkSzCmYSczS1aand3l/3KAwtkau/ru9wtBftrqsbLJZ8Nmv6Ud4 +4nKTF0dsj5hZaLrGbL5oeMfkEuYEZYSXl0CMtIg0wA9OCvk3ZjutMy0+8calRF87 +iQEcBBABAgAGBQJWc8vRAAoJELPrw2Cr+RQDqw4H/2Oj/o3ApVt42KmxOXC5Mcua +aINf3BrTwK0HDzIP+PSmRd3APVVku0Xv89obY/7l4YakI2UTzyvx5hvjRTM5jEOq +m4bd0E1atLo5UIzGtSdgTnPeAbH07beW4UHSG1FCWw35CwYtdyXm9qri9ppWlPKm +Hc91PIwGJTfSoIfWUT6MnCSaPjCed3peTXj4FpW1JeOjDtE3yR8gvmIdIfrI4a8Y +6CGYAUIdVWawNifLahEZjYS2rFcGCssjBSaWR25McL7m8lb/ChpoqpvQry3MaJXo +eOFE7X1zInPda9vDdWR4QFrLDN8JjxzBzwsQcfaA+ypv95SlD3qL6vFpHGHZ4/6J +ARwEEAECAAYFAlZ1TPMACgkQGMawXRQNVOhCaQf/aQZ0xEVW+iBuqXzd65axP3yW +S9dM//h9psP/UKhFzfxCdn3XzmJ92J0sv22DjR8AbbGLP/H9CeZY8nCQnYOHp+GQ +ikGJNjzyd1Zni+Ph67EYfEV2eqRO55GGmiRtUrZaur2pfnbNsvTQtA2rGXen5tLS +sCh4qDNHrM1TlP9MSV0clzoVWRrRNvkODrSDaCdEEDrOqfy0AEFlLmBTqSsduo4c +O46j0ruC0SvflYx+2HN3rVtZzt1wrhaPBPnV6gP7dhKp9XM4erWV40dP14YyDExZ +oKNys7Kq7pnRQMbE3HL6UGa8VPvu9eiELs7kw01pYBtYl1my9ekminj8cygpdYkB +HAQQAQgABgUCVolllwAKCRAjRRsQeqA5QYnjB/9oDZYh20qEpGIZRSmur8M/cGFK +J6IMxBHFIz73PM+hHB3v28aYRW0lXGu8BNGZVxkTuTjd1HlSFMCNpcNfbMmRhEGt +Ep3qGq+cq7zu72lVEiY8tJliq9zyOm+guFzUQ00pvaXuTUFlshvwlRS+GIGn8U2P +/SVRGqSOqCkidp4f06yElt5QifwzvHT8KvxjPgFA5NfQAXE5i/IoepV53XDhECqO +vsORbc0JT8n8/4hT8qHTno8UNbYK5BQjHlby92v7ZFVgI86Li2zb0HgQSmvpU/qR +ibSzg0gEUrWwUR4knTkoKYQwjry2bQ653oNgv0OsnSGEroYOyQ1Q96jOMFKViQEc +BBABCAAGBQJWxLxwAAoJENnYUJL2kkSzPbcH/jl1mYhR4f25pRe1InyR7BJF83YD +hJYIhbBCGqGVenFEy29hco832HkhMUukaos34KZjsWGDFX1IWe6cxOJvBZsDYHua +LCueh5I8/Tmtq+HuebuF0RJtJh7ItJoCrEv7ZyUQmbJ+aHLx2pXSqYUIiWlPvIlG +2/esQlUo7pOub7eEb8U3oKWYgs9HkytMeHSTKiuFJ7mzEyh2fLcgsc2q1XT4Vxuq +ksWxYv8MstTOxrltQ7LyP2QH/BzfqI5yE3UfSSg1sZE2Nh2cIFNWTYVxdx1fBJWG +tTT7l2o99mYwufSLz1UTbGF5PcXeK3sYxN5IJta2FUByaJAWPJonRnojinyJARwE +EAEIAAYFAlf7Qx8ACgkQo/9aebCRiCSTowf+Jm7U7n83AR4MriM1ehGg+QfX9kB3 +jsG1OXgKRpGPIORqxLAniMFGQKP/pqeg2X530HctqjpV+ALG4Ass/kNn4exu5se2 +KuThQMKLK7h7kfqCnrC8ObeCM7X70ny80b2h+749xWZtahpTuQwVrhcAikgPfS2n +XSKdubOyeBH3y0kT2zAoml0MOQsUb6yGycjdnbFrKvfINKfuZvF+z16YOu3eYZ3N +O6dErWQ5iTecuNe0nnn30D8+nWA5JfCxNDPfc0e85dm6xK6GTPdaQd5hpF14TdYZ +u5eT34BXJcmL5hJ6MzM+OFn5CIn2Xa6r6h9AOp5C0o15Qb6SXpUdZrV/34kBHAQQ +AQgABgUCWCj2AQAKCRABFQplW72BAiXGCACSHG54fSeKZysDiX7yUnaUeDf2szdv +egD+OPSVJQhcDdhyC/YnipEN4XFpeIkpxUrBXWYyy5B/ymzDQl95O8vI6TnDpUa+ +bvpkWEAlBK2DuElRojXfPo35ABu0IetQ9xyR+3IzaepHL7Ekf0n0H9vFTmeyYUc3 +B1m7RDwnUJuAlWRt1qQHmOejkzTDBZALeg+BJ5PtnWqCr29+JZB8cwUJ3Ca8Ypbi +CrXWYHu3jlXDDyEhQ73t5OlruOMiYp+opmRySu4rF2d9yJIXnq6uf0WNb6G6JzlV +MOqHKvtmrnwXb9zlFTSXb/NkxNmbYPrTvKmSr09YDC/p9iRkuDSeI/OEiQEcBBAB +CgAGBQJWlDXmAAoJEISlRGJ0Rpv+6/AIAJGPLDwkeCSkBIGwkg5Mtrlc3PNkGsX2 +hb2GP6CUiOeF/UAYU9HcxLv62nK/2qY8o96XY5D/CDOTMmvfr/S2Siyp3u6SVDbE +oj1KX7nTzItfWdk1t/uxfC0+d1zQC0tyJ5O/DHQBDabsZ9REZDqKjhTimilFIWlu +Gov3Hdaa8xkEij9f05REarOBNviaYUxoy9i5Vfo6Uh8jA9XaXw+mS5RIrssa/KlF +fh02wXH5xlExHeepo4g79nFD+lmnE5T9PhfjRnBtogCV3ZBehApS8hJze9JfLnex +7l1DGSPp6ydIyqoWHbk8VYiPMPfHMSlXpaeuprfq8xdBhqMT2a6Fp+KJARwEEgEC +AAYFAlSakYMACgkQlARpDCzjZAx4FAf9GP3vrIvZdZisDqcOoRmKl8iWkY5X3lmx +e5BaQ4qjQ6aUvxsopqLN4ETLTbp8oH9c3sTyshQA0BMtdJFst/ZjhDE9pU90Kel9 +CMbEgq0I5FE5A+348Ovmobe0TUPn2WClwyRGPCe4X0WMEikEHs3Bb1CFzYfbbIe0 +N1M/DqjUvfKv0lc325P7i2DlbDuUoLmNMgHHx6+jFqsxlNCobkq+IrhKLxv27/K3 +13UOzECiPRIbMhHmLHQic9MeJp0bzJiTo1icQVRnim5ZovcpXW2piJQaWqx/TUXG +aRdCjYrJJJZObIi6qnSB7SjdxwJUq6GuTEb/BJElQFnjsxySvTu24YkCGwQQAQIA +BgUCUVSNVAAKCRB+fTNcWi1ewX4xD/d0R2OHFLo42KJPsIc9Wz3AMO7mfpbCmSXc +xoM+Cyd9/GT2qgAt9hgItv3iqg9dj+AbjPNUKfpGG4Q4D/x/tb018C3F4U1PLC/P +Q2lYX0csvuv3Gp5MuNpCuHS5bW4kLyOpRZh1JrqniL8K1Mp8cdBhMf6H+ZckQuXS +hGHwOhGyBMu3X7biXikSvdgQmbDQMtaDbxuYZ+JGXF0uacPVnlAUwW1F55IIhmUH +IV7t+poYo/8M0HJ/lB9y5auamrJT4acsPWS+fYHAjfGfpSE7T7QWuiIKJ2EmpVa5 +hpGhzII9ahF0wtHTKkF7d7RYV1p1UUA5nu8QFTope8fyERJDZg88ICt+TpXJ7+PJ +9THcXgNI+papKy2wKHPfly6B+071BA4n0UX0tV7zqWk9axoN+nyUL97/k572kLTb +xahrBEYXphdNeqqXHa/udWpTYaKwSGYmIohTSIqBZh7Xa/rhLsx2UfgR5B0WW34E +8cTzuiZz//////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////// -////////////////////////////////////////iQIcBBABCAAGBQJX+0LWAAoJ -EAJ4If97HP7GahAQAMxf3Nyab2t+xJlFR+/ZCvqMq5rM8iq67ZK5fLG000RjLiBN -5bd6BglAq03l2DuE3b9hdnosKfU3FCeysivn0af0kxjMaH+W+9JSQJ9E5EjO+RgI -JDkn3n6X/lQjVl3N7R6FeaWY6Ug9paSCtAlVlwCfg/rn2jFIiHQb++44nQFpaX4W -uNzZWoy1SOGg32e624fjsgqB0aH2cmY3oGdMFt8FGuzOfa89JGW8P7mUeZsiQQRx -R4y+L7omQ60rlveKZeEo/ZVfSZUVtzM9wplXpUMbF6/XtUC9dmsVrSZePrsAHnjj -bbk0GBKit2UswC8fKdHVz9YiWKuM4QLEWiucYLkcWcHUFyp1Tk9ZeS3R3yPASC4e -WV72IVGS0mjjolcFwatMfYghQ42+sR+G6duEcJSN7sqrdzYxRny7aYz7GFXv1GCE -iz/CzhepHDROpu9KZv6xetyP4xmaunanzzrd7kM23530jFRK53GJ/4p6XlwYA3jN -sxaGoAADOTIwqolgxtvdrNwEeX0pNpFI85BXSJrvBxKseL4o2NlxxvkyrLPIuuU6 -EfnOgMtu5v1jgLkA3ON3eERxl7DM1I2bqFT2+Fpvsme6KFm1o4DepsO4wL9ZKmqU -MZs6AxfmUopia93EtsZs801vNNUBmSsh3pvIyXGc/v3v2LJY236rsf0DmticiQIc -BBABCAAGBQJbHUVXAAoJEMIYUlgZ94RRdLEP/jpetLMM956YJJkBbzALzmXFux3K -l3z9+YA/kXgZC6NDNRItBnsxUlOFkBaTSvvq+18RGIDr5St3+cLjZnnOGoR0YY+K -cAEzlOM0GMr118O6Bd+p61CqpA3oV0BErV3jmUe283OBw6q1+2HlDMAv1W1xpRhG -b0UpBS2OvXQnzAHS5hqkSwnB9os5Uitaud/k1kxvS/IklAidb3nsx6CQDvlx3Bvx -Z8WUWDz4uqrAoOyr5xw2G9zVZgbaV8t7i5mHegoie1mQWjo1kVesZ16buKoeire0 -8eIcbFYNBcQReTw6TzunHOJoSg4fSXPhj3g8PjJIuw3uSPbR1cV8DyRwtLXEogul -nC2L+JHh2g7PDenKmNtP2pZByQA7pM7CPDZAsS0IIeaCs2n2kF0I4m61Lntx17XX -T7k5P5Jl9+L7GwGWz7vxxtvsckpJV6LT8YphBWgPA/TYI1vBsHDVJdfaBWKVE+Us -pRJIAhDPIsJHNw4+Y+rbDzsaxXGc7QTspkWSRLfKAkBLcS/xJ7HPDWCu7NMV6p8p -whbzFQ54GyUPbIkGhsMru6G6cRMOiB1pB5XNyMVWmSqHKSxaTfEEdoyS4giu1h2/ -WLZxLsJOKu1ns1BsVmxNEOwX6OuBCq8JMuaIq7EOk/+Xs9TJbXDTQ0GvfnNMvTZo -rYYrySR3xCJ9ju7riQIcBBABCgAGBQJZtcGvAAoJEGKrbC2pNmtMIVgP/0eNCkI5 -HX643HQs3G9xGg8OmyO0Kk5wv0T1BIAwPjA2tzz3iNEmVMDac8/3qeKCfOyEhdJp -qvZxRZ8BKoOkmnIvbwdxPBow8ixdWGLN3ZIeRJL/c9/oxElQ35qyVmCVEkvSKFvp -QAG5mvxq4usMRBeol/f7VSsKR7kqU40GamW1q8ExoLkAmnQAHfHx8dZmMBBG4tgV -vSGwP0gpKBydEI6xtJXGexL6JumvHmmAAnImGQOL+cfv8oaVp9vXRFwrUZsx5ObG -XtV4xeGTr3nd+ZvCoocK6AHXcZiLF3XsnkoAUh7IkTsFPMjQ9w3lb/E8MPjfLrIb -w0WJYyNk4VoMePFYfWjGMU6zVRKwdurV1ndiSC4rZlapqfro78+u8pDoijNpzFsv -my4Y89w80N5l5qyMZ6PMOoZo+iH5hvxITXCtCJHs0QaNzvu8PZSG5Gb4hVn+NcjH -UfqulNxTIsyfISyvbdgQxEmFxSXeHPoMOhvaZn0niWL9JRAAXyM1urOhPG3mo5sq -GPpQu1/DbbkA2oo02Uw/Ngh7MP7ujRhwsnC0BQOEgshkeEzACJ3FwB/HbZ1bd0eM -jhhcMPwT4lbFQFadcFEhBSd96g93xpeLIIVw9+O447MtA8GHHmng+TE7QWFXL/CU -u+n8l7IQtlBSt1KMktSgWEqs6LSvsySDMIETiQIcBBABCgAGBQJZ6mC5AAoJEKhb -Oua8Odf3rvIP/iiehjNNyKMkzELw7xLRXbQ7AXesG+BKkVXBFZ4ertW6B1ovIkfD -mM63Xv3xTQDCWjf/AewDSEF06k3TpV8P1a/Weu5ESnigHah801dk3GoSNs0CWRSL -mZEMwRnyCK968PlZUdIdEr80SCy0pijFtuI2h81GbLZl5ic09jSXu2up+IxMb5w/ -cF7EeHNbyFtdn6WNnYCCWPM442eTpm1241+DCw17MvuOyyUSH23bBc9VePe3VsBX -S0aNAJhZVrAuY3UWFEdnVcwmN0QIO4qTqxApT1jaMjvaP5O7TQ0O1X6nReJ4217D -lb/Vj3FzVZl2f/BLjlQae0kBD/2p8waX8R7KSIvzaWJxtUWroOOgzlZgkzj1coD0 -PK0yysgM0KzoHEJFZcFz2Khde5SbbTz3iWE0KQgLiBuT0MVxRWrJcWq1b4cFeCr6 -C10ppmiTWqMlkWFczhXWZu+83b1uMeV1iXZGC0ldJTdscO8O4o9IXdhjr8BiLm7q -sGuGJCtWZID8+5GlY+A09rDmwh2Kr5R/aBzQ+JPmzbNYvVmqAvMbYnl1IDowxWv0 -w6kduvMfTbUB6UkM/zfsbl4PccxlPXO1yPsiFe+f/HIJMcM0aFGqjxY3SmVtKcDX -qy7w7Q3uTiy0u9MCqXCdpJRlDoMauM65Vcc/i3fR/MZdqPWcHcL8zKjSiQIzBBAB -CgAdFiEExB/CGya6nZqq0a63ajVKIh777qgFAlrMzKYACgkQajVKIh777qjRPA/+ -NV/GvceePkKjxHKsUsFP5r9acmMBWtgyDddv3me3rN2wTR1inUji/ezPxrXOBlKx -UC+6CK1Au3wuQsENRy2vqYrtWS/yc31chzuA4YolpFjy8BlRluobZJOoT9TYeVnE -cZYhBMKV0HpoEXSgb+uca+dnIaFSgMXi/qXYfM0g1IOLcR+wAW+ptBzY0KSpxkqn -qcmrwJPiMbtwExDcY0cAjHdl35MMSFe12KZdST4ZGScaXpzvB95JPeiC6kqPXaa6 -1bgUJteG2n85CZ0O9eSZXt2QSyaQapl8PLkI2cm7C7m12q7OqE2vrOIADnS2KTZh -I7Jh6pJZbvuYvxoc0u1aofmV0IeYcWmE5fT0Hjf8Aw+K3l7DEBAQs/EXyxZ6JUom -TJEQRM8lS7iYwPtuF0Q6c6H5HsmpJ8+zInyeqf4iwdmtu1YWohT8sIjYNHzWSraQ -SXevJ0B+SvERjsZU3RonFbodQBtEJNS/LZ9JM6ROR/XCXFwrXF/X72SN6twZjsMh -uKEv+KwJNyhsOU5uM4TVf+1aFmUeMSBIFfFOjtCeyJ6bmeqpBhme6gFoxgS326pS -JvLf8H76l4CeZACzxStXnoDb/RFucIH/8GLtH/dCzlbv9Atd813+o4Sr9WLD2O/O -agNXDgMiQu3j++RXB1VfMXVnrGy4BwFdvkueR5d85Q+JBBwEEAEIAAYFAlgGbPAA -CgkQemOAneUSdiLOth/+LNI/VXkol7A+9Z3qdIdyqMA3zYqAq1RoV1Szxk5uqvVw -uW7NziOBXr7hgx3JI3m+UlaLovFLCwWfZj4E0eRGmGs4ji82V6+1nczLBXjoETFf -WsNKPOi9VHvi4M5/CBenei8JrVwhlVO6IlQobO4ik09EnB9EzujqqoVpMMARQtgn -3Mo3YxhsgTUCC/A7iO8bQC02wFTyrIbjmhpmICJDdr+kd+18qDgJPZh31m9rYVwF -gaEQU8bQtKgf/5uKX1CohbqF6HJNIsNkbIFWl0A0EK8B/mPPaBYLV5bbSCwhINWo -3NC2pZMhltTQP6ubI1a97nRj9u+stg/WD/VlICgxIUhx3iawGvjIV49fPM1b9xwx -caxosg21OrVpjCcYFoMQUgsDzwsMZz1L+F/Ut2R/KD3ShXE+yFu+h9ZVIFx+tzd9 -Tt6f8ApHbw9McAL2jldouJgPqfZoK+yl3PzdDgJSvF4QsINBGZmicNwzEvBgaxj5 -PubBby6FBhMrsd5oHn5S7yAaA8wGlZklehyLhN4C7/sZmisIGatfVJJYPP0h0Nfb -tfZ90o28aapZqwCA5R2vXg/oBre5pF9+D95KpdRXHZlITfeIgN4bT5uhfucw2CRy -jWDUfLRkh+n4gpRiub5Wq8lqcrFP98v4tmyNlgufPe9QZNA1wSI2+/WlN4VNZXjf -54O0AWdStM6EbZrakSBB/riY6mv4Mzch1aEVF0wNJSmSw1pWr1TEDGvUd1qDp4KB -qaX53S0eONpykGHnpY5qfm51QowLlqmNQP4EhtmDq2tiFTvIR85MJyUaE+BDIDOr -mpi7w9xODXL74Tx4FGcL3SqPwB7jdUEb5ZqACZVwTsb0pERTuXyN1S1vWxjz8wk5 -k3YUE/eLaXxIvgSbUkxB3/kd9CYhn259HivpfuT2r6SieQe9wUOQdQ9LybKjKLfb -5H77I58eq9yR3KbqhQcdfxV99P/1x89nWkv9Z/hES23rlsGK1oMDRiJyD/Tk8otA -8Wffa2nkNwzLVPRm+TJd9JplA2u/RPO/79Cfqa2RU3Qwf0GSU4qARLR8REJ2KS6N -sg80j3s2Nj0OHp6k+QBPMo2Fi8Dde29SJNB99x7Gf+/QdtO/QtU8jV5a+jVO9ZnY -5usRpYg9h6UOUlHwxtWL7Aw48oMy2nVMvDGFGcx72EWoTXp9NX+i0Pz235Gxf3C4 -b498vQMR/COA2c0JcwiYK1FFKSDFOV4aFp9UXWeP1pyZh27iDDCO+ZX0Arrbt7y9 -rhwXEEd2O2FtIyGINa7QdHDJLcv75KX/obtffzijp8DGS78uIVt+EnlyEdcrDn0d -+XSTM8HMJW/yjNaeV9n4/jIHrMMpWefft1tue5TFDrkBDQRKoO2QAQgA2uKxSRSK -pd2JO1ODUDuxppYacY1JkemxDUEHG31cqCVTuFz4alNyl4I+8pmtX2i+YH7W9ew7 -uGgjRzPEjTOm8/Zz2ue+eQeroveuo0hyFa9Y3CxhNMCE3EH4AufdofuCmnUf/W7T -zyIvzecrwFPlyZhqWnmxEqu8FaR+jXK9Jsx2Zby/EihNoCwQOWtdv3I4Oi5KBbgl -xfxE7PmYgo9DYqTmHxmsnPiUE4FYZG263Ll1ZqkbwW77nwDEl1uh+tjbOu+Y1cKw -ecWbyVIuY1eKOnzVC88ldVSKxzKOGu37My4z65GTByMQfMBnoZ+FZFGYiCiThj+c -8i93DIRzYeOsjQARAQABiQJEBBgBAgAPAhsCBQJUA0bBBQkQ5ycvASnAXSAEGQEC -AAYFAkqg7ZAACgkQdKlBuiGeyBC0EQf5Af/G0/2xz0QwH58N6Cx/ZoMctPbxim+F -+MtZWtiZdGJ7G1wFGILAtPqSG6WEDa+ThOeHbZ1uGvzuFS24IlkZHljgTZlL30p8 -DFdy73pajoqLRfrrkb9DJTGgVhP2axhnOW/Q6Zu4hoQPSn2VGVOVmuwMb3r1r93f -Qbw0bQy/oIf9J+q2rbp4/chOodd7XMW95VMwiWIEdpYaD0moeK7+abYzBTG5ADMu -ZoK2ZrkteQZNQexSu4h0emWerLsMdvcMLyYiOdWP128+s1e/nibHGFPAeRPkQ+MV -PMZlrqgVq9i34XPA9HrtxVBd/PuOHoaS1yrGuADspSZTC5on4PMaQgkQ7oy8noht -3YmJqQgAqq0NouBzv3pytxnS/BAaV/n4fc4GP+xiTI0AHIN03Zmy47szUVPg5lwI -EeopJxt5J8lCupJCxxIBRFT59MbE0msQOT1L3vlgBeIidGTvVdrBQ1aESoRHm+yH -Is7H16zkUmj+vDu/bne36/MoSU0bc2EOcB7hQ5AzvdbZh9tYjpyKTPCJbEe207Sg -cHJ3+erExQ/aiddAwjx9FGdFCZAoTNdmrjpNUROno3dbIG7fSCO7PVPCrdCxL0Zr -tyuuEeTgTfcWxTQurYYNOxPv6sXF1VNPIJVBTfdAR2ZlhTpIjFMOWXJgXWiip8lY -y3C/AU1bpgSV26gIIlk1AnnNHVBH+YheBBAWCAAGBQJaXmY7AAoJEBu4nAYCNnRJ -eVEA/i3qZKJAmsyAZDRZ2a2h3AhEoFw1uX/H/naTsWVaOWdLAP9RZD+9A+jMOR20 -bnmyGSejGueiO/h44NqVUrPrydOdB4kCWwQYAQIAJgIbAhYhBKPE8Pl5yqIs26j1 -Eu6MvJ6Ibd2JBQJbZ+pFBQkVGmi1ASnAXSAEGQECAAYFAkqg7ZAACgkQdKlBuiGe -yBC0EQf5Af/G0/2xz0QwH58N6Cx/ZoMctPbxim+F+MtZWtiZdGJ7G1wFGILAtPqS -G6WEDa+ThOeHbZ1uGvzuFS24IlkZHljgTZlL30p8DFdy73pajoqLRfrrkb9DJTGg -VhP2axhnOW/Q6Zu4hoQPSn2VGVOVmuwMb3r1r93fQbw0bQy/oIf9J+q2rbp4/chO -odd7XMW95VMwiWIEdpYaD0moeK7+abYzBTG5ADMuZoK2ZrkteQZNQexSu4h0emWe -rLsMdvcMLyYiOdWP128+s1e/nibHGFPAeRPkQ+MVPMZlrqgVq9i34XPA9HrtxVBd -/PuOHoaS1yrGuADspSZTC5on4PMaQgkQ7oy8noht3YnJVwf/f6KY5ikoA9js2MMu -zBuOuoopwPxIvm2s937zXVJPTdT389GOhGxhmoZD14yDgo3pHHSUOKlOV0Lth+p0 -E/hiJ192wn/owMQ5W7NQd7BbAetoFWgwjrxgbt0PdEwDT/ECqflCwMTJkeV0sRmO -r+pcIkCSqoba2H2GdgWWay+jjq9bvz6MjQ/oxb+oDGInl4C81/S9PWk/gxqA49Pw -1nrNhMk15A8TeSJI33AUwRhygnlLDJ84dCpGtnL3pcMEIXcXsF+uBw3SH4hDjP0F -JrzIHFxZ8MmK6GA78qzYkays8ECE6RJRt2nGxvb8zMBKBuI3TTCwawR6NUfG9fv+ -I4Tesw== -=i53l +////////iQIcBBABAgAGBQJW5/QxAAoJEPvqMRCoU3iU3SkP+wRdT8z3EczONAcv +Jsu7ZHgh1ggzsmozTciSuaAZRfvFmUyB9h63cKNTS86CIrqHmMZrtHRu9llkNNiE +4Nj8JAAsMPSR4YaKHfHxc3bOH0iWtcPxtIiQEwYs/7oP0/YzFAxcUmZBDeLvy7aK +pFqdPUcEhMTWmscVajjJXv+6G8IZwYGFAFvSkYSimZP102gmgKQhcfPDqmlqy78F +t+T5MfIha1Q950iZyAM3j46lVWMkBaKPQKq1G3kKaL7Sy3o75y4N7lgzY5WfYnBY +VAU8eUjv408FoFKAYFTsA3RG7P2VROoNefPaLRSgEgZPR6efVux9Z3R4zOUQuljv +q8r00zMS0t5RVcDp1gCNZQ9xv2QeN/ZDld0U0IbDQRrlT15+l3SthkXapMMvbSVK +EILMgaL+ysl7raMW/Zqv1KN2ByVJsPjWnwWCPnn0fMFWr15ExzfZBUNh2rZlQ56j +BsJanHF69Th0vI7JNm7/Gd5FRWL8RcXzAL/UbVDuyGaO2JPztQ2dL1lnHVL5mgOM +js90YpADenNR5XkQxuazTRiQIOXfoZhgPwe99S9vEdYM6UPYZjt8uo1bmFEkV0CG +jWngJc2ySSurftXPFJ7gzFhDbx70Ga/1lw/4H2RPs9ZiZKKTtiGcDLhDxSuX5z3M +gzzD3CNp7uKJQlTIg4aFeX9JWQvUiQIcBBABCAAGBQJX+0LWAAoJEAJ4If97HP7G +ahAQAMxf3Nyab2t+xJlFR+/ZCvqMq5rM8iq67ZK5fLG000RjLiBN5bd6BglAq03l +2DuE3b9hdnosKfU3FCeysivn0af0kxjMaH+W+9JSQJ9E5EjO+RgIJDkn3n6X/lQj +Vl3N7R6FeaWY6Ug9paSCtAlVlwCfg/rn2jFIiHQb++44nQFpaX4WuNzZWoy1SOGg +32e624fjsgqB0aH2cmY3oGdMFt8FGuzOfa89JGW8P7mUeZsiQQRxR4y+L7omQ60r +lveKZeEo/ZVfSZUVtzM9wplXpUMbF6/XtUC9dmsVrSZePrsAHnjjbbk0GBKit2Us +wC8fKdHVz9YiWKuM4QLEWiucYLkcWcHUFyp1Tk9ZeS3R3yPASC4eWV72IVGS0mjj +olcFwatMfYghQ42+sR+G6duEcJSN7sqrdzYxRny7aYz7GFXv1GCEiz/CzhepHDRO +pu9KZv6xetyP4xmaunanzzrd7kM23530jFRK53GJ/4p6XlwYA3jNsxaGoAADOTIw +qolgxtvdrNwEeX0pNpFI85BXSJrvBxKseL4o2NlxxvkyrLPIuuU6EfnOgMtu5v1j +gLkA3ON3eERxl7DM1I2bqFT2+Fpvsme6KFm1o4DepsO4wL9ZKmqUMZs6AxfmUopi +a93EtsZs801vNNUBmSsh3pvIyXGc/v3v2LJY236rsf0DmticiQIcBBABCgAGBQJV +fZS1AAoJEFuCGoE7lKfEYBsP+gOUOmmHg0c09v/iPkel7JJGcNnipk4z8xl5nTxX +ay4nTY6TKtelOhQUBqDHBqdOe8PNWVutXqSDQKyzRPvXJRYgF2i3IUHq/GtCK2yP +aGV7XnYfEvddXmjAlYS9LkHcYH7zp7vLMW/8HgZ0JjeHAfmNF5+Q62rkDUMVBnSR +VlA+1mc3/o1O5p/Kn1Tt47kCkLJUMNyBxXl9BnbqJtFWKzoqgMovr2QEIZeUQzlJ +KygexnU4tCP5q5VefVqaVnEHkluXJq9knYK/G3c2Pet/GEDe5FkukzouQvcqGauj +jvc/pmT7VISkeO4YXvmfctOpggJ9J/ohxg4RgvqaRYdGoFgnNQMEnFLIxd5+8Sb4 +8mskS59rVwwOllWsbR+6T/ZDW8FYmpNzzuK7Af/JoOcWy7/j0fwOhJa4qX5aKgph +5S/rE9pvhmhbkgZta5m8GQ9bHInQnbefud5axRtSyx4cG1ZB/mRLFD7+kkVfW/Kr +tdP/7PuuYtIP/nEhs9HnwOmcoRI1WpDGERC6eUc+Dgc5sFD16tvp+2PW8/EBAWQK +55b9jZ4Uws0D/3Tn8BE0CP1lJCZzIzKqbO4+VhWNq0eJgwZWTUNoXQuFP1gOhJT+ +yqtxBRBP9YAOg+bO5kdjqS9IinbbYoaMkY8rUmqrF5r5XNob9mJzgF522npjWOx4 +P+7KiQIcBBABCgAGBQJZtcGvAAoJEGKrbC2pNmtMIVgP/0eNCkI5HX643HQs3G9x +Gg8OmyO0Kk5wv0T1BIAwPjA2tzz3iNEmVMDac8/3qeKCfOyEhdJpqvZxRZ8BKoOk +mnIvbwdxPBow8ixdWGLN3ZIeRJL/c9/oxElQ35qyVmCVEkvSKFvpQAG5mvxq4usM +RBeol/f7VSsKR7kqU40GamW1q8ExoLkAmnQAHfHx8dZmMBBG4tgVvSGwP0gpKByd +EI6xtJXGexL6JumvHmmAAnImGQOL+cfv8oaVp9vXRFwrUZsx5ObGXtV4xeGTr3nd ++ZvCoocK6AHXcZiLF3XsnkoAUh7IkTsFPMjQ9w3lb/E8MPjfLrIbw0WJYyNk4VoM +ePFYfWjGMU6zVRKwdurV1ndiSC4rZlapqfro78+u8pDoijNpzFsvmy4Y89w80N5l +5qyMZ6PMOoZo+iH5hvxITXCtCJHs0QaNzvu8PZSG5Gb4hVn+NcjHUfqulNxTIsyf +ISyvbdgQxEmFxSXeHPoMOhvaZn0niWL9JRAAXyM1urOhPG3mo5sqGPpQu1/DbbkA +2oo02Uw/Ngh7MP7ujRhwsnC0BQOEgshkeEzACJ3FwB/HbZ1bd0eMjhhcMPwT4lbF +QFadcFEhBSd96g93xpeLIIVw9+O447MtA8GHHmng+TE7QWFXL/CUu+n8l7IQtlBS +t1KMktSgWEqs6LSvsySDMIETiQIcBBABCgAGBQJZ6mC5AAoJEKhbOua8Odf3rvIP +/iiehjNNyKMkzELw7xLRXbQ7AXesG+BKkVXBFZ4ertW6B1ovIkfDmM63Xv3xTQDC +Wjf/AewDSEF06k3TpV8P1a/Weu5ESnigHah801dk3GoSNs0CWRSLmZEMwRnyCK96 +8PlZUdIdEr80SCy0pijFtuI2h81GbLZl5ic09jSXu2up+IxMb5w/cF7EeHNbyFtd +n6WNnYCCWPM442eTpm1241+DCw17MvuOyyUSH23bBc9VePe3VsBXS0aNAJhZVrAu +Y3UWFEdnVcwmN0QIO4qTqxApT1jaMjvaP5O7TQ0O1X6nReJ4217Dlb/Vj3FzVZl2 +f/BLjlQae0kBD/2p8waX8R7KSIvzaWJxtUWroOOgzlZgkzj1coD0PK0yysgM0Kzo +HEJFZcFz2Khde5SbbTz3iWE0KQgLiBuT0MVxRWrJcWq1b4cFeCr6C10ppmiTWqMl +kWFczhXWZu+83b1uMeV1iXZGC0ldJTdscO8O4o9IXdhjr8BiLm7qsGuGJCtWZID8 ++5GlY+A09rDmwh2Kr5R/aBzQ+JPmzbNYvVmqAvMbYnl1IDowxWv0w6kduvMfTbUB +6UkM/zfsbl4PccxlPXO1yPsiFe+f/HIJMcM0aFGqjxY3SmVtKcDXqy7w7Q3uTiy0 +u9MCqXCdpJRlDoMauM65Vcc/i3fR/MZdqPWcHcL8zKjSiQIcBBMBAgAGBQJWOIXX +AAoJEE8/UHhsQB3OlqIP/3lofZqqiV+uoiTdV91Tjmij9Rioz0kohpQsm/tau6JK +XItjG7DaG3XPL6NPckNGI+twD393Hdb/VkqatbpxLeJUQLoCjV3M02p6zDJHQ5wP +iXgC/8HZVdcP2jlvnrkg4N5dpLJJK4wpZ/KXMsw/SrBj047ZnySIl5qw9ytXrQm5 +8R7FBB/ANjENvo9C3LEsaDAKv0TL4vyMpz52TjUfgoz68g31Sl6KKOw1HG+dUB69 +M7MARSVEgaWUOm33eM12QQtCTndJQDg+LeYjfvfHbcnMZnniCZR7rHGxAhBzgKQq +JU/JizfZ4FDcBkABhsUQgkSeg3llFVzSU1iofT37A5cbQr0xUShPQwKgkESryuyL +059neVsAhDY/hFeyWCKtVQ12i3H7cvzRlfYxD8c/mN5TDiC70Cft1pcLU++u/6Ga +1kuzA7rkfoUocrCSjqb9FwLBokWcwbi7SyA8YD5m7W8sPINx7reokK7mvDsbOxpB +p/y/yT5ZpTjK3/MNgESrq2N+Qg9EFC4Srlg8wzovn0zamzb2xDJpLfrV/t2DsFrV +f2SWFd/YMjkljOLQhbsEpQIdrfS8/hNGgfoUIiko8lqNi50sGQ7kO9kirmjCZaAu +OaOi8U0K1C9RvVGTN3oGrxzRRXeqt2Z3bBqs5Lz5lrCNkerWZYXcItIyZ415i/Fs +iQQcBBABCAAGBQJYBmzwAAoJEHpjgJ3lEnYizrYf/izSP1V5KJewPvWd6nSHcqjA +N82KgKtUaFdUs8ZObqr1cLluzc4jgV6+4YMdySN5vlJWi6LxSwsFn2Y+BNHkRphr +OI4vNlevtZ3MywV46BExX1rDSjzovVR74uDOfwgXp3ovCa1cIZVTuiJUKGzuIpNP +RJwfRM7o6qqFaTDAEULYJ9zKN2MYbIE1AgvwO4jvG0AtNsBU8qyG45oaZiAiQ3a/ +pHftfKg4CT2Yd9Zva2FcBYGhEFPG0LSoH/+bil9QqIW6hehyTSLDZGyBVpdANBCv +Af5jz2gWC1eW20gsISDVqNzQtqWTIZbU0D+rmyNWve50Y/bvrLYP1g/1ZSAoMSFI +cd4msBr4yFePXzzNW/ccMXGsaLINtTq1aYwnGBaDEFILA88LDGc9S/hf1Ldkfyg9 +0oVxPshbvofWVSBcfrc3fU7en/AKR28PTHAC9o5XaLiYD6n2aCvspdz83Q4CUrxe +ELCDQRmZonDcMxLwYGsY+T7mwW8uhQYTK7HeaB5+Uu8gGgPMBpWZJXoci4TeAu/7 +GZorCBmrX1SSWDz9IdDX27X2fdKNvGmqWasAgOUdr14P6Aa3uaRffg/eSqXUVx2Z +SE33iIDeG0+boX7nMNgkco1g1Hy0ZIfp+IKUYrm+VqvJanKxT/fL+LZsjZYLnz3v +UGTQNcEiNvv1pTeFTWV43+eDtAFnUrTOhG2a2pEgQf64mOpr+DM3IdWhFRdMDSUp +ksNaVq9UxAxr1Hdag6eCgaml+d0tHjjacpBh56WOan5udUKMC5apjUD+BIbZg6tr +YhU7yEfOTCclGhPgQyAzq5qYu8PcTg1y++E8eBRnC90qj8Ae43VBG+WagAmVcE7G +9KREU7l8jdUtb1sY8/MJOZN2FBP3i2l8SL4Em1JMQd/5HfQmIZ9ufR4r6X7k9q+k +onkHvcFDkHUPS8myoyi32+R++yOfHqvckdym6oUHHX8VffT/9cfPZ1pL/Wf4REtt +65bBitaDA0Yicg/05PKLQPFn32tp5DcMy1T0ZvkyXfSaZQNrv0Tzv+/Qn6mtkVN0 +MH9BklOKgES0fERCdikujbIPNI97NjY9Dh6epPkATzKNhYvA3XtvUiTQffcexn/v +0HbTv0LVPI1eWvo1TvWZ2ObrEaWIPYelDlJR8MbVi+wMOPKDMtp1TLwxhRnMe9hF +qE16fTV/otD89t+RsX9wuG+PfL0DEfwjgNnNCXMImCtRRSkgxTleGhafVF1nj9ac +mYdu4gwwjvmV9AK627e8va4cFxBHdjthbSMhiDWu0HRwyS3L++Sl/6G7X384o6fA +xku/LiFbfhJ5chHXKw59Hfl0kzPBzCVv8ozWnlfZ+P4yB6zDKVnn37dbbnuUxQ6I +XgQQFggABgUCWl5mOwAKCRAbuJwGAjZ0SXlRAP4t6mSiQJrMgGQ0WdmtodwIRKBc +Nbl/x/52k7FlWjlnSwD/UWQ/vQPozDkdtG55shknoxrnojv4eODalVKz68nTnQeJ +ARwEEAECAAYFAk93ElwACgkQw/arJTtbsFxzLwgAlK9u7pGTBW1POc1ca0YVepWw +I//IkwCBTaWEswCXrK9QyT0itHIpmWjHEV4E5upDe6t0tCpd4MgmaGsijGLHky/Z +W5JQnu+P0bFOz7Dq+V288dzgHMlZHxgAtOeB/JRREy4ldXoHGx5e92rZaE551Km0 +uAYoWBkBDEb8txTOUsRLfYfUiwQeeFSFuaLzKutHuxOLYoPlcFQl/pwN4RvAFBB3 +QwOuvSg857vAslI20htiPSFcBC6DkB7MmuHR1a8GokhnGb0cZOwxz52emBZqZW9w +Exd1fG0pq75fEF+vfnNUUPKU25QuvyGPhma04oogsJPsEI1DkemRVNceu7aTBokB +MwQQAQgAHRYhBCBZ45m5ND49iWNTUvFOWAEoAwsZBQJan/mIAAoJEPFOWAEoAwsZ +FkcH/RRwfRTdhhVzYTxka4LUs336LOXHMVxhSrs5jaCc3HkDaXnFm7FrswhuYDTi +pUToE80bCFffITavCVoZVYhB6vnzlMLe5u6Zz0UpgxiFvsgKOMBxrKoDtGOvb4sO +ukceKxvoNgA3Y6hX6OSrkta0DsnheTDCSj4/Erzy8VnH456XQ4Ozjp8ybRuRT74k +npLQ3OpDGnO+yJxdlrLSwcpIcaXYbaGEJPLmHSqMQ0FjKjQxIdqSZAChCzJx5fPf +LojU4C6oDkKDQAulFlSEw71B6qKvriNdmVusdpsFQxViEJ01LJ4RJzyJTP81B4NA +bk5lL+f/cel71nySZB4rPGBAV12JAhwEEAEIAAYFAlsdRVcACgkQwhhSWBn3hFF0 +sQ/+Ol60swz3npgkmQFvMAvOZcW7HcqXfP35gD+ReBkLo0M1Ei0GezFSU4WQFpNK +++r7XxEYgOvlK3f5wuNmec4ahHRhj4pwATOU4zQYyvXXw7oF36nrUKqkDehXQESt +XeOZR7bzc4HDqrX7YeUMwC/VbXGlGEZvRSkFLY69dCfMAdLmGqRLCcH2izlSK1q5 +3+TWTG9L8iSUCJ1veezHoJAO+XHcG/FnxZRYPPi6qsCg7KvnHDYb3NVmBtpXy3uL +mYd6CiJ7WZBaOjWRV6xnXpu4qh6Kt7Tx4hxsVg0FxBF5PDpPO6cc4mhKDh9Jc+GP +eDw+Mki7De5I9tHVxXwPJHC0tcSiC6WcLYv4keHaDs8N6cqY20/alkHJADukzsI8 +NkCxLQgh5oKzafaQXQjibrUue3HXtddPuTk/kmX34vsbAZbPu/HG2+xySklXotPx +imEFaA8D9NgjW8GwcNUl19oFYpUT5SylEkgCEM8iwkc3Dj5j6tsPOxrFcZztBOym +RZJEt8oCQEtxL/Ensc8NYK7s0xXqnynCFvMVDngbJQ9siQaGwyu7obpxEw6IHWkH +lc3IxVaZKocpLFpN8QR2jJLiCK7WHb9YtnEuwk4q7WezUGxWbE0Q7Bfo64EKrwky +5oirsQ6T/5ez1MltcNNDQa9+c0y9NmithivJJHfEIn2O7uuJAjMEEAEKAB0WIQTE +H8IbJrqdmqrRrrdqNUoiHvvuqAUCWszMpgAKCRBqNUoiHvvuqNE8D/41X8a9x54+ +QqPEcqxSwU/mv1pyYwFa2DIN12/eZ7es3bBNHWKdSOL97M/Gtc4GUrFQL7oIrUC7 +fC5CwQ1HLa+piu1ZL/JzfVyHO4DhiiWkWPLwGVGW6htkk6hP1Nh5WcRxliEEwpXQ +emgRdKBv65xr52choVKAxeL+pdh8zSDUg4txH7ABb6m0HNjQpKnGSqepyavAk+Ix +u3ATENxjRwCMd2XfkwxIV7XYpl1JPhkZJxpenO8H3kk96ILqSo9dprrVuBQm14ba +fzkJnQ715Jle3ZBLJpBqmXw8uQjZybsLubXars6oTa+s4gAOdLYpNmEjsmHqkllu ++5i/GhzS7Vqh+ZXQh5hxaYTl9PQeN/wDD4reXsMQEBCz8RfLFnolSiZMkRBEzyVL +uJjA+24XRDpzofkeyaknz7MifJ6p/iLB2a27VhaiFPywiNg0fNZKtpBJd68nQH5K +8RGOxlTdGicVuh1AG0Qk1L8tn0kzpE5H9cJcXCtcX9fvZI3q3BmOwyG4oS/4rAk3 +KGw5Tm4zhNV/7VoWZR4xIEgV8U6O0J7InpuZ6qkGGZ7qAWjGBLfbqlIm8t/wfvqX +gJ5kALPFK1eegNv9EW5wgf/wYu0f90LOVu/0C13zXf6jhKv1YsPY785qA1cOAyJC +7eP75FcHVV8xdWesbLgHAV2+S55Hl3zlD4kBUwQTAQIAPQIbAwYLCQgHAwIEFQII +AwQWAgMBAh4BAheAFiEEo8Tw+XnKoizbqPUS7oy8noht3YkFAltn6jwFCRhLy9EA +CgkQ7oy8noht3YkhfAf+L/XXwlc/4k/sWL3A4Kxe2LejqrrfSGdzo6A9JQTkwuGz +b5t2UbynACNpbYxFlbdlg2zOH2rBx72Yjg4EYSyzPEOmCMvwAO3ekBmreO8UyPV3 +8b3c6mss9JxTenkKokFtBqsAnUhryykaGlQ8fZs87oXbOtpHZL48DG2TlSiQ2k4j +3YjiXnsHlPZpDPfVHrU1wlcxciI3SEPQNUxcRwHXkGtAcXK2P4fmRcDSXcgISh43 +Dg9ikV3yPLlJuxa887/uQe2ytHNOCgC9GhGyCOfQV09lr7mKpfJmz2YR0xZ+NGd6 +n5Tvs5GpKwoc30zo9eOQf6TAnQAX6w0NWHhKQEJCFYkBMwQQAQoAHRYhBIOZbqYq +gaZcXFp0j2nPQzY7zTQkBQJcP+D4AAoJEGnPQzY7zTQk0TAIAI41zJkJuXpBfASU +sr6n2BcXWPvodKDg1mQ+qJNPiLYWPCLqau1eYSR5OFXjoBFL8KiIPY3AGjI5jrn0 +aOityLm4p0PDgLYZ7VnPX2YPrMgIMIbQ471K8OFf9H2mRJp2bCXEIFQXRA75xrB0 +T/1TLTL+mz/2YF1oCPHU8ElT1nfFqAx0Nd3XpkhNCxn2K5687+6lG2YWjIXDSY5H +Hnl4JFtv4DBz4lyvmSz55r2WYcBSEVvhoTLOILvVbC0eAh1JOPAIls6ARuaOSkRP +gx+354QnXsNPIXEP1i11MfIufFsJLIN+5lyLOaMpM/BEB5jSEw7DX2N5t5SkONC/ +VtTkwIeJAjMEEAEIAB0WIQRHvH3oPUYui+0YqoYSJNvSmaT18wUCXDmNnQAKCRAS +JNvSmaT18/i3D/0ThbZLyrhhCCkxeS1AwYsTLKz6tzh26z1wNYM1RGhD0OnyRgI4 +FZDpwyAtMMS+R3wMC/M16Erx1xa5P2uvvUq8azki/rwVzyixtsZBzsTnnGrUOO72 +RFIz8HNEhbKvPMfmXkWgR1vVQihMIfU3ca4gMLldxbC6+I6vMY8nEgU5MGy39KbZ +z87C8fhtdxQqvKvwqebxMgvuLwf0UX6tR2Jn+gTzX6MCOGNJbIChuresPz1MJ1DB +MYsIpSUvOE0pt9wCNmUWHEUMGLSXs5N27kYmrNeR/WM7J/Az510kfhTDgteRZHea +lnPHeVqgfaD806Zkhb82Q7MNfu+FYo9tGY0KagEn7zQkrkMeVAJzF0+zXXG25FBZ +yS5jRBMICEa1XC5r2EORDwSyP8HZvJaMz2/NeclVaGLNNqIpq02/6O9zvyr1Xoo/ +ZwkF/n6sMP4zAmRO2NJ/t0aaI0g4ytgJ7dcZqGlVXeYSzYmMKPgtvqYwKRMJ+WmQ +GBuLOKEQp+lQLCbx/TRU62T46S4vzQSjITk/Huu010xagbrPhw3o4otMGLiJmIZe +YxDosDKpimVagPEHQzmZGkDWnBqTFUyTy5rJp9pO+43ZKkCknB4rOirjxu/idjbW +XAWb/7cQDTaSvHlFrEw41F0KrrGwTpLJthE81zgXskBNDMsUPSSArH2Hm4kCOQQS +AQoAIxYhBCkQSkbFYVv5eKCD8gwgfwey8ytnBQJbrjRTBYMHPoPpAAoJEAwgfwey +8ytnerYQAKVWdjbCDxVgzDiahizkfZFaMPL4c3FCQ1ty4OgppDFMqDMMzlYOV3MW +4bflgZddfSzvzAPMGDxeoQ0neBt8nRguKxuw2GiZRsMNfyxE9Bu7sBPwKhur/AIH +f7ZPkmntXVgWVJJJM7G5l7r+9VwMpaQCH1sNCkccuOHHPGZrk+rGxRKJN/2g39bt +ba0z2Sm3N1lkdQaZTmda1lYZ0XODySrKsisW+9iLDaPddZn2FtjM9/pMCm+ASmeU +FboDcre48PKD6BC7gLzX+jDU3afQVJjHRBLMjO0fdJAbgFtlD5fZ8xAoKyKHob5M +5uhXiFc/XLpwu4FmZ86/ugDY0hbNb9xwf7g3EczVYeRg5Xqce8stMF0upXf081rm +ru6RmsTGuIZu0zhEntRK/f0mDejn+D3xlCqBd4gn8UVzQC3X1IK2S41yOgX9lwO0 +AMUuNcnA4tlcOVfzTXVM3QZ7Ifr2FSVenrbTwXwPgcF5lKGURhX2wnTi/rdA8HG+ +cprIZ1Iingn0nacKyJMzIZ0x367Ifm5rPOWHeCZJdtC4B3wIn7da4w62AqopD/T1 +7F82IbkTdDkonwGhRMEJSCRvIWi08+2Dz0F0Gm5WIV0YZIb3Ca8cXdPy+114ru0q +GmqyXjmuTiSU9W/u2KqsRSfgvDWqMRMdSavvI0QTqLI45H3CBRO9iQFTBBMBCgA9 +AhsDBgsJCAcDAgQVAggDBBYCAwECHgECF4AWIQSjxPD5ecqiLNuo9RLujLyeiG3d +iQUCX7TTxQUJHJi1WgAKCRDujLyeiG3diVtBB/9+uQeOjXy5EFZrZXXnX2HsdMJX +ekP4FHiUMqZ3GA6KM4ypPmnpPfZ9bO+8vg56kVjpt8EzUKme3cs/oqPknoDZXnrA +4xlOCOd/oyLSatyAZXlQ5GV5Xr5TAQW2M/Wj2m7vRxO8tHoocmD3sI8/97cpbShg +bkyyjJlv0rs695Hws/gsyyxRTPZCtd0HeLBvy4L2ikTubebg9FTIfqq6AIpk/rIl +Xh5zio3PapclnrbaWXAHt1dCBiXqAIrDXNlaq6XnMJjXG9CAXtAmK2dbgy57TGgR +3JDCH2boYVNp4451ZY6TrGuOG72Dt0KHUhVluEWbm3aYHS4v7L6e2mADRnQYuQEN +BEqg7ZABCADa4rFJFIql3Yk7U4NQO7GmlhpxjUmR6bENQQcbfVyoJVO4XPhqU3KX +gj7yma1faL5gftb17Du4aCNHM8SNM6bz9nPa5755B6ui966jSHIVr1jcLGE0wITc +QfgC592h+4KadR/9btPPIi/N5yvAU+XJmGpaebESq7wVpH6Ncr0mzHZlvL8SKE2g +LBA5a12/cjg6LkoFuCXF/ETs+ZiCj0NipOYfGayc+JQTgVhkbbrcuXVmqRvBbvuf +AMSXW6H62Ns675jVwrB5xZvJUi5jV4o6fNULzyV1VIrHMo4a7fszLjPrkZMHIxB8 +wGehn4VkUZiIKJOGP5zyL3cMhHNh46yNABEBAAGJAkQEGAECAA8FAkqg7ZACGwIF +CQWjmoABKQkQ7oy8noht3YnAXSAEGQECAAYFAkqg7ZAACgkQdKlBuiGeyBC0EQf5 +Af/G0/2xz0QwH58N6Cx/ZoMctPbxim+F+MtZWtiZdGJ7G1wFGILAtPqSG6WEDa+T +hOeHbZ1uGvzuFS24IlkZHljgTZlL30p8DFdy73pajoqLRfrrkb9DJTGgVhP2axhn +OW/Q6Zu4hoQPSn2VGVOVmuwMb3r1r93fQbw0bQy/oIf9J+q2rbp4/chOodd7XMW9 +5VMwiWIEdpYaD0moeK7+abYzBTG5ADMuZoK2ZrkteQZNQexSu4h0emWerLsMdvcM +LyYiOdWP128+s1e/nibHGFPAeRPkQ+MVPMZlrqgVq9i34XPA9HrtxVBd/PuOHoaS +1yrGuADspSZTC5on4PMaQqpkCACiHhL07FWUg+W3uRQLnt+jMOqauaPWfJfPrK+V +mZZ3Q5KRXgQ1ciwIq9D/GKcnfqVqLeSFGGF3xrt24q9lETQYKdcCQGqkPdmBpYgF +eg71c4zviaADtQDtr93/RaGV3gC37r0WV6BRPU7NlZHHlDz/XaUz+NZIEslo/tmZ +yV8/yZlaItJI9qefzoA2aBJFHKYdtgLWo7IIAthchxVK8fbpc6Sopp/9K0GvXM/6 +Ijpu7H0NMVp7PGwuFbtmbwLR3GkyePmQeoMs6T1wn/l06JSIJVbZGcQC72d0KQrX +Y5rB2h/PKvrIgmmcvpOwDm4WpSizPas48p54M62u5Kjj3Q9MiQJEBBgBAgAPAhsC +BQJQPjNzBQkJX6zhASnAXSAEGQECAAYFAkqg7ZAACgkQdKlBuiGeyBC0EQf5Af/G +0/2xz0QwH58N6Cx/ZoMctPbxim+F+MtZWtiZdGJ7G1wFGILAtPqSG6WEDa+ThOeH +bZ1uGvzuFS24IlkZHljgTZlL30p8DFdy73pajoqLRfrrkb9DJTGgVhP2axhnOW/Q +6Zu4hoQPSn2VGVOVmuwMb3r1r93fQbw0bQy/oIf9J+q2rbp4/chOodd7XMW95VMw +iWIEdpYaD0moeK7+abYzBTG5ADMuZoK2ZrkteQZNQexSu4h0emWerLsMdvcMLyYi +OdWP128+s1e/nibHGFPAeRPkQ+MVPMZlrqgVq9i34XPA9HrtxVBd/PuOHoaS1yrG +uADspSZTC5on4PMaQgkQ7oy8noht3Yn+Nwf/bLfZW9RUqCQAmw1L5QLfMYb3GAIF +qx/h34y3MBToEzXqnfSEkZGM1iZtIgO1i3oVOGVlaGaE+wQKhg6zJZ6oTOZ+/ufR +O/xdmfGHZdlAfUEau/YiLknElEUNAQdUNuMB9TUtmBvh00aYoOjzRoAentTS+/3p +3+iQXK8NPJjQWBNToUVUQiYD9bBCIK/aHhBhmdEc0YfcWyQgd6IL7547BRJbPDju +OyAfRWLJ17uJMGYqOFTkputmpG8n0dG0yUcUI4MoA8U79iG83EAd5vTS1eJiTmc+ +PLBneknviBEBiSRO4Yu5q4QxksOqYhFYBzOj6HXwgJCczVEZUCnuW7kHw4kCRAQY +AQIADwIbAgUCVANGwQUJEOcnLwEpwF0gBBkBAgAGBQJKoO2QAAoJEHSpQbohnsgQ +tBEH+QH/xtP9sc9EMB+fDegsf2aDHLT28YpvhfjLWVrYmXRiextcBRiCwLT6khul +hA2vk4Tnh22dbhr87hUtuCJZGR5Y4E2ZS99KfAxXcu96Wo6Ki0X665G/QyUxoFYT +9msYZzlv0OmbuIaED0p9lRlTlZrsDG969a/d30G8NG0Mv6CH/Sfqtq26eP3ITqHX +e1zFveVTMIliBHaWGg9JqHiu/mm2MwUxuQAzLmaCtma5LXkGTUHsUruIdHplnqy7 +DHb3DC8mIjnVj9dvPrNXv54mxxhTwHkT5EPjFTzGZa6oFavYt+FzwPR67cVQXfz7 +jh6GktcqxrgA7KUmUwuaJ+DzGkIJEO6MvJ6Ibd2JiakIAKqtDaLgc796crcZ0vwQ +Glf5+H3OBj/sYkyNAByDdN2ZsuO7M1FT4OZcCBHqKScbeSfJQrqSQscSAURU+fTG +xNJrEDk9S975YAXiInRk71XawUNWhEqER5vshyLOx9es5FJo/rw7v253t+vzKElN +G3NhDnAe4UOQM73W2YfbWI6cikzwiWxHttO0oHByd/nqxMUP2onXQMI8fRRnRQmQ +KEzXZq46TVETp6N3WyBu30gjuz1Twq3QsS9Ga7crrhHk4E33FsU0Lq2GDTsT7+rF +xdVTTyCVQU33QEdmZYU6SIxTDllyYF1ooqfJWMtwvwFNW6YElduoCCJZNQJ5zR1Q +R/mIXgQQFggABgUCWl5mOwAKCRAbuJwGAjZ0SXlRAP4t6mSiQJrMgGQ0WdmtodwI +RKBcNbl/x/52k7FlWjlnSwD/UWQ/vQPozDkdtG55shknoxrnojv4eODalVKz68nT +nQeJAlsEGAECACYCGwIWIQSjxPD5ecqiLNuo9RLujLyeiG3diQUCW2fqRQUJFRpo +tQEpwF0gBBkBAgAGBQJKoO2QAAoJEHSpQbohnsgQtBEH+QH/xtP9sc9EMB+fDegs +f2aDHLT28YpvhfjLWVrYmXRiextcBRiCwLT6khulhA2vk4Tnh22dbhr87hUtuCJZ +GR5Y4E2ZS99KfAxXcu96Wo6Ki0X665G/QyUxoFYT9msYZzlv0OmbuIaED0p9lRlT +lZrsDG969a/d30G8NG0Mv6CH/Sfqtq26eP3ITqHXe1zFveVTMIliBHaWGg9JqHiu +/mm2MwUxuQAzLmaCtma5LXkGTUHsUruIdHplnqy7DHb3DC8mIjnVj9dvPrNXv54m +xxhTwHkT5EPjFTzGZa6oFavYt+FzwPR67cVQXfz7jh6GktcqxrgA7KUmUwuaJ+Dz +GkIJEO6MvJ6Ibd2JyVcH/3+imOYpKAPY7NjDLswbjrqKKcD8SL5trPd+811ST03U +9/PRjoRsYZqGQ9eMg4KN6Rx0lDipTldC7YfqdBP4YidfdsJ/6MDEOVuzUHewWwHr +aBVoMI68YG7dD3RMA0/xAqn5QsDEyZHldLEZjq/qXCJAkqqG2th9hnYFlmsvo46v +W78+jI0P6MW/qAxiJ5eAvNf0vT1pP4MagOPT8NZ6zYTJNeQPE3kiSN9wFMEYcoJ5 +SwyfOHQqRrZy96XDBCF3F7BfrgcN0h+IQ4z9BSa8yBxcWfDJiuhgO/Ks2JGsrPBA +hOkSUbdpxsb2/MzASgbiN00wsGsEejVHxvX7/iOE3rOJAlsEGAEKACYCGwIWIQSj +xPD5ecqiLNuo9RLujLyeiG3diQUCX7TT0gUJGANdQgEpwF0gBBkBAgAGBQJKoO2Q +AAoJEHSpQbohnsgQtBEH+QH/xtP9sc9EMB+fDegsf2aDHLT28YpvhfjLWVrYmXRi +extcBRiCwLT6khulhA2vk4Tnh22dbhr87hUtuCJZGR5Y4E2ZS99KfAxXcu96Wo6K +i0X665G/QyUxoFYT9msYZzlv0OmbuIaED0p9lRlTlZrsDG969a/d30G8NG0Mv6CH +/Sfqtq26eP3ITqHXe1zFveVTMIliBHaWGg9JqHiu/mm2MwUxuQAzLmaCtma5LXkG +TUHsUruIdHplnqy7DHb3DC8mIjnVj9dvPrNXv54mxxhTwHkT5EPjFTzGZa6oFavY +t+FzwPR67cVQXfz7jh6GktcqxrgA7KUmUwuaJ+DzGkIJEO6MvJ6Ibd2J7EMH/2sh +bVx9NRS36XNfQl6A1AXLCZ0+o4P+7zD1XsimSv2XsEMGzUxBk1FGao61QkXKuTEz +Y16bBE8tu7F0EbV6AyGoBdAqNauDZpJxq5OAHx7Od06R8KKil6T+OGGqPdPeEpgG ++i9d4hyDtESPeX+a8HDiIEC0czybPVzqvgtw8zTIpfQdaAMzv0ZPwYoU5mBG7SyP +ej5JjJj8Lfy/4LHHMRtwvqEqtNuukzePflnn0BR8UTQTQ9WlisRwUJzBdBJA23zh +GsFQ52ZUrxmcd65lC/CqYZEFwK0B8OwSzUxRbgFrCVzsizySv+QWXmi7EHd3bow4 +keSPmmDrjl8cySCNsMo= +=R0uO -----END PGP PUBLIC KEY BLOCK----- EOF diff --git a/integration/test_servers_of_happiness.py b/integration/test_servers_of_happiness.py index e5e4eb565..97392bf00 100644 --- a/integration/test_servers_of_happiness.py +++ b/integration/test_servers_of_happiness.py @@ -1,7 +1,6 @@ import sys from os.path import join -from twisted.internet import task from twisted.internet.error import ProcessTerminated import util diff --git a/integration/test_sftp.py b/integration/test_sftp.py new file mode 100644 index 000000000..6171c7413 --- /dev/null +++ b/integration/test_sftp.py @@ -0,0 +1,162 @@ +""" +It's possible to create/rename/delete files and directories in Tahoe-LAFS using +SFTP. + +These tests use Paramiko, rather than Twisted's Conch, because: + + 1. It's a different implementation, so we're not testing Conch against + itself. + + 2. Its API is much simpler to use. +""" + +from __future__ import unicode_literals +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + +from posixpath import join +from stat import S_ISDIR + +from paramiko import SSHClient +from paramiko.client import AutoAddPolicy +from paramiko.sftp_client import SFTPClient +from paramiko.ssh_exception import AuthenticationException +from paramiko.rsakey import RSAKey + +import pytest + +from .util import generate_ssh_key, run_in_thread + + +def connect_sftp(connect_args={"username": "alice", "password": "password"}): + """Create an SFTP client.""" + client = SSHClient() + client.set_missing_host_key_policy(AutoAddPolicy) + client.connect("localhost", port=8022, look_for_keys=False, + allow_agent=False, **connect_args) + sftp = SFTPClient.from_transport(client.get_transport()) + + def rmdir(path, delete_root=True): + for f in sftp.listdir_attr(path=path): + childpath = join(path, f.filename) + if S_ISDIR(f.st_mode): + rmdir(childpath) + else: + sftp.remove(childpath) + if delete_root: + sftp.rmdir(path) + + # Delete any files left over from previous tests :( + rmdir("/", delete_root=False) + + return sftp + + +@run_in_thread +def test_bad_account_password_ssh_key(alice, tmpdir): + """ + Can't login with unknown username, wrong password, or wrong SSH pub key. + """ + # Wrong password, wrong username: + for u, p in [("alice", "wrong"), ("someuser", "password")]: + with pytest.raises(AuthenticationException): + connect_sftp(connect_args={ + "username": u, "password": p, + }) + + another_key = join(str(tmpdir), "ssh_key") + generate_ssh_key(another_key) + good_key = RSAKey(filename=join(alice.node_dir, "private", "ssh_client_rsa_key")) + bad_key = RSAKey(filename=another_key) + + # Wrong key: + with pytest.raises(AuthenticationException): + connect_sftp(connect_args={ + "username": "alice2", "pkey": bad_key, + }) + + # Wrong username: + with pytest.raises(AuthenticationException): + connect_sftp(connect_args={ + "username": "someoneelse", "pkey": good_key, + }) + + +@run_in_thread +def test_ssh_key_auth(alice): + """It's possible to login authenticating with SSH public key.""" + key = RSAKey(filename=join(alice.node_dir, "private", "ssh_client_rsa_key")) + sftp = connect_sftp(connect_args={ + "username": "alice2", "pkey": key + }) + assert sftp.listdir() == [] + + +@run_in_thread +def test_read_write_files(alice): + """It's possible to upload and download files.""" + sftp = connect_sftp() + with sftp.file("myfile", "wb") as f: + f.write(b"abc") + f.write(b"def") + + with sftp.file("myfile", "rb") as f: + assert f.read(4) == b"abcd" + assert f.read(2) == b"ef" + assert f.read(1) == b"" + + +@run_in_thread +def test_directories(alice): + """ + It's possible to create, list directories, and create and remove files in + them. + """ + sftp = connect_sftp() + assert sftp.listdir() == [] + + sftp.mkdir("childdir") + assert sftp.listdir() == ["childdir"] + + with sftp.file("myfile", "wb") as f: + f.write(b"abc") + assert sorted(sftp.listdir()) == ["childdir", "myfile"] + + sftp.chdir("childdir") + assert sftp.listdir() == [] + + with sftp.file("myfile2", "wb") as f: + f.write(b"def") + assert sftp.listdir() == ["myfile2"] + + sftp.chdir(None) # root + with sftp.file("childdir/myfile2", "rb") as f: + assert f.read() == b"def" + + sftp.remove("myfile") + assert sftp.listdir() == ["childdir"] + + sftp.rmdir("childdir") + assert sftp.listdir() == [] + + +@run_in_thread +def test_rename(alice): + """Directories and files can be renamed.""" + sftp = connect_sftp() + sftp.mkdir("dir") + + filepath = join("dir", "file") + with sftp.file(filepath, "wb") as f: + f.write(b"abc") + + sftp.rename(filepath, join("dir", "file2")) + sftp.rename("dir", "dir2") + + with sftp.file(join("dir2", "file2"), "rb") as f: + assert f.read() == b"abc" diff --git a/integration/test_streaming_logs.py b/integration/test_streaming_logs.py index 32b97644d..52c813f9b 100644 --- a/integration/test_streaming_logs.py +++ b/integration/test_streaming_logs.py @@ -53,7 +53,12 @@ class _StreamingLogClientProtocol(WebSocketClientProtocol): self.factory.on_open.callback(self) def onMessage(self, payload, isBinary): - self.on_message.callback(payload) + if self.on_message is None: + # Already did our job, ignore it + return + on_message = self.on_message + self.on_message = None + on_message.callback(payload) def onClose(self, wasClean, code, reason): self.on_close.callback(reason) @@ -131,10 +136,13 @@ def _test_streaming_logs(reactor, temp_dir, alice): client.on_close = Deferred() client.on_message = Deferred() + # Capture this now before on_message perhaps goes away. + racing = _race(client.on_close, client.on_message) + # Provoke _some_ log event. yield treq.get(node_url) - result = yield _race(client.on_close, client.on_message) + result = yield racing assert isinstance(result, Right) json.loads(result.value) diff --git a/integration/test_tor.py b/integration/test_tor.py index 28360207a..dcbfb1151 100644 --- a/integration/test_tor.py +++ b/integration/test_tor.py @@ -1,21 +1,21 @@ from __future__ import print_function import sys -import time -import shutil -from os import mkdir, unlink, listdir -from os.path import join, exists -from six.moves import StringIO - -from twisted.internet.protocol import ProcessProtocol -from twisted.internet.error import ProcessExitedAlready, ProcessDone -from twisted.internet.defer import inlineCallbacks, Deferred +from os.path import join import pytest import pytest_twisted import util +from twisted.python.filepath import ( + FilePath, +) + +from allmydata.test.common import ( + write_introducer, +) + # see "conftest.py" for the fixtures (e.g. "tor_network") # XXX: Integration tests that involve Tor do not run reliably on @@ -73,12 +73,12 @@ def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_ne @pytest_twisted.inlineCallbacks def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_gatherer, tor_network, introducer_furl): - node_dir = join(temp_dir, name) + node_dir = FilePath(temp_dir).child(name) web_port = "tcp:{}:interface=localhost".format(control_port + 2000) if True: - print("creating", node_dir) - mkdir(node_dir) + print("creating", node_dir.path) + node_dir.makedirs() proto = util._DumpOutputProtocol(None) reactor.spawnProcess( proto, @@ -91,12 +91,15 @@ def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_ '--hide-ip', '--tor-control-port', 'tcp:localhost:{}'.format(control_port), '--listen', 'tor', - node_dir, + node_dir.path, ) ) yield proto.done - with open(join(node_dir, 'tahoe.cfg'), 'w') as f: + + # Which services should this client connect to? + write_introducer(node_dir, "default", introducer_furl) + with node_dir.child('tahoe.cfg').open('w') as f: f.write(''' [node] nickname = %(name)s @@ -112,15 +115,12 @@ onion = true onion.private_key_file = private/tor_onion.privkey [client] -# Which services should this client connect to? -introducer.furl = %(furl)s shares.needed = 1 shares.happy = 1 shares.total = 2 ''' % { 'name': name, - 'furl': introducer_furl, 'web_port': web_port, 'log_furl': flog_gatherer, 'control_port': control_port, @@ -128,5 +128,5 @@ shares.total = 2 }) print("running") - yield util._run_node(reactor, node_dir, request, None) + yield util._run_node(reactor, node_dir.path, request, None) print("okay, launched") diff --git a/integration/test_web.py b/integration/test_web.py index 575e4fc1a..aab11412f 100644 --- a/integration/test_web.py +++ b/integration/test_web.py @@ -9,20 +9,15 @@ WebAPI *should* do in every situation. It's not clear the latter exists anywhere, however. """ -import sys import time -import shutil import json import urllib2 -from os import mkdir, unlink, utime -from os.path import join, exists, getmtime import allmydata.uri import util import requests -import pytest_twisted import html5lib from bs4 import BeautifulSoup @@ -132,12 +127,12 @@ def test_deep_stats(alice): dircap_uri, data={ u"t": u"upload", - u"when_done": u".", }, files={ u"file": FILE_CONTENTS, }, ) + resp.raise_for_status() # confirm the file is in the directory resp = requests.get( @@ -180,6 +175,7 @@ def test_deep_stats(alice): time.sleep(.5) +@util.run_in_thread def test_status(alice): """ confirm we get something sensible from /status and the various sub-types @@ -265,7 +261,8 @@ def test_directory_deep_check(alice): dircap_url, params={u"t": u"json"}, ) - dir_meta = json.loads(resp.content) + # Just verify it is valid JSON. + json.loads(resp.content) # upload a file of pangrams into the directory FILE_CONTENTS = u"Sphinx of black quartz, judge my vow.\n" * (2048*10) diff --git a/integration/util.py b/integration/util.py index bbcf5efc6..256fd68c1 100644 --- a/integration/util.py +++ b/integration/util.py @@ -1,17 +1,25 @@ import sys import time import json -from os import mkdir +from os import mkdir, environ from os.path import exists, join from six.moves import StringIO from functools import partial +from subprocess import check_output +from twisted.python.filepath import ( + FilePath, +) from twisted.internet.defer import Deferred, succeed from twisted.internet.protocol import ProcessProtocol from twisted.internet.error import ProcessExitedAlready, ProcessDone +from twisted.internet.threads import deferToThread import requests +from paramiko.rsakey import RSAKey +from boltons.funcutils import wraps + from allmydata.util.configutil import ( get_config, set_config, @@ -22,6 +30,12 @@ from allmydata import client import pytest_twisted +def block_with_timeout(deferred, reactor, timeout=120): + """Block until Deferred has result, but timeout instead of waiting forever.""" + deferred.addTimeout(timeout, reactor) + return pytest_twisted.blockon(deferred) + + class _ProcessExitedProtocol(ProcessProtocol): """ Internal helper that .callback()s on self.done when the process @@ -120,11 +134,12 @@ def _cleanup_tahoe_process(tahoe_transport, exited): :return: After the process has exited. """ + from twisted.internet import reactor try: print("signaling {} with TERM".format(tahoe_transport.pid)) tahoe_transport.signalProcess('TERM') print("signaled, blocking on exit") - pytest_twisted.blockon(exited) + block_with_timeout(exited, reactor) print("exited, goodbye") except ProcessExitedAlready: pass @@ -145,6 +160,7 @@ def _tahoe_runner_optional_coverage(proto, reactor, request, other_args): proto, sys.executable, args, + env=environ, ) @@ -171,11 +187,15 @@ class TahoeProcess(object): u"portnum", ) + def kill(self): + """Kill the process, block until it's done.""" + _cleanup_tahoe_process(self.transport, self.transport.exited) + def __str__(self): return "".format(self._node_dir) -def _run_node(reactor, node_dir, request, magic_text): +def _run_node(reactor, node_dir, request, magic_text, finalize=True): """ Run a tahoe process from its node_dir. @@ -185,10 +205,8 @@ def _run_node(reactor, node_dir, request, magic_text): magic_text = "client running" protocol = _MagicTextProtocol(magic_text) - # on windows, "tahoe start" means: run forever in the foreground, - # but on linux it means daemonize. "tahoe run" is consistent - # between platforms. - + # "tahoe run" is consistent across Linux/macOS/Windows, unlike the old + # "start" command. transport = _tahoe_runner_optional_coverage( protocol, reactor, @@ -201,7 +219,8 @@ def _run_node(reactor, node_dir, request, magic_text): ) transport.exited = protocol.exited - request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited)) + if finalize: + request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited)) # XXX abusing the Deferred; should use .when_magic_seen() pattern @@ -220,7 +239,8 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam magic_text=None, needed=2, happy=3, - total=4): + total=4, + finalize=True): """ Helper to create a single node, run it and return the instance spawnProcess returned (ITransport) @@ -256,14 +276,19 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam def created(_): config_path = join(node_dir, 'tahoe.cfg') config = get_config(config_path) - set_config(config, 'node', 'log_gatherer.furl', flog_gatherer) - write_config(config_path, config) + set_config( + config, + u'node', + u'log_gatherer.furl', + flog_gatherer.decode("utf-8"), + ) + write_config(FilePath(config_path), config) created_d.addCallback(created) d = Deferred() d.callback(None) d.addCallback(lambda _: created_d) - d.addCallback(lambda _: _run_node(reactor, node_dir, request, magic_text)) + d.addCallback(lambda _: _run_node(reactor, node_dir, request, magic_text, finalize=finalize)) return d @@ -383,17 +408,13 @@ def await_file_vanishes(path, timeout=10): raise FileShouldVanishException(path, timeout) -def cli(request, reactor, node_dir, *argv): +def cli(node, *argv): """ - Run a tahoe CLI subcommand for a given node, optionally running - under coverage if '--coverage' was supplied. + Run a tahoe CLI subcommand for a given node in a blocking manner, returning + the output. """ - proto = _CollectOutputProtocol() - _tahoe_runner_optional_coverage( - proto, reactor, request, - ['--node-directory', node_dir] + list(argv), - ) - return proto.done + arguments = ["tahoe", '--node-directory', node.node_dir] + return check_output(arguments + list(argv)) def node_url(node_dir, uri_fragment): @@ -498,3 +519,36 @@ def await_client_ready(tahoe, timeout=10, liveness=60*2): tahoe, ) ) + + +def generate_ssh_key(path): + """Create a new SSH private/public key pair.""" + key = RSAKey.generate(2048) + key.write_private_key_file(path) + with open(path + ".pub", "wb") as f: + f.write(b"%s %s" % (key.get_name(), key.get_base64())) + + +def run_in_thread(f): + """Decorator for integration tests that runs code in a thread. + + Because we're using pytest_twisted, tests that rely on the reactor are + expected to return a Deferred and use async APIs so the reactor can run. + + In the case of the integration test suite, it launches nodes in the + background using Twisted APIs. The nodes stdout and stderr is read via + Twisted code. If the reactor doesn't run, reads don't happen, and + eventually the buffers fill up, and the nodes block when they try to flush + logs. + + We can switch to Twisted APIs (treq instead of requests etc.), but + sometimes it's easier or expedient to just have a blocking test. So this + decorator allows you to run the test in a thread, and the reactor can keep + running in the main thread. + + See https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3597 for tracking bug. + """ + @wraps(f) + def test(*args, **kwargs): + return deferToThread(lambda: f(*args, **kwargs)) + return test diff --git a/misc/build_helpers/run-deprecations.py b/misc/build_helpers/run-deprecations.py index 3972d5c89..f99cf90aa 100644 --- a/misc/build_helpers/run-deprecations.py +++ b/misc/build_helpers/run-deprecations.py @@ -20,7 +20,7 @@ class Options(usage.Options): self["args"] = list(args) description = """Run as: -PYTHONWARNINGS=default::DeprecationWarning python run-deprecations.py [--warnings=STDERRFILE] [--package=PYTHONPACKAGE ] COMMAND ARGS.. +python run-deprecations.py [--warnings=STDERRFILE] [--package=PYTHONPACKAGE ] COMMAND ARGS.. """ class RunPP(protocol.ProcessProtocol): @@ -79,12 +79,6 @@ def run_command(main): (command, os.environ.get("PATH"))) exe = executables[0] - pw = os.environ.get("PYTHONWARNINGS") - DDW = "default::DeprecationWarning" - if pw != DDW: - print("note: $PYTHONWARNINGS is '%s', not the expected %s" % (pw, DDW)) - sys.stdout.flush() - pp = RunPP() pp.d = defer.Deferred() pp.stdout = io.BytesIO() diff --git a/misc/build_helpers/show-tool-versions.py b/misc/build_helpers/show-tool-versions.py index c4fb79eff..f70183ae1 100644 --- a/misc/build_helpers/show-tool-versions.py +++ b/misc/build_helpers/show-tool-versions.py @@ -143,7 +143,6 @@ print_py_pkg_ver('coverage') print_py_pkg_ver('cryptography') print_py_pkg_ver('foolscap') print_py_pkg_ver('mock') -print_py_pkg_ver('Nevow', 'nevow') print_py_pkg_ver('pyasn1') print_py_pkg_ver('pycparser') print_py_pkg_ver('cryptography') diff --git a/misc/coding_tools/check-debugging.py b/misc/coding_tools/check-debugging.py index 17eeb30b7..f2ba6528e 100755 --- a/misc/coding_tools/check-debugging.py +++ b/misc/coding_tools/check-debugging.py @@ -11,8 +11,12 @@ umids = {} for starting_point in sys.argv[1:]: for root, dirs, files in os.walk(starting_point): - for fn in [f for f in files if f.endswith(".py")]: - fn = os.path.join(root, fn) + for f in files: + if not f.endswith(".py"): + continue + if f == "check-debugging.py": + continue + fn = os.path.join(root, f) for lineno,line in enumerate(open(fn, "r").readlines()): lineno = lineno+1 mo = re.search(r"\.setDebugging\(True\)", line) diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 000000000..01cbb57a8 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +ignore_missing_imports = True +plugins=mypy_zope:plugin diff --git a/newsfragments/1549.installation b/newsfragments/1549.installation new file mode 100644 index 000000000..cbb91cea5 --- /dev/null +++ b/newsfragments/1549.installation @@ -0,0 +1 @@ +Tahoe-LAFS now requires Twisted 19.10.0 or newer. As a result, it now has a transitive dependency on bcrypt. diff --git a/newsfragments/2920.minor b/newsfragments/2920.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/2928.minor b/newsfragments/2928.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3283.minor b/newsfragments/3283.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3314.minor b/newsfragments/3314.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3384.minor b/newsfragments/3384.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3399.feature b/newsfragments/3399.feature new file mode 100644 index 000000000..d30a91679 --- /dev/null +++ b/newsfragments/3399.feature @@ -0,0 +1 @@ +Added 'typechecks' environment for tox running mypy and performing static typechecks. diff --git a/newsfragments/3428.minor b/newsfragments/3428.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3432.minor b/newsfragments/3432.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3433.installation b/newsfragments/3433.installation new file mode 100644 index 000000000..3c06e53d3 --- /dev/null +++ b/newsfragments/3433.installation @@ -0,0 +1 @@ +Tahoe-LAFS no longer depends on Nevow. \ No newline at end of file diff --git a/newsfragments/3434.minor b/newsfragments/3434.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3435.minor b/newsfragments/3435.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3454.minor b/newsfragments/3454.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3459.minor b/newsfragments/3459.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3460.minor b/newsfragments/3460.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3465.minor b/newsfragments/3465.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3466.minor b/newsfragments/3466.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3467.minor b/newsfragments/3467.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3468.minor b/newsfragments/3468.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3470.minor b/newsfragments/3470.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3471.minor b/newsfragments/3471.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3472.minor b/newsfragments/3472.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3473.minor b/newsfragments/3473.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3474.minor b/newsfragments/3474.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3475.minor b/newsfragments/3475.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3477.minor b/newsfragments/3477.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3478.minor b/newsfragments/3478.minor new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/newsfragments/3478.minor @@ -0,0 +1 @@ + diff --git a/newsfragments/3479.minor b/newsfragments/3479.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3481.minor b/newsfragments/3481.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3482.minor b/newsfragments/3482.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3483.minor b/newsfragments/3483.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3485.minor b/newsfragments/3485.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3486.installation b/newsfragments/3486.installation new file mode 100644 index 000000000..7b24956b2 --- /dev/null +++ b/newsfragments/3486.installation @@ -0,0 +1 @@ +Tahoe-LAFS now requires the `netifaces` Python package and no longer requires the external `ip`, `ifconfig`, or `route.exe` executables. diff --git a/newsfragments/3488.minor b/newsfragments/3488.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3490.minor b/newsfragments/3490.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3491.minor b/newsfragments/3491.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3492.minor b/newsfragments/3492.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3493.minor b/newsfragments/3493.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3496.minor b/newsfragments/3496.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3497.installation b/newsfragments/3497.installation new file mode 100644 index 000000000..4a50be97e --- /dev/null +++ b/newsfragments/3497.installation @@ -0,0 +1 @@ +The Tahoe-LAFS project no longer commits to maintaining binary packages for all dependencies at . Please use PyPI instead. diff --git a/newsfragments/3499.minor b/newsfragments/3499.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3500.minor b/newsfragments/3500.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3501.minor b/newsfragments/3501.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3502.minor b/newsfragments/3502.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3503.other b/newsfragments/3503.other new file mode 100644 index 000000000..5d0c681b6 --- /dev/null +++ b/newsfragments/3503.other @@ -0,0 +1 @@ +The specification section of the Tahoe-LAFS documentation now includes explicit discussion of the security properties of Foolscap "fURLs" on which it depends. diff --git a/newsfragments/3504.configuration b/newsfragments/3504.configuration new file mode 100644 index 000000000..9ff74482c --- /dev/null +++ b/newsfragments/3504.configuration @@ -0,0 +1 @@ +The ``[client]introducer.furl`` configuration item is now deprecated in favor of the ``private/introducers.yaml`` file. \ No newline at end of file diff --git a/newsfragments/3509.bugfix b/newsfragments/3509.bugfix new file mode 100644 index 000000000..4d633feab --- /dev/null +++ b/newsfragments/3509.bugfix @@ -0,0 +1 @@ +Fix regression that broke flogtool results on Python 2. \ No newline at end of file diff --git a/newsfragments/3510.bugfix b/newsfragments/3510.bugfix new file mode 100644 index 000000000..d4a2bd5dc --- /dev/null +++ b/newsfragments/3510.bugfix @@ -0,0 +1 @@ +Fix a logging regression on Python 2 involving unicode strings. \ No newline at end of file diff --git a/newsfragments/3511.minor b/newsfragments/3511.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3513.minor b/newsfragments/3513.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3514.minor b/newsfragments/3514.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3515.minor b/newsfragments/3515.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3517.minor b/newsfragments/3517.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3518.removed b/newsfragments/3518.removed new file mode 100644 index 000000000..460af5142 --- /dev/null +++ b/newsfragments/3518.removed @@ -0,0 +1 @@ +Announcements delivered through the introducer system are no longer automatically annotated with copious information about the Tahoe-LAFS software version nor the versions of its dependencies. diff --git a/newsfragments/3520.minor b/newsfragments/3520.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3521.minor b/newsfragments/3521.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3522.minor b/newsfragments/3522.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3523.minor b/newsfragments/3523.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3524.minor b/newsfragments/3524.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3529.minor b/newsfragments/3529.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3532.minor b/newsfragments/3532.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3533.minor b/newsfragments/3533.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3534.minor b/newsfragments/3534.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3536.minor b/newsfragments/3536.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3537.minor b/newsfragments/3537.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3539.bugfix b/newsfragments/3539.bugfix new file mode 100644 index 000000000..ed4aeb9af --- /dev/null +++ b/newsfragments/3539.bugfix @@ -0,0 +1 @@ +Certain implementation-internal weakref KeyErrors are now handled and should no longer cause user-initiated operations to fail. diff --git a/newsfragments/3542.minor b/newsfragments/3542.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3544.minor b/newsfragments/3544.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3545.other b/newsfragments/3545.other new file mode 100644 index 000000000..fd8adc37b --- /dev/null +++ b/newsfragments/3545.other @@ -0,0 +1 @@ +The README, revised by Viktoriia with feedback from the team, is now more focused on the developer community and provides more information about Tahoe-LAFS, why it's important, and how someone can use it or start contributing to it. \ No newline at end of file diff --git a/newsfragments/3546.minor b/newsfragments/3546.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3547.minor b/newsfragments/3547.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3549.removed b/newsfragments/3549.removed new file mode 100644 index 000000000..53c7a7de1 --- /dev/null +++ b/newsfragments/3549.removed @@ -0,0 +1 @@ +The stats gatherer, broken since at least Tahoe-LAFS 1.13.0, has been removed. The ``[client]stats_gatherer.furl`` configuration item in ``tahoe.cfg`` is no longer allowed. The Tahoe-LAFS project recommends using a third-party metrics aggregation tool instead. diff --git a/newsfragments/3550.removed b/newsfragments/3550.removed new file mode 100644 index 000000000..2074bf676 --- /dev/null +++ b/newsfragments/3550.removed @@ -0,0 +1 @@ +The deprecated ``tahoe`` start, restart, stop, and daemonize sub-commands have been removed. \ No newline at end of file diff --git a/newsfragments/3551.minor b/newsfragments/3551.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3552.minor b/newsfragments/3552.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3553.minor b/newsfragments/3553.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3555.minor b/newsfragments/3555.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3557.minor b/newsfragments/3557.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3558.minor b/newsfragments/3558.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3560.minor b/newsfragments/3560.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3564.minor b/newsfragments/3564.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3565.minor b/newsfragments/3565.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3566.minor b/newsfragments/3566.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3567.minor b/newsfragments/3567.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3568.minor b/newsfragments/3568.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3572.minor b/newsfragments/3572.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3574.minor b/newsfragments/3574.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3575.minor b/newsfragments/3575.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3576.minor b/newsfragments/3576.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3577.minor b/newsfragments/3577.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3578.minor b/newsfragments/3578.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3579.minor b/newsfragments/3579.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3582.minor b/newsfragments/3582.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3583.removed b/newsfragments/3583.removed new file mode 100644 index 000000000..a3fce48be --- /dev/null +++ b/newsfragments/3583.removed @@ -0,0 +1 @@ +FTP is no longer supported by Tahoe-LAFS. Please use the SFTP support instead. \ No newline at end of file diff --git a/newsfragments/3584.bugfix b/newsfragments/3584.bugfix new file mode 100644 index 000000000..faf57713b --- /dev/null +++ b/newsfragments/3584.bugfix @@ -0,0 +1 @@ +SFTP public key auth likely works more consistently, and SFTP in general was previously broken. \ No newline at end of file diff --git a/newsfragments/3587.minor b/newsfragments/3587.minor new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/newsfragments/3587.minor @@ -0,0 +1 @@ + diff --git a/newsfragments/3589.minor b/newsfragments/3589.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3590.bugfix b/newsfragments/3590.bugfix new file mode 100644 index 000000000..aa504a5e3 --- /dev/null +++ b/newsfragments/3590.bugfix @@ -0,0 +1 @@ +Fixed issue where redirecting old-style URIs (/uri/?uri=...) didn't work. \ No newline at end of file diff --git a/newsfragments/3591.minor b/newsfragments/3591.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3594.minor b/newsfragments/3594.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3595.minor b/newsfragments/3595.minor new file mode 100644 index 000000000..e69de29bb diff --git a/newsfragments/3599.minor b/newsfragments/3599.minor new file mode 100644 index 000000000..e69de29bb diff --git a/nix/nevow.nix b/nix/nevow.nix deleted file mode 100644 index 202a59722..000000000 --- a/nix/nevow.nix +++ /dev/null @@ -1,45 +0,0 @@ -{ stdenv, buildPythonPackage, fetchPypi, isPy3k, twisted }: - -buildPythonPackage rec { - pname = "Nevow"; - version = "0.14.5"; - name = "${pname}-${version}"; - disabled = isPy3k; - - src = fetchPypi { - inherit pname; - inherit version; - sha256 = "1wr3fai01h1bcp4qpia6indg4qmxvywwv3q1iibm669mln2vmdmg"; - }; - - propagatedBuildInputs = [ twisted ]; - - checkInputs = [ twisted ]; - - checkPhase = '' - trial formless nevow - ''; - - meta = with stdenv.lib; { - description = "Nevow, a web application construction kit for Python"; - longDescription = '' - Nevow - Pronounced as the French "nouveau", or "noo-voh", Nevow - is a web application construction kit written in Python. It is - designed to allow the programmer to express as much of the view - logic as desired in Python, and includes a pure Python XML - expression syntax named stan to facilitate this. However it - also provides rich support for designer-edited templates, using - a very small XML attribute language to provide bi-directional - template manipulation capability. - - Nevow also includes formless, a declarative syntax for - specifying the types of method parameters and exposing these - methods to the web. Forms can be rendered automatically, and - form posts will be validated and input coerced, rendering error - pages if appropriate. Once a form post has validated - successfully, the method will be called with the coerced values. - ''; - homepage = https://github.com/twisted/nevow; - license = licenses.mit; - }; -} diff --git a/nix/overlays.nix b/nix/overlays.nix index ba3c9c885..2bf58575e 100644 --- a/nix/overlays.nix +++ b/nix/overlays.nix @@ -3,10 +3,7 @@ self: super: { packageOverrides = python-self: python-super: { # eliot is not part of nixpkgs at all at this time. eliot = python-self.callPackage ./eliot.nix { }; - # The packaged version of Nevow is very slightly out of date but also - # conflicts with the packaged version of Twisted. Supply our own - # slightly newer version. - nevow = python-super.callPackage ./nevow.nix { }; + # NixOS autobahn package has trollius as a dependency, although # it is optional. Trollius is unmaintained and fails on CI. autobahn = python-super.callPackage ./autobahn.nix { }; @@ -18,6 +15,9 @@ self: super: { # Need version of pyutil that supports Python 3. The version in 19.09 # is too old. pyutil = python-super.callPackage ./pyutil.nix { }; + + # Need a newer version of Twisted, too. + twisted = python-super.callPackage ./twisted.nix { }; }; }; } diff --git a/nix/tahoe-lafs.nix b/nix/tahoe-lafs.nix index f2e61d6c2..f3ccf950d 100644 --- a/nix/tahoe-lafs.nix +++ b/nix/tahoe-lafs.nix @@ -1,10 +1,10 @@ { fetchFromGitHub, lib -, nettools, python -, twisted, foolscap, nevow, zfec +, python +, twisted, foolscap, zfec , setuptools, setuptoolsTrial, pyasn1, zope_interface , service-identity, pyyaml, magic-wormhole, treq, appdirs -, beautifulsoup4, eliot, autobahn, cryptography -, html5lib, pyutil, distro +, beautifulsoup4, eliot, autobahn, cryptography, netifaces +, html5lib, pyutil, distro, configparser }: python.pkgs.buildPythonPackage rec { version = "1.14.0.dev"; @@ -23,34 +23,21 @@ python.pkgs.buildPythonPackage rec { # This list is over-zealous because it's more work to disable individual # tests with in a module. - # test_system is a lot of integration-style tests that do a lot of real - # networking between many processes. They sometimes fail spuriously. - rm src/allmydata/test/test_system.py - # Many of these tests don't properly skip when i2p or tor dependencies are # not supplied (and we are not supplying them). rm src/allmydata/test/test_i2p_provider.py rm src/allmydata/test/test_connections.py rm src/allmydata/test/cli/test_create.py rm src/allmydata/test/test_client.py - rm src/allmydata/test/test_runner.py - - # Some eliot code changes behavior based on whether stdout is a tty or not - # and fails when it is not. - rm src/allmydata/test/test_eliotutil.py ''; - propagatedNativeBuildInputs = [ - nettools - ]; - propagatedBuildInputs = with python.pkgs; [ - twisted foolscap nevow zfec appdirs + twisted foolscap zfec appdirs setuptoolsTrial pyasn1 zope_interface service-identity pyyaml magic-wormhole treq - eliot autobahn cryptography setuptools - future pyutil distro + eliot autobahn cryptography netifaces setuptools + future pyutil distro configparser ]; checkInputs = with python.pkgs; [ diff --git a/nix/twisted.nix b/nix/twisted.nix new file mode 100644 index 000000000..3c11e3c71 --- /dev/null +++ b/nix/twisted.nix @@ -0,0 +1,63 @@ +{ stdenv +, buildPythonPackage +, fetchPypi +, python +, zope_interface +, incremental +, automat +, constantly +, hyperlink +, pyhamcrest +, attrs +, pyopenssl +, service-identity +, setuptools +, idna +, bcrypt +}: +buildPythonPackage rec { + pname = "Twisted"; + version = "19.10.0"; + + src = fetchPypi { + inherit pname version; + extension = "tar.bz2"; + sha256 = "7394ba7f272ae722a74f3d969dcf599bc4ef093bc392038748a490f1724a515d"; + }; + + propagatedBuildInputs = [ zope_interface incremental automat constantly hyperlink pyhamcrest attrs setuptools bcrypt ]; + + passthru.extras.tls = [ pyopenssl service-identity idna ]; + + # Patch t.p._inotify to point to libc. Without this, + # twisted.python.runtime.platform.supportsINotify() == False + patchPhase = stdenv.lib.optionalString stdenv.isLinux '' + substituteInPlace src/twisted/python/_inotify.py --replace \ + "ctypes.util.find_library('c')" "'${stdenv.glibc.out}/lib/libc.so.6'" + ''; + + # Generate Twisted's plug-in cache. Twisted users must do it as well. See + # http://twistedmatrix.com/documents/current/core/howto/plugin.html#auto3 + # and http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=477103 for + # details. + postFixup = '' + $out/bin/twistd --help > /dev/null + ''; + + checkPhase = '' + ${python.interpreter} -m unittest discover -s twisted/test + ''; + # Tests require network + doCheck = false; + + meta = with stdenv.lib; { + homepage = https://twistedmatrix.com/; + description = "Twisted, an event-driven networking engine written in Python"; + longDescription = '' + Twisted is an event-driven networking engine written in Python + and licensed under the MIT license. + ''; + license = licenses.mit; + maintainers = [ ]; + }; +} diff --git a/setup.py b/setup.py index 4151545f7..5dc68d367 100644 --- a/setup.py +++ b/setup.py @@ -38,8 +38,7 @@ install_requires = [ "zfec >= 1.1.0", # zope.interface >= 3.6.0 is required for Twisted >= 12.1.0. - # zope.interface 3.6.3 and 3.6.4 are incompatible with Nevow (#1435). - "zope.interface >= 3.6.0, != 3.6.3, != 3.6.4", + "zope.interface >= 3.6.0", # * foolscap < 0.5.1 had a performance bug which spent O(N**2) CPU for # transferring large mutable files of size N. @@ -64,13 +63,8 @@ install_requires = [ # version of cryptography will *really* be installed. "cryptography >= 2.6", - # * We need Twisted 10.1.0 for the FTP frontend in order for - # Twisted's FTP server to support asynchronous close. # * The SFTP frontend depends on Twisted 11.0.0 to fix the SSH server # rekeying bug - # * The FTP frontend depends on Twisted >= 11.1.0 for - # filepath.Permissions - # * Nevow 0.11.1 depends on Twisted >= 13.0.0. # * The SFTP frontend and manhole depend on the conch extra. However, we # can't explicitly declare that without an undesirable dependency on gmpy, # as explained in ticket #2740. @@ -100,10 +94,9 @@ install_requires = [ # `pip install tahoe-lafs[sftp]` would not install requirements # specified by Twisted[conch]. Since this would be the *whole point* of # an sftp extra in Tahoe-LAFS, there is no point in having one. - "Twisted[tls,conch] >= 18.4.0", - - # We need Nevow >= 0.11.1 which can be installed using pip. - "Nevow >= 0.11.1", + # * Twisted 19.10 introduces Site.getContentFile which we use to get + # temporary upload files placed into a per-node temporary directory. + "Twisted[tls,conch] >= 19.10.0", "PyYAML >= 3.11", @@ -114,7 +107,9 @@ install_requires = [ # Eliot is contemplating dropping Python 2 support. Stick to a version we # know works on Python 2.7. - "eliot ~= 1.7", + "eliot ~= 1.7 ; python_version < '3.0'", + # On Python 3, we want a new enough version to support custom JSON encoders. + "eliot >= 1.13.0 ; python_version > '3.0'", # Pyrsistent 0.17.0 (which we use by way of Eliot) has dropped # Python 2 entirely; stick to the version known to work for us. @@ -131,11 +126,17 @@ install_requires = [ # Support for Python 3 transition "future >= 0.18.2", + # Discover local network configuration + "netifaces", + # Utility code: "pyutil >= 3.3.0", # Linux distribution detection: "distro >= 1.4.0", + + # Backported configparser for Python 2: + "configparser ; python_version < '3.0'", ] setup_requires = [ @@ -380,10 +381,7 @@ setup(name="tahoe-lafs", # also set in __init__.py # this version from time to time, but we will do it # intentionally. "pyflakes == 2.2.0", - # coverage 5.0 breaks the integration tests in some opaque way. - # This probably needs to be addressed in a more permanent way - # eventually... - "coverage ~= 4.5", + "coverage ~= 5.0", "mock", "tox", "pytest", @@ -397,6 +395,8 @@ setup(name="tahoe-lafs", # also set in __init__.py "html5lib", "junitxml", "tenacity", + "paramiko", + "pytest-timeout", ] + tor_requires + i2p_requires, "tor": tor_requires, "i2p": i2p_requires, diff --git a/src/allmydata/__init__.py b/src/allmydata/__init__.py index 15d5fb240..3157c8c80 100644 --- a/src/allmydata/__init__.py +++ b/src/allmydata/__init__.py @@ -14,7 +14,9 @@ __all__ = [ __version__ = "unknown" try: - from allmydata._version import __version__ + # type ignored as it fails in CI + # (https://app.circleci.com/pipelines/github/tahoe-lafs/tahoe-lafs/1647/workflows/60ae95d4-abe8-492c-8a03-1ad3b9e42ed3/jobs/40972) + from allmydata._version import __version__ # type: ignore except ImportError: # We're running in a tree that hasn't run update_version, and didn't # come with a _version.py, so we don't know what our version is. @@ -24,7 +26,9 @@ except ImportError: full_version = "unknown" branch = "unknown" try: - from allmydata._version import full_version, branch + # type ignored as it fails in CI + # (https://app.circleci.com/pipelines/github/tahoe-lafs/tahoe-lafs/1647/workflows/60ae95d4-abe8-492c-8a03-1ad3b9e42ed3/jobs/40972) + from allmydata._version import full_version, branch # type: ignore except ImportError: # We're running in a tree that hasn't run update_version, and didn't # come with a _version.py, so we don't know what our full version or diff --git a/src/allmydata/__main__.py b/src/allmydata/__main__.py index 25a67ea4e..87f1f6522 100644 --- a/src/allmydata/__main__.py +++ b/src/allmydata/__main__.py @@ -1,3 +1,16 @@ +""" +Ported to Python 3. +""" + +from __future__ import unicode_literals +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import sys from allmydata.scripts.runner import run diff --git a/src/allmydata/_auto_deps.py b/src/allmydata/_auto_deps.py index cf98aae96..521b17a45 100644 --- a/src/allmydata/_auto_deps.py +++ b/src/allmydata/_auto_deps.py @@ -1,3 +1,16 @@ +""" +Ported to Python 3. +""" + +from __future__ import unicode_literals +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + # Note: please minimize imports in this file. In particular, do not import # any module from Tahoe-LAFS or its dependencies, and do not import any # modules at all at global level. That includes setuptools and pkg_resources. @@ -11,7 +24,6 @@ package_imports = [ ('foolscap', 'foolscap'), ('zfec', 'zfec'), ('Twisted', 'twisted'), - ('Nevow', 'nevow'), ('zope.interface', 'zope.interface'), ('python', None), ('platform', None), @@ -72,7 +84,6 @@ runtime_warning_messages = [ ] warning_imports = [ - 'nevow', 'twisted.persisted.sob', 'twisted.python.filepath', ] diff --git a/src/allmydata/blacklist.py b/src/allmydata/blacklist.py index 89ee81a96..b7e1d0956 100644 --- a/src/allmydata/blacklist.py +++ b/src/allmydata/blacklist.py @@ -1,3 +1,14 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os @@ -34,10 +45,10 @@ class Blacklist(object): try: if self.last_mtime is None or current_mtime > self.last_mtime: self.entries.clear() - with open(self.blacklist_fn, "r") as f: + with open(self.blacklist_fn, "rb") as f: for line in f: line = line.strip() - if not line or line.startswith("#"): + if not line or line.startswith(b"#"): continue si_s, reason = line.split(None, 1) si = base32.a2b(si_s) # must be valid base32 diff --git a/src/allmydata/check_results.py b/src/allmydata/check_results.py index 068f77a25..f33c3afc0 100644 --- a/src/allmydata/check_results.py +++ b/src/allmydata/check_results.py @@ -1,3 +1,4 @@ +from past.builtins import unicode from zope.interface import implementer from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \ @@ -56,7 +57,11 @@ class CheckResults(object): self._list_incompatible_shares = list_incompatible_shares self._count_incompatible_shares = count_incompatible_shares - assert isinstance(summary, str) # should be a single string + # On Python 2, we can mix bytes and Unicode. On Python 3, we want + # unicode. + if isinstance(summary, bytes): + summary = unicode(summary, "utf-8") + assert isinstance(summary, unicode) # should be a single string self._summary = summary assert not isinstance(report, str) # should be list of strings self._report = report diff --git a/src/allmydata/client.py b/src/allmydata/client.py index af3a17d48..f5e603490 100644 --- a/src/allmydata/client.py +++ b/src/allmydata/client.py @@ -1,11 +1,10 @@ +from past.builtins import unicode + import os, stat, time, weakref from base64 import urlsafe_b64encode from functools import partial -from errno import ENOENT, EPERM -try: - from ConfigParser import NoSectionError -except ImportError: - from configparser import NoSectionError +# On Python 2 this will be the backported package: +from configparser import NoSectionError from foolscap.furl import ( decode_furl, @@ -34,9 +33,9 @@ from allmydata.introducer.client import IntroducerClient from allmydata.util import ( hashutil, base32, pollmixin, log, idlib, yamlutil, configutil, + fileutil, ) -from allmydata.util.encodingutil import (get_filesystem_encoding, - from_utf8_or_none) +from allmydata.util.encodingutil import get_filesystem_encoding from allmydata.util.abbreviate import parse_abbreviated_size from allmydata.util.time_format import parse_duration, parse_date from allmydata.util.i2p_provider import create as create_i2p_provider @@ -85,15 +84,8 @@ _client_config = configutil.ValidConfiguration( "shares.happy", "shares.needed", "shares.total", - "stats_gatherer.furl", "storage.plugins", ), - "ftpd": ( - "accounts.file", - "accounts.url", - "enabled", - "port", - ), "storage": ( "debug_discard", "enabled", @@ -133,7 +125,7 @@ def _valid_config(): return cfg.update(_client_config) # this is put into README in new node-directories -CLIENT_README = """ +CLIENT_README = u""" This directory contains files which contain private data for the Tahoe node, such as private keys. On Unix-like systems, the permissions on this directory are set to disallow users other than its owner from reading the contents of @@ -272,7 +264,7 @@ def create_client_from_config(config, _client_factory=None, _introducer_factory= i2p_provider = create_i2p_provider(reactor, config) tor_provider = create_tor_provider(reactor, config) - handlers = node.create_connection_handlers(reactor, config, i2p_provider, tor_provider) + handlers = node.create_connection_handlers(config, i2p_provider, tor_provider) default_connection_handlers, foolscap_connection_handlers = handlers tub_options = node.create_tub_options(config) @@ -466,57 +458,17 @@ def create_introducer_clients(config, main_tub, _introducer_factory=None): # we return this list introducer_clients = [] - introducers_yaml_filename = config.get_private_path("introducers.yaml") - introducers_filepath = FilePath(introducers_yaml_filename) + introducers = config.get_introducer_configuration() - try: - with introducers_filepath.open() as f: - introducers_yaml = yamlutil.safe_load(f) - if introducers_yaml is None: - raise EnvironmentError( - EPERM, - "Can't read '{}'".format(introducers_yaml_filename), - introducers_yaml_filename, - ) - introducers = introducers_yaml.get("introducers", {}) - log.msg( - "found {} introducers in private/introducers.yaml".format( - len(introducers), - ) - ) - except EnvironmentError as e: - if e.errno != ENOENT: - raise - introducers = {} - - if "default" in introducers.keys(): - raise ValueError( - "'default' introducer furl cannot be specified in introducers.yaml;" - " please fix impossible configuration." - ) - - # read furl from tahoe.cfg - tahoe_cfg_introducer_furl = config.get_config("client", "introducer.furl", None) - if tahoe_cfg_introducer_furl == "None": - raise ValueError( - "tahoe.cfg has invalid 'introducer.furl = None':" - " to disable it, use 'introducer.furl ='" - " or omit the key entirely" - ) - if tahoe_cfg_introducer_furl: - introducers[u'default'] = {'furl':tahoe_cfg_introducer_furl} - - for petname, introducer in introducers.items(): - introducer_cache_filepath = FilePath(config.get_private_path("introducer_{}_cache.yaml".format(petname))) + for petname, (furl, cache_path) in introducers.items(): ic = _introducer_factory( main_tub, - introducer['furl'].encode("ascii"), + furl.encode("ascii"), config.nickname, str(allmydata.__full_version__), str(_Client.OLDEST_SUPPORTED_VERSION), - node.get_app_versions(), partial(_sequencer, config), - introducer_cache_filepath, + cache_path, ) introducer_clients.append(ic) return introducer_clients @@ -628,7 +580,7 @@ def storage_enabled(config): :return bool: ``True`` if storage is enabled, ``False`` otherwise. """ - return config.get_config(b"storage", b"enabled", True, boolean=True) + return config.get_config("storage", "enabled", True, boolean=True) def anonymous_storage_enabled(config): @@ -642,7 +594,7 @@ def anonymous_storage_enabled(config): """ return ( storage_enabled(config) and - config.get_config(b"storage", b"anonymous", True, boolean=True) + config.get_config("storage", "anonymous", True, boolean=True) ) @@ -698,7 +650,6 @@ class _Client(node.Node, pollmixin.PollMixin): raise ValueError("config error: helper is enabled, but tub " "is not listening ('tub.port=' is empty)") self.init_helper() - self.init_ftp_server() self.init_sftp_server() # If the node sees an exit_trigger file, it will poll every second to see @@ -718,8 +669,7 @@ class _Client(node.Node, pollmixin.PollMixin): self.init_web(webport) # strports string def init_stats_provider(self): - gatherer_furl = self.config.get_config("client", "stats_gatherer.furl", None) - self.stats_provider = StatsProvider(self, gatherer_furl) + self.stats_provider = StatsProvider(self) self.stats_provider.setServiceParent(self) self.stats_provider.register_producer(self) @@ -727,10 +677,14 @@ class _Client(node.Node, pollmixin.PollMixin): return { 'node.uptime': time.time() - self.started_timestamp } def init_secrets(self): - lease_s = self.config.get_or_create_private_config("secret", _make_secret) + # configs are always unicode + def _unicode_make_secret(): + return unicode(_make_secret(), "ascii") + lease_s = self.config.get_or_create_private_config( + "secret", _unicode_make_secret).encode("utf-8") lease_secret = base32.a2b(lease_s) - convergence_s = self.config.get_or_create_private_config('convergence', - _make_secret) + convergence_s = self.config.get_or_create_private_config( + 'convergence', _unicode_make_secret).encode("utf-8") self.convergence = base32.a2b(convergence_s) self._secret_holder = SecretHolder(lease_secret, self.convergence) @@ -739,9 +693,11 @@ class _Client(node.Node, pollmixin.PollMixin): # existing key def _make_key(): private_key, _ = ed25519.create_signing_keypair() - return ed25519.string_from_signing_key(private_key) + b"\n" + # Config values are always unicode: + return unicode(ed25519.string_from_signing_key(private_key) + b"\n", "utf-8") - private_key_str = self.config.get_or_create_private_config("node.privkey", _make_key) + private_key_str = self.config.get_or_create_private_config( + "node.privkey", _make_key).encode("utf-8") private_key, public_key = ed25519.signing_keypair_from_string(private_key_str) public_key_str = ed25519.string_from_verifying_key(public_key) self.config.write_config_file("node.pubkey", public_key_str + b"\n", "wb") @@ -751,7 +707,7 @@ class _Client(node.Node, pollmixin.PollMixin): def get_long_nodeid(self): # this matches what IServer.get_longname() says about us elsewhere vk_string = ed25519.string_from_verifying_key(self._node_public_key) - return remove_prefix(vk_string, "pub-") + return remove_prefix(vk_string, b"pub-") def get_long_tubid(self): return idlib.nodeid_b2a(self.nodeid) @@ -781,7 +737,7 @@ class _Client(node.Node, pollmixin.PollMixin): vk_string = ed25519.string_from_verifying_key(self._node_public_key) vk_bytes = remove_prefix(vk_string, ed25519.PUBLIC_KEY_PREFIX) seed = base32.b2a(vk_bytes) - self.config.write_config_file("permutation-seed", seed+"\n") + self.config.write_config_file("permutation-seed", seed+b"\n", mode="wb") return seed.strip() def get_anonymous_storage_server(self): @@ -806,7 +762,7 @@ class _Client(node.Node, pollmixin.PollMixin): config_storedir = self.get_config( "storage", "storage_dir", self.STOREDIR, - ).decode('utf-8') + ) storedir = self.config.get_config_path(config_storedir) data = self.config.get_config("storage", "reserved_space", None) @@ -1021,7 +977,7 @@ class _Client(node.Node, pollmixin.PollMixin): c = ControlServer() c.setServiceParent(self) control_url = self.control_tub.registerReference(c) - self.config.write_private_config("control.furl", control_url + b"\n") + self.config.write_private_config("control.furl", control_url + "\n") def init_helper(self): self.helper = Helper(self.config.get_config_path("helper"), @@ -1038,39 +994,46 @@ class _Client(node.Node, pollmixin.PollMixin): def set_default_mutable_keysize(self, keysize): self._key_generator.set_default_keysize(keysize) + def _get_tempdir(self): + """ + Determine the path to the directory where temporary files for this node + should be written. + + :return bytes: The path which will exist and be a directory. + """ + tempdir_config = self.config.get_config("node", "tempdir", "tmp") + if isinstance(tempdir_config, bytes): + tempdir_config = tempdir_config.decode('utf-8') + tempdir = self.config.get_config_path(tempdir_config) + if not os.path.exists(tempdir): + fileutil.make_dirs(tempdir) + return tempdir + def init_web(self, webport): self.log("init_web(webport=%s)", args=(webport,)) from allmydata.webish import WebishServer nodeurl_path = self.config.get_config_path("node.url") - staticdir_config = self.config.get_config("node", "web.static", "public_html").decode("utf-8") + staticdir_config = self.config.get_config("node", "web.static", "public_html") staticdir = self.config.get_config_path(staticdir_config) - ws = WebishServer(self, webport, nodeurl_path, staticdir) + ws = WebishServer( + self, + webport, + self._get_tempdir(), + nodeurl_path, + staticdir, + ) ws.setServiceParent(self) - def init_ftp_server(self): - if self.config.get_config("ftpd", "enabled", False, boolean=True): - accountfile = from_utf8_or_none( - self.config.get_config("ftpd", "accounts.file", None)) - if accountfile: - accountfile = self.config.get_config_path(accountfile) - accounturl = self.config.get_config("ftpd", "accounts.url", None) - ftp_portstr = self.config.get_config("ftpd", "port", "8021") - - from allmydata.frontends import ftpd - s = ftpd.FTPServer(self, accountfile, accounturl, ftp_portstr) - s.setServiceParent(self) - def init_sftp_server(self): if self.config.get_config("sftpd", "enabled", False, boolean=True): - accountfile = from_utf8_or_none( - self.config.get_config("sftpd", "accounts.file", None)) + accountfile = self.config.get_config("sftpd", "accounts.file", None) if accountfile: accountfile = self.config.get_config_path(accountfile) accounturl = self.config.get_config("sftpd", "accounts.url", None) - sftp_portstr = self.config.get_config("sftpd", "port", "8022") - pubkey_file = from_utf8_or_none(self.config.get_config("sftpd", "host_pubkey_file")) - privkey_file = from_utf8_or_none(self.config.get_config("sftpd", "host_privkey_file")) + sftp_portstr = self.config.get_config("sftpd", "port", "tcp:8022") + pubkey_file = self.config.get_config("sftpd", "host_pubkey_file") + privkey_file = self.config.get_config("sftpd", "host_privkey_file") from allmydata.frontends import sftpd s = sftpd.SFTPServer(self, accountfile, accounturl, diff --git a/src/allmydata/codec.py b/src/allmydata/codec.py index a4baab4b6..19345959e 100644 --- a/src/allmydata/codec.py +++ b/src/allmydata/codec.py @@ -57,6 +57,10 @@ class CRSEncoder(object): return defer.succeed((shares, desired_share_ids)) + def encode_proposal(self, data, desired_share_ids=None): + raise NotImplementedError() + + @implementer(ICodecDecoder) class CRSDecoder(object): diff --git a/src/allmydata/deep_stats.py b/src/allmydata/deep_stats.py index c18adb5be..bfb43ebae 100644 --- a/src/allmydata/deep_stats.py +++ b/src/allmydata/deep_stats.py @@ -1,4 +1,15 @@ -"""Implementation of the deep stats class.""" +"""Implementation of the deep stats class. + +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import math @@ -13,7 +24,7 @@ from allmydata.util import mathutil class DeepStats(object): """Deep stats object. - Holds results of the deep-stats opetation. + Holds results of the deep-stats operation. Used for json generation in the API.""" # Json API version. @@ -121,7 +132,7 @@ class DeepStats(object): h[bucket] += 1 def get_results(self): - """Returns deep-stats resutls.""" + """Returns deep-stats results.""" stats = self.stats.copy() for key in self.histograms: h = self.histograms[key] diff --git a/src/allmydata/dirnode.py b/src/allmydata/dirnode.py index 59ebd73ba..6871b94c7 100644 --- a/src/allmydata/dirnode.py +++ b/src/allmydata/dirnode.py @@ -1,4 +1,16 @@ -"""Directory Node implementation.""" +"""Directory Node implementation. + +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + # Skip dict so it doesn't break things. + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 from past.builtins import unicode import time @@ -6,7 +18,6 @@ import time from zope.interface import implementer from twisted.internet import defer from foolscap.api import fireEventually -import json from allmydata.crypto import aes from allmydata.deep_stats import DeepStats @@ -19,7 +30,7 @@ from allmydata.interfaces import IFilesystemNode, IDirectoryNode, IFileNode, \ from allmydata.check_results import DeepCheckResults, \ DeepCheckAndRepairResults from allmydata.monitor import Monitor -from allmydata.util import hashutil, base32, log +from allmydata.util import hashutil, base32, log, jsonbytes as json from allmydata.util.encodingutil import quote_output, normalize from allmydata.util.assertutil import precondition from allmydata.util.netstring import netstring, split_netstring @@ -37,6 +48,8 @@ from eliot.twisted import ( NAME = Field.for_types( u"name", + # Make sure this works on Python 2; with str, it gets Future str which + # breaks Eliot. [unicode], u"The name linking the parent to this node.", ) @@ -179,7 +192,7 @@ class Adder(object): def modify(self, old_contents, servermap, first_time): children = self.node._unpack_contents(old_contents) now = time.time() - for (namex, (child, new_metadata)) in self.entries.iteritems(): + for (namex, (child, new_metadata)) in list(self.entries.items()): name = normalize(namex) precondition(IFilesystemNode.providedBy(child), child) @@ -205,8 +218,8 @@ class Adder(object): return new_contents def _encrypt_rw_uri(writekey, rw_uri): - precondition(isinstance(rw_uri, str), rw_uri) - precondition(isinstance(writekey, str), writekey) + precondition(isinstance(rw_uri, bytes), rw_uri) + precondition(isinstance(writekey, bytes), writekey) salt = hashutil.mutable_rwcap_salt_hash(rw_uri) key = hashutil.mutable_rwcap_key_hash(salt, writekey) @@ -221,7 +234,7 @@ def _encrypt_rw_uri(writekey, rw_uri): def pack_children(childrenx, writekey, deep_immutable=False): # initial_children must have metadata (i.e. {} instead of None) children = {} - for (namex, (node, metadata)) in childrenx.iteritems(): + for (namex, (node, metadata)) in list(childrenx.items()): precondition(isinstance(metadata, dict), "directory creation requires metadata to be a dict, not None", metadata) children[normalize(namex)] = (node, metadata) @@ -245,18 +258,19 @@ def _pack_normalized_children(children, writekey, deep_immutable=False): If deep_immutable is True, I will require that all my children are deeply immutable, and will raise a MustBeDeepImmutableError if not. """ - precondition((writekey is None) or isinstance(writekey, str), writekey) + precondition((writekey is None) or isinstance(writekey, bytes), writekey) has_aux = isinstance(children, AuxValueDict) entries = [] for name in sorted(children.keys()): - assert isinstance(name, unicode) + assert isinstance(name, str) entry = None (child, metadata) = children[name] child.raise_error() if deep_immutable and not child.is_allowed_in_immutable_directory(): - raise MustBeDeepImmutableError("child %s is not allowed in an immutable directory" % - quote_output(name, encoding='utf-8'), name) + raise MustBeDeepImmutableError( + "child %r is not allowed in an immutable directory" % (name,), + name) if has_aux: entry = children.get_aux(name) if not entry: @@ -264,26 +278,26 @@ def _pack_normalized_children(children, writekey, deep_immutable=False): assert isinstance(metadata, dict) rw_uri = child.get_write_uri() if rw_uri is None: - rw_uri = "" - assert isinstance(rw_uri, str), rw_uri + rw_uri = b"" + assert isinstance(rw_uri, bytes), rw_uri # should be prevented by MustBeDeepImmutableError check above assert not (rw_uri and deep_immutable) ro_uri = child.get_readonly_uri() if ro_uri is None: - ro_uri = "" - assert isinstance(ro_uri, str), ro_uri + ro_uri = b"" + assert isinstance(ro_uri, bytes), ro_uri if writekey is not None: writecap = netstring(_encrypt_rw_uri(writekey, rw_uri)) else: writecap = ZERO_LEN_NETSTR - entry = "".join([netstring(name.encode("utf-8")), + entry = b"".join([netstring(name.encode("utf-8")), netstring(strip_prefix_for_ro(ro_uri, deep_immutable)), writecap, - netstring(json.dumps(metadata))]) + netstring(json.dumps(metadata).encode("utf-8"))]) entries.append(netstring(entry)) - return "".join(entries) + return b"".join(entries) @implementer(IDirectoryNode, ICheckable, IDeepCheckable) class DirectoryNode(object): @@ -352,9 +366,9 @@ class DirectoryNode(object): # cleartext. The 'name' is UTF-8 encoded, and should be normalized to NFC. # The rwcapdata is formatted as: # pack("16ss32s", iv, AES(H(writekey+iv), plaintext_rw_uri), mac) - assert isinstance(data, str), (repr(data), type(data)) + assert isinstance(data, bytes), (repr(data), type(data)) # an empty directory is serialized as an empty string - if data == "": + if data == b"": return AuxValueDict() writeable = not self.is_readonly() mutable = self.is_mutable() @@ -373,7 +387,7 @@ class DirectoryNode(object): # Therefore we normalize names going both in and out of directories. name = normalize(namex_utf8.decode("utf-8")) - rw_uri = "" + rw_uri = b"" if writeable: rw_uri = self._decrypt_rwcapdata(rwcapdata) @@ -384,8 +398,8 @@ class DirectoryNode(object): # ro_uri is treated in the same way for consistency. # rw_uri and ro_uri will be either None or a non-empty string. - rw_uri = rw_uri.rstrip(' ') or None - ro_uri = ro_uri.rstrip(' ') or None + rw_uri = rw_uri.rstrip(b' ') or None + ro_uri = ro_uri.rstrip(b' ') or None try: child = self._create_and_validate_node(rw_uri, ro_uri, name) @@ -468,7 +482,7 @@ class DirectoryNode(object): exists a child of the given name, False if not.""" name = normalize(namex) d = self._read() - d.addCallback(lambda children: children.has_key(name)) + d.addCallback(lambda children: name in children) return d def _get(self, children, name): @@ -543,7 +557,7 @@ class DirectoryNode(object): else: pathx = pathx.split("/") for p in pathx: - assert isinstance(p, unicode), p + assert isinstance(p, str), p childnamex = pathx[0] remaining_pathx = pathx[1:] if remaining_pathx: @@ -554,9 +568,9 @@ class DirectoryNode(object): d = self.get_child_and_metadata(childnamex) return d - def set_uri(self, namex, writecap, readcap, metadata=None, overwrite=True): - precondition(isinstance(writecap, (str,type(None))), writecap) - precondition(isinstance(readcap, (str,type(None))), readcap) + def set_uri(self, namex, writecap, readcap=None, metadata=None, overwrite=True): + precondition(isinstance(writecap, (bytes, type(None))), writecap) + precondition(isinstance(readcap, (bytes, type(None))), readcap) # We now allow packing unknown nodes, provided they are valid # for this type of directory. @@ -569,16 +583,16 @@ class DirectoryNode(object): # this takes URIs a = Adder(self, overwrite=overwrite, create_readonly_node=self._create_readonly_node) - for (namex, e) in entries.iteritems(): - assert isinstance(namex, unicode), namex + for (namex, e) in entries.items(): + assert isinstance(namex, str), namex if len(e) == 2: writecap, readcap = e metadata = None else: assert len(e) == 3 writecap, readcap, metadata = e - precondition(isinstance(writecap, (str,type(None))), writecap) - precondition(isinstance(readcap, (str,type(None))), readcap) + precondition(isinstance(writecap, (bytes,type(None))), writecap) + precondition(isinstance(readcap, (bytes,type(None))), readcap) # We now allow packing unknown nodes, provided they are valid # for this type of directory. @@ -779,7 +793,7 @@ class DirectoryNode(object): # in the nodecache) seem to consume about 2000 bytes. dirkids = [] filekids = [] - for name, (child, metadata) in sorted(children.iteritems()): + for name, (child, metadata) in sorted(children.items()): childpath = path + [name] if isinstance(child, UnknownNode): walker.add_node(child, childpath) diff --git a/src/allmydata/frontends/auth.py b/src/allmydata/frontends/auth.py index 1bd481321..de406d604 100644 --- a/src/allmydata/frontends/auth.py +++ b/src/allmydata/frontends/auth.py @@ -4,8 +4,8 @@ from zope.interface import implementer from twisted.web.client import getPage from twisted.internet import defer from twisted.cred import error, checkers, credentials -from twisted.conch import error as conch_error from twisted.conch.ssh import keys +from twisted.conch.checkers import SSHPublicKeyChecker, InMemorySSHKeyDB from allmydata.util import base32 from allmydata.util.fileutil import abspath_expanduser_unicode @@ -29,7 +29,7 @@ class AccountFileChecker(object): def __init__(self, client, accountfile): self.client = client self.passwords = {} - self.pubkeys = {} + pubkeys = {} self.rootcaps = {} with open(abspath_expanduser_unicode(accountfile), "r") as f: for line in f: @@ -40,12 +40,14 @@ class AccountFileChecker(object): if passwd.startswith("ssh-"): bits = rest.split() keystring = " ".join([passwd] + bits[:-1]) + key = keys.Key.fromString(keystring) rootcap = bits[-1] - self.pubkeys[name] = keystring + pubkeys[name] = [key] else: self.passwords[name] = passwd rootcap = rest self.rootcaps[name] = rootcap + self._pubkeychecker = SSHPublicKeyChecker(InMemorySSHKeyDB(pubkeys)) def _avatarId(self, username): return FTPAvatarID(username, self.rootcaps[username]) @@ -57,11 +59,9 @@ class AccountFileChecker(object): def requestAvatarId(self, creds): if credentials.ISSHPrivateKey.providedBy(creds): - # Re-using twisted.conch.checkers.SSHPublicKeyChecker here, rather - # than re-implementing all of the ISSHPrivateKey checking logic, - # would be better. That would require Twisted 14.1.0 or newer, - # though. - return self._checkKey(creds) + d = defer.maybeDeferred(self._pubkeychecker.requestAvatarId, creds) + d.addCallback(self._avatarId) + return d elif credentials.IUsernameHashedPassword.providedBy(creds): return self._checkPassword(creds) elif credentials.IUsernamePassword.providedBy(creds): @@ -86,28 +86,6 @@ class AccountFileChecker(object): d.addCallback(self._cbPasswordMatch, str(creds.username)) return d - def _checkKey(self, creds): - """ - Determine whether some key-based credentials correctly authenticates a - user. - - Returns a Deferred that fires with the username if so or with an - UnauthorizedLogin failure otherwise. - """ - - # Is the public key indicated by the given credentials allowed to - # authenticate the username in those credentials? - if creds.blob == self.pubkeys.get(creds.username): - if creds.signature is None: - return defer.fail(conch_error.ValidPublicKey()) - - # Is the signature in the given credentials the correct - # signature for the data in those credentials? - key = keys.Key.fromString(creds.blob) - if key.verify(creds.signature, creds.sigData): - return defer.succeed(self._avatarId(creds.username)) - - return defer.fail(error.UnauthorizedLogin()) @implementer(checkers.ICredentialsChecker) class AccountURLChecker(object): diff --git a/src/allmydata/frontends/ftpd.py b/src/allmydata/frontends/ftpd.py deleted file mode 100644 index 3c1e91b90..000000000 --- a/src/allmydata/frontends/ftpd.py +++ /dev/null @@ -1,337 +0,0 @@ - -from types import NoneType - -from zope.interface import implementer -from twisted.application import service, strports -from twisted.internet import defer -from twisted.internet.interfaces import IConsumer -from twisted.cred import portal -from twisted.python import filepath -from twisted.protocols import ftp - -from allmydata.interfaces import IDirectoryNode, ExistingChildError, \ - NoSuchChildError -from allmydata.immutable.upload import FileHandle -from allmydata.util.fileutil import EncryptedTemporaryFile -from allmydata.util.assertutil import precondition - -@implementer(ftp.IReadFile) -class ReadFile(object): - def __init__(self, node): - self.node = node - def send(self, consumer): - d = self.node.read(consumer) - return d # when consumed - -@implementer(IConsumer) -class FileWriter(object): - - def registerProducer(self, producer, streaming): - if not streaming: - raise NotImplementedError("Non-streaming producer not supported.") - # we write the data to a temporary file, since Tahoe can't do - # streaming upload yet. - self.f = EncryptedTemporaryFile() - return None - - def unregisterProducer(self): - # the upload actually happens in WriteFile.close() - pass - - def write(self, data): - self.f.write(data) - -@implementer(ftp.IWriteFile) -class WriteFile(object): - - def __init__(self, parent, childname, convergence): - self.parent = parent - self.childname = childname - self.convergence = convergence - - def receive(self): - self.c = FileWriter() - return defer.succeed(self.c) - - def close(self): - u = FileHandle(self.c.f, self.convergence) - d = self.parent.add_file(self.childname, u) - return d - - -class NoParentError(Exception): - pass - -# filepath.Permissions was added in Twisted-11.1.0, which we require. Twisted -# <15.0.0 expected an int, and only does '&' on it. Twisted >=15.0.0 expects -# a filepath.Permissions. This satisfies both. - -class IntishPermissions(filepath.Permissions): - def __init__(self, statModeInt): - self._tahoe_statModeInt = statModeInt - filepath.Permissions.__init__(self, statModeInt) - def __and__(self, other): - return self._tahoe_statModeInt & other - -@implementer(ftp.IFTPShell) -class Handler(object): - def __init__(self, client, rootnode, username, convergence): - self.client = client - self.root = rootnode - self.username = username - self.convergence = convergence - - def makeDirectory(self, path): - d = self._get_root(path) - d.addCallback(lambda root_and_path: - self._get_or_create_directories(root_and_path[0], root_and_path[1])) - return d - - def _get_or_create_directories(self, node, path): - if not IDirectoryNode.providedBy(node): - # unfortunately it is too late to provide the name of the - # blocking directory in the error message. - raise ftp.FileExistsError("cannot create directory because there " - "is a file in the way") - if not path: - return defer.succeed(node) - d = node.get(path[0]) - def _maybe_create(f): - f.trap(NoSuchChildError) - return node.create_subdirectory(path[0]) - d.addErrback(_maybe_create) - d.addCallback(self._get_or_create_directories, path[1:]) - return d - - def _get_parent(self, path): - # fire with (parentnode, childname) - path = [unicode(p) for p in path] - if not path: - raise NoParentError - childname = path[-1] - d = self._get_root(path) - def _got_root(root_and_path): - (root, path) = root_and_path - if not path: - raise NoParentError - return root.get_child_at_path(path[:-1]) - d.addCallback(_got_root) - def _got_parent(parent): - return (parent, childname) - d.addCallback(_got_parent) - return d - - def _remove_thing(self, path, must_be_directory=False, must_be_file=False): - d = defer.maybeDeferred(self._get_parent, path) - def _convert_error(f): - f.trap(NoParentError) - raise ftp.PermissionDeniedError("cannot delete root directory") - d.addErrback(_convert_error) - def _got_parent(parent_and_childname): - (parent, childname) = parent_and_childname - d = parent.get(childname) - def _got_child(child): - if must_be_directory and not IDirectoryNode.providedBy(child): - raise ftp.IsNotADirectoryError("rmdir called on a file") - if must_be_file and IDirectoryNode.providedBy(child): - raise ftp.IsADirectoryError("rmfile called on a directory") - return parent.delete(childname) - d.addCallback(_got_child) - d.addErrback(self._convert_error) - return d - d.addCallback(_got_parent) - return d - - def removeDirectory(self, path): - return self._remove_thing(path, must_be_directory=True) - - def removeFile(self, path): - return self._remove_thing(path, must_be_file=True) - - def rename(self, fromPath, toPath): - # the target directory must already exist - d = self._get_parent(fromPath) - def _got_from_parent(fromparent_and_childname): - (fromparent, childname) = fromparent_and_childname - d = self._get_parent(toPath) - d.addCallback(lambda toparent_and_tochildname: - fromparent.move_child_to(childname, - toparent_and_tochildname[0], toparent_and_tochildname[1], - overwrite=False)) - return d - d.addCallback(_got_from_parent) - d.addErrback(self._convert_error) - return d - - def access(self, path): - # we allow access to everything that exists. We are required to raise - # an error for paths that don't exist: FTP clients (at least ncftp) - # uses this to decide whether to mkdir or not. - d = self._get_node_and_metadata_for_path(path) - d.addErrback(self._convert_error) - d.addCallback(lambda res: None) - return d - - def _convert_error(self, f): - if f.check(NoSuchChildError): - childname = f.value.args[0].encode("utf-8") - msg = "'%s' doesn't exist" % childname - raise ftp.FileNotFoundError(msg) - if f.check(ExistingChildError): - msg = f.value.args[0].encode("utf-8") - raise ftp.FileExistsError(msg) - return f - - def _get_root(self, path): - # return (root, remaining_path) - path = [unicode(p) for p in path] - if path and path[0] == "uri": - d = defer.maybeDeferred(self.client.create_node_from_uri, - str(path[1])) - d.addCallback(lambda root: (root, path[2:])) - else: - d = defer.succeed((self.root,path)) - return d - - def _get_node_and_metadata_for_path(self, path): - d = self._get_root(path) - def _got_root(root_and_path): - (root,path) = root_and_path - if path: - return root.get_child_and_metadata_at_path(path) - else: - return (root,{}) - d.addCallback(_got_root) - return d - - def _populate_row(self, keys, childnode_and_metadata): - (childnode, metadata) = childnode_and_metadata - values = [] - isdir = bool(IDirectoryNode.providedBy(childnode)) - for key in keys: - if key == "size": - if isdir: - value = 0 - else: - value = childnode.get_size() or 0 - elif key == "directory": - value = isdir - elif key == "permissions": - # Twisted-14.0.2 (and earlier) expected an int, and used it - # in a rendering function that did (mode & NUMBER). - # Twisted-15.0.0 expects a - # twisted.python.filepath.Permissions , and calls its - # .shorthand() method. This provides both. - value = IntishPermissions(0o600) - elif key == "hardlinks": - value = 1 - elif key == "modified": - # follow sftpd convention (i.e. linkmotime in preference to mtime) - if "linkmotime" in metadata.get("tahoe", {}): - value = metadata["tahoe"]["linkmotime"] - else: - value = metadata.get("mtime", 0) - elif key == "owner": - value = self.username - elif key == "group": - value = self.username - else: - value = "??" - values.append(value) - return values - - def stat(self, path, keys=()): - # for files only, I think - d = self._get_node_and_metadata_for_path(path) - def _render(node_and_metadata): - (node, metadata) = node_and_metadata - assert not IDirectoryNode.providedBy(node) - return self._populate_row(keys, (node,metadata)) - d.addCallback(_render) - d.addErrback(self._convert_error) - return d - - def list(self, path, keys=()): - # the interface claims that path is a list of unicodes, but in - # practice it is not - d = self._get_node_and_metadata_for_path(path) - def _list(node_and_metadata): - (node, metadata) = node_and_metadata - if IDirectoryNode.providedBy(node): - return node.list() - return { path[-1]: (node, metadata) } # need last-edge metadata - d.addCallback(_list) - def _render(children): - results = [] - for (name, childnode) in children.iteritems(): - # the interface claims that the result should have a unicode - # object as the name, but it fails unless you give it a - # bytestring - results.append( (name.encode("utf-8"), - self._populate_row(keys, childnode) ) ) - return results - d.addCallback(_render) - d.addErrback(self._convert_error) - return d - - def openForReading(self, path): - d = self._get_node_and_metadata_for_path(path) - d.addCallback(lambda node_and_metadata: ReadFile(node_and_metadata[0])) - d.addErrback(self._convert_error) - return d - - def openForWriting(self, path): - path = [unicode(p) for p in path] - if not path: - raise ftp.PermissionDeniedError("cannot STOR to root directory") - childname = path[-1] - d = self._get_root(path) - def _got_root(root_and_path): - (root, path) = root_and_path - if not path: - raise ftp.PermissionDeniedError("cannot STOR to root directory") - return root.get_child_at_path(path[:-1]) - d.addCallback(_got_root) - def _got_parent(parent): - return WriteFile(parent, childname, self.convergence) - d.addCallback(_got_parent) - return d - -from allmydata.frontends.auth import AccountURLChecker, AccountFileChecker, NeedRootcapLookupScheme - - -@implementer(portal.IRealm) -class Dispatcher(object): - def __init__(self, client): - self.client = client - - def requestAvatar(self, avatarID, mind, interface): - assert interface == ftp.IFTPShell - rootnode = self.client.create_node_from_uri(avatarID.rootcap) - convergence = self.client.convergence - s = Handler(self.client, rootnode, avatarID.username, convergence) - def logout(): pass - return (interface, s, None) - - -class FTPServer(service.MultiService): - def __init__(self, client, accountfile, accounturl, ftp_portstr): - precondition(isinstance(accountfile, (unicode, NoneType)), accountfile) - service.MultiService.__init__(self) - - r = Dispatcher(client) - p = portal.Portal(r) - - if accountfile: - c = AccountFileChecker(self, accountfile) - p.registerChecker(c) - if accounturl: - c = AccountURLChecker(self, accounturl) - p.registerChecker(c) - if not accountfile and not accounturl: - # we could leave this anonymous, with just the /uri/CAP form - raise NeedRootcapLookupScheme("must provide some translation") - - f = ftp.FTPFactory(p) - s = strports.service(ftp_portstr, f) - s.setServiceParent(self) diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index db914fa45..bc7196de6 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -1,6 +1,17 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import six -import heapq, traceback, array, stat, struct -from types import NoneType +import heapq, traceback, stat, struct from stat import S_IFREG, S_IFDIR from time import time, strftime, localtime @@ -45,6 +56,17 @@ from allmydata.util.log import NOISY, OPERATIONAL, WEIRD, \ if six.PY3: long = int + +def createSFTPError(errorCode, errorMessage): + """ + SFTPError that can accept both Unicode and bytes. + + Twisted expects _native_ strings for the SFTPError message, but we often do + Unicode by default even on Python 2. + """ + return SFTPError(errorCode, six.ensure_str(errorMessage)) + + def eventually_callback(d): return lambda res: eventually(d.callback, res) @@ -53,9 +75,9 @@ def eventually_errback(d): def _utf8(x): - if isinstance(x, unicode): - return x.encode('utf-8') if isinstance(x, str): + return x.encode('utf-8') + if isinstance(x, bytes): return x return repr(x) @@ -64,7 +86,7 @@ def _to_sftp_time(t): """SFTP times are unsigned 32-bit integers representing UTC seconds (ignoring leap seconds) since the Unix epoch, January 1 1970 00:00 UTC. A Tahoe time is the corresponding float.""" - return long(t) & long(0xFFFFFFFF) + return int(t) & int(0xFFFFFFFF) def _convert_error(res, request): @@ -73,7 +95,7 @@ def _convert_error(res, request): if not isinstance(res, Failure): logged_res = res - if isinstance(res, str): logged_res = "" % (len(res),) + if isinstance(res, (bytes, str)): logged_res = "" % (len(res),) logmsg("SUCCESS %r %r" % (request, logged_res,), level=OPERATIONAL) return res @@ -92,10 +114,10 @@ def _convert_error(res, request): raise err if err.check(NoSuchChildError): childname = _utf8(err.value.args[0]) - raise SFTPError(FX_NO_SUCH_FILE, childname) + raise createSFTPError(FX_NO_SUCH_FILE, childname) if err.check(NotWriteableError) or err.check(ChildOfWrongTypeError): msg = _utf8(err.value.args[0]) - raise SFTPError(FX_PERMISSION_DENIED, msg) + raise createSFTPError(FX_PERMISSION_DENIED, msg) if err.check(ExistingChildError): # Versions of SFTP after v3 (which is what twisted.conch implements) # define a specific error code for this case: FX_FILE_ALREADY_EXISTS. @@ -104,16 +126,16 @@ def _convert_error(res, request): # to translate the error to the equivalent of POSIX EEXIST, which is # necessary for some picky programs (such as gedit). msg = _utf8(err.value.args[0]) - raise SFTPError(FX_FAILURE, msg) + raise createSFTPError(FX_FAILURE, msg) if err.check(NotImplementedError): - raise SFTPError(FX_OP_UNSUPPORTED, _utf8(err.value)) + raise createSFTPError(FX_OP_UNSUPPORTED, _utf8(err.value)) if err.check(EOFError): - raise SFTPError(FX_EOF, "end of file reached") + raise createSFTPError(FX_EOF, "end of file reached") if err.check(defer.FirstError): _convert_error(err.value.subFailure, request) # We assume that the error message is not anonymity-sensitive. - raise SFTPError(FX_FAILURE, _utf8(err.value)) + raise createSFTPError(FX_FAILURE, _utf8(err.value)) def _repr_flags(flags): @@ -146,7 +168,7 @@ def _lsLine(name, attrs): # Since we now depend on Twisted v10.1, consider calling Twisted's version. mode = st_mode - perms = array.array('c', '-'*10) + perms = ["-"] * 10 ft = stat.S_IFMT(mode) if stat.S_ISDIR(ft): perms[0] = 'd' elif stat.S_ISREG(ft): perms[0] = '-' @@ -165,7 +187,7 @@ def _lsLine(name, attrs): if mode&stat.S_IXOTH: perms[9] = 'x' # suid/sgid never set - l = perms.tostring() + l = "".join(perms) l += str(st_nlink).rjust(5) + ' ' un = str(st_uid) l += un.ljust(9) @@ -182,6 +204,7 @@ def _lsLine(name, attrs): l += strftime("%b %d %Y ", localtime(st_mtime)) else: l += strftime("%b %d %H:%M ", localtime(st_mtime)) + l = l.encode("utf-8") l += name return l @@ -223,7 +246,7 @@ def _populate_attrs(childnode, metadata, size=None): if childnode and size is None: size = childnode.get_size() if size is not None: - _assert(isinstance(size, (int, long)) and not isinstance(size, bool), size=size) + _assert(isinstance(size, int) and not isinstance(size, bool), size=size) attrs['size'] = size perms = S_IFREG | 0o666 @@ -255,7 +278,7 @@ def _attrs_to_metadata(attrs): for key in attrs: if key == "mtime" or key == "ctime" or key == "createtime": - metadata[key] = long(attrs[key]) + metadata[key] = int(attrs[key]) elif key.startswith("ext_"): metadata[key] = str(attrs[key]) @@ -267,7 +290,7 @@ def _attrs_to_metadata(attrs): def _direntry_for(filenode_or_parent, childname, filenode=None): - precondition(isinstance(childname, (unicode, NoneType)), childname=childname) + precondition(isinstance(childname, (str, type(None))), childname=childname) if childname is None: filenode_or_parent = filenode @@ -275,7 +298,7 @@ def _direntry_for(filenode_or_parent, childname, filenode=None): if filenode_or_parent: rw_uri = filenode_or_parent.get_write_uri() if rw_uri and childname: - return rw_uri + "/" + childname.encode('utf-8') + return rw_uri + b"/" + childname.encode('utf-8') else: return rw_uri @@ -327,7 +350,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin): if size < self.current_size or size < self.downloaded: self.f.truncate(size) if size > self.current_size: - self.overwrite(self.current_size, "\x00" * (size - self.current_size)) + self.overwrite(self.current_size, b"\x00" * (size - self.current_size)) self.current_size = size # make the invariant self.download_size <= self.current_size be true again @@ -335,7 +358,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin): self.download_size = size if self.downloaded >= self.download_size: - self.download_done("size changed") + self.download_done(b"size changed") def registerProducer(self, p, streaming): if noisy: self.log(".registerProducer(%r, streaming=%r)" % (p, streaming), level=NOISY) @@ -410,21 +433,21 @@ class OverwriteableFileConsumer(PrefixingLogMixin): milestone = end while len(self.milestones) > 0: - (next, d) = self.milestones[0] - if next > milestone: + (next_, d) = self.milestones[0] + if next_ > milestone: return - if noisy: self.log("MILESTONE %r %r" % (next, d), level=NOISY) + if noisy: self.log("MILESTONE %r %r" % (next_, d), level=NOISY) heapq.heappop(self.milestones) - eventually_callback(d)("reached") + eventually_callback(d)(b"reached") if milestone >= self.download_size: - self.download_done("reached download size") + self.download_done(b"reached download size") def overwrite(self, offset, data): if noisy: self.log(".overwrite(%r, )" % (offset, len(data)), level=NOISY) if self.is_closed: self.log("overwrite called on a closed OverwriteableFileConsumer", level=WEIRD) - raise SFTPError(FX_BAD_MESSAGE, "cannot write to a closed file handle") + raise createSFTPError(FX_BAD_MESSAGE, "cannot write to a closed file handle") if offset > self.current_size: # Normally writing at an offset beyond the current end-of-file @@ -435,7 +458,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin): # the gap between the current EOF and the offset. self.f.seek(self.current_size) - self.f.write("\x00" * (offset - self.current_size)) + self.f.write(b"\x00" * (offset - self.current_size)) start = self.current_size else: self.f.seek(offset) @@ -455,7 +478,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin): if noisy: self.log(".read(%r, %r), current_size = %r" % (offset, length, self.current_size), level=NOISY) if self.is_closed: self.log("read called on a closed OverwriteableFileConsumer", level=WEIRD) - raise SFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle") + raise createSFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle") # Note that the overwrite method is synchronous. When a write request is processed # (e.g. a writeChunk request on the async queue of GeneralSFTPFile), overwrite will @@ -509,7 +532,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin): return d def download_done(self, res): - _assert(isinstance(res, (str, Failure)), res=res) + _assert(isinstance(res, (bytes, Failure)), res=res) # Only the first call to download_done counts, but we log subsequent calls # (multiple calls are normal). if self.done_status is not None: @@ -526,8 +549,8 @@ class OverwriteableFileConsumer(PrefixingLogMixin): eventually_callback(self.done)(None) while len(self.milestones) > 0: - (next, d) = self.milestones[0] - if noisy: self.log("MILESTONE FINISH %r %r %r" % (next, d, res), level=NOISY) + (next_, d) = self.milestones[0] + if noisy: self.log("MILESTONE FINISH %r %r %r" % (next_, d, res), level=NOISY) heapq.heappop(self.milestones) # The callback means that the milestone has been reached if # it is ever going to be. Note that the file may have been @@ -541,7 +564,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin): self.f.close() except Exception as e: self.log("suppressed %r from close of temporary file %r" % (e, self.f), level=WEIRD) - self.download_done("closed") + self.download_done(b"closed") return self.done_status def unregisterProducer(self): @@ -565,7 +588,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin): PrefixingLogMixin.__init__(self, facility="tahoe.sftp", prefix=userpath) if noisy: self.log(".__init__(%r, %r, %r)" % (userpath, filenode, metadata), level=NOISY) - precondition(isinstance(userpath, str) and IFileNode.providedBy(filenode), + precondition(isinstance(userpath, bytes) and IFileNode.providedBy(filenode), userpath=userpath, filenode=filenode) self.filenode = filenode self.metadata = metadata @@ -577,7 +600,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin): self.log(request, level=OPERATIONAL) if self.closed: - def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle") + def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle") return defer.execute(_closed) d = defer.Deferred() @@ -594,7 +617,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin): # i.e. we respond with an EOF error iff offset is already at EOF. if offset >= len(data): - eventually_errback(d)(Failure(SFTPError(FX_EOF, "read at or past end of file"))) + eventually_errback(d)(Failure(createSFTPError(FX_EOF, "read at or past end of file"))) else: eventually_callback(d)(data[offset:offset+length]) # truncated if offset+length > len(data) return data @@ -605,7 +628,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin): def writeChunk(self, offset, data): self.log(".writeChunk(%r, ) denied" % (offset, len(data)), level=OPERATIONAL) - def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") + def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") return defer.execute(_denied) def close(self): @@ -619,7 +642,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin): self.log(request, level=OPERATIONAL) if self.closed: - def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot get attributes for a closed file handle") + def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot get attributes for a closed file handle") return defer.execute(_closed) d = defer.execute(_populate_attrs, self.filenode, self.metadata) @@ -628,7 +651,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin): def setAttrs(self, attrs): self.log(".setAttrs(%r) denied" % (attrs,), level=OPERATIONAL) - def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") + def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") return defer.execute(_denied) @@ -649,7 +672,7 @@ class GeneralSFTPFile(PrefixingLogMixin): if noisy: self.log(".__init__(%r, %r = %r, %r, )" % (userpath, flags, _repr_flags(flags), close_notify), level=NOISY) - precondition(isinstance(userpath, str), userpath=userpath) + precondition(isinstance(userpath, bytes), userpath=userpath) self.userpath = userpath self.flags = flags self.close_notify = close_notify @@ -668,11 +691,11 @@ class GeneralSFTPFile(PrefixingLogMixin): # not be set before then. self.consumer = None - def open(self, parent=None, childname=None, filenode=None, metadata=None): + def open(self, parent=None, childname=None, filenode=None, metadata=None): # noqa: F811 self.log(".open(parent=%r, childname=%r, filenode=%r, metadata=%r)" % (parent, childname, filenode, metadata), level=OPERATIONAL) - precondition(isinstance(childname, (unicode, NoneType)), childname=childname) + precondition(isinstance(childname, (str, type(None))), childname=childname) precondition(filenode is None or IFileNode.providedBy(filenode), filenode=filenode) precondition(not self.closed, sftpfile=self) @@ -689,7 +712,7 @@ class GeneralSFTPFile(PrefixingLogMixin): if (self.flags & FXF_TRUNC) or not filenode: # We're either truncating or creating the file, so we don't need the old contents. self.consumer = OverwriteableFileConsumer(0, tempfile_maker) - self.consumer.download_done("download not needed") + self.consumer.download_done(b"download not needed") else: self.async_.addCallback(lambda ignored: filenode.get_best_readable_version()) @@ -703,7 +726,7 @@ class GeneralSFTPFile(PrefixingLogMixin): d = version.read(self.consumer, 0, None) def _finished(res): if not isinstance(res, Failure): - res = "download finished" + res = b"download finished" self.consumer.download_done(res) d.addBoth(_finished) # It is correct to drop d here. @@ -723,7 +746,7 @@ class GeneralSFTPFile(PrefixingLogMixin): def rename(self, new_userpath, new_parent, new_childname): self.log(".rename(%r, %r, %r)" % (new_userpath, new_parent, new_childname), level=OPERATIONAL) - precondition(isinstance(new_userpath, str) and isinstance(new_childname, unicode), + precondition(isinstance(new_userpath, bytes) and isinstance(new_childname, str), new_userpath=new_userpath, new_childname=new_childname) self.userpath = new_userpath self.parent = new_parent @@ -751,11 +774,11 @@ class GeneralSFTPFile(PrefixingLogMixin): self.log(request, level=OPERATIONAL) if not (self.flags & FXF_READ): - def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for reading") + def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "file handle was not opened for reading") return defer.execute(_denied) if self.closed: - def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle") + def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle") return defer.execute(_closed) d = defer.Deferred() @@ -773,11 +796,11 @@ class GeneralSFTPFile(PrefixingLogMixin): self.log(".writeChunk(%r, )" % (offset, len(data)), level=OPERATIONAL) if not (self.flags & FXF_WRITE): - def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") + def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") return defer.execute(_denied) if self.closed: - def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot write to a closed file handle") + def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot write to a closed file handle") return defer.execute(_closed) self.has_changed = True @@ -893,7 +916,7 @@ class GeneralSFTPFile(PrefixingLogMixin): self.log(request, level=OPERATIONAL) if self.closed: - def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot get attributes for a closed file handle") + def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot get attributes for a closed file handle") return defer.execute(_closed) # Optimization for read-only handles, when we already know the metadata. @@ -917,16 +940,16 @@ class GeneralSFTPFile(PrefixingLogMixin): self.log(request, level=OPERATIONAL) if not (self.flags & FXF_WRITE): - def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") + def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing") return defer.execute(_denied) if self.closed: - def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot set attributes for a closed file handle") + def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot set attributes for a closed file handle") return defer.execute(_closed) size = attrs.get("size", None) - if size is not None and (not isinstance(size, (int, long)) or size < 0): - def _bad(): raise SFTPError(FX_BAD_MESSAGE, "new size is not a valid nonnegative integer") + if size is not None and (not isinstance(size, int) or size < 0): + def _bad(): raise createSFTPError(FX_BAD_MESSAGE, "new size is not a valid nonnegative integer") return defer.execute(_bad) d = defer.Deferred() @@ -1012,7 +1035,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def logout(self): self.log(".logout()", level=OPERATIONAL) - for files in self._heisenfiles.itervalues(): + for files in self._heisenfiles.values(): for f in files: f.abandon() @@ -1039,7 +1062,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): request = "._abandon_any_heisenfiles(%r, %r)" % (userpath, direntry) self.log(request, level=OPERATIONAL) - precondition(isinstance(userpath, str), userpath=userpath) + precondition(isinstance(userpath, bytes), userpath=userpath) # First we synchronously mark all heisenfiles matching the userpath or direntry # as abandoned, and remove them from the two heisenfile dicts. Then we .sync() @@ -1088,8 +1111,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): (from_userpath, from_parent, from_childname, to_userpath, to_parent, to_childname, overwrite)) self.log(request, level=OPERATIONAL) - precondition((isinstance(from_userpath, str) and isinstance(from_childname, unicode) and - isinstance(to_userpath, str) and isinstance(to_childname, unicode)), + precondition((isinstance(from_userpath, bytes) and isinstance(from_childname, str) and + isinstance(to_userpath, bytes) and isinstance(to_childname, str)), from_userpath=from_userpath, from_childname=from_childname, to_userpath=to_userpath, to_childname=to_childname) if noisy: self.log("all_heisenfiles = %r\nself._heisenfiles = %r" % (all_heisenfiles, self._heisenfiles), level=NOISY) @@ -1118,7 +1141,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): # does not mean that they were not committed; it is used to determine whether # a NoSuchChildError from the rename attempt should be suppressed). If overwrite # is False and there were already heisenfiles at the destination userpath or - # direntry, we return a Deferred that fails with SFTPError(FX_PERMISSION_DENIED). + # direntry, we return a Deferred that fails with createSFTPError(FX_PERMISSION_DENIED). from_direntry = _direntry_for(from_parent, from_childname) to_direntry = _direntry_for(to_parent, to_childname) @@ -1127,7 +1150,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): (from_direntry, to_direntry, len(all_heisenfiles), len(self._heisenfiles), request), level=NOISY) if not overwrite and (to_userpath in self._heisenfiles or to_direntry in all_heisenfiles): - def _existing(): raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + to_userpath) + def _existing(): raise createSFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + str(to_userpath, "utf-8")) if noisy: self.log("existing", level=NOISY) return defer.execute(_existing) @@ -1161,7 +1184,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): request = "._update_attrs_for_heisenfiles(%r, %r, %r)" % (userpath, direntry, attrs) self.log(request, level=OPERATIONAL) - _assert(isinstance(userpath, str) and isinstance(direntry, str), + _assert(isinstance(userpath, bytes) and isinstance(direntry, bytes), userpath=userpath, direntry=direntry) files = [] @@ -1194,7 +1217,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): request = "._sync_heisenfiles(%r, %r, ignore=%r)" % (userpath, direntry, ignore) self.log(request, level=OPERATIONAL) - _assert(isinstance(userpath, str) and isinstance(direntry, (str, NoneType)), + _assert(isinstance(userpath, bytes) and isinstance(direntry, (bytes, type(None))), userpath=userpath, direntry=direntry) files = [] @@ -1219,7 +1242,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _remove_heisenfile(self, userpath, parent, childname, file_to_remove): if noisy: self.log("._remove_heisenfile(%r, %r, %r, %r)" % (userpath, parent, childname, file_to_remove), level=NOISY) - _assert(isinstance(userpath, str) and isinstance(childname, (unicode, NoneType)), + _assert(isinstance(userpath, bytes) and isinstance(childname, (str, type(None))), userpath=userpath, childname=childname) direntry = _direntry_for(parent, childname) @@ -1246,8 +1269,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): (existing_file, userpath, flags, _repr_flags(flags), parent, childname, filenode, metadata), level=NOISY) - _assert((isinstance(userpath, str) and isinstance(childname, (unicode, NoneType)) and - (metadata is None or 'no-write' in metadata)), + _assert((isinstance(userpath, bytes) and isinstance(childname, (str, type(None))) and + (metadata is None or 'no-write' in metadata)), userpath=userpath, childname=childname, metadata=metadata) writing = (flags & (FXF_WRITE | FXF_CREAT)) != 0 @@ -1280,17 +1303,17 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): if not (flags & (FXF_READ | FXF_WRITE)): def _bad_readwrite(): - raise SFTPError(FX_BAD_MESSAGE, "invalid file open flags: at least one of FXF_READ and FXF_WRITE must be set") + raise createSFTPError(FX_BAD_MESSAGE, "invalid file open flags: at least one of FXF_READ and FXF_WRITE must be set") return defer.execute(_bad_readwrite) if (flags & FXF_EXCL) and not (flags & FXF_CREAT): def _bad_exclcreat(): - raise SFTPError(FX_BAD_MESSAGE, "invalid file open flags: FXF_EXCL cannot be set without FXF_CREAT") + raise createSFTPError(FX_BAD_MESSAGE, "invalid file open flags: FXF_EXCL cannot be set without FXF_CREAT") return defer.execute(_bad_exclcreat) path = self._path_from_string(pathstring) if not path: - def _emptypath(): raise SFTPError(FX_NO_SUCH_FILE, "path cannot be empty") + def _emptypath(): raise createSFTPError(FX_NO_SUCH_FILE, "path cannot be empty") return defer.execute(_emptypath) # The combination of flags is potentially valid. @@ -1349,20 +1372,20 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _got_root(root_and_path): (root, path) = root_and_path if root.is_unknown(): - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot open an unknown cap (or child of an unknown object). " "Upgrading the gateway to a later Tahoe-LAFS version may help") if not path: # case 1 if noisy: self.log("case 1: root = %r, path[:-1] = %r" % (root, path[:-1]), level=NOISY) if not IFileNode.providedBy(root): - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot open a directory cap") if (flags & FXF_WRITE) and root.is_readonly(): - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot write to a non-writeable filecap without a parent directory") if flags & FXF_EXCL: - raise SFTPError(FX_FAILURE, + raise createSFTPError(FX_FAILURE, "cannot create a file exclusively when it already exists") # The file does not need to be added to all_heisenfiles, because it is not @@ -1389,7 +1412,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _got_parent(parent): if noisy: self.log("_got_parent(%r)" % (parent,), level=NOISY) if parent.is_unknown(): - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot open a child of an unknown object. " "Upgrading the gateway to a later Tahoe-LAFS version may help") @@ -1404,13 +1427,13 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): # which is consistent with what might happen on a POSIX filesystem. if parent_readonly: - raise SFTPError(FX_FAILURE, + raise createSFTPError(FX_FAILURE, "cannot create a file exclusively when the parent directory is read-only") # 'overwrite=False' ensures failure if the link already exists. # FIXME: should use a single call to set_uri and return (child, metadata) (#1035) - zero_length_lit = "URI:LIT:" + zero_length_lit = b"URI:LIT:" if noisy: self.log("%r.set_uri(%r, None, readcap=%r, overwrite=False)" % (parent, zero_length_lit, childname), level=NOISY) d3.addCallback(lambda ign: parent.set_uri(childname, None, readcap=zero_length_lit, @@ -1436,14 +1459,14 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): metadata['no-write'] = _no_write(parent_readonly, filenode, current_metadata) if filenode.is_unknown(): - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot open an unknown cap. Upgrading the gateway " "to a later Tahoe-LAFS version may help") if not IFileNode.providedBy(filenode): - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot open a directory as if it were a file") if (flags & FXF_WRITE) and metadata['no-write']: - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot open a non-writeable file for writing") return self._make_file(file, userpath, flags, parent=parent, childname=childname, @@ -1453,10 +1476,10 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): f.trap(NoSuchChildError) if not (flags & FXF_CREAT): - raise SFTPError(FX_NO_SUCH_FILE, + raise createSFTPError(FX_NO_SUCH_FILE, "the file does not exist, and was not opened with the creation (CREAT) flag") if parent_readonly: - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot create a file when the parent directory is read-only") return self._make_file(file, userpath, flags, parent=parent, childname=childname) @@ -1495,9 +1518,9 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): (to_parent, to_childname) = to_pair if from_childname is None: - raise SFTPError(FX_NO_SUCH_FILE, "cannot rename a source object specified by URI") + raise createSFTPError(FX_NO_SUCH_FILE, "cannot rename a source object specified by URI") if to_childname is None: - raise SFTPError(FX_NO_SUCH_FILE, "cannot rename to a destination specified by URI") + raise createSFTPError(FX_NO_SUCH_FILE, "cannot rename to a destination specified by URI") # # "It is an error if there already exists a file with the name specified @@ -1512,7 +1535,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): d2.addCallback(lambda ign: to_parent.get(to_childname)) def _expect_fail(res): if not isinstance(res, Failure): - raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + to_userpath) + raise createSFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + str(to_userpath, "utf-8")) # It is OK if we fail for errors other than NoSuchChildError, since that probably # indicates some problem accessing the destination directory. @@ -1537,7 +1560,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): if not isinstance(err, Failure) or (renamed and err.check(NoSuchChildError)): return None if not overwrite and err.check(ExistingChildError): - raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + to_userpath) + raise createSFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + str(to_userpath, "utf-8")) return err d3.addBoth(_check) @@ -1555,7 +1578,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): path = self._path_from_string(pathstring) metadata = _attrs_to_metadata(attrs) if 'no-write' in metadata: - def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "cannot create a directory that is initially read-only") + def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "cannot create a directory that is initially read-only") return defer.execute(_denied) d = self._get_root(path) @@ -1567,7 +1590,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _get_or_create_directories(self, node, path, metadata): if not IDirectoryNode.providedBy(node): # TODO: provide the name of the blocking file in the error message. - def _blocked(): raise SFTPError(FX_FAILURE, "cannot create directory because there " + def _blocked(): raise createSFTPError(FX_FAILURE, "cannot create directory because there " "is a file in the way") # close enough return defer.execute(_blocked) @@ -1605,7 +1628,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _got_parent(parent_and_childname): (parent, childname) = parent_and_childname if childname is None: - raise SFTPError(FX_NO_SUCH_FILE, "cannot remove an object specified by URI") + raise createSFTPError(FX_NO_SUCH_FILE, "cannot remove an object specified by URI") direntry = _direntry_for(parent, childname) d2 = defer.succeed(False) @@ -1636,18 +1659,18 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): d.addCallback(_got_parent_or_node) def _list(dirnode): if dirnode.is_unknown(): - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot list an unknown cap as a directory. Upgrading the gateway " "to a later Tahoe-LAFS version may help") if not IDirectoryNode.providedBy(dirnode): - raise SFTPError(FX_PERMISSION_DENIED, + raise createSFTPError(FX_PERMISSION_DENIED, "cannot list a file as if it were a directory") d2 = dirnode.list() def _render(children): parent_readonly = dirnode.is_readonly() results = [] - for filename, (child, metadata) in children.iteritems(): + for filename, (child, metadata) in list(children.items()): # The file size may be cached or absent. metadata['no-write'] = _no_write(parent_readonly, child, metadata) attrs = _populate_attrs(child, metadata) @@ -1727,7 +1750,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): if "size" in attrs: # this would require us to download and re-upload the truncated/extended # file contents - def _unsupported(): raise SFTPError(FX_OP_UNSUPPORTED, "setAttrs wth size attribute unsupported") + def _unsupported(): raise createSFTPError(FX_OP_UNSUPPORTED, "setAttrs wth size attribute unsupported") return defer.execute(_unsupported) path = self._path_from_string(pathstring) @@ -1744,7 +1767,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): if childname is None: if updated_heisenfiles: return None - raise SFTPError(FX_NO_SUCH_FILE, userpath) + raise createSFTPError(FX_NO_SUCH_FILE, userpath) else: desired_metadata = _attrs_to_metadata(attrs) if noisy: self.log("desired_metadata = %r" % (desired_metadata,), level=NOISY) @@ -1767,7 +1790,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def readLink(self, pathstring): self.log(".readLink(%r)" % (pathstring,), level=OPERATIONAL) - def _unsupported(): raise SFTPError(FX_OP_UNSUPPORTED, "readLink") + def _unsupported(): raise createSFTPError(FX_OP_UNSUPPORTED, "readLink") return defer.execute(_unsupported) def makeLink(self, linkPathstring, targetPathstring): @@ -1776,7 +1799,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): # If this is implemented, note the reversal of arguments described in point 7 of # . - def _unsupported(): raise SFTPError(FX_OP_UNSUPPORTED, "makeLink") + def _unsupported(): raise createSFTPError(FX_OP_UNSUPPORTED, "makeLink") return defer.execute(_unsupported) def extendedRequest(self, extensionName, extensionData): @@ -1785,8 +1808,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): # We implement the three main OpenSSH SFTP extensions; see # - if extensionName == 'posix-rename@openssh.com': - def _bad(): raise SFTPError(FX_BAD_MESSAGE, "could not parse posix-rename@openssh.com request") + if extensionName == b'posix-rename@openssh.com': + def _bad(): raise createSFTPError(FX_BAD_MESSAGE, "could not parse posix-rename@openssh.com request") if 4 > len(extensionData): return defer.execute(_bad) (fromPathLen,) = struct.unpack('>L', extensionData[0:4]) @@ -1803,11 +1826,11 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): # an error, or an FXP_EXTENDED_REPLY. But it happens to do the right thing # (respond with an FXP_STATUS message) if we return a Failure with code FX_OK. def _succeeded(ign): - raise SFTPError(FX_OK, "request succeeded") + raise createSFTPError(FX_OK, "request succeeded") d.addCallback(_succeeded) return d - if extensionName == 'statvfs@openssh.com' or extensionName == 'fstatvfs@openssh.com': + if extensionName == b'statvfs@openssh.com' or extensionName == b'fstatvfs@openssh.com': # f_bsize and f_frsize should be the same to avoid a bug in 'df' return defer.succeed(struct.pack('>11Q', 1024, # uint64 f_bsize /* file system block size */ @@ -1823,7 +1846,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): 65535, # uint64 f_namemax /* maximum filename length */ )) - def _unsupported(): raise SFTPError(FX_OP_UNSUPPORTED, "unsupported %r request " % + def _unsupported(): raise createSFTPError(FX_OP_UNSUPPORTED, "unsupported %r request " % (extensionName, len(extensionData))) return defer.execute(_unsupported) @@ -1838,29 +1861,29 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): def _path_from_string(self, pathstring): if noisy: self.log("CONVERT %r" % (pathstring,), level=NOISY) - _assert(isinstance(pathstring, str), pathstring=pathstring) + _assert(isinstance(pathstring, bytes), pathstring=pathstring) # The home directory is the root directory. - pathstring = pathstring.strip("/") - if pathstring == "" or pathstring == ".": + pathstring = pathstring.strip(b"/") + if pathstring == b"" or pathstring == b".": path_utf8 = [] else: - path_utf8 = pathstring.split("/") + path_utf8 = pathstring.split(b"/") # # "Servers SHOULD interpret a path name component ".." as referring to # the parent directory, and "." as referring to the current directory." path = [] for p_utf8 in path_utf8: - if p_utf8 == "..": + if p_utf8 == b"..": # ignore excess .. components at the root if len(path) > 0: path = path[:-1] - elif p_utf8 != ".": + elif p_utf8 != b".": try: p = p_utf8.decode('utf-8', 'strict') except UnicodeError: - raise SFTPError(FX_NO_SUCH_FILE, "path could not be decoded as UTF-8") + raise createSFTPError(FX_NO_SUCH_FILE, "path could not be decoded as UTF-8") path.append(p) if noisy: self.log(" PATH %r" % (path,), level=NOISY) @@ -1975,11 +1998,13 @@ class Dispatcher(object): class SFTPServer(service.MultiService): + name = "frontend:sftp" + def __init__(self, client, accountfile, accounturl, sftp_portstr, pubkey_file, privkey_file): - precondition(isinstance(accountfile, (unicode, NoneType)), accountfile) - precondition(isinstance(pubkey_file, unicode), pubkey_file) - precondition(isinstance(privkey_file, unicode), privkey_file) + precondition(isinstance(accountfile, (str, type(None))), accountfile) + precondition(isinstance(pubkey_file, str), pubkey_file) + precondition(isinstance(privkey_file, str), privkey_file) service.MultiService.__init__(self) r = Dispatcher(client) @@ -2010,5 +2035,5 @@ class SFTPServer(service.MultiService): f = SSHFactory() f.portal = p - s = strports.service(sftp_portstr, f) + s = strports.service(six.ensure_str(sftp_portstr), f) s.setServiceParent(self) diff --git a/src/allmydata/immutable/checker.py b/src/allmydata/immutable/checker.py index ce533b969..9636b9a2f 100644 --- a/src/allmydata/immutable/checker.py +++ b/src/allmydata/immutable/checker.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from zope.interface import implementer from twisted.internet import defer from foolscap.api import DeadReferenceError, RemoteException @@ -616,7 +628,7 @@ class Checker(log.PrefixingLogMixin): d.addCallback(_got_ueb) def _discard_result(r): - assert isinstance(r, str), r + assert isinstance(r, bytes), r # to free up the RAM return None diff --git a/src/allmydata/immutable/downloader/finder.py b/src/allmydata/immutable/downloader/finder.py index 6d222bc73..4f6d1aa14 100644 --- a/src/allmydata/immutable/downloader/finder.py +++ b/src/allmydata/immutable/downloader/finder.py @@ -9,6 +9,7 @@ from __future__ import unicode_literals from future.utils import PY2 if PY2: from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from six import ensure_str import time now = time.time @@ -98,7 +99,7 @@ class ShareFinder(object): # internal methods def loop(self): - pending_s = ",".join([rt.server.get_name() + pending_s = ",".join([ensure_str(rt.server.get_name()) for rt in self.pending_requests]) # sort? self.log(format="ShareFinder loop: running=%(running)s" " hungry=%(hungry)s, pending=%(pending)s", diff --git a/src/allmydata/immutable/encode.py b/src/allmydata/immutable/encode.py index 9351df501..a9835b6b8 100644 --- a/src/allmydata/immutable/encode.py +++ b/src/allmydata/immutable/encode.py @@ -255,11 +255,11 @@ class Encoder(object): # captures the slot, not the value #d.addCallback(lambda res: self.do_segment(i)) # use this form instead: - d.addCallback(lambda res, i=i: self._encode_segment(i)) + d.addCallback(lambda res, i=i: self._encode_segment(i, is_tail=False)) d.addCallback(self._send_segment, i) d.addCallback(self._turn_barrier) last_segnum = self.num_segments - 1 - d.addCallback(lambda res: self._encode_tail_segment(last_segnum)) + d.addCallback(lambda res: self._encode_segment(last_segnum, is_tail=True)) d.addCallback(self._send_segment, last_segnum) d.addCallback(self._turn_barrier) @@ -317,8 +317,24 @@ class Encoder(object): dl.append(d) return self._gather_responses(dl) - def _encode_segment(self, segnum): - codec = self._codec + def _encode_segment(self, segnum, is_tail): + """ + Encode one segment of input into the configured number of shares. + + :param segnum: Ostensibly, the number of the segment to encode. In + reality, this parameter is ignored and the *next* segment is + encoded and returned. + + :param bool is_tail: ``True`` if this is the last segment, ``False`` + otherwise. + + :return: A ``Deferred`` which fires with a two-tuple. The first + element is a list of string-y objects representing the encoded + segment data for one of the shares. The second element is a list + of integers giving the share numbers of the shares in the first + element. + """ + codec = self._tail_codec if is_tail else self._codec start = time.time() # the ICodecEncoder API wants to receive a total of self.segment_size @@ -350,9 +366,11 @@ class Encoder(object): # footprint to 430KiB at the expense of more hash-tree overhead. d = self._gather_data(self.required_shares, input_piece_size, - crypttext_segment_hasher) + crypttext_segment_hasher, allow_short=is_tail) def _done_gathering(chunks): for c in chunks: + # If is_tail then a short trailing chunk will have been padded + # by _gather_data assert len(c) == input_piece_size self._crypttext_hashes.append(crypttext_segment_hasher.digest()) # during this call, we hit 5*segsize memory @@ -365,31 +383,6 @@ class Encoder(object): d.addCallback(_done) return d - def _encode_tail_segment(self, segnum): - - start = time.time() - codec = self._tail_codec - input_piece_size = codec.get_block_size() - - crypttext_segment_hasher = hashutil.crypttext_segment_hasher() - - d = self._gather_data(self.required_shares, input_piece_size, - crypttext_segment_hasher, allow_short=True) - def _done_gathering(chunks): - for c in chunks: - # a short trailing chunk will have been padded by - # _gather_data - assert len(c) == input_piece_size - self._crypttext_hashes.append(crypttext_segment_hasher.digest()) - return codec.encode(chunks) - d.addCallback(_done_gathering) - def _done(res): - elapsed = time.time() - start - self._times["cumulative_encoding"] += elapsed - return res - d.addCallback(_done) - return d - def _gather_data(self, num_chunks, input_chunk_size, crypttext_segment_hasher, allow_short=False): diff --git a/src/allmydata/immutable/filenode.py b/src/allmydata/immutable/filenode.py index 105a8cde3..9e13e1337 100644 --- a/src/allmydata/immutable/filenode.py +++ b/src/allmydata/immutable/filenode.py @@ -152,7 +152,6 @@ class CiphertextFileNode(object): for server in servers: sm.add(shnum, server) servers_responding.add(server) - servers_responding = sorted(servers_responding) good_hosts = len(reduce(set.union, sm.values(), set())) is_healthy = bool(len(sm) >= verifycap.total_shares) diff --git a/src/allmydata/immutable/literal.py b/src/allmydata/immutable/literal.py index 68db478f3..6ed5571b9 100644 --- a/src/allmydata/immutable/literal.py +++ b/src/allmydata/immutable/literal.py @@ -19,7 +19,7 @@ from twisted.protocols import basic from allmydata.interfaces import IImmutableFileNode, ICheckable from allmydata.uri import LiteralFileURI -@implementer(IImmutableFileNode, ICheckable) + class _ImmutableFileNodeBase(object): def get_write_uri(self): @@ -56,6 +56,7 @@ class _ImmutableFileNodeBase(object): return not self == other +@implementer(IImmutableFileNode, ICheckable) class LiteralFileNode(_ImmutableFileNodeBase): def __init__(self, filecap): diff --git a/src/allmydata/immutable/offloaded.py b/src/allmydata/immutable/offloaded.py index fb8c706a3..2d2c5c1f5 100644 --- a/src/allmydata/immutable/offloaded.py +++ b/src/allmydata/immutable/offloaded.py @@ -1,3 +1,14 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os, stat, time, weakref from zope.interface import implementer @@ -25,10 +36,11 @@ class CHKCheckerAndUEBFetcher(object): less than 'N' shares present. If the file is completely healthy, I return a tuple of (sharemap, - UEB_data, UEB_hash). + UEB_data, UEB_hash). A sharemap is a dict with share numbers as keys and + sets of server ids (which hold that share) as values. """ - def __init__(self, peer_getter, storage_index, logparent=None): + def __init__(self, peer_getter, storage_index, logparent): self._peer_getter = peer_getter self._found_shares = set() self._storage_index = storage_index @@ -46,6 +58,12 @@ class CHKCheckerAndUEBFetcher(object): return log.msg(*args, **kwargs) def check(self): + """ + :return Deferred[bool|(DictOfSets, dict, bytes)]: If no share can be found + with a usable UEB block or fewer than N shares can be found then the + Deferred fires with ``False``. Otherwise, it fires with a tuple of + the sharemap, the UEB data, and the UEB hash. + """ d = self._get_all_shareholders(self._storage_index) d.addCallback(self._get_uri_extension) d.addCallback(self._done) @@ -123,14 +141,14 @@ class CHKCheckerAndUEBFetcher(object): @implementer(interfaces.RICHKUploadHelper) -class CHKUploadHelper(Referenceable, upload.CHKUploader): +class CHKUploadHelper(Referenceable, upload.CHKUploader): # type: ignore # warner/foolscap#78 """I am the helper-server -side counterpart to AssistedUploader. I handle peer selection, encoding, and share pushing. I read ciphertext from the remote AssistedUploader. """ - VERSION = { "http://allmydata.org/tahoe/protocols/helper/chk-upload/v1" : + VERSION = { b"http://allmydata.org/tahoe/protocols/helper/chk-upload/v1" : { }, - "application-version": str(allmydata.__full_version__), + b"application-version": allmydata.__full_version__.encode("utf-8"), } def __init__(self, storage_index, @@ -481,10 +499,26 @@ class LocalCiphertextReader(AskUntilSuccessMixin): # ??. I'm not sure if it makes sense to forward the close message. return self.call("close") + # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3561 + def set_upload_status(self, upload_status): + raise NotImplementedError @implementer(interfaces.RIHelper, interfaces.IStatsProducer) -class Helper(Referenceable): +class Helper(Referenceable): # type: ignore # warner/foolscap#78 + """ + :ivar dict[bytes, CHKUploadHelper] _active_uploads: For any uploads which + have been started but not finished, a mapping from storage index to the + upload helper. + + :ivar chk_checker: A callable which returns an object like a + CHKCheckerAndUEBFetcher instance which can check CHK shares. + Primarily for the convenience of tests to override. + + :ivar chk_upload: A callable which returns an object like a + CHKUploadHelper instance which can upload CHK shares. Primarily for + the convenience of tests to override. + """ # this is the non-distributed version. When we need to have multiple # helpers, this object will become the HelperCoordinator, and will query # the farm of Helpers to see if anyone has the storage_index of interest, @@ -498,6 +532,9 @@ class Helper(Referenceable): } MAX_UPLOAD_STATUSES = 10 + chk_checker = CHKCheckerAndUEBFetcher + chk_upload = CHKUploadHelper + def __init__(self, basedir, storage_broker, secret_holder, stats_provider, history): self._basedir = basedir @@ -569,6 +606,9 @@ class Helper(Referenceable): return self.VERSION def remote_upload_chk(self, storage_index): + """ + See ``RIHelper.upload_chk`` + """ self.count("chk_upload_helper.upload_requests") lp = self.log(format="helper: upload_chk query for SI %(si)s", si=si_b2a(storage_index)) @@ -591,7 +631,7 @@ class Helper(Referenceable): lp2 = self.log("doing a quick check+UEBfetch", parent=lp, level=log.NOISY) sb = self._storage_broker - c = CHKCheckerAndUEBFetcher(sb.get_servers_for_psi, storage_index, lp2) + c = self.chk_checker(sb.get_servers_for_psi, storage_index, lp2) d = c.check() def _checked(res): if res: @@ -633,14 +673,18 @@ class Helper(Referenceable): return (None, uh) def _make_chk_upload_helper(self, storage_index, lp): - si_s = si_b2a(storage_index) + si_s = si_b2a(storage_index).decode('ascii') incoming_file = os.path.join(self._chk_incoming, si_s) encoding_file = os.path.join(self._chk_encoding, si_s) - uh = CHKUploadHelper(storage_index, self, - self._storage_broker, - self._secret_holder, - incoming_file, encoding_file, - lp) + uh = self.chk_upload( + storage_index, + self, + self._storage_broker, + self._secret_holder, + incoming_file, + encoding_file, + lp, + ) return uh def _add_upload(self, uh): diff --git a/src/allmydata/immutable/repairer.py b/src/allmydata/immutable/repairer.py index 1d3782d10..bccd8453d 100644 --- a/src/allmydata/immutable/repairer.py +++ b/src/allmydata/immutable/repairer.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from zope.interface import implementer from twisted.internet import defer from allmydata.storage.server import si_b2a diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py index e77cbb30b..46e01184f 100644 --- a/src/allmydata/immutable/upload.py +++ b/src/allmydata/immutable/upload.py @@ -11,20 +11,32 @@ from future.utils import PY2, native_str if PY2: from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from past.builtins import long, unicode +from six import ensure_str + +try: + from typing import List +except ImportError: + pass import os, time, weakref, itertools + +import attr + from zope.interface import implementer from twisted.python import failure from twisted.internet import defer from twisted.application import service -from foolscap.api import Referenceable, Copyable, RemoteCopy, fireEventually +from foolscap.api import Referenceable, Copyable, RemoteCopy from allmydata.crypto import aes from allmydata.util.hashutil import file_renewal_secret_hash, \ file_cancel_secret_hash, bucket_renewal_secret_hash, \ bucket_cancel_secret_hash, plaintext_hasher, \ storage_index_hash, plaintext_segment_hasher, convergence_hasher -from allmydata.util.deferredutil import timeout_call +from allmydata.util.deferredutil import ( + timeout_call, + until, +) from allmydata import hashtree, uri from allmydata.storage.server import si_b2a from allmydata.immutable import encode @@ -385,6 +397,9 @@ class PeerSelector(object): ) return self.happiness_mappings + def add_peers(self, peerids=None): + raise NotImplementedError + class _QueryStatistics(object): @@ -896,13 +911,45 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin): raise UploadUnhappinessError(msg) +@attr.s +class _Accum(object): + """ + Accumulate up to some known amount of ciphertext. + + :ivar remaining: The number of bytes still expected. + :ivar ciphertext: The bytes accumulated so far. + """ + remaining = attr.ib(validator=attr.validators.instance_of(int)) # type: int + ciphertext = attr.ib(default=attr.Factory(list)) # type: List[bytes] + + def extend(self, + size, # type: int + ciphertext, # type: List[bytes] + ): + """ + Accumulate some more ciphertext. + + :param size: The amount of data the new ciphertext represents towards + the goal. This may be more than the actual size of the given + ciphertext if the source has run out of data. + + :param ciphertext: The new ciphertext to accumulate. + """ + self.remaining -= size + self.ciphertext.extend(ciphertext) + + @implementer(IEncryptedUploadable) class EncryptAnUploadable(object): """This is a wrapper that takes an IUploadable and provides IEncryptedUploadable.""" CHUNKSIZE = 50*1024 - def __init__(self, original, log_parent=None, progress=None): + def __init__(self, original, log_parent=None, progress=None, chunk_size=None): + """ + :param chunk_size: The number of bytes to read from the uploadable at a + time, or None for some default. + """ precondition(original.default_params_set, "set_default_encoding_parameters not called on %r before wrapping with EncryptAnUploadable" % (original,)) self.original = IUploadable(original) @@ -916,6 +963,8 @@ class EncryptAnUploadable(object): self._ciphertext_bytes_read = 0 self._status = None self._progress = progress + if chunk_size is not None: + self.CHUNKSIZE = chunk_size def set_upload_status(self, upload_status): self._status = IUploadStatus(upload_status) @@ -1022,47 +1071,53 @@ class EncryptAnUploadable(object): # and size d.addCallback(lambda ignored: self.get_size()) d.addCallback(lambda ignored: self._get_encryptor()) - # then fetch and encrypt the plaintext. The unusual structure here - # (passing a Deferred *into* a function) is needed to avoid - # overflowing the stack: Deferreds don't optimize out tail recursion. - # We also pass in a list, to which _read_encrypted will append - # ciphertext. - ciphertext = [] - d2 = defer.Deferred() - d.addCallback(lambda ignored: - self._read_encrypted(length, ciphertext, hash_only, d2)) - d.addCallback(lambda ignored: d2) + + accum = _Accum(length) + + def action(): + """ + Read some bytes into the accumulator. + """ + return self._read_encrypted(accum, hash_only) + + def condition(): + """ + Check to see if the accumulator has all the data. + """ + return accum.remaining == 0 + + d.addCallback(lambda ignored: until(action, condition)) + d.addCallback(lambda ignored: accum.ciphertext) return d - def _read_encrypted(self, remaining, ciphertext, hash_only, fire_when_done): - if not remaining: - fire_when_done.callback(ciphertext) - return None + def _read_encrypted(self, + ciphertext_accum, # type: _Accum + hash_only, # type: bool + ): + # type: (...) -> defer.Deferred + """ + Read the next chunk of plaintext, encrypt it, and extend the accumulator + with the resulting ciphertext. + """ # tolerate large length= values without consuming a lot of RAM by # reading just a chunk (say 50kB) at a time. This only really matters # when hash_only==True (i.e. resuming an interrupted upload), since # that's the case where we will be skipping over a lot of data. - size = min(remaining, self.CHUNKSIZE) - remaining = remaining - size + size = min(ciphertext_accum.remaining, self.CHUNKSIZE) + # read a chunk of plaintext.. d = defer.maybeDeferred(self.original.read, size) - # N.B.: if read() is synchronous, then since everything else is - # actually synchronous too, we'd blow the stack unless we stall for a - # tick. Once you accept a Deferred from IUploadable.read(), you must - # be prepared to have it fire immediately too. - d.addCallback(fireEventually) def _good(plaintext): # and encrypt it.. # o/' over the fields we go, hashing all the way, sHA! sHA! sHA! o/' ct = self._hash_and_encrypt_plaintext(plaintext, hash_only) - ciphertext.extend(ct) - self._read_encrypted(remaining, ciphertext, hash_only, - fire_when_done) - def _err(why): - fire_when_done.errback(why) + # Intentionally tell the accumulator about the expected size, not + # the actual size. If we run out of data we still want remaining + # to drop otherwise it will never reach 0 and the loop will never + # end. + ciphertext_accum.extend(size, ct) d.addCallback(_good) - d.addErrback(_err) - return None + return d def _hash_and_encrypt_plaintext(self, data, hash_only): assert isinstance(data, (tuple, list)), type(data) @@ -1423,7 +1478,7 @@ class LiteralUploader(object): return self._status @implementer(RIEncryptedUploadable) -class RemoteEncryptedUploadable(Referenceable): +class RemoteEncryptedUploadable(Referenceable): # type: ignore # warner/foolscap#78 def __init__(self, encrypted_uploadable, upload_status): self._eu = IEncryptedUploadable(encrypted_uploadable) @@ -1825,7 +1880,7 @@ class Uploader(service.MultiService, log.PrefixingLogMixin): def startService(self): service.MultiService.startService(self) if self._helper_furl: - self.parent.tub.connectTo(self._helper_furl, + self.parent.tub.connectTo(ensure_str(self._helper_furl), self._got_helper) def _got_helper(self, helper): diff --git a/src/allmydata/interfaces.py b/src/allmydata/interfaces.py index 49dcf7646..0dd5ddc83 100644 --- a/src/allmydata/interfaces.py +++ b/src/allmydata/interfaces.py @@ -521,7 +521,6 @@ class IStorageBroker(Interface): oldest_supported: the peer's oldest supported version, same rref: the RemoteReference, if connected, otherwise None - remote_host: the IAddress, if connected, otherwise None This method is intended for monitoring interfaces, such as a web page that describes connecting and connected peers. @@ -682,7 +681,7 @@ class IURI(Interface): passing into init_from_string.""" -class IVerifierURI(Interface, IURI): +class IVerifierURI(IURI): def init_from_string(uri): """Accept a string (as created by my to_string() method) and populate this instance with its data. I am not normally called directly, @@ -749,7 +748,7 @@ class IProgress(Interface): "Current amount of progress (in percentage)" ) - def set_progress(self, value): + def set_progress(value): """ Sets the current amount of progress. @@ -757,7 +756,7 @@ class IProgress(Interface): set_progress_total. """ - def set_progress_total(self, value): + def set_progress_total(value): """ Sets the total amount of expected progress @@ -860,12 +859,6 @@ class IPeerSelector(Interface): peer selection begins. """ - def confirm_share_allocation(peerid, shnum): - """ - Confirm that an allocated peer=>share pairing has been - successfully established. - """ - def add_peers(peerids=set): """ Update my internal state to include the peers in peerids as @@ -1825,11 +1818,6 @@ class IEncoder(Interface): willing to receive data. """ - def set_size(size): - """Specify the number of bytes that will be encoded. This must be - peformed before get_serialized_params() can be called. - """ - def set_encrypted_uploadable(u): """Provide a source of encrypted upload data. 'u' must implement IEncryptedUploadable. @@ -2932,38 +2920,6 @@ class RIHelper(RemoteInterface): return (UploadResults, ChoiceOf(RICHKUploadHelper, None)) -class RIStatsProvider(RemoteInterface): - __remote_name__ = native_str("RIStatsProvider.tahoe.allmydata.com") - """ - Provides access to statistics and monitoring information. - """ - - def get_stats(): - """ - returns a dictionary containing 'counters' and 'stats', each a - dictionary with string counter/stat name keys, and numeric or None values. - counters are monotonically increasing measures of work done, and - stats are instantaneous measures (potentially time averaged - internally) - """ - return DictOf(bytes, DictOf(bytes, ChoiceOf(float, int, long, None))) - - -class RIStatsGatherer(RemoteInterface): - __remote_name__ = native_str("RIStatsGatherer.tahoe.allmydata.com") - """ - Provides a monitoring service for centralised collection of stats - """ - - def provide(provider=RIStatsProvider, nickname=bytes): - """ - @param provider: a stats collector instance that should be polled - periodically by the gatherer to collect stats. - @param nickname: a name useful to identify the provided client - """ - return None - - class IStatsProducer(Interface): def get_stats(): """ @@ -3174,3 +3130,24 @@ class IAnnounceableStorageServer(Interface): :type: ``IReferenceable`` provider """ ) + + +class IAddressFamily(Interface): + """ + Support for one specific address family. + + This stretches the definition of address family to include things like Tor + and I2P. + """ + def get_listener(): + """ + Return a string endpoint description or an ``IStreamServerEndpoint``. + + This would be named ``get_server_endpoint`` if not for historical + reasons. + """ + + def get_client_endpoint(): + """ + Return an ``IStreamClientEndpoint``. + """ diff --git a/src/allmydata/introducer/client.py b/src/allmydata/introducer/client.py index 225ec1abc..07f8a5f7a 100644 --- a/src/allmydata/introducer/client.py +++ b/src/allmydata/introducer/client.py @@ -1,7 +1,22 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from past.builtins import long + +from six import ensure_text, ensure_str + import time from zope.interface import implementer from twisted.application import service -from foolscap.api import Referenceable, eventually +from foolscap.api import Referenceable from allmydata.interfaces import InsufficientVersionError from allmydata.introducer.interfaces import IIntroducerClient, \ RIIntroducerSubscriberClient_v2 @@ -9,36 +24,38 @@ from allmydata.introducer.common import sign_to_foolscap, unsign_from_foolscap,\ get_tubid_string_from_ann from allmydata.util import log, yamlutil, connection_status from allmydata.util.rrefutil import add_version_to_remote_reference +from allmydata.util.observer import ( + ObserverList, +) from allmydata.crypto.error import BadSignature from allmydata.util.assertutil import precondition class InvalidCacheError(Exception): pass -V2 = "http://allmydata.org/tahoe/protocols/introducer/v2" +V2 = b"http://allmydata.org/tahoe/protocols/introducer/v2" @implementer(RIIntroducerSubscriberClient_v2, IIntroducerClient) class IntroducerClient(service.Service, Referenceable): def __init__(self, tub, introducer_furl, nickname, my_version, oldest_supported, - app_versions, sequencer, cache_filepath): + sequencer, cache_filepath): self._tub = tub self.introducer_furl = introducer_furl - assert type(nickname) is unicode + assert isinstance(nickname, str) self._nickname = nickname self._my_version = my_version self._oldest_supported = oldest_supported - self._app_versions = app_versions self._sequencer = sequencer self._cache_filepath = cache_filepath - self._my_subscriber_info = { "version": 0, - "nickname": self._nickname, - "app-versions": self._app_versions, - "my-version": self._my_version, - "oldest-supported": self._oldest_supported, + self._my_subscriber_info = { b"version": 0, + b"nickname": self._nickname, + b"app-versions": [], + b"my-version": self._my_version, + b"oldest-supported": self._oldest_supported, } self._outbound_announcements = {} # not signed @@ -48,8 +65,7 @@ class IntroducerClient(service.Service, Referenceable): self._publisher = None self._since = None - self._local_subscribers = [] # (servicename,cb,args,kwargs) tuples - self._subscribed_service_names = set() + self._local_subscribers = {} # {servicename: ObserverList} self._subscriptions = set() # requests we've actually sent # _inbound_announcements remembers one announcement per @@ -80,7 +96,7 @@ class IntroducerClient(service.Service, Referenceable): def startService(self): service.Service.startService(self) self._introducer_error = None - rc = self._tub.connectTo(self.introducer_furl, self._got_introducer) + rc = self._tub.connectTo(ensure_str(self.introducer_furl), self._got_introducer) self._introducer_reconnector = rc def connect_failed(failure): self.log("Initial Introducer connection failed: perhaps it's down", @@ -110,21 +126,26 @@ class IntroducerClient(service.Service, Referenceable): def _save_announcements(self): announcements = [] - for _, value in self._inbound_announcements.items(): + for value in self._inbound_announcements.values(): ann, key_s, time_stamp = value + # On Python 2, bytes strings are encoded into YAML Unicode strings. + # On Python 3, bytes are encoded as YAML bytes. To minimize + # changes, Python 3 for now ensures the same is true. server_params = { "ann" : ann, - "key_s" : key_s, + "key_s" : ensure_text(key_s), } announcements.append(server_params) announcement_cache_yaml = yamlutil.safe_dump(announcements) + if isinstance(announcement_cache_yaml, str): + announcement_cache_yaml = announcement_cache_yaml.encode("utf-8") self._cache_filepath.setContent(announcement_cache_yaml) def _got_introducer(self, publisher): self.log("connected to introducer, getting versions") - default = { "http://allmydata.org/tahoe/protocols/introducer/v1": + default = { b"http://allmydata.org/tahoe/protocols/introducer/v1": { }, - "application-version": "unknown: no get_version()", + b"application-version": b"unknown: no get_version()", } d = add_version_to_remote_reference(publisher, default) d.addCallback(self._got_versioned_introducer) @@ -137,6 +158,7 @@ class IntroducerClient(service.Service, Referenceable): def _got_versioned_introducer(self, publisher): self.log("got introducer version: %s" % (publisher.version,)) # we require an introducer that speaks at least V2 + assert all(type(V2) == type(v) for v in publisher.version) if V2 not in publisher.version: raise InsufficientVersionError("V2", publisher.version) self._publisher = publisher @@ -156,28 +178,28 @@ class IntroducerClient(service.Service, Referenceable): kwargs["facility"] = "tahoe.introducer.client" return log.msg(*args, **kwargs) - def subscribe_to(self, service_name, cb, *args, **kwargs): - self._local_subscribers.append( (service_name,cb,args,kwargs) ) - self._subscribed_service_names.add(service_name) + def subscribe_to(self, service_name, callback, *args, **kwargs): + obs = self._local_subscribers.setdefault(service_name, ObserverList()) + obs.subscribe(lambda key_s, ann: callback(key_s, ann, *args, **kwargs)) self._maybe_subscribe() - for index,(ann,key_s,when) in self._inbound_announcements.items(): - precondition(isinstance(key_s, str), key_s) + for index,(ann,key_s,when) in list(self._inbound_announcements.items()): + precondition(isinstance(key_s, bytes), key_s) servicename = index[0] if servicename == service_name: - eventually(cb, key_s, ann, *args, **kwargs) + obs.notify(key_s, ann) def _maybe_subscribe(self): if not self._publisher: self.log("want to subscribe, but no introducer yet", level=log.NOISY) return - for service_name in self._subscribed_service_names: + for service_name in self._local_subscribers: if service_name in self._subscriptions: continue self._subscriptions.add(service_name) self._debug_outstanding += 1 d = self._publisher.callRemote("subscribe_v2", - self, service_name, + self, service_name.encode("utf-8"), self._my_subscriber_info) d.addBoth(self._debug_retired) d.addErrback(log.err, facility="tahoe.introducer.client", @@ -188,7 +210,7 @@ class IntroducerClient(service.Service, Referenceable): # "seqnum" and "nonce" will be populated with new values in # publish(), each time we make a change "nickname": self._nickname, - "app-versions": self._app_versions, + "app-versions": [], "my-version": self._my_version, "oldest-supported": self._oldest_supported, @@ -205,7 +227,7 @@ class IntroducerClient(service.Service, Referenceable): self._outbound_announcements[service_name] = ann_d # publish all announcements with the new seqnum and nonce - for service_name,ann_d in self._outbound_announcements.items(): + for service_name,ann_d in list(self._outbound_announcements.items()): ann_d["seqnum"] = current_seqnum ann_d["nonce"] = current_nonce ann_t = sign_to_foolscap(ann_d, signing_key) @@ -217,7 +239,7 @@ class IntroducerClient(service.Service, Referenceable): self.log("want to publish, but no introducer yet", level=log.NOISY) return # this re-publishes everything. The Introducer ignores duplicates - for ann_t in self._published_announcements.values(): + for ann_t in list(self._published_announcements.values()): self._debug_counts["outbound_message"] += 1 self._debug_outstanding += 1 d = self._publisher.callRemote("publish_v2", ann_t, self._canary) @@ -237,7 +259,7 @@ class IntroducerClient(service.Service, Referenceable): # this might raise UnknownKeyError or bad-sig error ann, key_s = unsign_from_foolscap(ann_t) # key is "v0-base32abc123" - precondition(isinstance(key_s, str), key_s) + precondition(isinstance(key_s, bytes), key_s) except BadSignature: self.log("bad signature on inbound announcement: %s" % (ann_t,), parent=lp, level=log.WEIRD, umid="ZAU15Q") @@ -247,17 +269,17 @@ class IntroducerClient(service.Service, Referenceable): self._process_announcement(ann, key_s) def _process_announcement(self, ann, key_s): - precondition(isinstance(key_s, str), key_s) + precondition(isinstance(key_s, bytes), key_s) self._debug_counts["inbound_announcement"] += 1 service_name = str(ann["service-name"]) - if service_name not in self._subscribed_service_names: + if service_name not in self._local_subscribers: self.log("announcement for a service we don't care about [%s]" % (service_name,), level=log.UNUSUAL, umid="dIpGNA") self._debug_counts["wrong_service"] += 1 return # for ASCII values, simplejson might give us unicode *or* bytes - if "nickname" in ann and isinstance(ann["nickname"], str): - ann["nickname"] = unicode(ann["nickname"]) + if "nickname" in ann and isinstance(ann["nickname"], bytes): + ann["nickname"] = str(ann["nickname"]) nick_s = ann.get("nickname",u"").encode("utf-8") lp2 = self.log(format="announcement for nickname '%(nick)s', service=%(svc)s: %(ann)s", nick=nick_s, svc=service_name, ann=ann, umid="BoKEag") @@ -265,11 +287,11 @@ class IntroducerClient(service.Service, Referenceable): # how do we describe this node in the logs? desc_bits = [] assert key_s - desc_bits.append("serverid=" + key_s[:20]) + desc_bits.append(b"serverid=" + key_s[:20]) if "anonymous-storage-FURL" in ann: tubid_s = get_tubid_string_from_ann(ann) - desc_bits.append("tubid=" + tubid_s[:8]) - description = "/".join(desc_bits) + desc_bits.append(b"tubid=" + tubid_s[:8]) + description = b"/".join(desc_bits) # the index is used to track duplicates index = (service_name, key_s) @@ -319,11 +341,11 @@ class IntroducerClient(service.Service, Referenceable): self._deliver_announcements(key_s, ann) def _deliver_announcements(self, key_s, ann): - precondition(isinstance(key_s, str), key_s) + precondition(isinstance(key_s, bytes), key_s) service_name = str(ann["service-name"]) - for (service_name2,cb,args,kwargs) in self._local_subscribers: - if service_name2 == service_name: - eventually(cb, key_s, ann, *args, **kwargs) + obs = self._local_subscribers.get(service_name) + if obs is not None: + obs.notify(key_s, ann) def connection_status(self): assert self.running # startService builds _introducer_reconnector diff --git a/src/allmydata/introducer/common.py b/src/allmydata/introducer/common.py index abc0811f0..f67aad203 100644 --- a/src/allmydata/introducer/common.py +++ b/src/allmydata/introducer/common.py @@ -1,18 +1,29 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import re -import json from allmydata.crypto.util import remove_prefix from allmydata.crypto import ed25519 -from allmydata.util import base32, rrefutil +from allmydata.util import base32, rrefutil, jsonbytes as json def get_tubid_string_from_ann(ann): - return get_tubid_string(str(ann.get("anonymous-storage-FURL") - or ann.get("FURL"))) + furl = ann.get("anonymous-storage-FURL") or ann.get("FURL") + return get_tubid_string(furl) def get_tubid_string(furl): m = re.match(r'pb://(\w+)@', furl) assert m - return m.group(1).lower() + return m.group(1).lower().encode("ascii") def sign_to_foolscap(announcement, signing_key): diff --git a/src/allmydata/introducer/interfaces.py b/src/allmydata/introducer/interfaces.py index 9f08f1943..24fd3945f 100644 --- a/src/allmydata/introducer/interfaces.py +++ b/src/allmydata/introducer/interfaces.py @@ -73,7 +73,7 @@ class IIntroducerClient(Interface): publish their services to the rest of the world, and I help them learn about services available on other nodes.""" - def publish(service_name, ann, signing_key=None): + def publish(service_name, ann, signing_key): """Publish the given announcement dictionary (which must be JSON-serializable), plus some additional keys, to the world. @@ -83,8 +83,7 @@ class IIntroducerClient(Interface): the signing_key, if present, otherwise it is derived from the 'anonymous-storage-FURL' key. - If signing_key= is set to an instance of SigningKey, it will be - used to sign the announcement.""" + signing_key (a SigningKey) will be used to sign the announcement.""" def subscribe_to(service_name, callback, *args, **kwargs): """Call this if you will eventually want to use services with the diff --git a/src/allmydata/introducer/server.py b/src/allmydata/introducer/server.py index 0a933bd01..339c5a0ac 100644 --- a/src/allmydata/introducer/server.py +++ b/src/allmydata/introducer/server.py @@ -1,5 +1,26 @@ +""" +Ported to Python 3. +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from past.builtins import long +from six import ensure_text import time, os.path, textwrap + +try: + from typing import Any, Dict, Union +except ImportError: + pass + from zope.interface import implementer from twisted.application import service from twisted.internet import defer @@ -7,7 +28,7 @@ from twisted.python.failure import Failure from foolscap.api import Referenceable import allmydata from allmydata import node -from allmydata.util import log, rrefutil +from allmydata.util import log, rrefutil, dictutil from allmydata.util.i2p_provider import create as create_i2p_provider from allmydata.util.tor_provider import create as create_tor_provider from allmydata.introducer.interfaces import \ @@ -55,7 +76,7 @@ def create_introducer(basedir=u"."): i2p_provider = create_i2p_provider(reactor, config) tor_provider = create_tor_provider(reactor, config) - default_connection_handlers, foolscap_connection_handlers = create_connection_handlers(reactor, config, i2p_provider, tor_provider) + default_connection_handlers, foolscap_connection_handlers = create_connection_handlers(config, i2p_provider, tor_provider) tub_options = create_tub_options(config) # we don't remember these because the Introducer doesn't make @@ -122,7 +143,7 @@ class _IntroducerNode(node.Node): from allmydata.webish import IntroducerWebishServer nodeurl_path = self.config.get_config_path(u"node.url") - config_staticdir = self.get_config("node", "web.static", "public_html").decode('utf-8') + config_staticdir = self.get_config("node", "web.static", "public_html") staticdir = self.config.get_config_path(config_staticdir) ws = IntroducerWebishServer(self, webport, nodeurl_path, staticdir) ws.setServiceParent(self) @@ -132,10 +153,12 @@ class IntroducerService(service.MultiService, Referenceable): name = "introducer" # v1 is the original protocol, added in 1.0 (but only advertised starting # in 1.3), removed in 1.12. v2 is the new signed protocol, added in 1.10 - VERSION = { #"http://allmydata.org/tahoe/protocols/introducer/v1": { }, - "http://allmydata.org/tahoe/protocols/introducer/v2": { }, - "application-version": str(allmydata.__full_version__), - } + # TODO: reconcile bytes/str for keys + VERSION = { + #"http://allmydata.org/tahoe/protocols/introducer/v1": { }, + b"http://allmydata.org/tahoe/protocols/introducer/v2": { }, + b"application-version": allmydata.__full_version__.encode("utf-8"), + } # type: Dict[Union[bytes, str], Any] def __init__(self): service.MultiService.__init__(self) @@ -155,7 +178,7 @@ class IntroducerService(service.MultiService, Referenceable): # 'subscriber_info' is a dict, provided directly by v2 clients. The # expected keys are: version, nickname, app-versions, my-version, # oldest-supported - self._subscribers = {} + self._subscribers = dictutil.UnicodeKeyDict({}) self._debug_counts = {"inbound_message": 0, "inbound_duplicate": 0, @@ -179,7 +202,7 @@ class IntroducerService(service.MultiService, Referenceable): def get_announcements(self): """Return a list of AnnouncementDescriptor for all announcements""" announcements = [] - for (index, (_, canary, ann, when)) in self._announcements.items(): + for (index, (_, canary, ann, when)) in list(self._announcements.items()): ad = AnnouncementDescriptor(when, index, canary, ann) announcements.append(ad) return announcements @@ -187,8 +210,8 @@ class IntroducerService(service.MultiService, Referenceable): def get_subscribers(self): """Return a list of SubscriberDescriptor objects for all subscribers""" s = [] - for service_name, subscriptions in self._subscribers.items(): - for rref,(subscriber_info,when) in subscriptions.items(): + for service_name, subscriptions in list(self._subscribers.items()): + for rref,(subscriber_info,when) in list(subscriptions.items()): # note that if the subscriber didn't do Tub.setLocation, # tubid will be None. Also, subscribers do not tell us which # pubkey they use; only publishers do that. @@ -279,6 +302,10 @@ class IntroducerService(service.MultiService, Referenceable): def remote_subscribe_v2(self, subscriber, service_name, subscriber_info): self.log("introducer: subscription[%s] request at %s" % (service_name, subscriber), umid="U3uzLg") + service_name = ensure_text(service_name) + subscriber_info = dictutil.UnicodeKeyDict({ + ensure_text(k): v for (k, v) in subscriber_info.items() + }) return self.add_subscriber(subscriber, service_name, subscriber_info) def add_subscriber(self, subscriber, service_name, subscriber_info): @@ -301,6 +328,10 @@ class IntroducerService(service.MultiService, Referenceable): subscribers.pop(subscriber, None) subscriber.notifyOnDisconnect(_remove) + # Make sure types are correct: + for k in self._announcements: + assert isinstance(k[0], type(service_name)) + # now tell them about any announcements they're interested in announcements = set( [ ann_t for idx,(ann_t,canary,ann,when) diff --git a/src/allmydata/mutable/checker.py b/src/allmydata/mutable/checker.py index 6e083f8f5..e3e5951f0 100644 --- a/src/allmydata/mutable/checker.py +++ b/src/allmydata/mutable/checker.py @@ -1,3 +1,14 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from allmydata.uri import from_string from allmydata.util import base32, log, dictutil @@ -187,7 +198,7 @@ class MutableChecker(object): if self.bad_shares: report.append("Corrupt Shares:") summary.append("Corrupt Shares:") - for (server, shnum, f) in sorted(self.bad_shares): + for (server, shnum, f) in sorted(self.bad_shares, key=id): serverid = server.get_serverid() locator = (server, self._storage_index, shnum) corrupt_share_locators.append(locator) diff --git a/src/allmydata/mutable/common.py b/src/allmydata/mutable/common.py index 16f39b302..802681ae7 100644 --- a/src/allmydata/mutable/common.py +++ b/src/allmydata/mutable/common.py @@ -1,3 +1,14 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 MODE_CHECK = "MODE_CHECK" # query all peers MODE_ANYTHING = "MODE_ANYTHING" # one recoverable version diff --git a/src/allmydata/mutable/filenode.py b/src/allmydata/mutable/filenode.py index 849dc4c88..39e8b76be 100644 --- a/src/allmydata/mutable/filenode.py +++ b/src/allmydata/mutable/filenode.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import random from zope.interface import implementer @@ -147,9 +159,9 @@ class MutableFileNode(object): def _get_initial_contents(self, contents): if contents is None: - return MutableData("") + return MutableData(b"") - if isinstance(contents, str): + if isinstance(contents, bytes): return MutableData(contents) if IMutableUploadable.providedBy(contents): @@ -552,7 +564,7 @@ class MutableFileNode(object): return d - def upload(self, new_contents, servermap): + def upload(self, new_contents, servermap, progress=None): """ I overwrite the contents of the best recoverable version of this mutable file with new_contents, using servermap instead of @@ -884,9 +896,9 @@ class MutableFileVersion(object): d = self._try_to_download_data() def _apply(old_contents): new_contents = modifier(old_contents, self._servermap, first_time) - precondition((isinstance(new_contents, str) or + precondition((isinstance(new_contents, bytes) or new_contents is None), - "Modifier function must return a string " + "Modifier function must return bytes " "or None") if new_contents is None or new_contents == old_contents: @@ -939,7 +951,7 @@ class MutableFileVersion(object): return self._servermap.size_of_version(self._version) - def download_to_data(self, fetch_privkey=False, progress=None): + def download_to_data(self, fetch_privkey=False, progress=None): # type: ignore # fixme """ I return a Deferred that fires with the contents of this readable object as a byte string. @@ -960,7 +972,7 @@ class MutableFileVersion(object): c = consumer.MemoryConsumer() # modify will almost certainly write, so we need the privkey. d = self._read(c, fetch_privkey=True) - d.addCallback(lambda mc: "".join(mc.chunks)) + d.addCallback(lambda mc: b"".join(mc.chunks)) return d @@ -1076,7 +1088,7 @@ class MutableFileVersion(object): start = offset rest = offset + data.get_size() new = old[:start] - new += "".join(data.read(data.get_size())) + new += b"".join(data.read(data.get_size())) new += old[rest:] return new return self._modify(m, None) @@ -1141,7 +1153,7 @@ class MutableFileVersion(object): start_segments = {} # shnum -> start segment end_segments = {} # shnum -> end segment blockhashes = {} # shnum -> blockhash tree - for (shnum, original_data) in update_data.iteritems(): + for (shnum, original_data) in list(update_data.items()): data = [d[1] for d in original_data if d[0] == self._version] # data is [(blockhashes,start,end)..] @@ -1193,3 +1205,7 @@ class MutableFileVersion(object): self._servermap, mode=mode) return u.update() + + # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3562 + def get_servermap(self): + raise NotImplementedError diff --git a/src/allmydata/mutable/layout.py b/src/allmydata/mutable/layout.py index bf9a0483b..ce51a8833 100644 --- a/src/allmydata/mutable/layout.py +++ b/src/allmydata/mutable/layout.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + # Omit dict so Python 3 changes don't leak into API callers on Python 2. + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 from past.utils import old_div import struct @@ -1744,7 +1756,7 @@ class MDMFSlotReadProxy(object): def _read(self, readvs, force_remote=False): - unsatisfiable = list(filter(lambda x: x[0] + x[1] > len(self._data), readvs)) + unsatisfiable = [x for x in readvs if x[0] + x[1] > len(self._data)] # TODO: It's entirely possible to tweak this so that it just # fulfills the requests that it can, and not demand that all # requests are satisfiable before running it. diff --git a/src/allmydata/mutable/publish.py b/src/allmydata/mutable/publish.py index 12ad3d992..8a760c5d3 100644 --- a/src/allmydata/mutable/publish.py +++ b/src/allmydata/mutable/publish.py @@ -1,5 +1,17 @@ +""" +Ported to Python 3. +""" +from __future__ import division +from __future__ import absolute_import +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import os, time -from six.moves import cStringIO as StringIO +from io import BytesIO from itertools import count from zope.interface import implementer from twisted.internet import defer @@ -46,7 +58,7 @@ class PublishStatus(object): self.size = None self.status = "Not started" self.progress = 0.0 - self.counter = self.statusid_counter.next() + self.counter = next(self.statusid_counter) self.started = time.time() def add_per_server_time(self, server, elapsed): @@ -305,7 +317,7 @@ class Publish(object): # Our update process fetched these for us. We need to update # them in place as publishing happens. self.blockhashes = {} # (shnum, [blochashes]) - for (i, bht) in blockhashes.iteritems(): + for (i, bht) in list(blockhashes.items()): # We need to extract the leaves from our old hash tree. old_segcount = mathutil.div_ceil(version[4], version[3]) @@ -313,7 +325,7 @@ class Publish(object): bht = dict(enumerate(bht)) h.set_hashes(bht) leaves = h[h.get_leaf_index(0):] - for j in xrange(self.num_segments - len(leaves)): + for j in range(self.num_segments - len(leaves)): leaves.append(None) assert len(leaves) >= self.num_segments @@ -446,7 +458,7 @@ class Publish(object): # then we add in all the shares that were bad (corrupted, bad # signatures, etc). We want to replace these. - for key, old_checkstring in self._servermap.get_bad_shares().items(): + for key, old_checkstring in list(self._servermap.get_bad_shares().items()): (server, shnum) = key self.goal.add( (server,shnum) ) self.bad_share_checkstrings[(server,shnum)] = old_checkstring @@ -509,10 +521,10 @@ class Publish(object): # This will eventually hold the block hash chain for each share # that we publish. We define it this way so that empty publishes # will still have something to write to the remote slot. - self.blockhashes = dict([(i, []) for i in xrange(self.total_shares)]) - for i in xrange(self.total_shares): + self.blockhashes = dict([(i, []) for i in range(self.total_shares)]) + for i in range(self.total_shares): blocks = self.blockhashes[i] - for j in xrange(self.num_segments): + for j in range(self.num_segments): blocks.append(None) self.sharehash_leaves = None # eventually [sharehashes] self.sharehashes = {} # shnum -> [sharehash leaves necessary to @@ -526,7 +538,7 @@ class Publish(object): return self.done_deferred def _get_some_writer(self): - return list(self.writers.values()[0])[0] + return list(list(self.writers.values())[0])[0] def _update_status(self): self._status.set_status("Sending Shares: %d placed out of %d, " @@ -684,7 +696,7 @@ class Publish(object): salt = os.urandom(16) assert self._version == SDMF_VERSION - for shnum, writers in self.writers.iteritems(): + for shnum, writers in self.writers.items(): for writer in writers: writer.put_salt(salt) @@ -703,8 +715,9 @@ class Publish(object): self.log("Pushing segment %d of %d" % (segnum + 1, self.num_segments)) data = self.data.read(segsize) - # XXX: This is dumb. Why return a list? - data = "".join(data) + if not isinstance(data, bytes): + # XXX: Why does this return a list? + data = b"".join(data) assert len(data) == segsize, len(data) @@ -732,7 +745,7 @@ class Publish(object): for i in range(len(crypttext_pieces)): offset = i * piece_size piece = crypttext[offset:offset+piece_size] - piece = piece + "\x00"*(piece_size - len(piece)) # padding + piece = piece + b"\x00"*(piece_size - len(piece)) # padding crypttext_pieces[i] = piece assert len(piece) == piece_size d = fec.encode(crypttext_pieces) @@ -751,7 +764,7 @@ class Publish(object): results, salt = encoded_and_salt shares, shareids = results self._status.set_status("Pushing segment") - for i in xrange(len(shares)): + for i in range(len(shares)): sharedata = shares[i] shareid = shareids[i] if self._version == MDMF_VERSION: @@ -786,7 +799,7 @@ class Publish(object): def push_encprivkey(self): encprivkey = self._encprivkey self._status.set_status("Pushing encrypted private key") - for shnum, writers in self.writers.iteritems(): + for shnum, writers in self.writers.items(): for writer in writers: writer.put_encprivkey(encprivkey) @@ -794,7 +807,7 @@ class Publish(object): def push_blockhashes(self): self.sharehash_leaves = [None] * len(self.blockhashes) self._status.set_status("Building and pushing block hash tree") - for shnum, blockhashes in self.blockhashes.iteritems(): + for shnum, blockhashes in list(self.blockhashes.items()): t = hashtree.HashTree(blockhashes) self.blockhashes[shnum] = list(t) # set the leaf for future use. @@ -808,7 +821,7 @@ class Publish(object): def push_sharehashes(self): self._status.set_status("Building and pushing share hash chain") share_hash_tree = hashtree.HashTree(self.sharehash_leaves) - for shnum in xrange(len(self.sharehash_leaves)): + for shnum in range(len(self.sharehash_leaves)): needed_indices = share_hash_tree.needed_hashes(shnum) self.sharehashes[shnum] = dict( [ (i, share_hash_tree[i]) for i in needed_indices] ) @@ -824,7 +837,7 @@ class Publish(object): # - Get the checkstring of the resulting layout; sign that. # - Push the signature self._status.set_status("Pushing root hashes and signature") - for shnum in xrange(self.total_shares): + for shnum in range(self.total_shares): writers = self.writers[shnum] for writer in writers: writer.put_root_hash(self.root_hash) @@ -852,7 +865,7 @@ class Publish(object): signable = self._get_some_writer().get_signable() self.signature = rsa.sign_data(self._privkey, signable) - for (shnum, writers) in self.writers.iteritems(): + for (shnum, writers) in self.writers.items(): for writer in writers: writer.put_signature(self.signature) self._status.timings['sign'] = time.time() - started @@ -867,7 +880,7 @@ class Publish(object): ds = [] verification_key = rsa.der_string_from_verifying_key(self._pubkey) - for (shnum, writers) in self.writers.copy().iteritems(): + for (shnum, writers) in list(self.writers.copy().items()): for writer in writers: writer.put_verification_key(verification_key) self.num_outstanding += 1 @@ -901,7 +914,7 @@ class Publish(object): def log_goal(self, goal, message=""): logmsg = [message] - for (shnum, server) in sorted([(s,p) for (p,s) in goal]): + for (shnum, server) in sorted([(s,p) for (p,s) in goal], key=lambda t: (id(t[0]), id(t[1]))): logmsg.append("sh%d to [%s]" % (shnum, server.get_name())) self.log("current goal: %s" % (", ".join(logmsg)), level=log.NOISY) self.log("we are planning to push new seqnum=#%d" % self._new_seqnum, @@ -1003,7 +1016,7 @@ class Publish(object): # TODO: Precompute this. shares = [] - for shnum, writers in self.writers.iteritems(): + for shnum, writers in self.writers.items(): shares.extend([x.shnum for x in writers if x.server == server]) known_shnums = set(shares) surprise_shares -= known_shnums @@ -1104,7 +1117,7 @@ class Publish(object): self.bad_servers.add(server) # don't ask them again # use the checkstring to add information to the log message unknown_format = False - for (shnum,readv) in read_data.items(): + for (shnum,readv) in list(read_data.items()): checkstring = readv[0] version = get_version_from_checkstring(checkstring) if version == MDMF_VERSION: @@ -1198,7 +1211,7 @@ class Publish(object): class MutableFileHandle(object): """ I am a mutable uploadable built around a filehandle-like object, - usually either a StringIO instance or a handle to an actual file. + usually either a BytesIO instance or a handle to an actual file. """ def __init__(self, filehandle): @@ -1268,14 +1281,14 @@ class MutableFileHandle(object): class MutableData(MutableFileHandle): """ I am a mutable uploadable built around a string, which I then cast - into a StringIO and treat as a filehandle. + into a BytesIO and treat as a filehandle. """ def __init__(self, s): # Take a string and return a file-like uploadable. - assert isinstance(s, str) + assert isinstance(s, bytes) - MutableFileHandle.__init__(self, StringIO(s)) + MutableFileHandle.__init__(self, BytesIO(s)) @implementer(IMutableUploadable) @@ -1327,7 +1340,7 @@ class TransformingUploadable(object): # are we in state 0? self.log("reading %d bytes" % length) - old_start_data = "" + old_start_data = b"" old_data_length = self._first_segment_offset - self._read_marker if old_data_length > 0: if old_data_length > length: @@ -1345,7 +1358,7 @@ class TransformingUploadable(object): # to pad the end of the data with data from our last segment. old_end_length = length - \ (self._newdata.get_size() - self._newdata.pos()) - old_end_data = "" + old_end_data = b"" if old_end_length > 0: self.log("reading %d bytes of old end data" % old_end_length) @@ -1361,7 +1374,7 @@ class TransformingUploadable(object): self.log("reading %d bytes of new data" % length) new_data = self._newdata.read(length) - new_data = "".join(new_data) + new_data = b"".join(new_data) self._read_marker += len(old_start_data + new_data + old_end_data) diff --git a/src/allmydata/mutable/repairer.py b/src/allmydata/mutable/repairer.py index 261ca9633..23af02203 100644 --- a/src/allmydata/mutable/repairer.py +++ b/src/allmydata/mutable/repairer.py @@ -1,3 +1,14 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from zope.interface import implementer from twisted.internet import defer diff --git a/src/allmydata/mutable/retrieve.py b/src/allmydata/mutable/retrieve.py index a7785e4c5..894fb9776 100644 --- a/src/allmydata/mutable/retrieve.py +++ b/src/allmydata/mutable/retrieve.py @@ -1,3 +1,16 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + # Don't import bytes and str, to prevent API leakage + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, max, min # noqa: F401 + import time from itertools import count @@ -747,9 +760,9 @@ class Retrieve(object): blockhashes = dict(enumerate(blockhashes)) self.log("the reader gave me the following blockhashes: %s" % \ - blockhashes.keys()) + list(blockhashes.keys())) self.log("the reader gave me the following sharehashes: %s" % \ - sharehashes.keys()) + list(sharehashes.keys())) bht = self._block_hash_trees[reader.shnum] if bht.needed_hashes(segnum, include_leaf=True): @@ -906,9 +919,11 @@ class Retrieve(object): def notify_server_corruption(self, server, shnum, reason): + if isinstance(reason, str): + reason = reason.encode("utf-8") storage_server = server.get_storage_server() storage_server.advise_corrupt_share( - "mutable", + b"mutable", self._storage_index, shnum, reason, diff --git a/src/allmydata/mutable/servermap.py b/src/allmydata/mutable/servermap.py index 32962c495..6d8e7806a 100644 --- a/src/allmydata/mutable/servermap.py +++ b/src/allmydata/mutable/servermap.py @@ -1,4 +1,16 @@ +""" +Ported to Python 3. +""" from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + # Doesn't import str to prevent API leakage on Python 2 + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401 +from past.builtins import unicode import sys, time, copy from zope.interface import implementer @@ -156,6 +168,7 @@ class ServerMap(object): corrupted or badly signed) so that a repair operation can do the test-and-set using it as a reference. """ + assert isinstance(checkstring, bytes) key = (server, shnum) # record checkstring self._bad_shares[key] = checkstring self._known_shares.pop(key, None) @@ -185,7 +198,7 @@ class ServerMap(object): def dump(self, out=sys.stdout): print("servermap:", file=out) - for ( (server, shnum), (verinfo, timestamp) ) in self._known_shares.items(): + for ( (server, shnum), (verinfo, timestamp) ) in list(self._known_shares.items()): (seqnum, root_hash, IV, segsize, datalength, k, N, prefix, offsets_tuple) = verinfo print("[%s]: sh#%d seq%d-%s %d-of-%d len%d" % @@ -223,7 +236,7 @@ class ServerMap(object): """Return a dict that maps versionid to sets of (shnum, server, timestamp) tuples.""" versionmap = DictOfSets() - for ( (server, shnum), (verinfo, timestamp) ) in self._known_shares.items(): + for ( (server, shnum), (verinfo, timestamp) ) in list(self._known_shares.items()): versionmap.add(verinfo, (shnum, server, timestamp)) return versionmap @@ -242,7 +255,7 @@ class ServerMap(object): (num_distinct_shares, k, N) tuples.""" versionmap = self.make_versionmap() all_shares = {} - for verinfo, shares in versionmap.items(): + for verinfo, shares in list(versionmap.items()): s = set() for (shnum, server, timestamp) in shares: s.add(shnum) @@ -268,7 +281,7 @@ class ServerMap(object): """Return a string describing which versions we know about.""" versionmap = self.make_versionmap() bits = [] - for (verinfo, shares) in versionmap.items(): + for (verinfo, shares) in list(versionmap.items()): vstr = self.summarize_version(verinfo) shnums = set([shnum for (shnum, server, timestamp) in shares]) bits.append("%d*%s" % (len(shnums), vstr)) @@ -279,7 +292,7 @@ class ServerMap(object): recoverable.""" versionmap = self.make_versionmap() recoverable_versions = set() - for (verinfo, shares) in versionmap.items(): + for (verinfo, shares) in list(versionmap.items()): (seqnum, root_hash, IV, segsize, datalength, k, N, prefix, offsets_tuple) = verinfo shnums = set([shnum for (shnum, server, timestamp) in shares]) @@ -295,7 +308,7 @@ class ServerMap(object): versionmap = self.make_versionmap() unrecoverable_versions = set() - for (verinfo, shares) in versionmap.items(): + for (verinfo, shares) in list(versionmap.items()): (seqnum, root_hash, IV, segsize, datalength, k, N, prefix, offsets_tuple) = verinfo shnums = set([shnum for (shnum, server, timestamp) in shares]) @@ -329,7 +342,7 @@ class ServerMap(object): healths = {} # maps verinfo to (found,k) unrecoverable = set() highest_recoverable_seqnum = -1 - for (verinfo, shares) in versionmap.items(): + for (verinfo, shares) in list(versionmap.items()): (seqnum, root_hash, IV, segsize, datalength, k, N, prefix, offsets_tuple) = verinfo shnums = set([shnum for (shnum, server, timestamp) in shares]) @@ -664,7 +677,7 @@ class ServermapUpdater(object): ds = [] - for shnum,datav in datavs.items(): + for shnum,datav in list(datavs.items()): data = datav[0] reader = MDMFSlotReadProxy(ss, storage_index, @@ -800,9 +813,11 @@ class ServermapUpdater(object): def notify_server_corruption(self, server, shnum, reason): + if isinstance(reason, unicode): + reason = reason.encode("utf-8") ss = server.get_storage_server() ss.advise_corrupt_share( - "mutable", + b"mutable", self._storage_index, shnum, reason, diff --git a/src/allmydata/node.py b/src/allmydata/node.py index 26fc6fc97..2f340f860 100644 --- a/src/allmydata/node.py +++ b/src/allmydata/node.py @@ -1,36 +1,60 @@ """ This module contains classes and functions to implement and manage a node for Tahoe-LAFS. + +Ported to Python 3. """ -from past.builtins import unicode +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from six import ensure_str, ensure_text import datetime import os.path import re import types import errno -from io import StringIO -import tempfile from base64 import b32decode, b32encode +from errno import ENOENT, EPERM +from warnings import warn -# Python 2 compatibility -from six.moves import configparser -from future.utils import PY2 -if PY2: - from io import BytesIO as StringIO # noqa: F811 +try: + from typing import Union +except ImportError: + pass +import attr + +# On Python 2 this will be the backported package. +import configparser + +from twisted.python.filepath import ( + FilePath, +) from twisted.python import log as twlog from twisted.application import service from twisted.python.failure import Failure -from foolscap.api import Tub, app_versions +from foolscap.api import Tub + import foolscap.logging.log -from allmydata.version_checks import get_package_versions, get_package_versions_string + from allmydata.util import log from allmydata.util import fileutil, iputil -from allmydata.util.assertutil import _assert from allmydata.util.fileutil import abspath_expanduser_unicode from allmydata.util.encodingutil import get_filesystem_encoding, quote_output from allmydata.util import configutil +from allmydata.util.yamlutil import ( + safe_load, +) + +from . import ( + __full_version__, +) def _common_valid_config(): return configutil.ValidConfiguration({ @@ -72,11 +96,6 @@ def _common_valid_config(): ), }) -# Add our application versions to the data that Foolscap's LogPublisher -# reports. -for thing, things_version in get_package_versions().items(): - app_versions.add_version(thing, things_version) - # group 1 will be addr (dotted quad string), group 3 if any will be portnum (string) ADDR_RE = re.compile("^([1-9][0-9]*\.[1-9][0-9]*\.[1-9][0-9]*\.[1-9][0-9]*)(:([1-9][0-9]*))?$") @@ -100,8 +119,8 @@ def formatTimeTahoeStyle(self, when): """ d = datetime.datetime.utcfromtimestamp(when) if d.microsecond: - return d.isoformat(" ")[:-3]+"Z" - return d.isoformat(" ") + ".000Z" + return d.isoformat(ensure_str(" "))[:-3]+"Z" + return d.isoformat(ensure_str(" ")) + ".000Z" PRIV_README = """ This directory contains files which contain private data for the Tahoe node, @@ -155,6 +174,7 @@ def create_node_dir(basedir, readme_text): privdir = os.path.join(basedir, "private") if not os.path.exists(privdir): fileutil.make_dirs(privdir, 0o700) + readme_text = ensure_text(readme_text) with open(os.path.join(privdir, 'README'), 'w') as f: f.write(readme_text) @@ -175,7 +195,7 @@ def read_config(basedir, portnumfile, generated_files=[], _valid_config=None): :returns: :class:`allmydata.node._Config` instance """ - basedir = abspath_expanduser_unicode(unicode(basedir)) + basedir = abspath_expanduser_unicode(ensure_text(basedir)) if _valid_config is None: _valid_config = _common_valid_config() @@ -185,44 +205,52 @@ def read_config(basedir, portnumfile, generated_files=[], _valid_config=None): # canonicalize the portnum file portnumfile = os.path.join(basedir, portnumfile) - # (try to) read the main config file - config_fname = os.path.join(basedir, "tahoe.cfg") - parser = configparser.SafeConfigParser() + config_path = FilePath(basedir).child("tahoe.cfg") try: - parser = configutil.get_config(config_fname) + config_str = config_path.getContent() except EnvironmentError as e: if e.errno != errno.ENOENT: raise + # The file is missing, just create empty ConfigParser. + config_str = u"" + else: + config_str = config_str.decode("utf-8-sig") - configutil.validate_config(config_fname, parser, _valid_config) - - # make sure we have a private configuration area - fileutil.make_dirs(os.path.join(basedir, "private"), 0o700) - - return _Config(parser, portnumfile, basedir, config_fname) + return config_from_string( + basedir, + portnumfile, + config_str, + _valid_config, + config_path, + ) -def config_from_string(basedir, portnumfile, config_str, _valid_config=None): +def config_from_string(basedir, portnumfile, config_str, _valid_config=None, fpath=None): """ load and validate configuration from in-memory string """ if _valid_config is None: _valid_config = _common_valid_config() + if isinstance(config_str, bytes): + config_str = config_str.decode("utf-8") + # load configuration from in-memory string - parser = configparser.SafeConfigParser() - parser.readfp(StringIO(config_str)) + parser = configutil.get_config_from_string(config_str) - fname = "" - configutil.validate_config(fname, parser, _valid_config) - return _Config(parser, portnumfile, basedir, fname) + configutil.validate_config( + "" if fpath is None else fpath.path, + parser, + _valid_config, + ) - -def get_app_versions(): - """ - :returns: dict of versions important to Foolscap - """ - return dict(app_versions.versions) + return _Config( + parser, + portnumfile, + basedir, + fpath, + _valid_config, + ) def _error_about_old_config_files(basedir, generated_files): @@ -250,6 +278,12 @@ def _error_about_old_config_files(basedir, generated_files): raise e +def ensure_text_and_abspath_expanduser_unicode(basedir): + # type: (Union[bytes, str]) -> str + return abspath_expanduser_unicode(ensure_text(basedir)) + + +@attr.s class _Config(object): """ Manages configuration of a Tahoe 'node directory'. @@ -258,35 +292,47 @@ class _Config(object): class; names and funtionality have been kept the same while moving the code. It probably makes sense for several of these APIs to have better names. + + :ivar ConfigParser config: The actual configuration values. + + :ivar str portnum_fname: filename to use for the port-number file (a + relative path inside basedir). + + :ivar str _basedir: path to our "node directory", inside which all + configuration is managed. + + :ivar (FilePath|NoneType) config_path: The path actually used to create + the configparser (might be ``None`` if using in-memory data). + + :ivar ValidConfiguration valid_config_sections: The validator for the + values in this configuration. """ + config = attr.ib(validator=attr.validators.instance_of(configparser.ConfigParser)) + portnum_fname = attr.ib() + _basedir = attr.ib( + converter=ensure_text_and_abspath_expanduser_unicode, + ) # type: str + config_path = attr.ib( + validator=attr.validators.optional( + attr.validators.instance_of(FilePath), + ), + ) + valid_config_sections = attr.ib( + default=configutil.ValidConfiguration.everything(), + validator=attr.validators.instance_of(configutil.ValidConfiguration), + ) - def __init__(self, configparser, portnum_fname, basedir, config_fname): - """ - :param configparser: a ConfigParser instance + @property + def nickname(self): + nickname = self.get_config("node", "nickname", u"") + assert isinstance(nickname, str) + return nickname - :param portnum_fname: filename to use for the port-number file - (a relative path inside basedir) - - :param basedir: path to our "node directory", inside which all - configuration is managed - - :param config_fname: the pathname actually used to create the - configparser (might be 'fake' if using in-memory data) - """ - self.portnum_fname = portnum_fname - self._basedir = abspath_expanduser_unicode(unicode(basedir)) - self._config_fname = config_fname - self.config = configparser - - nickname_utf8 = self.get_config("node", "nickname", "") - if isinstance(nickname_utf8, bytes): # Python 2 - self.nickname = nickname_utf8.decode("utf-8") - else: - self.nickname = nickname_utf8 - assert type(self.nickname) is unicode - - def validate(self, valid_config_sections): - configutil.validate_config(self._config_fname, self.config, valid_config_sections) + @property + def _config_fname(self): + if self.config_path is None: + return "" + return self.config_path.path def write_config_file(self, name, value, mode="w"): """ @@ -316,7 +362,7 @@ class _Config(object): return self.config.getboolean(section, option) item = self.config.get(section, option) - if option.endswith(".furl") and self._contains_unescaped_hash(item): + if option.endswith(".furl") and '#' in item: raise UnescapedHashError(section, option, item) return item @@ -331,6 +377,34 @@ class _Config(object): ) return default + def set_config(self, section, option, value): + """ + Set a config option in a section and re-write the tahoe.cfg file + + :param str section: The name of the section in which to set the + option. + + :param str option: The name of the option to set. + + :param str value: The value of the option. + + :raise UnescapedHashError: If the option holds a fURL and there is a + ``#`` in the value. + """ + if option.endswith(".furl") and "#" in value: + raise UnescapedHashError(section, option, value) + + copied_config = configutil.copy_config(self.config) + configutil.set_config(copied_config, section, option, value) + configutil.validate_config( + self._config_fname, + copied_config, + self.valid_config_sections, + ) + if self.config_path is not None: + configutil.write_config(self.config_path, copied_config) + self.config = copied_config + def get_config_from_file(self, name, required=False): """Get the (string) contents of a config file, or None if the file did not exist. If required=True, raise an exception rather than @@ -360,14 +434,16 @@ class _Config(object): """ privname = os.path.join(self._basedir, "private", name) try: - value = fileutil.read(privname) + value = fileutil.read(privname, mode="r") except EnvironmentError as e: if e.errno != errno.ENOENT: raise # we only care about "file doesn't exist" if default is _None: raise MissingConfigEntry("The required configuration file %s is missing." % (quote_output(privname),)) - if isinstance(default, (bytes, unicode)): + if isinstance(default, bytes): + default = str(default, "utf-8") + if isinstance(default, str): value = default else: value = default() @@ -379,19 +455,21 @@ class _Config(object): config file that resides within the subdirectory named 'private'), and return it. """ + if isinstance(value, str): + value = value.encode("utf-8") privname = os.path.join(self._basedir, "private", name) with open(privname, "wb") as f: f.write(value) def get_private_config(self, name, default=_None): - """Read the (string) contents of a private config file (which is a + """Read the (native string) contents of a private config file (a config file that resides within the subdirectory named 'private'), and return it. Return a default, or raise an error if one was not given. """ privname = os.path.join(self._basedir, "private", name) try: - return fileutil.read(privname).strip() + return fileutil.read(privname, mode="r").strip() except EnvironmentError as e: if e.errno != errno.ENOENT: raise # we only care about "file doesn't exist" @@ -419,16 +497,96 @@ class _Config(object): os.path.join(self._basedir, *args) ) - @staticmethod - def _contains_unescaped_hash(item): - characters = iter(item) - for c in characters: - if c == '\\': - characters.next() - elif c == '#': - return True + def get_introducer_configuration(self): + """ + Get configuration for introducers. - return False + :return {unicode: (unicode, FilePath)}: A mapping from introducer + petname to a tuple of the introducer's fURL and local cache path. + """ + introducers_yaml_filename = self.get_private_path("introducers.yaml") + introducers_filepath = FilePath(introducers_yaml_filename) + + def get_cache_filepath(petname): + return FilePath( + self.get_private_path("introducer_{}_cache.yaml".format(petname)), + ) + + try: + with introducers_filepath.open() as f: + introducers_yaml = safe_load(f) + if introducers_yaml is None: + raise EnvironmentError( + EPERM, + "Can't read '{}'".format(introducers_yaml_filename), + introducers_yaml_filename, + ) + introducers = { + petname: config["furl"] + for petname, config + in introducers_yaml.get("introducers", {}).items() + } + non_strs = list( + k + for k + in introducers.keys() + if not isinstance(k, str) + ) + if non_strs: + raise TypeError( + "Introducer petnames {!r} should have been str".format( + non_strs, + ), + ) + non_strs = list( + v + for v + in introducers.values() + if not isinstance(v, str) + ) + if non_strs: + raise TypeError( + "Introducer fURLs {!r} should have been str".format( + non_strs, + ), + ) + log.msg( + "found {} introducers in {!r}".format( + len(introducers), + introducers_yaml_filename, + ) + ) + except EnvironmentError as e: + if e.errno != ENOENT: + raise + introducers = {} + + # supported the deprecated [client]introducer.furl item in tahoe.cfg + tahoe_cfg_introducer_furl = self.get_config("client", "introducer.furl", None) + if tahoe_cfg_introducer_furl == "None": + raise ValueError( + "tahoe.cfg has invalid 'introducer.furl = None':" + " to disable it omit the key entirely" + ) + if tahoe_cfg_introducer_furl: + warn( + "tahoe.cfg [client]introducer.furl is deprecated; " + "use private/introducers.yaml instead.", + category=DeprecationWarning, + stacklevel=-1, + ) + if "default" in introducers: + raise ValueError( + "'default' introducer furl cannot be specified in tahoe.cfg and introducers.yaml;" + " please fix impossible configuration." + ) + introducers['default'] = tahoe_cfg_introducer_furl + + return { + petname: (furl, get_cache_filepath(petname)) + for (petname, furl) + in introducers.items() + } def create_tub_options(config): @@ -468,28 +626,20 @@ def _make_tcp_handler(): return default() -def create_connection_handlers(reactor, config, i2p_provider, tor_provider): +def create_default_connection_handlers(config, handlers): """ - :returns: 2-tuple of default_connection_handlers, foolscap_connection_handlers + :return: A dictionary giving the default connection handlers. The keys + are strings like "tcp" and the values are strings like "tor" or + ``None``. """ reveal_ip = config.get_config("node", "reveal-IP-address", True, boolean=True) - # We store handlers for everything. None means we were unable to - # create that handler, so hints which want it will be ignored. - handlers = foolscap_connection_handlers = { - "tcp": _make_tcp_handler(), - "tor": tor_provider.get_tor_handler(), - "i2p": i2p_provider.get_i2p_handler(), - } - log.msg( - format="built Foolscap connection handlers for: %(known_handlers)s", - known_handlers=sorted([k for k,v in handlers.items() if v]), - facility="tahoe.node", - umid="PuLh8g", - ) - - # then we remember the default mappings from tahoe.cfg - default_connection_handlers = {"tor": "tor", "i2p": "i2p"} + # Remember the default mappings from tahoe.cfg + default_connection_handlers = { + name: name + for name + in handlers + } tcp_handler_name = config.get_config("connections", "tcp", "tcp").lower() if tcp_handler_name == "disabled": default_connection_handlers["tcp"] = None @@ -514,10 +664,35 @@ def create_connection_handlers(reactor, config, i2p_provider, tor_provider): if not reveal_ip: if default_connection_handlers.get("tcp") == "tcp": - raise PrivacyError("tcp = tcp, must be set to 'tor' or 'disabled'") - return default_connection_handlers, foolscap_connection_handlers + raise PrivacyError( + "Privacy requested with `reveal-IP-address = false` " + "but `tcp = tcp` conflicts with this.", + ) + return default_connection_handlers +def create_connection_handlers(config, i2p_provider, tor_provider): + """ + :returns: 2-tuple of default_connection_handlers, foolscap_connection_handlers + """ + # We store handlers for everything. None means we were unable to + # create that handler, so hints which want it will be ignored. + handlers = { + "tcp": _make_tcp_handler(), + "tor": tor_provider.get_client_endpoint(), + "i2p": i2p_provider.get_client_endpoint(), + } + log.msg( + format="built Foolscap connection handlers for: %(known_handlers)s", + known_handlers=sorted([k for k,v in handlers.items() if v]), + facility="tahoe.node", + umid="PuLh8g", + ) + return create_default_connection_handlers( + config, + handlers, + ), handlers + def create_tub(tub_options, default_connection_handlers, foolscap_connection_handlers, handler_overrides={}, **kwargs): @@ -532,12 +707,12 @@ def create_tub(tub_options, default_connection_handlers, foolscap_connection_han the new Tub via `Tub.setOption` """ tub = Tub(**kwargs) - for (name, value) in tub_options.items(): + for (name, value) in list(tub_options.items()): tub.setOption(name, value) handlers = default_connection_handlers.copy() handlers.update(handler_overrides) tub.removeAllConnectionHintHandlers() - for hint_type, handler_name in handlers.items(): + for hint_type, handler_name in list(handlers.items()): handler = foolscap_connection_handlers.get(handler_name) if handler: tub.addConnectionHintHandler(hint_type, handler) @@ -549,13 +724,29 @@ def _convert_tub_port(s): :returns: a proper Twisted endpoint string like (`tcp:X`) is `s` is a bare number, or returns `s` as-is """ - if re.search(r'^\d+$', s): - return "tcp:{}".format(int(s)) - return s + us = s + if isinstance(s, bytes): + us = s.decode("utf-8") + if re.search(r'^\d+$', us): + return "tcp:{}".format(int(us)) + return us -def _tub_portlocation(config): +class PortAssignmentRequired(Exception): """ + A Tub port number was configured to be 0 where this is not allowed. + """ + + +def _tub_portlocation(config, get_local_addresses_sync, allocate_tcp_port): + """ + Figure out the network location of the main tub for some configuration. + + :param get_local_addresses_sync: A function like + ``iputil.get_local_addresses_sync``. + + :param allocate_tcp_port: A function like ``iputil.allocate_tcp_port``. + :returns: None or tuple of (port, location) for the main tub based on the given configuration. May raise ValueError or PrivacyError if there are problems with the config @@ -595,7 +786,7 @@ def _tub_portlocation(config): file_tubport = fileutil.read(config.portnum_fname).strip() tubport = _convert_tub_port(file_tubport) else: - tubport = "tcp:%d" % iputil.allocate_tcp_port() + tubport = "tcp:%d" % (allocate_tcp_port(),) fileutil.write_atomically(config.portnum_fname, tubport + "\n", mode="") else: @@ -603,7 +794,7 @@ def _tub_portlocation(config): for port in tubport.split(","): if port in ("0", "tcp:0"): - raise ValueError("tub.port cannot be 0: you must choose") + raise PortAssignmentRequired() if cfg_location is None: cfg_location = "AUTO" @@ -615,7 +806,7 @@ def _tub_portlocation(config): if "AUTO" in split_location: if not reveal_ip: raise PrivacyError("tub.location uses AUTO") - local_addresses = iputil.get_local_addresses_sync() + local_addresses = get_local_addresses_sync() # tubport must be like "tcp:12345" or "tcp:12345:morestuff" local_portnum = int(tubport.split(":")[1]) new_locations = [] @@ -639,9 +830,40 @@ def _tub_portlocation(config): new_locations.append(loc) location = ",".join(new_locations) + # Lacking this, Python 2 blows up in Foolscap when it is confused by a + # Unicode FURL. + location = location.encode("utf-8") + return tubport, location +def tub_listen_on(i2p_provider, tor_provider, tub, tubport, location): + """ + Assign a Tub its listener locations. + + :param i2p_provider: See ``allmydata.util.i2p_provider.create``. + :param tor_provider: See ``allmydata.util.tor_provider.create``. + """ + for port in tubport.split(","): + if port == "listen:i2p": + # the I2P provider will read its section of tahoe.cfg and + # return either a fully-formed Endpoint, or a descriptor + # that will create one, so we don't have to stuff all the + # options into the tub.port string (which would need a lot + # of escaping) + port_or_endpoint = i2p_provider.get_listener() + elif port == "listen:tor": + port_or_endpoint = tor_provider.get_listener() + else: + port_or_endpoint = port + # Foolscap requires native strings: + if isinstance(port_or_endpoint, (bytes, str)): + port_or_endpoint = ensure_str(port_or_endpoint) + tub.listenOn(port_or_endpoint) + # This last step makes the Tub is ready for tub.registerReference() + tub.setLocation(location) + + def create_main_tub(config, tub_options, default_connection_handlers, foolscap_connection_handlers, i2p_provider, tor_provider, @@ -666,33 +888,34 @@ def create_main_tub(config, tub_options, :param tor_provider: None, or a _Provider instance if txtorcon + Tor are installed. """ - portlocation = _tub_portlocation(config) + portlocation = _tub_portlocation( + config, + iputil.get_local_addresses_sync, + iputil.allocate_tcp_port, + ) - certfile = config.get_private_path("node.pem") # FIXME? "node.pem" was the CERTFILE option/thing - tub = create_tub(tub_options, default_connection_handlers, foolscap_connection_handlers, - handler_overrides=handler_overrides, certFile=certfile) + # FIXME? "node.pem" was the CERTFILE option/thing + certfile = config.get_private_path("node.pem") - if portlocation: - tubport, location = portlocation - for port in tubport.split(","): - if port == "listen:i2p": - # the I2P provider will read its section of tahoe.cfg and - # return either a fully-formed Endpoint, or a descriptor - # that will create one, so we don't have to stuff all the - # options into the tub.port string (which would need a lot - # of escaping) - port_or_endpoint = i2p_provider.get_listener() - elif port == "listen:tor": - port_or_endpoint = tor_provider.get_listener() - else: - port_or_endpoint = port - tub.listenOn(port_or_endpoint) - tub.setLocation(location) - log.msg("Tub location set to %s" % (location,)) - # the Tub is now ready for tub.registerReference() - else: + tub = create_tub( + tub_options, + default_connection_handlers, + foolscap_connection_handlers, + handler_overrides=handler_overrides, + certFile=certfile, + ) + if portlocation is None: log.msg("Tub is not listening") - + else: + tubport, location = portlocation + tub_listen_on( + i2p_provider, + tor_provider, + tub, + tubport, + location, + ) + log.msg("Tub location set to %s" % (location,)) return tub @@ -714,7 +937,6 @@ class Node(service.MultiService): """ NODETYPE = "unknown NODETYPE" CERTFILE = "node.pem" - GENERATED_FILES = [] def __init__(self, config, main_tub, control_tub, i2p_provider, tor_provider): """ @@ -732,8 +954,6 @@ class Node(service.MultiService): self._i2p_provider = i2p_provider self._tor_provider = tor_provider - self.init_tempdir() - self.create_log_tub() self.logSource = "Node" self.setup_logging() @@ -742,7 +962,7 @@ class Node(service.MultiService): if self.tub is not None: self.nodeid = b32decode(self.tub.tubID.upper()) # binary format self.short_nodeid = b32encode(self.nodeid).lower()[:8] # for printing - self.config.write_config_file("my_nodeid", b32encode(self.nodeid).lower() + "\n") + self.config.write_config_file("my_nodeid", b32encode(self.nodeid).lower() + b"\n", mode="wb") self.tub.setServiceParent(self) else: self.nodeid = self.short_nodeid = None @@ -751,7 +971,7 @@ class Node(service.MultiService): if self.control_tub is not None: self.control_tub.setServiceParent(self) - self.log("Node constructed. " + get_package_versions_string()) + self.log("Node constructed. " + __full_version__) iputil.increase_rlimits() def _is_tub_listening(self): @@ -760,25 +980,6 @@ class Node(service.MultiService): """ return len(self.tub.getListeners()) > 0 - def init_tempdir(self): - """ - Initialize/create a directory for temporary files. - """ - tempdir_config = self.config.get_config("node", "tempdir", "tmp") - if isinstance(tempdir_config, bytes): - tempdir_config = tempdir_config.decode('utf-8') - tempdir = self.config.get_config_path(tempdir_config) - if not os.path.exists(tempdir): - fileutil.make_dirs(tempdir) - tempfile.tempdir = tempdir - # this should cause twisted.web.http (which uses - # tempfile.TemporaryFile) to put large request bodies in the given - # directory. Without this, the default temp dir is usually /tmp/, - # which is frequently too small. - temp_fd, test_name = tempfile.mkstemp() - _assert(os.path.dirname(test_name) == tempdir, test_name, tempdir) - os.close(temp_fd) # avoid leak of unneeded fd - # pull this outside of Node's __init__ too, see: # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2948 def create_log_tub(self): @@ -839,12 +1040,13 @@ class Node(service.MultiService): lgfurl = self.config.get_config("node", "log_gatherer.furl", "") if lgfurl: # this is in addition to the contents of log-gatherer-furlfile + lgfurl = lgfurl.encode("utf-8") self.log_tub.setOption("log-gatherer-furl", lgfurl) self.log_tub.setOption("log-gatherer-furlfile", self.config.get_config_path("log_gatherer.furl")) incident_dir = self.config.get_config_path("logs", "incidents") - foolscap.logging.log.setLogDir(incident_dir.encode(get_filesystem_encoding())) + foolscap.logging.log.setLogDir(incident_dir) twlog.msg("Foolscap logging initialized") twlog.msg("Note to developers: twistd.log does not receive very much.") twlog.msg("Use 'flogtool tail -c NODEDIR/private/logport.furl' instead") diff --git a/src/allmydata/nodemaker.py b/src/allmydata/nodemaker.py index 8e68d92fe..6b0b77c5c 100644 --- a/src/allmydata/nodemaker.py +++ b/src/allmydata/nodemaker.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import weakref from zope.interface import implementer from allmydata.util.assertutil import precondition @@ -66,9 +78,9 @@ class NodeMaker(object): memokey = b"I" + bigcap else: memokey = b"M" + bigcap - if memokey in self._node_cache: + try: node = self._node_cache[memokey] - else: + except KeyError: cap = uri.from_string(bigcap, deep_immutable=deep_immutable, name=name) node = self._create_from_single_cap(cap) @@ -126,7 +138,7 @@ class NodeMaker(object): def create_new_mutable_directory(self, initial_children={}, version=None): # initial_children must have metadata (i.e. {} instead of None) - for (name, (node, metadata)) in initial_children.iteritems(): + for (name, (node, metadata)) in initial_children.items(): precondition(isinstance(metadata, dict), "create_new_mutable_directory requires metadata to be a dict, not None", metadata) node.raise_error() diff --git a/src/allmydata/scripts/admin.py b/src/allmydata/scripts/admin.py index e472ffd8c..50dde9e43 100644 --- a/src/allmydata/scripts/admin.py +++ b/src/allmydata/scripts/admin.py @@ -1,5 +1,10 @@ from __future__ import print_function +try: + from allmydata.scripts.types_ import SubCommands +except ImportError: + pass + from twisted.python import usage from allmydata.scripts.common import BaseOptions @@ -79,8 +84,8 @@ def do_admin(options): subCommands = [ - ["admin", None, AdminCommand, "admin subcommands: use 'tahoe admin' for a list"], - ] + ("admin", None, AdminCommand, "admin subcommands: use 'tahoe admin' for a list"), + ] # type: SubCommands dispatch = { "admin": do_admin, diff --git a/src/allmydata/scripts/cli.py b/src/allmydata/scripts/cli.py index 379e1d212..e4cd8aa22 100644 --- a/src/allmydata/scripts/cli.py +++ b/src/allmydata/scripts/cli.py @@ -1,6 +1,12 @@ from __future__ import print_function import os.path, re, fnmatch + +try: + from allmydata.scripts.types_ import SubCommands, Parameters +except ImportError: + pass + from twisted.python import usage from allmydata.scripts.common import get_aliases, get_default_nodedir, \ DEFAULT_ALIAS, BaseOptions @@ -19,7 +25,7 @@ class FileStoreOptions(BaseOptions): "This overrides the URL found in the --node-directory ."], ["dir-cap", None, None, "Specify which dirnode URI should be used as the 'tahoe' alias."] - ] + ] # type: Parameters def postOptions(self): self["quiet"] = self.parent["quiet"] @@ -455,25 +461,25 @@ class DeepCheckOptions(FileStoreOptions): Optionally repair any problems found.""" subCommands = [ - ["mkdir", None, MakeDirectoryOptions, "Create a new directory."], - ["add-alias", None, AddAliasOptions, "Add a new alias cap."], - ["create-alias", None, CreateAliasOptions, "Create a new alias cap."], - ["list-aliases", None, ListAliasesOptions, "List all alias caps."], - ["ls", None, ListOptions, "List a directory."], - ["get", None, GetOptions, "Retrieve a file from the grid."], - ["put", None, PutOptions, "Upload a file into the grid."], - ["cp", None, CpOptions, "Copy one or more files or directories."], - ["unlink", None, UnlinkOptions, "Unlink a file or directory on the grid."], - ["mv", None, MvOptions, "Move a file within the grid."], - ["ln", None, LnOptions, "Make an additional link to an existing file or directory."], - ["backup", None, BackupOptions, "Make target dir look like local dir."], - ["webopen", None, WebopenOptions, "Open a web browser to a grid file or directory."], - ["manifest", None, ManifestOptions, "List all files/directories in a subtree."], - ["stats", None, StatsOptions, "Print statistics about all files/directories in a subtree."], - ["check", None, CheckOptions, "Check a single file or directory."], - ["deep-check", None, DeepCheckOptions, "Check all files/directories reachable from a starting point."], - ["status", None, TahoeStatusCommand, "Various status information."], - ] + ("mkdir", None, MakeDirectoryOptions, "Create a new directory."), + ("add-alias", None, AddAliasOptions, "Add a new alias cap."), + ("create-alias", None, CreateAliasOptions, "Create a new alias cap."), + ("list-aliases", None, ListAliasesOptions, "List all alias caps."), + ("ls", None, ListOptions, "List a directory."), + ("get", None, GetOptions, "Retrieve a file from the grid."), + ("put", None, PutOptions, "Upload a file into the grid."), + ("cp", None, CpOptions, "Copy one or more files or directories."), + ("unlink", None, UnlinkOptions, "Unlink a file or directory on the grid."), + ("mv", None, MvOptions, "Move a file within the grid."), + ("ln", None, LnOptions, "Make an additional link to an existing file or directory."), + ("backup", None, BackupOptions, "Make target dir look like local dir."), + ("webopen", None, WebopenOptions, "Open a web browser to a grid file or directory."), + ("manifest", None, ManifestOptions, "List all files/directories in a subtree."), + ("stats", None, StatsOptions, "Print statistics about all files/directories in a subtree."), + ("check", None, CheckOptions, "Check a single file or directory."), + ("deep-check", None, DeepCheckOptions, "Check all files/directories reachable from a starting point."), + ("status", None, TahoeStatusCommand, "Various status information."), + ] # type: SubCommands def mkdir(options): from allmydata.scripts import tahoe_mkdir diff --git a/src/allmydata/scripts/common.py b/src/allmydata/scripts/common.py index b2bb8a1e6..d73344274 100644 --- a/src/allmydata/scripts/common.py +++ b/src/allmydata/scripts/common.py @@ -4,11 +4,20 @@ import os, sys, urllib, textwrap import codecs from os.path import join +try: + from typing import Optional + from .types_ import Parameters +except ImportError: + pass + +from yaml import ( + safe_dump, +) + # Python 2 compatibility from future.utils import PY2 if PY2: from future.builtins import str # noqa: F401 -from six.moves.configparser import NoSectionError from twisted.python import usage @@ -34,12 +43,12 @@ class BaseOptions(usage.Options): super(BaseOptions, self).__init__() self.command_name = os.path.basename(sys.argv[0]) - # Only allow "tahoe --version", not e.g. "tahoe start --version" + # Only allow "tahoe --version", not e.g. "tahoe --version" def opt_version(self): raise usage.UsageError("--version not allowed on subcommands") - description = None - description_unwrapped = None + description = None # type: Optional[str] + description_unwrapped = None # type: Optional[str] def __str__(self): width = int(os.environ.get('COLUMNS', '80')) @@ -62,7 +71,7 @@ class BasedirOptions(BaseOptions): optParameters = [ ["basedir", "C", None, "Specify which Tahoe base directory should be used. [default: %s]" % quote_local_unicode_path(_default_nodedir)], - ] + ] # type: Parameters def parseArgs(self, basedir=None): # This finds the node-directory option correctly even if we are in a subcommand. @@ -99,7 +108,7 @@ class NoDefaultBasedirOptions(BasedirOptions): optParameters = [ ["basedir", "C", None, "Specify which Tahoe base directory should be used."], - ] + ] # type: Parameters # This is overridden in order to ensure we get a "Wrong number of arguments." # error when more than one argument is given. @@ -113,24 +122,42 @@ class NoDefaultBasedirOptions(BasedirOptions): DEFAULT_ALIAS = u"tahoe" +def write_introducer(basedir, petname, furl): + """ + Overwrite the node's ``introducers.yaml`` with a file containing the given + introducer information. + """ + if isinstance(furl, bytes): + furl = furl.decode("utf-8") + basedir.child(b"private").child(b"introducers.yaml").setContent( + safe_dump({ + "introducers": { + petname: { + "furl": furl, + }, + }, + }).encode("ascii"), + ) + + def get_introducer_furl(nodedir, config): """ :return: the introducer FURL for the given node (no matter if it's a client-type node or an introducer itself) """ + for petname, (furl, cache) in config.get_introducer_configuration().items(): + return furl + + # We have no configured introducers. Maybe this is running *on* the + # introducer? Let's guess, sure why not. try: - introducer_furl = config.get('client', 'introducer.furl') - except NoSectionError: - # we're not a client; maybe this is running *on* the introducer? - try: - with open(join(nodedir, "private", "introducer.furl"), "r") as f: - introducer_furl = f.read().strip() - except IOError: - raise Exception( - "Can't find introducer FURL in tahoe.cfg nor " - "{}/private/introducer.furl".format(nodedir) - ) - return introducer_furl + with open(join(nodedir, "private", "introducer.furl"), "r") as f: + return f.read().strip() + except IOError: + raise Exception( + "Can't find introducer FURL in tahoe.cfg nor " + "{}/private/introducer.furl".format(nodedir) + ) def get_aliases(nodedir): diff --git a/src/allmydata/scripts/create_node.py b/src/allmydata/scripts/create_node.py index 2634e0915..0f507f518 100644 --- a/src/allmydata/scripts/create_node.py +++ b/src/allmydata/scripts/create_node.py @@ -3,13 +3,27 @@ from __future__ import print_function import os import json +try: + from allmydata.scripts.types_ import SubCommands +except ImportError: + pass + from twisted.internet import reactor, defer from twisted.python.usage import UsageError -from allmydata.scripts.common import BasedirOptions, NoDefaultBasedirOptions +from twisted.python.filepath import ( + FilePath, +) + +from allmydata.scripts.common import ( + BasedirOptions, + NoDefaultBasedirOptions, + write_introducer, +) from allmydata.scripts.default_nodedir import _default_nodedir from allmydata.util.assertutil import precondition from allmydata.util.encodingutil import listdir_unicode, argv_to_unicode, quote_local_unicode_path, get_io_encoding from allmydata.util import fileutil, i2p_provider, iputil, tor_provider + from wormhole import wormhole @@ -299,14 +313,16 @@ def write_node_config(c, config): def write_client_config(c, config): - # note, config can be a plain dict, it seems -- see - # test_configutil.py in test_create_client_config + introducer = config.get("introducer", None) + if introducer is not None: + write_introducer( + FilePath(config["basedir"]), + "default", + introducer, + ) + c.write("[client]\n") - c.write("# Which services should this client connect to?\n") - introducer = config.get("introducer", None) or "" - c.write("introducer.furl = %s\n" % introducer) c.write("helper.furl =\n") - c.write("#stats_gatherer.furl =\n") c.write("\n") c.write("# Encoding parameters this client will use for newly-uploaded files\n") c.write("# This can be changed at any time: the encoding is saved in\n") @@ -437,8 +453,11 @@ def create_node(config): print("Node created in %s" % quote_local_unicode_path(basedir), file=out) tahoe_cfg = quote_local_unicode_path(os.path.join(basedir, "tahoe.cfg")) + introducers_yaml = quote_local_unicode_path( + os.path.join(basedir, "private", "introducers.yaml"), + ) if not config.get("introducer", ""): - print(" Please set [client]introducer.furl= in %s!" % tahoe_cfg, file=out) + print(" Please add introducers to %s!" % (introducers_yaml,), file=out) print(" The node cannot connect to a grid without it.", file=out) if not config.get("nickname", ""): print(" Please set [node]nickname= in %s" % tahoe_cfg, file=out) @@ -478,10 +497,10 @@ def create_introducer(config): subCommands = [ - ["create-node", None, CreateNodeOptions, "Create a node that acts as a client, server or both."], - ["create-client", None, CreateClientOptions, "Create a client node (with storage initially disabled)."], - ["create-introducer", None, CreateIntroducerOptions, "Create an introducer node."], -] + ("create-node", None, CreateNodeOptions, "Create a node that acts as a client, server or both."), + ("create-client", None, CreateClientOptions, "Create a client node (with storage initially disabled)."), + ("create-introducer", None, CreateIntroducerOptions, "Create an introducer node."), +] # type: SubCommands dispatch = { "create-node": create_node, diff --git a/src/allmydata/scripts/debug.py b/src/allmydata/scripts/debug.py index e6d332444..550c37fde 100644 --- a/src/allmydata/scripts/debug.py +++ b/src/allmydata/scripts/debug.py @@ -1,5 +1,12 @@ from __future__ import print_function +try: + from allmydata.scripts.types_ import SubCommands +except ImportError: + pass + +from future.utils import bchr + # do not import any allmydata modules at this level. Do that from inside # individual functions instead. import struct, time, os, sys @@ -180,10 +187,10 @@ def dump_mutable_share(options): share_type = "unknown" f.seek(m.DATA_OFFSET) version = f.read(1) - if version == "\x00": + if version == b"\x00": # this slot contains an SMDF share share_type = "SDMF" - elif version == "\x01": + elif version == b"\x01": share_type = "MDMF" f.close() @@ -714,10 +721,10 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out): share_type = "unknown" f.seek(m.DATA_OFFSET) version = f.read(1) - if version == "\x00": + if version == b"\x00": # this slot contains an SMDF share share_type = "SDMF" - elif version == "\x01": + elif version == b"\x01": share_type = "MDMF" if share_type == "SDMF": @@ -905,7 +912,7 @@ def corrupt_share(options): f = open(fn, "rb+") f.seek(offset) d = f.read(1) - d = chr(ord(d) ^ 0x01) + d = bchr(ord(d) ^ 0x01) f.seek(offset) f.write(d) f.close() @@ -920,7 +927,7 @@ def corrupt_share(options): f.seek(m.DATA_OFFSET) data = f.read(2000) # make sure this slot contains an SMDF share - assert data[0] == "\x00", "non-SDMF mutable shares not supported" + assert data[0:1] == b"\x00", "non-SDMF mutable shares not supported" f.close() (version, ig_seqnum, ig_roothash, ig_IV, ig_k, ig_N, ig_segsize, @@ -1051,8 +1058,8 @@ def do_debug(options): subCommands = [ - ["debug", None, DebugCommand, "debug subcommands: use 'tahoe debug' for a list."], - ] + ("debug", None, DebugCommand, "debug subcommands: use 'tahoe debug' for a list."), + ] # type: SubCommands dispatch = { "debug": do_debug, diff --git a/src/allmydata/scripts/run_common.py b/src/allmydata/scripts/run_common.py deleted file mode 100644 index fa19c2076..000000000 --- a/src/allmydata/scripts/run_common.py +++ /dev/null @@ -1,263 +0,0 @@ -from __future__ import print_function - -import os, sys -from allmydata.scripts.common import BasedirOptions -from twisted.scripts import twistd -from twisted.python import usage -from twisted.python.reflect import namedAny -from twisted.internet.defer import maybeDeferred, fail -from twisted.application.service import Service - -from allmydata.scripts.default_nodedir import _default_nodedir -from allmydata.util import fileutil -from allmydata.node import read_config -from allmydata.util.encodingutil import listdir_unicode, quote_local_unicode_path -from allmydata.util.configutil import UnknownConfigError -from allmydata.util.deferredutil import HookMixin - - -def get_pidfile(basedir): - """ - Returns the path to the PID file. - :param basedir: the node's base directory - :returns: the path to the PID file - """ - return os.path.join(basedir, u"twistd.pid") - -def get_pid_from_pidfile(pidfile): - """ - Tries to read and return the PID stored in the node's PID file - (twistd.pid). - :param pidfile: try to read this PID file - :returns: A numeric PID on success, ``None`` if PID file absent or - inaccessible, ``-1`` if PID file invalid. - """ - try: - with open(pidfile, "r") as f: - pid = f.read() - except EnvironmentError: - return None - - try: - pid = int(pid) - except ValueError: - return -1 - - return pid - -def identify_node_type(basedir): - """ - :return unicode: None or one of: 'client', 'introducer', - 'key-generator' or 'stats-gatherer' - """ - tac = u'' - try: - for fn in listdir_unicode(basedir): - if fn.endswith(u".tac"): - tac = fn - break - except OSError: - return None - - for t in (u"client", u"introducer", u"key-generator", u"stats-gatherer"): - if t in tac: - return t - return None - - -class RunOptions(BasedirOptions): - optParameters = [ - ("basedir", "C", None, - "Specify which Tahoe base directory should be used." - " This has the same effect as the global --node-directory option." - " [default: %s]" % quote_local_unicode_path(_default_nodedir)), - ] - - def parseArgs(self, basedir=None, *twistd_args): - # This can't handle e.g. 'tahoe start --nodaemon', since '--nodaemon' - # looks like an option to the tahoe subcommand, not to twistd. So you - # can either use 'tahoe start' or 'tahoe start NODEDIR - # --TWISTD-OPTIONS'. Note that 'tahoe --node-directory=NODEDIR start - # --TWISTD-OPTIONS' also isn't allowed, unfortunately. - - BasedirOptions.parseArgs(self, basedir) - self.twistd_args = twistd_args - - def getSynopsis(self): - return ("Usage: %s [global-options] %s [options]" - " [NODEDIR [twistd-options]]" - % (self.command_name, self.subcommand_name)) - - def getUsage(self, width=None): - t = BasedirOptions.getUsage(self, width) + "\n" - twistd_options = str(MyTwistdConfig()).partition("\n")[2].partition("\n\n")[0] - t += twistd_options.replace("Options:", "twistd-options:", 1) - t += """ - -Note that if any twistd-options are used, NODEDIR must be specified explicitly -(not by default or using -C/--basedir or -d/--node-directory), and followed by -the twistd-options. -""" - return t - - -class MyTwistdConfig(twistd.ServerOptions): - subCommands = [("DaemonizeTahoeNode", None, usage.Options, "node")] - - stderr = sys.stderr - - -class DaemonizeTheRealService(Service, HookMixin): - """ - this HookMixin should really be a helper; our hooks: - - - 'running': triggered when startup has completed; it triggers - with None of successful or a Failure otherwise. - """ - stderr = sys.stderr - - def __init__(self, nodetype, basedir, options): - super(DaemonizeTheRealService, self).__init__() - self.nodetype = nodetype - self.basedir = basedir - # setup for HookMixin - self._hooks = { - "running": None, - } - self.stderr = options.parent.stderr - - def startService(self): - - def key_generator_removed(): - return fail(ValueError("key-generator support removed, see #2783")) - - def start(): - node_to_instance = { - u"client": lambda: maybeDeferred(namedAny("allmydata.client.create_client"), self.basedir), - u"introducer": lambda: maybeDeferred(namedAny("allmydata.introducer.server.create_introducer"), self.basedir), - u"stats-gatherer": lambda: maybeDeferred(namedAny("allmydata.stats.StatsGathererService"), read_config(self.basedir, None), self.basedir, verbose=True), - u"key-generator": key_generator_removed, - } - - try: - service_factory = node_to_instance[self.nodetype] - except KeyError: - raise ValueError("unknown nodetype %s" % self.nodetype) - - def handle_config_error(fail): - if fail.check(UnknownConfigError): - self.stderr.write("\nConfiguration error:\n{}\n\n".format(fail.value)) - else: - self.stderr.write("\nUnknown error\n") - fail.printTraceback(self.stderr) - reactor.stop() - - d = service_factory() - - def created(srv): - srv.setServiceParent(self.parent) - d.addCallback(created) - d.addErrback(handle_config_error) - d.addBoth(self._call_hook, 'running') - return d - - from twisted.internet import reactor - reactor.callWhenRunning(start) - - -class DaemonizeTahoeNodePlugin(object): - tapname = "tahoenode" - def __init__(self, nodetype, basedir): - self.nodetype = nodetype - self.basedir = basedir - - def makeService(self, so): - return DaemonizeTheRealService(self.nodetype, self.basedir, so) - - -def run(config): - """ - Runs a Tahoe-LAFS node in the foreground. - - Sets up the IService instance corresponding to the type of node - that's starting and uses Twisted's twistd runner to disconnect our - process from the terminal. - """ - out = config.stdout - err = config.stderr - basedir = config['basedir'] - quoted_basedir = quote_local_unicode_path(basedir) - print("'tahoe {}' in {}".format(config.subcommand_name, quoted_basedir), file=out) - if not os.path.isdir(basedir): - print("%s does not look like a directory at all" % quoted_basedir, file=err) - return 1 - nodetype = identify_node_type(basedir) - if not nodetype: - print("%s is not a recognizable node directory" % quoted_basedir, file=err) - return 1 - # Now prepare to turn into a twistd process. This os.chdir is the point - # of no return. - os.chdir(basedir) - twistd_args = [] - if (nodetype in (u"client", u"introducer") - and "--nodaemon" not in config.twistd_args - and "--syslog" not in config.twistd_args - and "--logfile" not in config.twistd_args): - fileutil.make_dirs(os.path.join(basedir, u"logs")) - twistd_args.extend(["--logfile", os.path.join("logs", "twistd.log")]) - twistd_args.extend(config.twistd_args) - twistd_args.append("DaemonizeTahoeNode") # point at our DaemonizeTahoeNodePlugin - - twistd_config = MyTwistdConfig() - twistd_config.stdout = out - twistd_config.stderr = err - try: - twistd_config.parseOptions(twistd_args) - except usage.error as ue: - # these arguments were unsuitable for 'twistd' - print(config, file=err) - print("tahoe %s: usage error from twistd: %s\n" % (config.subcommand_name, ue), file=err) - return 1 - twistd_config.loadedPlugins = {"DaemonizeTahoeNode": DaemonizeTahoeNodePlugin(nodetype, basedir)} - - # handle invalid PID file (twistd might not start otherwise) - pidfile = get_pidfile(basedir) - if get_pid_from_pidfile(pidfile) == -1: - print("found invalid PID file in %s - deleting it" % basedir, file=err) - os.remove(pidfile) - - # On Unix-like platforms: - # Unless --nodaemon was provided, the twistd.runApp() below spawns off a - # child process, and the parent calls os._exit(0), so there's no way for - # us to get control afterwards, even with 'except SystemExit'. If - # application setup fails (e.g. ImportError), runApp() will raise an - # exception. - # - # So if we wanted to do anything with the running child, we'd have two - # options: - # - # * fork first, and have our child wait for the runApp() child to get - # running. (note: just fork(). This is easier than fork+exec, since we - # don't have to get PATH and PYTHONPATH set up, since we're not - # starting a *different* process, just cloning a new instance of the - # current process) - # * or have the user run a separate command some time after this one - # exits. - # - # For Tahoe, we don't need to do anything with the child, so we can just - # let it exit. - # - # On Windows: - # twistd does not fork; it just runs in the current process whether or not - # --nodaemon is specified. (As on Unix, --nodaemon does have the side effect - # of causing us to log to stdout/stderr.) - - if "--nodaemon" in twistd_args or sys.platform == "win32": - verb = "running" - else: - verb = "starting" - - print("%s node in %s" % (verb, quoted_basedir), file=out) - twistd.runApp(twistd_config) - # we should only reach here if --nodaemon or equivalent was used - return 0 diff --git a/src/allmydata/scripts/runner.py b/src/allmydata/scripts/runner.py index cfd22694b..9a632a57d 100644 --- a/src/allmydata/scripts/runner.py +++ b/src/allmydata/scripts/runner.py @@ -4,14 +4,17 @@ import os, sys from six.moves import StringIO import six +try: + from allmydata.scripts.types_ import SubCommands +except ImportError: + pass + from twisted.python import usage from twisted.internet import defer, task, threads -from allmydata.version_checks import get_package_versions_string from allmydata.scripts.common import get_default_nodedir from allmydata.scripts import debug, create_node, cli, \ - stats_gatherer, admin, tahoe_daemonize, tahoe_start, \ - tahoe_stop, tahoe_restart, tahoe_run, tahoe_invite + admin, tahoe_run, tahoe_invite from allmydata.util.encodingutil import quote_output, quote_local_unicode_path, get_io_encoding from allmydata.util.eliotutil import ( opt_eliot_destination, @@ -19,6 +22,10 @@ from allmydata.util.eliotutil import ( eliot_logging_service, ) +from .. import ( + __full_version__, +) + _default_nodedir = get_default_nodedir() NODEDIR_HELP = ("Specify which Tahoe node directory should be used. The " @@ -34,20 +41,12 @@ if _default_nodedir: # XXX all this 'dispatch' stuff needs to be unified + fixed up _control_node_dispatch = { - "daemonize": tahoe_daemonize.daemonize, - "start": tahoe_start.start, "run": tahoe_run.run, - "stop": tahoe_stop.stop, - "restart": tahoe_restart.restart, } process_control_commands = [ - ["run", None, tahoe_run.RunOptions, "run a node without daemonizing"], - ["daemonize", None, tahoe_daemonize.DaemonizeOptions, "(deprecated) run a node in the background"], - ["start", None, tahoe_start.StartOptions, "(deprecated) start a node in the background and confirm it started"], - ["stop", None, tahoe_stop.StopOptions, "(deprecated) stop a node"], - ["restart", None, tahoe_restart.RestartOptions, "(deprecated) restart a node"], -] + ("run", None, tahoe_run.RunOptions, "run a node without daemonizing"), +] # type: SubCommands class Options(usage.Options): @@ -57,7 +56,6 @@ class Options(usage.Options): stderr = sys.stderr subCommands = ( create_node.subCommands - + stats_gatherer.subCommands + admin.subCommands + process_control_commands + debug.subCommands @@ -77,12 +75,10 @@ class Options(usage.Options): ] def opt_version(self): - print(get_package_versions_string(debug=True), file=self.stdout) + print(__full_version__, file=self.stdout) self.no_command_needed = True - def opt_version_and_path(self): - print(get_package_versions_string(show_paths=True, debug=True), file=self.stdout) - self.no_command_needed = True + opt_version_and_path = opt_version opt_eliot_destination = opt_eliot_destination opt_help_eliot_destinations = opt_help_eliot_destinations @@ -106,8 +102,8 @@ class Options(usage.Options): create_dispatch = {} -for module in (create_node, stats_gatherer): - create_dispatch.update(module.dispatch) +for module in (create_node,): + create_dispatch.update(module.dispatch) # type: ignore def parse_options(argv, config=None): if not config: diff --git a/src/allmydata/scripts/stats_gatherer.py b/src/allmydata/scripts/stats_gatherer.py deleted file mode 100644 index 26848a23c..000000000 --- a/src/allmydata/scripts/stats_gatherer.py +++ /dev/null @@ -1,103 +0,0 @@ -from __future__ import print_function - -import os - -# Python 2 compatibility -from future.utils import PY2 -if PY2: - from future.builtins import str # noqa: F401 - -from twisted.python import usage - -from allmydata.scripts.common import NoDefaultBasedirOptions -from allmydata.scripts.create_node import write_tac -from allmydata.util.assertutil import precondition -from allmydata.util.encodingutil import listdir_unicode, quote_output -from allmydata.util import fileutil, iputil - - -class CreateStatsGathererOptions(NoDefaultBasedirOptions): - subcommand_name = "create-stats-gatherer" - optParameters = [ - ("hostname", None, None, "Hostname of this machine, used to build location"), - ("location", None, None, "FURL connection hints, e.g. 'tcp:HOSTNAME:PORT'"), - ("port", None, None, "listening endpoint, e.g. 'tcp:PORT'"), - ] - def postOptions(self): - if self["hostname"] and (not self["location"]) and (not self["port"]): - pass - elif (not self["hostname"]) and self["location"] and self["port"]: - pass - else: - raise usage.UsageError("You must provide --hostname, or --location and --port.") - - description = """ - Create a "stats-gatherer" service, which is a standalone process that - collects and stores runtime statistics from many server nodes. This is a - tool for operations personnel to keep track of free disk space, server - load, and protocol activity, across a fleet of Tahoe storage servers. - - The "stats-gatherer" listens on a TCP port and publishes a Foolscap FURL - by writing it into a file named "stats_gatherer.furl". You must copy this - FURL into the servers' tahoe.cfg, as the [client] stats_gatherer.furl= - entry. Those servers will then establish a connection to the - stats-gatherer and publish their statistics on a periodic basis. The - gatherer writes a summary JSON file out to disk after each update. - - The stats-gatherer listens on a configurable port, and writes a - configurable hostname+port pair into the FURL that it publishes. There - are two configuration modes you can use. - - * In the first, you provide --hostname=, and the service chooses its own - TCP port number. If the host is named "example.org" and you provide - --hostname=example.org, the node will pick a port number (e.g. 12345) - and use location="tcp:example.org:12345" and port="tcp:12345". - - * In the second, you provide both --location= and --port=, and the - service will refrain from doing any allocation of its own. --location= - must be a Foolscap "FURL connection hint sequence", which is a - comma-separated list of "tcp:HOSTNAME:PORTNUM" strings. --port= must be - a Twisted server endpoint specification, which is generally - "tcp:PORTNUM". So, if your host is named "example.org" and you want to - use port 6789, you should provide --location=tcp:example.org:6789 and - --port=tcp:6789. You are responsible for making sure --location= and - --port= match each other. - """ - - -def create_stats_gatherer(config): - err = config.stderr - basedir = config['basedir'] - # This should always be called with an absolute Unicode basedir. - precondition(isinstance(basedir, str), basedir) - - if os.path.exists(basedir): - if listdir_unicode(basedir): - print("The base directory %s is not empty." % quote_output(basedir), file=err) - print("To avoid clobbering anything, I am going to quit now.", file=err) - print("Please use a different directory, or empty this one.", file=err) - return -1 - # we're willing to use an empty directory - else: - os.mkdir(basedir) - write_tac(basedir, "stats-gatherer") - if config["hostname"]: - portnum = iputil.allocate_tcp_port() - location = "tcp:%s:%d" % (config["hostname"], portnum) - port = "tcp:%d" % portnum - else: - location = config["location"] - port = config["port"] - fileutil.write(os.path.join(basedir, "location"), location+"\n") - fileutil.write(os.path.join(basedir, "port"), port+"\n") - return 0 - -subCommands = [ - ["create-stats-gatherer", None, CreateStatsGathererOptions, "Create a stats-gatherer service."], -] - -dispatch = { - "create-stats-gatherer": create_stats_gatherer, - } - - diff --git a/src/allmydata/scripts/tahoe_add_alias.py b/src/allmydata/scripts/tahoe_add_alias.py index ddef46db6..6f931556d 100644 --- a/src/allmydata/scripts/tahoe_add_alias.py +++ b/src/allmydata/scripts/tahoe_add_alias.py @@ -1,4 +1,5 @@ from __future__ import print_function +from __future__ import unicode_literals import os.path import codecs @@ -10,7 +11,7 @@ from allmydata import uri from allmydata.scripts.common_http import do_http, check_http_error from allmydata.scripts.common import get_aliases from allmydata.util.fileutil import move_into_place -from allmydata.util.encodingutil import unicode_to_output, quote_output +from allmydata.util.encodingutil import quote_output, quote_output_u def add_line_to_aliasfile(aliasfile, alias, cap): @@ -48,14 +49,13 @@ def add_alias(options): old_aliases = get_aliases(nodedir) if alias in old_aliases: - print("Alias %s already exists!" % quote_output(alias), file=stderr) + show_output(stderr, "Alias {alias} already exists!", alias=alias) return 1 aliasfile = os.path.join(nodedir, "private", "aliases") cap = uri.from_string_dirnode(cap).to_string() add_line_to_aliasfile(aliasfile, alias, cap) - - print("Alias %s added" % quote_output(alias), file=stdout) + show_output(stdout, "Alias {alias} added", alias=alias) return 0 def create_alias(options): @@ -75,7 +75,7 @@ def create_alias(options): old_aliases = get_aliases(nodedir) if alias in old_aliases: - print("Alias %s already exists!" % quote_output(alias), file=stderr) + show_output(stderr, "Alias {alias} already exists!", alias=alias) return 1 aliasfile = os.path.join(nodedir, "private", "aliases") @@ -93,11 +93,51 @@ def create_alias(options): # probably check for others.. add_line_to_aliasfile(aliasfile, alias, new_uri) - - print("Alias %s created" % (quote_output(alias),), file=stdout) + show_output(stdout, "Alias {alias} created", alias=alias) return 0 +def show_output(fp, template, **kwargs): + """ + Print to just about anything. + + :param fp: A file-like object to which to print. This handles the case + where ``fp`` declares a support encoding with the ``encoding`` + attribute (eg sys.stdout on Python 3). It handles the case where + ``fp`` declares no supported encoding via ``None`` for its + ``encoding`` attribute (eg sys.stdout on Python 2 when stdout is not a + tty). It handles the case where ``fp`` declares an encoding that does + not support all of the characters in the output by forcing the + "namereplace" error handler. It handles the case where there is no + ``encoding`` attribute at all (eg StringIO.StringIO) by writing + utf-8-encoded bytes. + """ + assert isinstance(template, unicode) + + # On Python 3 fp has an encoding attribute under all real usage. On + # Python 2, the encoding attribute is None if stdio is not a tty. The + # test suite often passes StringIO which has no such attribute. Make + # allowances for this until the test suite is fixed and Python 2 is no + # more. + try: + encoding = fp.encoding or "utf-8" + except AttributeError: + has_encoding = False + encoding = "utf-8" + else: + has_encoding = True + + output = template.format(**{ + k: quote_output_u(v, encoding=encoding) + for (k, v) + in kwargs.items() + }) + safe_output = output.encode(encoding, "namereplace") + if has_encoding: + safe_output = safe_output.decode(encoding) + print(safe_output, file=fp) + + def _get_alias_details(nodedir): aliases = get_aliases(nodedir) alias_names = sorted(aliases.keys()) @@ -111,34 +151,45 @@ def _get_alias_details(nodedir): return data +def _escape_format(t): + """ + _escape_format(t).format() == t + + :param unicode t: The text to escape. + """ + return t.replace("{", "{{").replace("}", "}}") + + def list_aliases(options): - nodedir = options['node-directory'] - stdout = options.stdout - stderr = options.stderr - - data = _get_alias_details(nodedir) - - max_width = max([len(quote_output(name)) for name in data.keys()] + [0]) - fmt = "%" + str(max_width) + "s: %s" - rc = 0 + """ + Show aliases that exist. + """ + data = _get_alias_details(options['node-directory']) if options['json']: - try: - # XXX why are we presuming utf-8 output? - print(json.dumps(data, indent=4).decode('utf-8'), file=stdout) - except (UnicodeEncodeError, UnicodeDecodeError): - print(json.dumps(data, indent=4), file=stderr) - rc = 1 + output = _escape_format(json.dumps(data, indent=4).decode("ascii")) else: - for name, details in data.items(): - dircap = details['readonly'] if options['readonly-uri'] else details['readwrite'] - try: - print(fmt % (unicode_to_output(name), unicode_to_output(dircap.decode('utf-8'))), file=stdout) - except (UnicodeEncodeError, UnicodeDecodeError): - print(fmt % (quote_output(name), quote_output(dircap)), file=stderr) - rc = 1 + def dircap(details): + return ( + details['readonly'] + if options['readonly-uri'] + else details['readwrite'] + ).decode("utf-8") - if rc == 1: - print("\nThis listing included aliases or caps that could not be converted to the terminal" \ - "\noutput encoding. These are shown using backslash escapes and in quotes.", file=stderr) - return rc + def format_dircap(name, details): + return fmt % (name, dircap(details)) + + max_width = max([len(quote_output(name)) for name in data.keys()] + [0]) + fmt = "%" + str(max_width) + "s: %s" + output = "\n".join(list( + format_dircap(name, details) + for name, details + in data.items() + )) + + if output: + # Show whatever we computed. Skip this if there is no output to avoid + # a spurious blank line. + show_output(options.stdout, output) + + return 0 diff --git a/src/allmydata/scripts/tahoe_daemonize.py b/src/allmydata/scripts/tahoe_daemonize.py deleted file mode 100644 index ad2f92355..000000000 --- a/src/allmydata/scripts/tahoe_daemonize.py +++ /dev/null @@ -1,16 +0,0 @@ -from .run_common import ( - RunOptions as _RunOptions, - run, -) - -__all__ = [ - "DaemonizeOptions", - "daemonize", -] - -class DaemonizeOptions(_RunOptions): - subcommand_name = "daemonize" - -def daemonize(config): - print("'tahoe daemonize' is deprecated; see 'tahoe run'") - return run(config) diff --git a/src/allmydata/scripts/tahoe_invite.py b/src/allmydata/scripts/tahoe_invite.py index cca4216e3..884536ec2 100644 --- a/src/allmydata/scripts/tahoe_invite.py +++ b/src/allmydata/scripts/tahoe_invite.py @@ -1,16 +1,20 @@ from __future__ import print_function import json -from os.path import join + +try: + from allmydata.scripts.types_ import SubCommands +except ImportError: + pass from twisted.python import usage from twisted.internet import defer, reactor from wormhole import wormhole -from allmydata.util import configutil from allmydata.util.encodingutil import argv_to_abspath from allmydata.scripts.common import get_default_nodedir, get_introducer_furl +from allmydata.node import read_config class InviteOptions(usage.Options): @@ -77,7 +81,7 @@ def invite(options): basedir = argv_to_abspath(options.parent['node-directory']) else: basedir = get_default_nodedir() - config = configutil.get_config(join(basedir, 'tahoe.cfg')) + config = read_config(basedir, u"") out = options.stdout err = options.stderr @@ -104,7 +108,7 @@ def invite(options): subCommands = [ ("invite", None, InviteOptions, "Invite a new node to this grid"), -] +] # type: SubCommands dispatch = { "invite": invite, diff --git a/src/allmydata/scripts/tahoe_restart.py b/src/allmydata/scripts/tahoe_restart.py deleted file mode 100644 index 339db862f..000000000 --- a/src/allmydata/scripts/tahoe_restart.py +++ /dev/null @@ -1,21 +0,0 @@ -from __future__ import print_function - -from .tahoe_start import StartOptions, start -from .tahoe_stop import stop, COULD_NOT_STOP - - -class RestartOptions(StartOptions): - subcommand_name = "restart" - - -def restart(config): - print("'tahoe restart' is deprecated; see 'tahoe run'") - stderr = config.stderr - rc = stop(config) - if rc == COULD_NOT_STOP: - print("ignoring couldn't-stop", file=stderr) - rc = 0 - if rc: - print("not restarting", file=stderr) - return rc - return start(config) diff --git a/src/allmydata/scripts/tahoe_run.py b/src/allmydata/scripts/tahoe_run.py index 0a921cc71..bc4ba27d1 100644 --- a/src/allmydata/scripts/tahoe_run.py +++ b/src/allmydata/scripts/tahoe_run.py @@ -1,15 +1,233 @@ -from .run_common import ( - RunOptions as _RunOptions, - run, -) +from __future__ import print_function __all__ = [ "RunOptions", "run", ] -class RunOptions(_RunOptions): +import os, sys +from allmydata.scripts.common import BasedirOptions +from twisted.scripts import twistd +from twisted.python import usage +from twisted.python.reflect import namedAny +from twisted.internet.defer import maybeDeferred +from twisted.application.service import Service + +from allmydata.scripts.default_nodedir import _default_nodedir +from allmydata.util.encodingutil import listdir_unicode, quote_local_unicode_path +from allmydata.util.configutil import UnknownConfigError +from allmydata.util.deferredutil import HookMixin + +from allmydata.node import ( + PortAssignmentRequired, + PrivacyError, +) + +def get_pidfile(basedir): + """ + Returns the path to the PID file. + :param basedir: the node's base directory + :returns: the path to the PID file + """ + return os.path.join(basedir, u"twistd.pid") + +def get_pid_from_pidfile(pidfile): + """ + Tries to read and return the PID stored in the node's PID file + (twistd.pid). + :param pidfile: try to read this PID file + :returns: A numeric PID on success, ``None`` if PID file absent or + inaccessible, ``-1`` if PID file invalid. + """ + try: + with open(pidfile, "r") as f: + pid = f.read() + except EnvironmentError: + return None + + try: + pid = int(pid) + except ValueError: + return -1 + + return pid + +def identify_node_type(basedir): + """ + :return unicode: None or one of: 'client' or 'introducer'. + """ + tac = u'' + try: + for fn in listdir_unicode(basedir): + if fn.endswith(u".tac"): + tac = fn + break + except OSError: + return None + + for t in (u"client", u"introducer"): + if t in tac: + return t + return None + + +class RunOptions(BasedirOptions): subcommand_name = "run" - def postOptions(self): - self.twistd_args += ("--nodaemon",) + optParameters = [ + ("basedir", "C", None, + "Specify which Tahoe base directory should be used." + " This has the same effect as the global --node-directory option." + " [default: %s]" % quote_local_unicode_path(_default_nodedir)), + ] + + def parseArgs(self, basedir=None, *twistd_args): + # This can't handle e.g. 'tahoe run --reactor=foo', since + # '--reactor=foo' looks like an option to the tahoe subcommand, not to + # twistd. So you can either use 'tahoe run' or 'tahoe run NODEDIR + # --TWISTD-OPTIONS'. Note that 'tahoe --node-directory=NODEDIR run + # --TWISTD-OPTIONS' also isn't allowed, unfortunately. + + BasedirOptions.parseArgs(self, basedir) + self.twistd_args = twistd_args + + def getSynopsis(self): + return ("Usage: %s [global-options] %s [options]" + " [NODEDIR [twistd-options]]" + % (self.command_name, self.subcommand_name)) + + def getUsage(self, width=None): + t = BasedirOptions.getUsage(self, width) + "\n" + twistd_options = str(MyTwistdConfig()).partition("\n")[2].partition("\n\n")[0] + t += twistd_options.replace("Options:", "twistd-options:", 1) + t += """ + +Note that if any twistd-options are used, NODEDIR must be specified explicitly +(not by default or using -C/--basedir or -d/--node-directory), and followed by +the twistd-options. +""" + return t + + +class MyTwistdConfig(twistd.ServerOptions): + subCommands = [("DaemonizeTahoeNode", None, usage.Options, "node")] + + stderr = sys.stderr + + +class DaemonizeTheRealService(Service, HookMixin): + """ + this HookMixin should really be a helper; our hooks: + + - 'running': triggered when startup has completed; it triggers + with None of successful or a Failure otherwise. + """ + stderr = sys.stderr + + def __init__(self, nodetype, basedir, options): + super(DaemonizeTheRealService, self).__init__() + self.nodetype = nodetype + self.basedir = basedir + # setup for HookMixin + self._hooks = { + "running": None, + } + self.stderr = options.parent.stderr + + def startService(self): + + def start(): + node_to_instance = { + u"client": lambda: maybeDeferred(namedAny("allmydata.client.create_client"), self.basedir), + u"introducer": lambda: maybeDeferred(namedAny("allmydata.introducer.server.create_introducer"), self.basedir), + } + + try: + service_factory = node_to_instance[self.nodetype] + except KeyError: + raise ValueError("unknown nodetype %s" % self.nodetype) + + def handle_config_error(reason): + if reason.check(UnknownConfigError): + self.stderr.write("\nConfiguration error:\n{}\n\n".format(reason.value)) + elif reason.check(PortAssignmentRequired): + self.stderr.write("\ntub.port cannot be 0: you must choose.\n\n") + elif reason.check(PrivacyError): + self.stderr.write("\n{}\n\n".format(reason.value)) + else: + self.stderr.write("\nUnknown error\n") + reason.printTraceback(self.stderr) + reactor.stop() + + d = service_factory() + + def created(srv): + srv.setServiceParent(self.parent) + d.addCallback(created) + d.addErrback(handle_config_error) + d.addBoth(self._call_hook, 'running') + return d + + from twisted.internet import reactor + reactor.callWhenRunning(start) + + +class DaemonizeTahoeNodePlugin(object): + tapname = "tahoenode" + def __init__(self, nodetype, basedir): + self.nodetype = nodetype + self.basedir = basedir + + def makeService(self, so): + return DaemonizeTheRealService(self.nodetype, self.basedir, so) + + +def run(config): + """ + Runs a Tahoe-LAFS node in the foreground. + + Sets up the IService instance corresponding to the type of node + that's starting and uses Twisted's twistd runner to disconnect our + process from the terminal. + """ + out = config.stdout + err = config.stderr + basedir = config['basedir'] + quoted_basedir = quote_local_unicode_path(basedir) + print("'tahoe {}' in {}".format(config.subcommand_name, quoted_basedir), file=out) + if not os.path.isdir(basedir): + print("%s does not look like a directory at all" % quoted_basedir, file=err) + return 1 + nodetype = identify_node_type(basedir) + if not nodetype: + print("%s is not a recognizable node directory" % quoted_basedir, file=err) + return 1 + # Now prepare to turn into a twistd process. This os.chdir is the point + # of no return. + os.chdir(basedir) + twistd_args = ["--nodaemon"] + twistd_args.extend(config.twistd_args) + twistd_args.append("DaemonizeTahoeNode") # point at our DaemonizeTahoeNodePlugin + + twistd_config = MyTwistdConfig() + twistd_config.stdout = out + twistd_config.stderr = err + try: + twistd_config.parseOptions(twistd_args) + except usage.error as ue: + # these arguments were unsuitable for 'twistd' + print(config, file=err) + print("tahoe %s: usage error from twistd: %s\n" % (config.subcommand_name, ue), file=err) + return 1 + twistd_config.loadedPlugins = {"DaemonizeTahoeNode": DaemonizeTahoeNodePlugin(nodetype, basedir)} + + # handle invalid PID file (twistd might not start otherwise) + pidfile = get_pidfile(basedir) + if get_pid_from_pidfile(pidfile) == -1: + print("found invalid PID file in %s - deleting it" % basedir, file=err) + os.remove(pidfile) + + # We always pass --nodaemon so twistd.runApp does not daemonize. + print("running node in %s" % (quoted_basedir,), file=out) + twistd.runApp(twistd_config) + return 0 diff --git a/src/allmydata/scripts/tahoe_start.py b/src/allmydata/scripts/tahoe_start.py deleted file mode 100644 index bc076d1b7..000000000 --- a/src/allmydata/scripts/tahoe_start.py +++ /dev/null @@ -1,152 +0,0 @@ -from __future__ import print_function - -import os -import io -import sys -import time -import subprocess -from os.path import join, exists - -from allmydata.scripts.common import BasedirOptions -from allmydata.scripts.default_nodedir import _default_nodedir -from allmydata.util.encodingutil import quote_local_unicode_path - -from .run_common import MyTwistdConfig, identify_node_type - - -class StartOptions(BasedirOptions): - subcommand_name = "start" - optParameters = [ - ("basedir", "C", None, - "Specify which Tahoe base directory should be used." - " This has the same effect as the global --node-directory option." - " [default: %s]" % quote_local_unicode_path(_default_nodedir)), - ] - - def parseArgs(self, basedir=None, *twistd_args): - # This can't handle e.g. 'tahoe start --nodaemon', since '--nodaemon' - # looks like an option to the tahoe subcommand, not to twistd. So you - # can either use 'tahoe start' or 'tahoe start NODEDIR - # --TWISTD-OPTIONS'. Note that 'tahoe --node-directory=NODEDIR start - # --TWISTD-OPTIONS' also isn't allowed, unfortunately. - - BasedirOptions.parseArgs(self, basedir) - self.twistd_args = twistd_args - - def getSynopsis(self): - return ("Usage: %s [global-options] %s [options]" - " [NODEDIR [twistd-options]]" - % (self.command_name, self.subcommand_name)) - - def getUsage(self, width=None): - t = BasedirOptions.getUsage(self, width) + "\n" - twistd_options = str(MyTwistdConfig()).partition("\n")[2].partition("\n\n")[0] - t += twistd_options.replace("Options:", "twistd-options:", 1) - t += """ - -Note that if any twistd-options are used, NODEDIR must be specified explicitly -(not by default or using -C/--basedir or -d/--node-directory), and followed by -the twistd-options. -""" - return t - - -def start(config): - """ - Start a tahoe node (daemonize it and confirm startup) - - We run 'tahoe daemonize' with all the options given to 'tahoe - start' and then watch the log files for the correct text to appear - (e.g. "introducer started"). If that doesn't happen within a few - seconds, an error is printed along with all collected logs. - """ - print("'tahoe start' is deprecated; see 'tahoe run'") - out = config.stdout - err = config.stderr - basedir = config['basedir'] - quoted_basedir = quote_local_unicode_path(basedir) - print("STARTING", quoted_basedir, file=out) - if not os.path.isdir(basedir): - print("%s does not look like a directory at all" % quoted_basedir, file=err) - return 1 - nodetype = identify_node_type(basedir) - if not nodetype: - print("%s is not a recognizable node directory" % quoted_basedir, file=err) - return 1 - - # "tahoe start" attempts to monitor the logs for successful - # startup -- but we can't always do that. - - can_monitor_logs = False - if (nodetype in (u"client", u"introducer") - and "--nodaemon" not in config.twistd_args - and "--syslog" not in config.twistd_args - and "--logfile" not in config.twistd_args): - can_monitor_logs = True - - if "--help" in config.twistd_args: - return 0 - - if not can_monitor_logs: - print("Custom logging options; can't monitor logs for proper startup messages", file=out) - return 1 - - # before we spawn tahoe, we check if "the log file" exists or not, - # and if so remember how big it is -- essentially, we're doing - # "tail -f" to see what "this" incarnation of "tahoe daemonize" - # spews forth. - starting_offset = 0 - log_fname = join(basedir, 'logs', 'twistd.log') - if exists(log_fname): - with open(log_fname, 'r') as f: - f.seek(0, 2) - starting_offset = f.tell() - - # spawn tahoe. Note that since this daemonizes, it should return - # "pretty fast" and with a zero return-code, or else something - # Very Bad has happened. - try: - args = [sys.executable] if not getattr(sys, 'frozen', False) else [] - for i, arg in enumerate(sys.argv): - if arg in ['start', 'restart']: - args.append('daemonize') - else: - args.append(arg) - subprocess.check_call(args) - except subprocess.CalledProcessError as e: - return e.returncode - - # now, we have to determine if tahoe has actually started up - # successfully or not. so, we start sucking up log files and - # looking for "the magic string", which depends on the node type. - - magic_string = u'{} running'.format(nodetype) - with io.open(log_fname, 'r') as f: - f.seek(starting_offset) - - collected = u'' - overall_start = time.time() - while time.time() - overall_start < 60: - this_start = time.time() - while time.time() - this_start < 5: - collected += f.read() - if magic_string in collected: - if not config.parent['quiet']: - print("Node has started successfully", file=out) - return 0 - if 'Traceback ' in collected: - print("Error starting node; see '{}' for more:\n\n{}".format( - log_fname, - collected, - ), file=err) - return 1 - time.sleep(0.1) - print("Still waiting up to {}s for node startup".format( - 60 - int(time.time() - overall_start) - ), file=out) - - print("Something has gone wrong starting the node.", file=out) - print("Logs are available in '{}'".format(log_fname), file=out) - print("Collected for this run:", file=out) - print(collected, file=out) - return 1 diff --git a/src/allmydata/scripts/tahoe_stop.py b/src/allmydata/scripts/tahoe_stop.py deleted file mode 100644 index 28c0f8131..000000000 --- a/src/allmydata/scripts/tahoe_stop.py +++ /dev/null @@ -1,85 +0,0 @@ -from __future__ import print_function - -import os -import time -import signal - -from allmydata.scripts.common import BasedirOptions -from allmydata.util.encodingutil import quote_local_unicode_path -from .run_common import get_pidfile, get_pid_from_pidfile - -COULD_NOT_STOP = 2 - - -class StopOptions(BasedirOptions): - def parseArgs(self, basedir=None): - BasedirOptions.parseArgs(self, basedir) - - def getSynopsis(self): - return ("Usage: %s [global-options] stop [options] [NODEDIR]" - % (self.command_name,)) - - -def stop(config): - print("'tahoe stop' is deprecated; see 'tahoe run'") - out = config.stdout - err = config.stderr - basedir = config['basedir'] - quoted_basedir = quote_local_unicode_path(basedir) - print("STOPPING", quoted_basedir, file=out) - pidfile = get_pidfile(basedir) - pid = get_pid_from_pidfile(pidfile) - if pid is None: - print("%s does not look like a running node directory (no twistd.pid)" % quoted_basedir, file=err) - # we define rc=2 to mean "nothing is running, but it wasn't me who - # stopped it" - return COULD_NOT_STOP - elif pid == -1: - print("%s contains an invalid PID file" % basedir, file=err) - # we define rc=2 to mean "nothing is running, but it wasn't me who - # stopped it" - return COULD_NOT_STOP - - # kill it hard (SIGKILL), delete the twistd.pid file, then wait for the - # process itself to go away. If it hasn't gone away after 20 seconds, warn - # the user but keep waiting until they give up. - try: - os.kill(pid, signal.SIGKILL) - except OSError as oserr: - if oserr.errno == 3: - print(oserr.strerror) - # the process didn't exist, so wipe the pid file - os.remove(pidfile) - return COULD_NOT_STOP - else: - raise - try: - os.remove(pidfile) - except EnvironmentError: - pass - start = time.time() - time.sleep(0.1) - wait = 40 - first_time = True - while True: - # poll once per second until we see the process is no longer running - try: - os.kill(pid, 0) - except OSError: - print("process %d is dead" % pid, file=out) - return - wait -= 1 - if wait < 0: - if first_time: - print("It looks like pid %d is still running " - "after %d seconds" % (pid, - (time.time() - start)), file=err) - print("I will keep watching it until you interrupt me.", file=err) - wait = 10 - first_time = False - else: - print("pid %d still running after %d seconds" % \ - (pid, (time.time() - start)), file=err) - wait = 10 - time.sleep(1) - # control never reaches here: no timeout diff --git a/src/allmydata/scripts/types_.py b/src/allmydata/scripts/types_.py new file mode 100644 index 000000000..3937cb803 --- /dev/null +++ b/src/allmydata/scripts/types_.py @@ -0,0 +1,12 @@ +from typing import List, Tuple, Type, Sequence, Any +from allmydata.scripts.common import BaseOptions + + +# Historically, subcommands were implemented as lists, but due to a +# [designed contraint in mypy](https://stackoverflow.com/a/52559625/70170), +# a Tuple is required. +SubCommand = Tuple[str, None, Type[BaseOptions], str] + +SubCommands = List[SubCommand] + +Parameters = List[Sequence[Any]] diff --git a/src/allmydata/stats.py b/src/allmydata/stats.py index f669b0861..91205a93c 100644 --- a/src/allmydata/stats.py +++ b/src/allmydata/stats.py @@ -1,84 +1,29 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division from __future__ import print_function +from __future__ import unicode_literals -import json -import os -import pprint -import time -from collections import deque - -# Python 2 compatibility from future.utils import PY2 if PY2: - from future.builtins import str # noqa: F401 + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + +import time -from twisted.internet import reactor from twisted.application import service from twisted.application.internet import TimerService from zope.interface import implementer -from foolscap.api import eventually, DeadReferenceError, Referenceable, Tub +from foolscap.api import eventually from allmydata.util import log -from allmydata.util.encodingutil import quote_local_unicode_path -from allmydata.interfaces import RIStatsProvider, RIStatsGatherer, IStatsProducer - -@implementer(IStatsProducer) -class LoadMonitor(service.MultiService): - - loop_interval = 1 - num_samples = 60 - - def __init__(self, provider, warn_if_delay_exceeds=1): - service.MultiService.__init__(self) - self.provider = provider - self.warn_if_delay_exceeds = warn_if_delay_exceeds - self.started = False - self.last = None - self.stats = deque() - self.timer = None - - def startService(self): - if not self.started: - self.started = True - self.timer = reactor.callLater(self.loop_interval, self.loop) - service.MultiService.startService(self) - - def stopService(self): - self.started = False - if self.timer: - self.timer.cancel() - self.timer = None - return service.MultiService.stopService(self) - - def loop(self): - self.timer = None - if not self.started: - return - now = time.time() - if self.last is not None: - delay = now - self.last - self.loop_interval - if delay > self.warn_if_delay_exceeds: - log.msg(format='excessive reactor delay (%ss)', args=(delay,), - level=log.UNUSUAL) - self.stats.append(delay) - while len(self.stats) > self.num_samples: - self.stats.popleft() - - self.last = now - self.timer = reactor.callLater(self.loop_interval, self.loop) - - def get_stats(self): - if self.stats: - avg = sum(self.stats) / len(self.stats) - m_x = max(self.stats) - else: - avg = m_x = 0 - return { 'load_monitor.avg_load': avg, - 'load_monitor.max_load': m_x, } +from allmydata.interfaces import IStatsProducer @implementer(IStatsProducer) class CPUUsageMonitor(service.MultiService): HISTORY_LENGTH = 15 - POLL_INTERVAL = 60 + POLL_INTERVAL = 60 # type: float def __init__(self): service.MultiService.__init__(self) @@ -128,37 +73,18 @@ class CPUUsageMonitor(service.MultiService): return s -@implementer(RIStatsProvider) -class StatsProvider(Referenceable, service.MultiService): +class StatsProvider(service.MultiService): - def __init__(self, node, gatherer_furl): + def __init__(self, node): service.MultiService.__init__(self) self.node = node - self.gatherer_furl = gatherer_furl # might be None self.counters = {} self.stats_producers = [] - - # only run the LoadMonitor (which submits a timer every second) if - # there is a gatherer who is going to be paying attention. Our stats - # are visible through HTTP even without a gatherer, so run the rest - # of the stats (including the once-per-minute CPUUsageMonitor) - if gatherer_furl: - self.load_monitor = LoadMonitor(self) - self.load_monitor.setServiceParent(self) - self.register_producer(self.load_monitor) - self.cpu_monitor = CPUUsageMonitor() self.cpu_monitor.setServiceParent(self) self.register_producer(self.cpu_monitor) - def startService(self): - if self.node and self.gatherer_furl: - nickname_utf8 = self.node.nickname.encode("utf-8") - self.node.tub.connectTo(self.gatherer_furl, - self._connected, nickname_utf8) - service.MultiService.startService(self) - def count(self, name, delta=1): if isinstance(name, str): name = name.encode("utf-8") @@ -175,155 +101,3 @@ class StatsProvider(Referenceable, service.MultiService): ret = { 'counters': self.counters, 'stats': stats } log.msg(format='get_stats() -> %(stats)s', stats=ret, level=log.NOISY) return ret - - def remote_get_stats(self): - # The remote API expects keys to be bytes: - def to_bytes(d): - result = {} - for (k, v) in d.items(): - if isinstance(k, str): - k = k.encode("utf-8") - result[k] = v - return result - - stats = self.get_stats() - return {b"counters": to_bytes(stats["counters"]), - b"stats": to_bytes(stats["stats"])} - - def _connected(self, gatherer, nickname): - gatherer.callRemoteOnly('provide', self, nickname or '') - - -@implementer(RIStatsGatherer) -class StatsGatherer(Referenceable, service.MultiService): - - poll_interval = 60 - - def __init__(self, basedir): - service.MultiService.__init__(self) - self.basedir = basedir - - self.clients = {} - self.nicknames = {} - - self.timer = TimerService(self.poll_interval, self.poll) - self.timer.setServiceParent(self) - - def get_tubid(self, rref): - return rref.getRemoteTubID() - - def remote_provide(self, provider, nickname): - tubid = self.get_tubid(provider) - if tubid == '': - print("WARNING: failed to get tubid for %s (%s)" % (provider, nickname)) - # don't add to clients to poll (polluting data) don't care about disconnect - return - self.clients[tubid] = provider - self.nicknames[tubid] = nickname - - def poll(self): - for tubid,client in self.clients.items(): - nickname = self.nicknames.get(tubid) - d = client.callRemote('get_stats') - d.addCallbacks(self.got_stats, self.lost_client, - callbackArgs=(tubid, nickname), - errbackArgs=(tubid,)) - d.addErrback(self.log_client_error, tubid) - - def lost_client(self, f, tubid): - # this is called lazily, when a get_stats request fails - del self.clients[tubid] - del self.nicknames[tubid] - f.trap(DeadReferenceError) - - def log_client_error(self, f, tubid): - log.msg("StatsGatherer: error in get_stats(), peerid=%s" % tubid, - level=log.UNUSUAL, failure=f) - - def got_stats(self, stats, tubid, nickname): - raise NotImplementedError() - -class StdOutStatsGatherer(StatsGatherer): - verbose = True - def remote_provide(self, provider, nickname): - tubid = self.get_tubid(provider) - if self.verbose: - print('connect "%s" [%s]' % (nickname, tubid)) - provider.notifyOnDisconnect(self.announce_lost_client, tubid) - StatsGatherer.remote_provide(self, provider, nickname) - - def announce_lost_client(self, tubid): - print('disconnect "%s" [%s]' % (self.nicknames[tubid], tubid)) - - def got_stats(self, stats, tubid, nickname): - print('"%s" [%s]:' % (nickname, tubid)) - pprint.pprint(stats) - -class JSONStatsGatherer(StdOutStatsGatherer): - # inherit from StdOutStatsGatherer for connect/disconnect notifications - - def __init__(self, basedir=u".", verbose=True): - self.verbose = verbose - StatsGatherer.__init__(self, basedir) - self.jsonfile = os.path.join(basedir, "stats.json") - - if os.path.exists(self.jsonfile): - try: - with open(self.jsonfile, 'rb') as f: - self.gathered_stats = json.load(f) - except Exception: - print("Error while attempting to load stats file %s.\n" - "You may need to restore this file from a backup," - " or delete it if no backup is available.\n" % - quote_local_unicode_path(self.jsonfile)) - raise - else: - self.gathered_stats = {} - - def got_stats(self, stats, tubid, nickname): - s = self.gathered_stats.setdefault(tubid, {}) - s['timestamp'] = time.time() - s['nickname'] = nickname - s['stats'] = stats - self.dump_json() - - def dump_json(self): - tmp = "%s.tmp" % (self.jsonfile,) - with open(tmp, 'wb') as f: - json.dump(self.gathered_stats, f) - if os.path.exists(self.jsonfile): - os.unlink(self.jsonfile) - os.rename(tmp, self.jsonfile) - -class StatsGathererService(service.MultiService): - furl_file = "stats_gatherer.furl" - - def __init__(self, basedir=".", verbose=False): - service.MultiService.__init__(self) - self.basedir = basedir - self.tub = Tub(certFile=os.path.join(self.basedir, - "stats_gatherer.pem")) - self.tub.setServiceParent(self) - self.tub.setOption("logLocalFailures", True) - self.tub.setOption("logRemoteFailures", True) - self.tub.setOption("expose-remote-exception-types", False) - - self.stats_gatherer = JSONStatsGatherer(self.basedir, verbose) - self.stats_gatherer.setServiceParent(self) - - try: - with open(os.path.join(self.basedir, "location")) as f: - location = f.read().strip() - except EnvironmentError: - raise ValueError("Unable to find 'location' in BASEDIR, please rebuild your stats-gatherer") - try: - with open(os.path.join(self.basedir, "port")) as f: - port = f.read().strip() - except EnvironmentError: - raise ValueError("Unable to find 'port' in BASEDIR, please rebuild your stats-gatherer") - - self.tub.listenOn(port) - self.tub.setLocation(location) - ff = os.path.join(self.basedir, self.furl_file) - self.gatherer_furl = self.tub.registerReference(self.stats_gatherer, - furlFile=ff) diff --git a/src/allmydata/storage/crawler.py b/src/allmydata/storage/crawler.py index 24042c38b..f13f7cb99 100644 --- a/src/allmydata/storage/crawler.py +++ b/src/allmydata/storage/crawler.py @@ -19,7 +19,7 @@ import os, time, struct try: import cPickle as pickle except ImportError: - import pickle + import pickle # type: ignore from twisted.internet import reactor from twisted.application import service from allmydata.storage.common import si_b2a diff --git a/src/allmydata/storage/immutable.py b/src/allmydata/storage/immutable.py index 778c0ddf8..4b60d79f1 100644 --- a/src/allmydata/storage/immutable.py +++ b/src/allmydata/storage/immutable.py @@ -202,7 +202,7 @@ class ShareFile(object): @implementer(RIBucketWriter) -class BucketWriter(Referenceable): +class BucketWriter(Referenceable): # type: ignore # warner/foolscap#78 def __init__(self, ss, incominghome, finalhome, max_size, lease_info, canary): self.ss = ss @@ -301,7 +301,7 @@ class BucketWriter(Referenceable): @implementer(RIBucketReader) -class BucketReader(Referenceable): +class BucketReader(Referenceable): # type: ignore # warner/foolscap#78 def __init__(self, ss, sharefname, storage_index=None, shnum=None): self.ss = ss diff --git a/src/allmydata/storage/server.py b/src/allmydata/storage/server.py index 8a8138f26..5f2ef3ac2 100644 --- a/src/allmydata/storage/server.py +++ b/src/allmydata/storage/server.py @@ -581,7 +581,7 @@ class StorageServer(service.MultiService, Referenceable): for share in six.viewvalues(shares): share.add_or_renew_lease(lease_info) - def slot_testv_and_readv_and_writev( + def slot_testv_and_readv_and_writev( # type: ignore # warner/foolscap#78 self, storage_index, secrets, diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index df1e4573e..eb1572dcb 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -2,7 +2,13 @@ """ I contain the client-side code which speaks to storage servers, in particular the foolscap-based server implemented in src/allmydata/storage/*.py . + +Ported to Python 3. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals # roadmap: # @@ -28,15 +34,16 @@ the foolscap-based server implemented in src/allmydata/storage/*.py . # # 6: implement other sorts of IStorageClient classes: S3, etc -from past.builtins import unicode +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import re, time, hashlib -try: - from ConfigParser import ( - NoSectionError, - ) -except ImportError: - from configparser import NoSectionError + +# On Python 2 this will be the backport. +from configparser import NoSectionError + import attr from zope.interface import ( Attribute, @@ -67,6 +74,8 @@ from allmydata.util.assertutil import precondition from allmydata.util.observer import ObserverList from allmydata.util.rrefutil import add_version_to_remote_reference from allmydata.util.hashutil import permute_server_hash +from allmydata.util.dictutil import BytesKeyDict, UnicodeKeyDict + # who is responsible for de-duplication? # both? @@ -94,7 +103,7 @@ class StorageClientConfig(object): decreasing preference. See the *[client]peers.preferred* documentation for details. - :ivar dict[unicode, dict[bytes, bytes]] storage_plugins: A mapping from + :ivar dict[unicode, dict[unicode, unicode]] storage_plugins: A mapping from names of ``IFoolscapStoragePlugin`` configured in *tahoe.cfg* to the respective configuration. """ @@ -109,24 +118,24 @@ class StorageClientConfig(object): :param _Config config: The loaded Tahoe-LAFS node configuration. """ - ps = config.get_config("client", "peers.preferred", b"").split(b",") - preferred_peers = tuple([p.strip() for p in ps if p != b""]) + ps = config.get_config("client", "peers.preferred", "").split(",") + preferred_peers = tuple([p.strip() for p in ps if p != ""]) enabled_storage_plugins = ( name.strip() for name in config.get_config( - b"client", - b"storage.plugins", - b"", - ).decode("utf-8").split(u",") + "client", + "storage.plugins", + "", + ).split(u",") if name.strip() ) storage_plugins = {} for plugin_name in enabled_storage_plugins: try: - plugin_config = config.items(b"storageclient.plugins." + plugin_name) + plugin_config = config.items("storageclient.plugins." + plugin_name) except NoSectionError: plugin_config = [] storage_plugins[plugin_name] = dict(plugin_config) @@ -145,6 +154,9 @@ class StorageFarmBroker(service.MultiService): I'm also responsible for subscribing to the IntroducerClient to find out about new servers as they are announced by the Introducer. + :ivar _tub_maker: A one-argument callable which accepts a dictionary of + "handler overrides" and returns a ``foolscap.api.Tub``. + :ivar StorageClientConfig storage_client_config: Values from the node configuration file relating to storage behavior. """ @@ -175,7 +187,7 @@ class StorageFarmBroker(service.MultiService): # storage servers that we've heard about. Each descriptor manages its # own Reconnector, and will give us a RemoteReference when we ask # them for it. - self.servers = {} + self.servers = BytesKeyDict() self._static_server_ids = set() # ignore announcements for these self.introducer_client = None self._threshold_listeners = [] # tuples of (threshold, Deferred) @@ -189,7 +201,10 @@ class StorageFarmBroker(service.MultiService): # this sorted order). for (server_id, server) in sorted(servers.items()): try: - storage_server = self._make_storage_server(server_id, server) + storage_server = self._make_storage_server( + server_id.encode("utf-8"), + server, + ) except Exception: # TODO: The _make_storage_server failure is logged but maybe # we should write a traceback here. Notably, tests don't @@ -199,6 +214,8 @@ class StorageFarmBroker(service.MultiService): # information. pass else: + if isinstance(server_id, str): + server_id = server_id.encode("utf-8") self._static_server_ids.add(server_id) self.servers[server_id] = storage_server storage_server.setServiceParent(self) @@ -232,8 +249,19 @@ class StorageFarmBroker(service.MultiService): include_result=False, ) def _make_storage_server(self, server_id, server): - assert isinstance(server_id, unicode) # from YAML - server_id = server_id.encode("ascii") + """ + Create a new ``IServer`` for the given storage server announcement. + + :param bytes server_id: The unique identifier for the server. + + :param dict server: The server announcement. See ``Static Server + Definitions`` in the configuration documentation for details about + the structure and contents. + + :return IServer: The object-y representation of the server described + by the given announcement. + """ + assert isinstance(server_id, bytes) handler_overrides = server.get("connections", {}) s = NativeStorageServer( server_id, @@ -260,7 +288,7 @@ class StorageFarmBroker(service.MultiService): # these two are used in unit tests def test_add_rref(self, serverid, rref, ann): s = self._make_storage_server( - serverid.decode("ascii"), + serverid, {"ann": ann.copy()}, ) s._rref = rref @@ -292,28 +320,71 @@ class StorageFarmBroker(service.MultiService): remaining.append( (threshold, d) ) self._threshold_listeners = remaining - def _got_announcement(self, key_s, ann): - precondition(isinstance(key_s, str), key_s) - precondition(key_s.startswith("v0-"), key_s) - precondition(ann["service-name"] == "storage", ann["service-name"]) - server_id = key_s + def _should_ignore_announcement(self, server_id, ann): + """ + Determine whether a new storage announcement should be discarded or used + to update our collection of storage servers. + + :param bytes server_id: The unique identifier for the storage server + which made the announcement. + + :param dict ann: The announcement. + + :return bool: ``True`` if the announcement should be ignored, + ``False`` if it should be used to update our local storage server + state. + """ + # Let local static configuration always override any announcement for + # a particular server. if server_id in self._static_server_ids: log.msg(format="ignoring announcement for static server '%(id)s'", id=server_id, facility="tahoe.storage_broker", umid="AlxzqA", level=log.UNUSUAL) + return True + + try: + old = self.servers[server_id] + except KeyError: + # We don't know anything about this server. Let's use the + # announcement to change that. + return False + else: + # Determine if this announcement is at all difference from the + # announcement we already have for the server. If it is the same, + # we don't need to change anything. + return old.get_announcement() == ann + + def _got_announcement(self, key_s, ann): + """ + This callback is given to the introducer and called any time an + announcement is received which has a valid signature and does not have + a sequence number less than or equal to a previous sequence number + seen for that server by that introducer. + + Note sequence numbers are not considered between different introducers + so if we use more than one introducer it is possible for them to + deliver us stale announcements in some cases. + """ + precondition(isinstance(key_s, bytes), key_s) + precondition(key_s.startswith(b"v0-"), key_s) + precondition(ann["service-name"] == "storage", ann["service-name"]) + server_id = key_s + + if self._should_ignore_announcement(server_id, ann): return + s = self._make_storage_server( - server_id.decode("utf-8"), + server_id, {u"ann": ann}, ) - server_id = s.get_serverid() - old = self.servers.get(server_id) - if old: - if old.get_announcement() == ann: - return # duplicate - # replacement - del self.servers[server_id] + + try: + old = self.servers.pop(server_id) + except KeyError: + pass + else: + # It's a replacement, get rid of the old one. old.stop_connecting() old.disownServiceParent() # NOTE: this disownServiceParent() returns a Deferred that @@ -328,6 +399,7 @@ class StorageFarmBroker(service.MultiService): # until they have fired (but hopefully don't keep reference # cycles around when they fire earlier than that, which will # almost always be the case for normal runtime). + # now we forget about them and start using the new one s.setServiceParent(self) self.servers[server_id] = s @@ -343,7 +415,7 @@ class StorageFarmBroker(service.MultiService): # connections to only a subset of the servers, which would increase # the chances that we'll put shares in weird places (and not update # existing shares of mutable files). See #374 for more details. - for dsc in self.servers.values(): + for dsc in list(self.servers.values()): dsc.try_to_connect() def get_servers_for_psi(self, peer_selection_index): @@ -383,7 +455,7 @@ class StorageFarmBroker(service.MultiService): # Upload Results web page). If the Helper is running 1.12 or newer, # it will send pubkeys, but if it's still running 1.11, it will send # tubids. This clause maps the old tubids to our existing servers. - for s in self.servers.values(): + for s in list(self.servers.values()): if isinstance(s, NativeStorageServer): if serverid == s.get_tubid(): return s @@ -392,6 +464,7 @@ class StorageFarmBroker(service.MultiService): @implementer(IDisplayableServer) class StubServer(object): def __init__(self, serverid): + assert isinstance(serverid, bytes) self.serverid = serverid # binary tubid def get_serverid(self): return self.serverid @@ -489,17 +562,21 @@ class _FoolscapStorage(object): } *nickname* is optional. + + The furl will be a Unicode string on Python 3; on Python 2 it will be + either a native (bytes) string or a Unicode string. """ + furl = furl.encode("utf-8") m = re.match(br'pb://(\w+)@', furl) assert m, furl tubid_s = m.group(1).lower() tubid = base32.a2b(tubid_s) if "permutation-seed-base32" in ann: seed = ann["permutation-seed-base32"] - if isinstance(seed, unicode): + if isinstance(seed, str): seed = seed.encode("utf-8") ps = base32.a2b(seed) - elif re.search(r'^v0-[0-9a-zA-Z]{52}$', server_id): + elif re.search(br'^v0-[0-9a-zA-Z]{52}$', server_id): ps = base32.a2b(server_id[3:]) else: log.msg("unable to parse serverid '%(server_id)s as pubkey, " @@ -591,7 +668,7 @@ def _storage_from_foolscap_plugin(node_config, config, announcement, get_rref): in getPlugins(IFoolscapStoragePlugin) } storage_options = announcement.get(u"storage-options", []) - for plugin_name, plugin_config in config.storage_plugins.items(): + for plugin_name, plugin_config in list(config.storage_plugins.items()): try: plugin = plugins[plugin_name] except KeyError: @@ -621,19 +698,18 @@ class NativeStorageServer(service.MultiService): @ivar nickname: the server's self-reported nickname (unicode), same @ivar rref: the RemoteReference, if connected, otherwise None - @ivar remote_host: the IAddress, if connected, otherwise None """ - VERSION_DEFAULTS = { - b"http://allmydata.org/tahoe/protocols/storage/v1" : - { b"maximum-immutable-share-size": 2**32 - 1, - b"maximum-mutable-share-size": 2*1000*1000*1000, # maximum prior to v1.9.2 - b"tolerates-immutable-read-overrun": False, - b"delete-mutable-shares-with-zero-length-writev": False, - b"available-space": None, - }, - b"application-version": "unknown: no get_version()", - } + VERSION_DEFAULTS = UnicodeKeyDict({ + "http://allmydata.org/tahoe/protocols/storage/v1" : + UnicodeKeyDict({ "maximum-immutable-share-size": 2**32 - 1, + "maximum-mutable-share-size": 2*1000*1000*1000, # maximum prior to v1.9.2 + "tolerates-immutable-read-overrun": False, + "delete-mutable-shares-with-zero-length-writev": False, + "available-space": None, + }), + "application-version": "unknown: no get_version()", + }) def __init__(self, server_id, ann, tub_maker, handler_overrides, node_config, config=StorageClientConfig()): service.MultiService.__init__(self) @@ -647,7 +723,6 @@ class NativeStorageServer(service.MultiService): self.last_connect_time = None self.last_loss_time = None - self.remote_host = None self._rref = None self._is_connected = False self._reconnector = None @@ -686,7 +761,7 @@ class NativeStorageServer(service.MultiService): else: return _FoolscapStorage.from_announcement( self._server_id, - furl.encode("utf-8"), + furl, ann, storage_server, ) @@ -698,8 +773,6 @@ class NativeStorageServer(service.MultiService): # Nope pass else: - if isinstance(furl, unicode): - furl = furl.encode("utf-8") # See comment above for the _storage_from_foolscap_plugin case # about passing in get_rref. storage_server = _StorageServer(get_rref=self.get_rref) @@ -756,8 +829,6 @@ class NativeStorageServer(service.MultiService): return None def get_announcement(self): return self.announcement - def get_remote_host(self): - return self.remote_host def get_connection_status(self): last_received = None @@ -773,7 +844,7 @@ class NativeStorageServer(service.MultiService): version = self.get_version() if version is None: return None - protocol_v1_version = version.get(b'http://allmydata.org/tahoe/protocols/storage/v1', {}) + protocol_v1_version = version.get('http://allmydata.org/tahoe/protocols/storage/v1', UnicodeKeyDict()) available_space = protocol_v1_version.get('available-space') if available_space is None: available_space = protocol_v1_version.get('maximum-immutable-share-size', None) @@ -805,7 +876,6 @@ class NativeStorageServer(service.MultiService): level=log.NOISY, parent=lp) self.last_connect_time = time.time() - self.remote_host = rref.getLocationHints() self._rref = rref self._is_connected = True rref.notifyOnDisconnect(self._lost) @@ -831,7 +901,6 @@ class NativeStorageServer(service.MultiService): # get_connected_servers() or get_servers_for_psi()) can continue to # use s.get_rref().callRemote() and not worry about it being None. self._is_connected = False - self.remote_host = None def stop_connecting(self): # used when this descriptor has been superceded by another diff --git a/src/allmydata/test/__init__.py b/src/allmydata/test/__init__.py index abbde919f..19c046eca 100644 --- a/src/allmydata/test/__init__.py +++ b/src/allmydata/test/__init__.py @@ -113,4 +113,5 @@ if sys.platform == "win32": initialize() from eliot import to_file -to_file(open("eliot.log", "w")) +from allmydata.util.jsonbytes import BytesJSONEncoder +to_file(open("eliot.log", "w"), encoder=BytesJSONEncoder) diff --git a/src/allmydata/test/check_grid.py b/src/allmydata/test/check_grid.py index d3993ee5e..0a68ed899 100644 --- a/src/allmydata/test/check_grid.py +++ b/src/allmydata/test/check_grid.py @@ -16,22 +16,19 @@ that this script does not import anything from tahoe directly, so it doesn't matter what its PYTHONPATH is, as long as the bin/tahoe that it uses is functional. -This script expects that the client node will be not running when the script -starts, but it will forcibly shut down the node just to be sure. It will shut -down the node after the test finishes. +This script expects the client node to be running already. To set up the client node, do the following: - tahoe create-client DIR - populate DIR/introducer.furl - tahoe start DIR - tahoe add-alias -d DIR testgrid `tahoe mkdir -d DIR` - pick a 10kB-ish test file, compute its md5sum - tahoe put -d DIR FILE testgrid:old.MD5SUM - tahoe put -d DIR FILE testgrid:recent.MD5SUM - tahoe put -d DIR FILE testgrid:recentdir/recent.MD5SUM - echo "" | tahoe put -d DIR --mutable testgrid:log - echo "" | tahoe put -d DIR --mutable testgrid:recentlog + tahoe create-client --introducer=INTRODUCER_FURL DIR + tahoe run DIR + tahoe -d DIR create-alias testgrid + # pick a 10kB-ish test file, compute its md5sum + tahoe -d DIR put FILE testgrid:old.MD5SUM + tahoe -d DIR put FILE testgrid:recent.MD5SUM + tahoe -d DIR put FILE testgrid:recentdir/recent.MD5SUM + echo "" | tahoe -d DIR put --mutable - testgrid:log + echo "" | tahoe -d DIR put --mutable - testgrid:recentlog This script will perform the following steps (the kind of compatibility that is being tested is in [brackets]): @@ -52,7 +49,6 @@ is being tested is in [brackets]): This script will also keep track of speeds and latencies and will write them in a machine-readable logfile. - """ import time, subprocess, md5, os.path, random @@ -104,26 +100,13 @@ class GridTester(object): def cli(self, cmd, *args, **kwargs): print("tahoe", cmd, " ".join(args)) - stdout, stderr = self.command(self.tahoe, cmd, "-d", self.nodedir, + stdout, stderr = self.command(self.tahoe, "-d", self.nodedir, cmd, *args, **kwargs) if not kwargs.get("ignore_stderr", False) and stderr != "": raise CommandFailed("command '%s' had stderr: %s" % (" ".join(args), stderr)) return stdout - def stop_old_node(self): - print("tahoe stop", self.nodedir, "(force)") - self.command(self.tahoe, "stop", self.nodedir, expected_rc=None) - - def start_node(self): - print("tahoe start", self.nodedir) - self.command(self.tahoe, "start", self.nodedir) - time.sleep(5) - - def stop_node(self): - print("tahoe stop", self.nodedir) - self.command(self.tahoe, "stop", self.nodedir) - def read_and_check(self, f): expected_md5_s = f[f.find(".")+1:] out = self.cli("get", "testgrid:" + f) @@ -204,19 +187,11 @@ class GridTester(object): fn = prefix + "." + md5sum return fn, data - def run(self): - self.stop_old_node() - self.start_node() - try: - self.do_test() - finally: - self.stop_node() - def main(): config = GridTesterOptions() config.parseOptions() gt = GridTester(config) - gt.run() + gt.do_test() if __name__ == "__main__": main() diff --git a/src/allmydata/test/check_load.py b/src/allmydata/test/check_load.py index 4058ddf77..21576ea3a 100644 --- a/src/allmydata/test/check_load.py +++ b/src/allmydata/test/check_load.py @@ -37,6 +37,11 @@ a mean of 10kB and a max of 100MB, so filesize=min(int(1.0/random(.0002)),1e8) import os, sys, httplib, binascii import urllib, json, random, time, urlparse +try: + from typing import Dict +except ImportError: + pass + # Python 2 compatibility from future.utils import PY2 if PY2: @@ -49,13 +54,13 @@ if sys.argv[1] == "--stats": DELAY = 10 MAXSAMPLES = 6 totals = [] - last_stats = {} + last_stats = {} # type: Dict[str, float] while True: - stats = {} + stats = {} # type: Dict[str, float] for sf in statsfiles: for line in open(sf, "r").readlines(): - name, value = line.split(":") - value = int(value.strip()) + name, str_value = line.split(":") + value = int(str_value.strip()) if name not in stats: stats[name] = 0 stats[name] += float(value) diff --git a/src/allmydata/test/check_memory.py b/src/allmydata/test/check_memory.py index 41cf6e1d7..268d77451 100644 --- a/src/allmydata/test/check_memory.py +++ b/src/allmydata/test/check_memory.py @@ -8,6 +8,9 @@ if PY2: from future.builtins import str # noqa: F401 from six.moves import cStringIO as StringIO +from twisted.python.filepath import ( + FilePath, +) from twisted.internet import defer, reactor, protocol, error from twisted.application import service, internet from twisted.web import client as tw_client @@ -21,6 +24,10 @@ from allmydata.util import fileutil, pollmixin from allmydata.util.fileutil import abspath_expanduser_unicode from allmydata.util.encodingutil import get_filesystem_encoding +from allmydata.scripts.common import ( + write_introducer, +) + class StallableHTTPGetterDiscarder(tw_client.HTTPPageGetter, object): full_speed_ahead = False _bytes_so_far = 0 @@ -180,16 +187,18 @@ class SystemFramework(pollmixin.PollMixin): self.introducer_furl = self.introducer.introducer_url def make_nodes(self): + root = FilePath(self.testdir) self.nodes = [] for i in range(self.numnodes): - nodedir = os.path.join(self.testdir, "node%d" % i) - os.mkdir(nodedir) - f = open(os.path.join(nodedir, "tahoe.cfg"), "w") - f.write("[client]\n" - "introducer.furl = %s\n" - "shares.happy = 1\n" - "[storage]\n" - % (self.introducer_furl,)) + nodedir = root.child("node%d" % (i,)) + private = nodedir.child("private") + private.makedirs() + write_introducer(nodedir, "default", self.introducer_url) + config = ( + "[client]\n" + "shares.happy = 1\n" + "[storage]\n" + ) # the only tests for which we want the internal nodes to actually # retain shares are the ones where somebody's going to download # them. @@ -200,13 +209,13 @@ class SystemFramework(pollmixin.PollMixin): # for these tests, we tell the storage servers to pretend to # accept shares, but really just throw them out, since we're # only testing upload and not download. - f.write("debug_discard = true\n") + config += "debug_discard = true\n" if self.mode in ("receive",): # for this mode, the client-under-test gets all the shares, # so our internal nodes can refuse requests - f.write("readonly = true\n") - f.close() - c = client.Client(basedir=nodedir) + config += "readonly = true\n" + nodedir.child("tahoe.cfg").setContent(config) + c = client.Client(basedir=nodedir.path) c.setServiceParent(self) self.nodes.append(c) # the peers will start running, eventually they will connect to each @@ -235,16 +244,16 @@ this file are ignored. quiet = StringIO() create_node.create_node({'basedir': clientdir}, out=quiet) log.msg("DONE MAKING CLIENT") + write_introducer(clientdir, "default", self.introducer_furl) # now replace tahoe.cfg # set webport=0 and then ask the node what port it picked. f = open(os.path.join(clientdir, "tahoe.cfg"), "w") f.write("[node]\n" "web.port = tcp:0:interface=127.0.0.1\n" "[client]\n" - "introducer.furl = %s\n" "shares.happy = 1\n" "[storage]\n" - % (self.introducer_furl,)) + ) if self.mode in ("upload-self", "receive"): # accept and store shares, to trigger the memory consumption bugs @@ -499,13 +508,13 @@ if __name__ == '__main__': mode = "upload" if len(sys.argv) > 1: mode = sys.argv[1] - if sys.maxint == 2147483647: + if sys.maxsize == 2147483647: bits = "32" - elif sys.maxint == 9223372036854775807: + elif sys.maxsize == 9223372036854775807: bits = "64" else: bits = "?" - print("%s-bit system (sys.maxint=%d)" % (bits, sys.maxint)) + print("%s-bit system (sys.maxsize=%d)" % (bits, sys.maxsize)) # put the logfile and stats.out in _test_memory/ . These stick around. # put the nodes and other files in _test_memory/test/ . These are # removed each time we run. diff --git a/src/allmydata/test/cli/common.py b/src/allmydata/test/cli/common.py index 852dce52c..bf175de44 100644 --- a/src/allmydata/test/cli/common.py +++ b/src/allmydata/test/cli/common.py @@ -1,6 +1,6 @@ from ...util.encodingutil import unicode_to_argv from ...scripts import runner -from ..common_util import ReallyEqualMixin, run_cli +from ..common_util import ReallyEqualMixin, run_cli, run_cli_unicode def parse_options(basedir, command, args): o = runner.Options() @@ -10,10 +10,41 @@ def parse_options(basedir, command, args): return o class CLITestMixin(ReallyEqualMixin): - def do_cli(self, verb, *args, **kwargs): + """ + A mixin for use with ``GridTestMixin`` to execute CLI commands against + nodes created by methods of that mixin. + """ + def do_cli_unicode(self, verb, argv, client_num=0, **kwargs): + """ + Run a Tahoe-LAFS CLI command. + + :param verb: See ``run_cli_unicode``. + + :param argv: See ``run_cli_unicode``. + + :param int client_num: The number of the ``GridTestMixin``-created + node against which to execute the command. + + :param kwargs: Additional keyword arguments to pass to + ``run_cli_unicode``. + """ # client_num is used to execute client CLI commands on a specific # client. - client_num = kwargs.get("client_num", 0) + client_dir = self.get_clientdir(i=client_num) + nodeargs = [ u"--node-directory", client_dir ] + return run_cli_unicode(verb, argv, nodeargs=nodeargs, **kwargs) + + + def do_cli(self, verb, *args, **kwargs): + """ + Like ``do_cli_unicode`` but work with ``bytes`` everywhere instead of + ``unicode``. + + Where possible, prefer ``do_cli_unicode``. + """ + # client_num is used to execute client CLI commands on a specific + # client. + client_num = kwargs.pop("client_num", 0) client_dir = unicode_to_argv(self.get_clientdir(i=client_num)) - nodeargs = [ "--node-directory", client_dir ] - return run_cli(verb, nodeargs=nodeargs, *args, **kwargs) + nodeargs = [ b"--node-directory", client_dir ] + return run_cli(verb, *args, nodeargs=nodeargs, **kwargs) diff --git a/src/allmydata/test/cli/test_alias.py b/src/allmydata/test/cli/test_alias.py index 6542d154f..72b634608 100644 --- a/src/allmydata/test/cli/test_alias.py +++ b/src/allmydata/test/cli/test_alias.py @@ -1,105 +1,126 @@ import json -from mock import patch from twisted.trial import unittest from twisted.internet.defer import inlineCallbacks -from allmydata.util.encodingutil import unicode_to_argv from allmydata.scripts.common import get_aliases from allmydata.test.no_network import GridTestMixin from .common import CLITestMixin -from ..common_util import skip_if_cannot_represent_argv +from allmydata.util import encodingutil # see also test_create_alias class ListAlias(GridTestMixin, CLITestMixin, unittest.TestCase): @inlineCallbacks - def test_list(self): - self.basedir = "cli/ListAlias/test_list" + def _check_create_alias(self, alias, encoding): + """ + Verify that ``tahoe create-alias`` can be used to create an alias named + ``alias`` when argv is encoded using ``encoding``. + + :param unicode alias: The alias to try to create. + + :param NoneType|str encoding: The name of an encoding to force the + ``create-alias`` implementation to use. This simulates the + effects of setting LANG and doing other locale-foolishness without + actually having to mess with this process's global locale state. + If this is ``None`` then the encoding used will be ascii but the + stdio objects given to the code under test will not declare any + encoding (this is like Python 2 when stdio is not a tty). + + :return Deferred: A Deferred that fires with success if the alias can + be created and that creation is reported on stdout appropriately + encoded or with failure if something goes wrong. + """ + self.basedir = self.mktemp() self.set_up_grid(oneshare=True) - rc, stdout, stderr = yield self.do_cli( - "create-alias", - unicode_to_argv(u"tahoe"), + # We can pass an encoding into the test utilities to invoke the code + # under test but we can't pass such a parameter directly to the code + # under test. Instead, that code looks at io_encoding. So, + # monkey-patch that value to our desired value here. This is the code + # that most directly takes the place of messing with LANG or the + # locale module. + self.patch(encodingutil, "io_encoding", encoding or "ascii") + + rc, stdout, stderr = yield self.do_cli_unicode( + u"create-alias", + [alias], + encoding=encoding, ) - self.failUnless(unicode_to_argv(u"Alias 'tahoe' created") in stdout) - self.failIf(stderr) - aliases = get_aliases(self.get_clientdir()) - self.failUnless(u"tahoe" in aliases) - self.failUnless(aliases[u"tahoe"].startswith("URI:DIR2:")) + # Make sure the result of the create-alias command is as we want it to + # be. + self.assertEqual(u"Alias '{}' created\n".format(alias), stdout) + self.assertEqual("", stderr) + self.assertEqual(0, rc) - rc, stdout, stderr = yield self.do_cli("list-aliases", "--json") + # Make sure it had the intended side-effect, too - an alias created in + # the node filesystem state. + aliases = get_aliases(self.get_clientdir()) + self.assertIn(alias, aliases) + self.assertTrue(aliases[alias].startswith(u"URI:DIR2:")) + + # And inspect the state via the user interface list-aliases command + # too. + rc, stdout, stderr = yield self.do_cli_unicode( + u"list-aliases", + [u"--json"], + encoding=encoding, + ) self.assertEqual(0, rc) data = json.loads(stdout) - self.assertIn(u"tahoe", data) - data = data[u"tahoe"] - self.assertIn("readwrite", data) - self.assertIn("readonly", data) + self.assertIn(alias, data) + data = data[alias] + self.assertIn(u"readwrite", data) + self.assertIn(u"readonly", data) - @inlineCallbacks - def test_list_unicode_mismatch_json(self): - """ - pretty hack-y test, but we want to cover the 'except' on Unicode - errors paths and I can't come up with a nicer way to trigger - this - """ - self.basedir = "cli/ListAlias/test_list_unicode_mismatch_json" - skip_if_cannot_represent_argv(u"tahoe\u263A") - self.set_up_grid(oneshare=True) - rc, stdout, stderr = yield self.do_cli( - "create-alias", - unicode_to_argv(u"tahoe\u263A"), + def test_list_none(self): + """ + An alias composed of all ASCII-encodeable code points can be created when + stdio aren't clearly marked with an encoding. + """ + return self._check_create_alias( + u"tahoe", + encoding=None, ) - self.failUnless(unicode_to_argv(u"Alias 'tahoe\u263A' created") in stdout) - self.failIf(stderr) - booms = [] - - def boom(out, indent=4): - if not len(booms): - booms.append(out) - raise UnicodeEncodeError("foo", u"foo", 3, 5, "foo") - return str(out) - - with patch("allmydata.scripts.tahoe_add_alias.json.dumps", boom): - aliases = get_aliases(self.get_clientdir()) - self.failUnless(u"tahoe\u263A" in aliases) - self.failUnless(aliases[u"tahoe\u263A"].startswith("URI:DIR2:")) - - rc, stdout, stderr = yield self.do_cli("list-aliases", "--json") - - self.assertEqual(1, rc) - self.assertIn("could not be converted", stderr) - - @inlineCallbacks - def test_list_unicode_mismatch(self): - self.basedir = "cli/ListAlias/test_list_unicode_mismatch" - skip_if_cannot_represent_argv(u"tahoe\u263A") - self.set_up_grid(oneshare=True) - - rc, stdout, stderr = yield self.do_cli( - "create-alias", - unicode_to_argv(u"tahoe\u263A"), + def test_list_ascii(self): + """ + An alias composed of all ASCII-encodeable code points can be created when + the active encoding is ASCII. + """ + return self._check_create_alias( + u"tahoe", + encoding="ascii", ) - def boom(out): - print("boom {}".format(out)) - return out - raise UnicodeEncodeError("foo", u"foo", 3, 5, "foo") - with patch("allmydata.scripts.tahoe_add_alias.unicode_to_output", boom): - self.failUnless(unicode_to_argv(u"Alias 'tahoe\u263A' created") in stdout) - self.failIf(stderr) - aliases = get_aliases(self.get_clientdir()) - self.failUnless(u"tahoe\u263A" in aliases) - self.failUnless(aliases[u"tahoe\u263A"].startswith("URI:DIR2:")) + def test_list_latin_1(self): + """ + An alias composed of all Latin-1-encodeable code points can be created + when the active encoding is Latin-1. - rc, stdout, stderr = yield self.do_cli("list-aliases") + This is very similar to ``test_list_utf_8`` but the assumption of + UTF-8 is nearly ubiquitous and explicitly exercising the codepaths + with a UTF-8-incompatible encoding helps flush out unintentional UTF-8 + assumptions. + """ + return self._check_create_alias( + u"taho\N{LATIN SMALL LETTER E WITH ACUTE}", + encoding="latin-1", + ) - self.assertEqual(1, rc) - self.assertIn("could not be converted", stderr) + + def test_list_utf_8(self): + """ + An alias composed of all UTF-8-encodeable code points can be created when + the active encoding is UTF-8. + """ + return self._check_create_alias( + u"tahoe\N{SNOWMAN}", + encoding="utf-8", + ) diff --git a/src/allmydata/test/cli/test_cli.py b/src/allmydata/test/cli/test_cli.py index 72e4fe69d..2b1bc1c86 100644 --- a/src/allmydata/test/cli/test_cli.py +++ b/src/allmydata/test/cli/test_cli.py @@ -20,14 +20,14 @@ from allmydata.scripts.common_http import socket_error import allmydata.scripts.common_http # Test that the scripts can be imported. -from allmydata.scripts import create_node, debug, tahoe_start, tahoe_restart, \ +from allmydata.scripts import create_node, debug, \ tahoe_add_alias, tahoe_backup, tahoe_check, tahoe_cp, tahoe_get, tahoe_ls, \ tahoe_manifest, tahoe_mkdir, tahoe_mv, tahoe_put, tahoe_unlink, tahoe_webopen, \ - tahoe_stop, tahoe_daemonize, tahoe_run -_hush_pyflakes = [create_node, debug, tahoe_start, tahoe_restart, tahoe_stop, + tahoe_run +_hush_pyflakes = [create_node, debug, tahoe_add_alias, tahoe_backup, tahoe_check, tahoe_cp, tahoe_get, tahoe_ls, tahoe_manifest, tahoe_mkdir, tahoe_mv, tahoe_put, tahoe_unlink, tahoe_webopen, - tahoe_daemonize, tahoe_run] + tahoe_run] from allmydata.scripts import common from allmydata.scripts.common import DEFAULT_ALIAS, get_aliases, get_alias, \ @@ -626,18 +626,6 @@ class Help(unittest.TestCase): help = str(cli.ListAliasesOptions()) self.failUnlessIn("[options]", help) - def test_start(self): - help = str(tahoe_start.StartOptions()) - self.failUnlessIn("[options] [NODEDIR [twistd-options]]", help) - - def test_stop(self): - help = str(tahoe_stop.StopOptions()) - self.failUnlessIn("[options] [NODEDIR]", help) - - def test_restart(self): - help = str(tahoe_restart.RestartOptions()) - self.failUnlessIn("[options] [NODEDIR [twistd-options]]", help) - def test_run(self): help = str(tahoe_run.RunOptions()) self.failUnlessIn("[options] [NODEDIR [twistd-options]]", help) @@ -1266,85 +1254,72 @@ class Options(ReallyEqualMixin, unittest.TestCase): # "tahoe --version" dumps text to stdout and exits stdout = StringIO() self.failUnlessRaises(SystemExit, self.parse, ["--version"], stdout) - self.failUnlessIn(allmydata.__appname__ + ":", stdout.getvalue()) + self.failUnlessIn(allmydata.__full_version__, stdout.getvalue()) # but "tahoe SUBCOMMAND --version" should be rejected self.failUnlessRaises(usage.UsageError, self.parse, - ["start", "--version"]) + ["run", "--version"]) self.failUnlessRaises(usage.UsageError, self.parse, - ["start", "--version-and-path"]) + ["run", "--version-and-path"]) def test_quiet(self): # accepted as an overall option, but not on subcommands - o = self.parse(["--quiet", "start"]) + o = self.parse(["--quiet", "run"]) self.failUnless(o.parent["quiet"]) self.failUnlessRaises(usage.UsageError, self.parse, - ["start", "--quiet"]) + ["run", "--quiet"]) def test_basedir(self): # accept a --node-directory option before the verb, or a --basedir # option after, or a basedir argument after, but none in the wrong # place, and not more than one of the three. - o = self.parse(["start"]) + + # Here is some option twistd recognizes but we don't. Depending on + # where it appears, it should be passed through to twistd. It doesn't + # really matter which option it is (it doesn't even have to be a valid + # option). This test does not actually run any of the twistd argument + # parsing. + some_twistd_option = "--spew" + + o = self.parse(["run"]) self.failUnlessReallyEqual(o["basedir"], os.path.join(fileutil.abspath_expanduser_unicode(u"~"), u".tahoe")) - o = self.parse(["start", "here"]) + o = self.parse(["run", "here"]) self.failUnlessReallyEqual(o["basedir"], fileutil.abspath_expanduser_unicode(u"here")) - o = self.parse(["start", "--basedir", "there"]) + o = self.parse(["run", "--basedir", "there"]) self.failUnlessReallyEqual(o["basedir"], fileutil.abspath_expanduser_unicode(u"there")) - o = self.parse(["--node-directory", "there", "start"]) + o = self.parse(["--node-directory", "there", "run"]) self.failUnlessReallyEqual(o["basedir"], fileutil.abspath_expanduser_unicode(u"there")) - o = self.parse(["start", "here", "--nodaemon"]) + o = self.parse(["run", "here", some_twistd_option]) self.failUnlessReallyEqual(o["basedir"], fileutil.abspath_expanduser_unicode(u"here")) self.failUnlessRaises(usage.UsageError, self.parse, - ["--basedir", "there", "start"]) + ["--basedir", "there", "run"]) self.failUnlessRaises(usage.UsageError, self.parse, - ["start", "--node-directory", "there"]) + ["run", "--node-directory", "there"]) self.failUnlessRaises(usage.UsageError, self.parse, ["--node-directory=there", - "start", "--basedir=here"]) + "run", "--basedir=here"]) self.failUnlessRaises(usage.UsageError, self.parse, - ["start", "--basedir=here", "anywhere"]) + ["run", "--basedir=here", "anywhere"]) self.failUnlessRaises(usage.UsageError, self.parse, ["--node-directory=there", - "start", "anywhere"]) + "run", "anywhere"]) self.failUnlessRaises(usage.UsageError, self.parse, ["--node-directory=there", - "start", "--basedir=here", "anywhere"]) + "run", "--basedir=here", "anywhere"]) self.failUnlessRaises(usage.UsageError, self.parse, - ["--node-directory=there", "start", "--nodaemon"]) + ["--node-directory=there", "run", some_twistd_option]) self.failUnlessRaises(usage.UsageError, self.parse, - ["start", "--basedir=here", "--nodaemon"]) + ["run", "--basedir=here", some_twistd_option]) -class Stop(unittest.TestCase): - def test_non_numeric_pid(self): - """ - If the pidfile exists but does not contain a numeric value, a complaint to - this effect is written to stderr and the non-success result is - returned. - """ - basedir = FilePath(self.mktemp().decode("ascii")) - basedir.makedirs() - basedir.child(u"twistd.pid").setContent(b"foo") +class Run(unittest.TestCase): - config = tahoe_stop.StopOptions() - config.stdout = StringIO() - config.stderr = StringIO() - config['basedir'] = basedir.path - - result_code = tahoe_stop.stop(config) - self.assertEqual(2, result_code) - self.assertIn("invalid PID file", config.stderr.getvalue()) - - -class Start(unittest.TestCase): - - @patch('allmydata.scripts.run_common.os.chdir') - @patch('allmydata.scripts.run_common.twistd') + @patch('allmydata.scripts.tahoe_run.os.chdir') + @patch('allmydata.scripts.tahoe_run.twistd') def test_non_numeric_pid(self, mock_twistd, chdir): """ If the pidfile exists but does not contain a numeric value, a complaint to @@ -1355,13 +1330,13 @@ class Start(unittest.TestCase): basedir.child(u"twistd.pid").setContent(b"foo") basedir.child(u"tahoe-client.tac").setContent(b"") - config = tahoe_daemonize.DaemonizeOptions() + config = tahoe_run.RunOptions() config.stdout = StringIO() config.stderr = StringIO() config['basedir'] = basedir.path config.twistd_args = [] - result_code = tahoe_daemonize.daemonize(config) + result_code = tahoe_run.run(config) self.assertIn("invalid PID file", config.stderr.getvalue()) self.assertTrue(len(mock_twistd.mock_calls), 1) self.assertEqual(mock_twistd.mock_calls[0][0], 'runApp') diff --git a/src/allmydata/test/cli/test_cp.py b/src/allmydata/test/cli/test_cp.py index ba1894f1c..6cebec4a5 100644 --- a/src/allmydata/test/cli/test_cp.py +++ b/src/allmydata/test/cli/test_cp.py @@ -661,7 +661,7 @@ starting copy, 2 files, 1 directories # This test ensures that tahoe will copy a file from the grid to # a local directory without a specified file name. # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2027 - self.basedir = "cli/Cp/cp_verbose" + self.basedir = "cli/Cp/ticket_2027" self.set_up_grid(oneshare=True) # Write a test file, which we'll copy to the grid. diff --git a/src/allmydata/test/cli/test_create.py b/src/allmydata/test/cli/test_create.py index f013c0205..aee07a671 100644 --- a/src/allmydata/test/cli/test_create.py +++ b/src/allmydata/test/cli/test_create.py @@ -52,13 +52,8 @@ class Config(unittest.TestCase): create_node.write_node_config(f, opts) create_node.write_client_config(f, opts) - config = configutil.get_config(fname) # should succeed, no exceptions - configutil.validate_config( - fname, - config, - client._valid_config(), - ) + client.read_config(d, "") @defer.inlineCallbacks def test_client(self): diff --git a/src/allmydata/test/cli/test_daemonize.py b/src/allmydata/test/cli/test_daemonize.py deleted file mode 100644 index b1365329a..000000000 --- a/src/allmydata/test/cli/test_daemonize.py +++ /dev/null @@ -1,202 +0,0 @@ -import os -from io import ( - BytesIO, -) -from os.path import dirname, join -from mock import patch, Mock -from six.moves import StringIO -from sys import getfilesystemencoding -from twisted.trial import unittest -from allmydata.scripts import runner -from allmydata.scripts.run_common import ( - identify_node_type, - DaemonizeTahoeNodePlugin, - MyTwistdConfig, -) -from allmydata.scripts.tahoe_daemonize import ( - DaemonizeOptions, -) - - -class Util(unittest.TestCase): - def setUp(self): - self.twistd_options = MyTwistdConfig() - self.twistd_options.parseOptions(["DaemonizeTahoeNode"]) - self.options = self.twistd_options.subOptions - - def test_node_type_nothing(self): - tmpdir = self.mktemp() - base = dirname(tmpdir).decode(getfilesystemencoding()) - - t = identify_node_type(base) - - self.assertIs(None, t) - - def test_node_type_introducer(self): - tmpdir = self.mktemp() - base = dirname(tmpdir).decode(getfilesystemencoding()) - with open(join(dirname(tmpdir), 'introducer.tac'), 'w') as f: - f.write("test placeholder") - - t = identify_node_type(base) - - self.assertEqual(u"introducer", t) - - def test_daemonize(self): - tmpdir = self.mktemp() - plug = DaemonizeTahoeNodePlugin('client', tmpdir) - - with patch('twisted.internet.reactor') as r: - def call(fn, *args, **kw): - fn() - r.stop = lambda: None - r.callWhenRunning = call - service = plug.makeService(self.options) - service.parent = Mock() - service.startService() - - self.assertTrue(service is not None) - - def test_daemonize_no_keygen(self): - tmpdir = self.mktemp() - stderr = BytesIO() - plug = DaemonizeTahoeNodePlugin('key-generator', tmpdir) - - with patch('twisted.internet.reactor') as r: - def call(fn, *args, **kw): - d = fn() - d.addErrback(lambda _: None) # ignore the error we'll trigger - r.callWhenRunning = call - service = plug.makeService(self.options) - service.stderr = stderr - service.parent = Mock() - # we'll raise ValueError because there's no key-generator - # .. BUT we do this in an async function called via - # "callWhenRunning" .. hence using a hook - d = service.set_hook('running') - service.startService() - def done(f): - self.assertIn( - "key-generator support removed", - stderr.getvalue(), - ) - return None - d.addBoth(done) - return d - - def test_daemonize_unknown_nodetype(self): - tmpdir = self.mktemp() - plug = DaemonizeTahoeNodePlugin('an-unknown-service', tmpdir) - - with patch('twisted.internet.reactor') as r: - def call(fn, *args, **kw): - fn() - r.stop = lambda: None - r.callWhenRunning = call - service = plug.makeService(self.options) - service.parent = Mock() - with self.assertRaises(ValueError) as ctx: - service.startService() - self.assertIn( - "unknown nodetype", - str(ctx.exception) - ) - - def test_daemonize_options(self): - parent = runner.Options() - opts = DaemonizeOptions() - opts.parent = parent - opts.parseArgs() - - # just gratuitous coverage, ensureing we don't blow up on - # these methods. - opts.getSynopsis() - opts.getUsage() - - -class RunDaemonizeTests(unittest.TestCase): - - def setUp(self): - # no test should change our working directory - self._working = os.path.abspath('.') - d = super(RunDaemonizeTests, self).setUp() - self._reactor = patch('twisted.internet.reactor') - self._reactor.stop = lambda: None - self._twistd = patch('allmydata.scripts.run_common.twistd') - self.node_dir = self.mktemp() - os.mkdir(self.node_dir) - for cm in [self._reactor, self._twistd]: - cm.__enter__() - return d - - def tearDown(self): - d = super(RunDaemonizeTests, self).tearDown() - for cm in [self._reactor, self._twistd]: - cm.__exit__(None, None, None) - # Note: if you raise an exception (e.g. via self.assertEqual - # or raise RuntimeError) it is apparently just ignored and the - # test passes anyway... - if self._working != os.path.abspath('.'): - print("WARNING: a test just changed the working dir; putting it back") - os.chdir(self._working) - return d - - def _placeholder_nodetype(self, nodetype): - fname = join(self.node_dir, '{}.tac'.format(nodetype)) - with open(fname, 'w') as f: - f.write("test placeholder") - - def test_daemonize_defaults(self): - self._placeholder_nodetype('introducer') - - config = runner.parse_or_exit_with_explanation([ - # have to do this so the tests don't much around in - # ~/.tahoe (the default) - '--node-directory', self.node_dir, - 'daemonize', - ]) - i, o, e = StringIO(), StringIO(), StringIO() - with patch('allmydata.scripts.runner.sys') as s: - exit_code = [None] - def _exit(code): - exit_code[0] = code - s.exit = _exit - runner.dispatch(config, i, o, e) - - self.assertEqual(0, exit_code[0]) - - def test_daemonize_wrong_nodetype(self): - self._placeholder_nodetype('invalid') - - config = runner.parse_or_exit_with_explanation([ - # have to do this so the tests don't much around in - # ~/.tahoe (the default) - '--node-directory', self.node_dir, - 'daemonize', - ]) - i, o, e = StringIO(), StringIO(), StringIO() - with patch('allmydata.scripts.runner.sys') as s: - exit_code = [None] - def _exit(code): - exit_code[0] = code - s.exit = _exit - runner.dispatch(config, i, o, e) - - self.assertEqual(0, exit_code[0]) - - def test_daemonize_run(self): - self._placeholder_nodetype('client') - - config = runner.parse_or_exit_with_explanation([ - # have to do this so the tests don't much around in - # ~/.tahoe (the default) - '--node-directory', self.node_dir, - 'daemonize', - ]) - with patch('allmydata.scripts.runner.sys') as s: - exit_code = [None] - def _exit(code): - exit_code[0] = code - s.exit = _exit - from allmydata.scripts.tahoe_daemonize import daemonize - daemonize(config) diff --git a/src/allmydata/test/cli/test_invite.py b/src/allmydata/test/cli/test_invite.py index 0daeb5840..f356e18de 100644 --- a/src/allmydata/test/cli/test_invite.py +++ b/src/allmydata/test/cli/test_invite.py @@ -8,7 +8,9 @@ from twisted.internet import defer from ..common_util import run_cli from ..no_network import GridTestMixin from .common import CLITestMixin - +from ...client import ( + read_config, +) class _FakeWormhole(object): @@ -81,9 +83,19 @@ class Join(GridTestMixin, CLITestMixin, unittest.TestCase): ) self.assertEqual(0, rc) + + config = read_config(node_dir, u"") + self.assertIn( + "pb://foo", + set( + furl + for (furl, cache) + in config.get_introducer_configuration().values() + ), + ) + with open(join(node_dir, 'tahoe.cfg'), 'r') as f: config = f.read() - self.assertIn("pb://foo", config) self.assertIn(u"somethinghopefullyunique", config) @defer.inlineCallbacks diff --git a/src/allmydata/test/cli/test_run.py b/src/allmydata/test/cli/test_run.py new file mode 100644 index 000000000..d27791f34 --- /dev/null +++ b/src/allmydata/test/cli/test_run.py @@ -0,0 +1,127 @@ +""" +Tests for ``allmydata.scripts.tahoe_run``. +""" + +from six.moves import ( + StringIO, +) + +from testtools.matchers import ( + Contains, + Equals, +) + +from twisted.python.filepath import ( + FilePath, +) +from twisted.internet.testing import ( + MemoryReactor, +) +from twisted.internet.test.modulehelpers import ( + AlternateReactor, +) + +from ...scripts.tahoe_run import ( + DaemonizeTheRealService, +) + +from ...scripts.runner import ( + parse_options +) +from ..common import ( + SyncTestCase, +) + +class DaemonizeTheRealServiceTests(SyncTestCase): + """ + Tests for ``DaemonizeTheRealService``. + """ + def _verify_error(self, config, expected): + """ + Assert that when ``DaemonizeTheRealService`` is started using the given + configuration it writes the given message to stderr and stops the + reactor. + + :param bytes config: The contents of a ``tahoe.cfg`` file to give to + the service. + + :param bytes expected: A string to assert appears in stderr after the + service starts. + """ + nodedir = FilePath(self.mktemp()) + nodedir.makedirs() + nodedir.child("tahoe.cfg").setContent(config) + nodedir.child("tahoe-client.tac").touch() + + options = parse_options(["run", nodedir.path]) + stdout = options.stdout = StringIO() + stderr = options.stderr = StringIO() + run_options = options.subOptions + + reactor = MemoryReactor() + with AlternateReactor(reactor): + service = DaemonizeTheRealService( + "client", + nodedir.path, + run_options, + ) + service.startService() + + # We happen to know that the service uses reactor.callWhenRunning + # to schedule all its work (though I couldn't tell you *why*). + # Make sure those scheduled calls happen. + waiting = reactor.whenRunningHooks[:] + del reactor.whenRunningHooks[:] + for f, a, k in waiting: + f(*a, **k) + + self.assertThat( + reactor.hasStopped, + Equals(True), + ) + + self.assertThat( + stdout.getvalue(), + Equals(""), + ) + + self.assertThat( + stderr.getvalue(), + Contains(expected), + ) + + def test_unknown_config(self): + """ + If there are unknown items in the node configuration file then a short + message introduced with ``"Configuration error:"`` is written to + stderr. + """ + self._verify_error("[invalid-section]\n", "Configuration error:") + + def test_port_assignment_required(self): + """ + If ``tub.port`` is configured to use port 0 then a short message rejecting + this configuration is written to stderr. + """ + self._verify_error( + """ + [node] + tub.port = 0 + """, + "tub.port cannot be 0", + ) + + def test_privacy_error(self): + """ + If ``reveal-IP-address`` is set to false and the tub is not configured in + a way that avoids revealing the node's IP address, a short message + about privacy is written to stderr. + """ + self._verify_error( + """ + [node] + tub.port = AUTO + reveal-IP-address = false + """, + "Privacy requested", + ) diff --git a/src/allmydata/test/cli/test_start.py b/src/allmydata/test/cli/test_start.py deleted file mode 100644 index 42c70f024..000000000 --- a/src/allmydata/test/cli/test_start.py +++ /dev/null @@ -1,273 +0,0 @@ -import os -import shutil -import subprocess -from os.path import join -from mock import patch -from six.moves import StringIO -from functools import partial - -from twisted.trial import unittest -from allmydata.scripts import runner - - -#@patch('twisted.internet.reactor') -@patch('allmydata.scripts.tahoe_start.subprocess') -class RunStartTests(unittest.TestCase): - - def setUp(self): - d = super(RunStartTests, self).setUp() - self.node_dir = self.mktemp() - os.mkdir(self.node_dir) - return d - - def _placeholder_nodetype(self, nodetype): - fname = join(self.node_dir, '{}.tac'.format(nodetype)) - with open(fname, 'w') as f: - f.write("test placeholder") - - def _pid_file(self, pid): - fname = join(self.node_dir, 'twistd.pid') - with open(fname, 'w') as f: - f.write(u"{}\n".format(pid)) - - def _logs(self, logs): - os.mkdir(join(self.node_dir, 'logs')) - fname = join(self.node_dir, 'logs', 'twistd.log') - with open(fname, 'w') as f: - f.write(logs) - - def test_start_defaults(self, _subprocess): - self._placeholder_nodetype('client') - self._pid_file(1234) - self._logs('one log\ntwo log\nred log\nblue log\n') - - config = runner.parse_or_exit_with_explanation([ - # have to do this so the tests don't muck around in - # ~/.tahoe (the default) - '--node-directory', self.node_dir, - 'start', - ]) - i, o, e = StringIO(), StringIO(), StringIO() - try: - with patch('allmydata.scripts.tahoe_start.os'): - with patch('allmydata.scripts.runner.sys') as s: - exit_code = [None] - def _exit(code): - exit_code[0] = code - s.exit = _exit - - def launch(*args, **kw): - with open(join(self.node_dir, 'logs', 'twistd.log'), 'a') as f: - f.write('client running\n') # "the magic" - _subprocess.check_call = launch - runner.dispatch(config, i, o, e) - except Exception: - pass - - self.assertEqual([0], exit_code) - self.assertTrue('Node has started' in o.getvalue()) - - def test_start_fails(self, _subprocess): - self._placeholder_nodetype('client') - self._logs('existing log line\n') - - config = runner.parse_or_exit_with_explanation([ - # have to do this so the tests don't muck around in - # ~/.tahoe (the default) - '--node-directory', self.node_dir, - 'start', - ]) - - i, o, e = StringIO(), StringIO(), StringIO() - with patch('allmydata.scripts.tahoe_start.time') as t: - with patch('allmydata.scripts.runner.sys') as s: - exit_code = [None] - def _exit(code): - exit_code[0] = code - s.exit = _exit - - thetime = [0] - def _time(): - thetime[0] += 0.1 - return thetime[0] - t.time = _time - - def launch(*args, **kw): - with open(join(self.node_dir, 'logs', 'twistd.log'), 'a') as f: - f.write('a new log line\n') - _subprocess.check_call = launch - - runner.dispatch(config, i, o, e) - - # should print out the collected logs and an error-code - self.assertTrue("a new log line" in o.getvalue()) - self.assertEqual([1], exit_code) - - def test_start_subprocess_fails(self, _subprocess): - self._placeholder_nodetype('client') - self._logs('existing log line\n') - - config = runner.parse_or_exit_with_explanation([ - # have to do this so the tests don't muck around in - # ~/.tahoe (the default) - '--node-directory', self.node_dir, - 'start', - ]) - - i, o, e = StringIO(), StringIO(), StringIO() - with patch('allmydata.scripts.tahoe_start.time'): - with patch('allmydata.scripts.runner.sys') as s: - # undo patch for the exception-class - _subprocess.CalledProcessError = subprocess.CalledProcessError - exit_code = [None] - def _exit(code): - exit_code[0] = code - s.exit = _exit - - def launch(*args, **kw): - raise subprocess.CalledProcessError(42, "tahoe") - _subprocess.check_call = launch - - runner.dispatch(config, i, o, e) - - # should get our "odd" error-code - self.assertEqual([42], exit_code) - - def test_start_help(self, _subprocess): - self._placeholder_nodetype('client') - - std = StringIO() - with patch('sys.stdout') as stdo: - stdo.write = std.write - try: - runner.parse_or_exit_with_explanation([ - # have to do this so the tests don't muck around in - # ~/.tahoe (the default) - '--node-directory', self.node_dir, - 'start', - '--help', - ], stdout=std) - self.fail("Should get exit") - except SystemExit as e: - print(e) - - self.assertIn( - "Usage:", - std.getvalue() - ) - - def test_start_unknown_node_type(self, _subprocess): - self._placeholder_nodetype('bogus') - - config = runner.parse_or_exit_with_explanation([ - # have to do this so the tests don't muck around in - # ~/.tahoe (the default) - '--node-directory', self.node_dir, - 'start', - ]) - - i, o, e = StringIO(), StringIO(), StringIO() - with patch('allmydata.scripts.runner.sys') as s: - exit_code = [None] - def _exit(code): - exit_code[0] = code - s.exit = _exit - - runner.dispatch(config, i, o, e) - - # should print out the collected logs and an error-code - self.assertIn( - "is not a recognizable node directory", - e.getvalue() - ) - self.assertEqual([1], exit_code) - - def test_start_nodedir_not_dir(self, _subprocess): - shutil.rmtree(self.node_dir) - assert not os.path.isdir(self.node_dir) - - config = runner.parse_or_exit_with_explanation([ - # have to do this so the tests don't muck around in - # ~/.tahoe (the default) - '--node-directory', self.node_dir, - 'start', - ]) - - i, o, e = StringIO(), StringIO(), StringIO() - with patch('allmydata.scripts.runner.sys') as s: - exit_code = [None] - def _exit(code): - exit_code[0] = code - s.exit = _exit - - runner.dispatch(config, i, o, e) - - # should print out the collected logs and an error-code - self.assertIn( - "does not look like a directory at all", - e.getvalue() - ) - self.assertEqual([1], exit_code) - - -class RunTests(unittest.TestCase): - """ - Tests confirming end-user behavior of CLI commands - """ - - def setUp(self): - d = super(RunTests, self).setUp() - self.addCleanup(partial(os.chdir, os.getcwd())) - self.node_dir = self.mktemp() - os.mkdir(self.node_dir) - return d - - @patch('twisted.internet.reactor') - def test_run_invalid_config(self, reactor): - """ - Configuration that's invalid should be obvious to the user - """ - - def cwr(fn, *args, **kw): - fn() - - def stop(*args, **kw): - stopped.append(None) - stopped = [] - reactor.callWhenRunning = cwr - reactor.stop = stop - - with open(os.path.join(self.node_dir, "client.tac"), "w") as f: - f.write('test') - - with open(os.path.join(self.node_dir, "tahoe.cfg"), "w") as f: - f.write( - "[invalid section]\n" - "foo = bar\n" - ) - - config = runner.parse_or_exit_with_explanation([ - # have to do this so the tests don't muck around in - # ~/.tahoe (the default) - '--node-directory', self.node_dir, - 'run', - ]) - - i, o, e = StringIO(), StringIO(), StringIO() - d = runner.dispatch(config, i, o, e) - - self.assertFailure(d, SystemExit) - - output = e.getvalue() - # should print out the collected logs and an error-code - self.assertIn( - "invalid section", - output, - ) - self.assertIn( - "Configuration error:", - output, - ) - # ensure reactor.stop was actually called - self.assertEqual([None], stopped) - return d diff --git a/src/allmydata/test/cli_node_api.py b/src/allmydata/test/cli_node_api.py index 8453fbca2..34d73a199 100644 --- a/src/allmydata/test/cli_node_api.py +++ b/src/allmydata/test/cli_node_api.py @@ -5,7 +5,6 @@ __all__ = [ "on_stdout", "on_stdout_and_stderr", "on_different", - "wait_for_exit", ] import os @@ -14,8 +13,11 @@ from errno import ENOENT import attr +from eliot import ( + log_call, +) + from twisted.internet.error import ( - ProcessDone, ProcessTerminated, ProcessExitedAlready, ) @@ -25,9 +27,6 @@ from twisted.internet.interfaces import ( from twisted.python.filepath import ( FilePath, ) -from twisted.python.runtime import ( - platform, -) from twisted.internet.protocol import ( Protocol, ProcessProtocol, @@ -42,11 +41,9 @@ from twisted.internet.task import ( from ..client import ( _Client, ) -from ..scripts.tahoe_stop import ( - COULD_NOT_STOP, -) from ..util.eliotutil import ( inline_callbacks, + log_call_deferred, ) class Expect(Protocol, object): @@ -156,6 +153,7 @@ class CLINodeAPI(object): env=os.environ, ) + @log_call(action_type="test:cli-api:run", include_args=["extra_tahoe_args"]) def run(self, protocol, extra_tahoe_args=()): """ Start the node running. @@ -176,28 +174,21 @@ class CLINodeAPI(object): if ENOENT != e.errno: raise - def stop(self, protocol): - self._execute( - protocol, - [u"stop", self.basedir.asTextMode().path], - ) + @log_call_deferred(action_type="test:cli-api:stop") + def stop(self): + return self.stop_and_wait() + @log_call_deferred(action_type="test:cli-api:stop-and-wait") @inline_callbacks def stop_and_wait(self): - if platform.isWindows(): - # On Windows there is no PID file and no "tahoe stop". - if self.process is not None: - while True: - try: - self.process.signalProcess("TERM") - except ProcessExitedAlready: - break - else: - yield deferLater(self.reactor, 0.1, lambda: None) - else: - protocol, ended = wait_for_exit() - self.stop(protocol) - yield ended + if self.process is not None: + while True: + try: + self.process.signalProcess("TERM") + except ProcessExitedAlready: + break + else: + yield deferLater(self.reactor, 0.1, lambda: None) def active(self): # By writing this file, we get two minutes before the client will @@ -208,28 +199,9 @@ class CLINodeAPI(object): def _check_cleanup_reason(self, reason): # Let it fail because the process has already exited. reason.trap(ProcessTerminated) - if reason.value.exitCode != COULD_NOT_STOP: - return reason return None def cleanup(self): stopping = self.stop_and_wait() stopping.addErrback(self._check_cleanup_reason) return stopping - - -class _WaitForEnd(ProcessProtocol, object): - def __init__(self, ended): - self._ended = ended - - def processEnded(self, reason): - if reason.check(ProcessDone): - self._ended.callback(None) - else: - self._ended.errback(reason) - - -def wait_for_exit(): - ended = Deferred() - protocol = _WaitForEnd(ended) - return protocol, ended diff --git a/src/allmydata/test/common.py b/src/allmydata/test/common.py index 15d677f89..fde92fb59 100644 --- a/src/allmydata/test/common.py +++ b/src/allmydata/test/common.py @@ -11,6 +11,8 @@ __all__ = [ "skipIf", ] +from past.builtins import chr as byteschr, unicode + import os, random, struct import six import tempfile @@ -62,10 +64,16 @@ from twisted.internet.endpoints import AdoptedStreamServerEndpoint from twisted.trial.unittest import TestCase as _TrialTestCase from allmydata import uri -from allmydata.interfaces import IMutableFileNode, IImmutableFileNode,\ - NotEnoughSharesError, ICheckable, \ - IMutableUploadable, SDMF_VERSION, \ - MDMF_VERSION +from allmydata.interfaces import ( + IMutableFileNode, + IImmutableFileNode, + NotEnoughSharesError, + ICheckable, + IMutableUploadable, + SDMF_VERSION, + MDMF_VERSION, + IAddressFamily, +) from allmydata.check_results import CheckResults, CheckAndRepairResults, \ DeepCheckResults, DeepCheckAndRepairResults from allmydata.storage_client import StubServer @@ -81,6 +89,9 @@ from allmydata.client import ( config_from_string, create_client_from_config, ) +from allmydata.scripts.common import ( + write_introducer, + ) from ..crypto import ( ed25519, @@ -110,7 +121,6 @@ class MemoryIntroducerClient(object): nickname = attr.ib() my_version = attr.ib() oldest_supported = attr.ib() - app_versions = attr.ib() sequencer = attr.ib() cache_filepath = attr.ib() @@ -212,7 +222,7 @@ class UseNode(object): :ivar FilePath basedir: The base directory of the node. - :ivar bytes introducer_furl: The introducer furl with which to + :ivar str introducer_furl: The introducer furl with which to configure the client. :ivar dict[bytes, bytes] node_config: Configuration items for the *node* @@ -222,24 +232,25 @@ class UseNode(object): """ plugin_config = attr.ib() storage_plugin = attr.ib() - basedir = attr.ib() - introducer_furl = attr.ib() + basedir = attr.ib(validator=attr.validators.instance_of(FilePath)) + introducer_furl = attr.ib(validator=attr.validators.instance_of(str), + converter=six.ensure_str) node_config = attr.ib(default=attr.Factory(dict)) config = attr.ib(default=None) def setUp(self): def format_config_items(config): - return b"\n".join( - b" = ".join((key, value)) + return "\n".join( + " = ".join((key, value)) for (key, value) in config.items() ) if self.plugin_config is None: - plugin_config_section = b"" + plugin_config_section = "" else: - plugin_config_section = b""" + plugin_config_section = """ [storageclient.plugins.{storage_plugin}] {config} """.format( @@ -247,6 +258,11 @@ class UseNode(object): config=format_config_items(self.plugin_config), ) + write_introducer( + self.basedir, + "default", + self.introducer_furl, + ) self.config = config_from_string( self.basedir.asTextMode().path, "tub.port", @@ -255,11 +271,9 @@ class UseNode(object): {node_config} [client] -introducer.furl = {furl} storage.plugins = {storage_plugin} {plugin_config_section} """.format( - furl=self.introducer_furl, storage_plugin=self.storage_plugin, node_config=format_config_items(self.node_config), plugin_config_section=plugin_config_section, @@ -392,7 +406,7 @@ class DummyProducer(object): pass @implementer(IImmutableFileNode) -class FakeCHKFileNode(object): +class FakeCHKFileNode(object): # type: ignore # incomplete implementation """I provide IImmutableFileNode, but all of my data is stored in a class-level dictionary.""" @@ -530,7 +544,7 @@ def create_chk_filenode(contents, all_contents): @implementer(IMutableFileNode, ICheckable) -class FakeMutableFileNode(object): +class FakeMutableFileNode(object): # type: ignore # incomplete implementation """I provide IMutableFileNode, but all of my data is stored in a class-level dictionary.""" @@ -811,13 +825,18 @@ class WebErrorMixin(object): code=None, substring=None, response_substring=None, callable=None, *args, **kwargs): # returns a Deferred with the response body - assert substring is None or isinstance(substring, str) + if isinstance(substring, bytes): + substring = unicode(substring, "ascii") + if isinstance(response_substring, unicode): + response_substring = response_substring.encode("ascii") + assert substring is None or isinstance(substring, unicode) + assert response_substring is None or isinstance(response_substring, bytes) assert callable def _validate(f): if code is not None: - self.failUnlessEqual(f.value.status, str(code), which) + self.failUnlessEqual(f.value.status, b"%d" % code, which) if substring: - code_string = str(f) + code_string = unicode(f) self.failUnless(substring in code_string, "%s: substring '%s' not in '%s'" % (which, substring, code_string)) @@ -1051,7 +1070,7 @@ def _corrupt_share_data_last_byte(data, debug=False): sharedatasize = struct.unpack(">Q", data[0x0c+0x08:0x0c+0x0c+8])[0] offset = 0x0c+0x44+sharedatasize-1 - newdata = data[:offset] + chr(ord(data[offset])^0xFF) + data[offset+1:] + newdata = data[:offset] + byteschr(ord(data[offset:offset+1])^0xFF) + data[offset+1:] if debug: log.msg("testing: flipping all bits of byte at offset %d: %r, newdata: %r" % (offset, data[offset], newdata[offset])) return newdata @@ -1079,7 +1098,7 @@ def _corrupt_crypttext_hash_tree_byte_x221(data, debug=False): assert sharevernum in (1, 2), "This test is designed to corrupt immutable shares of v1 or v2 in specific ways." if debug: log.msg("original data: %r" % (data,)) - return data[:0x0c+0x221] + chr(ord(data[0x0c+0x221])^0x02) + data[0x0c+0x2210+1:] + return data[:0x0c+0x221] + byteschr(ord(data[0x0c+0x221:0x0c+0x221+1])^0x02) + data[0x0c+0x2210+1:] def _corrupt_block_hashes(data, debug=False): """Scramble the file data -- the field containing the block hash tree @@ -1139,6 +1158,28 @@ def _corrupt_uri_extension(data, debug=False): return corrupt_field(data, 0x0c+uriextoffset, uriextlen) + +@attr.s +@implementer(IAddressFamily) +class ConstantAddresses(object): + """ + Pretend to provide support for some address family but just hand out + canned responses. + """ + _listener = attr.ib(default=None) + _handler = attr.ib(default=None) + + def get_listener(self): + if self._listener is None: + raise Exception("{!r} has no listener.") + return self._listener + + def get_client_endpoint(self): + if self._handler is None: + raise Exception("{!r} has no client endpoint.") + return self._handler + + class _TestCaseMixin(object): """ A mixin for ``TestCase`` which collects helpful behaviors for subclasses. @@ -1151,8 +1192,9 @@ class _TestCaseMixin(object): test (including setUp and tearDown messages). * trial-compatible mktemp method * unittest2-compatible assertRaises helper - * Automatic cleanup of tempfile.tempdir mutation (pervasive through the - Tahoe-LAFS test suite). + * Automatic cleanup of tempfile.tempdir mutation (once pervasive through + the Tahoe-LAFS test suite, perhaps gone now but someone should verify + this). """ def setUp(self): # Restore the original temporary directory. Node ``init_tempdir`` diff --git a/src/allmydata/test/common_util.py b/src/allmydata/test/common_util.py index e3f5cf750..2a70cff3a 100644 --- a/src/allmydata/test/common_util.py +++ b/src/allmydata/test/common_util.py @@ -1,10 +1,18 @@ from __future__ import print_function +from future.utils import PY2, native_str, bchr, binary_type +from future.builtins import str as future_str +from past.builtins import unicode + import os import time import signal from random import randrange from six.moves import StringIO +from io import ( + TextIOWrapper, + BytesIO, +) from twisted.internet import reactor, defer from twisted.python import failure @@ -13,9 +21,6 @@ from twisted.trial import unittest from ..util.assertutil import precondition from ..scripts import runner from allmydata.util.encodingutil import unicode_platform, get_filesystem_encoding, get_io_encoding -# Imported for backwards compatibility: -from future.utils import bord, bchr, binary_type -from past.builtins import unicode def skip_if_cannot_represent_filename(u): @@ -35,27 +40,134 @@ def skip_if_cannot_represent_argv(u): except UnicodeEncodeError: raise unittest.SkipTest("A non-ASCII argv could not be encoded on this platform.") -def run_cli(verb, *args, **kwargs): - precondition(not [True for arg in args if not isinstance(arg, str)], - "arguments to do_cli must be strs -- convert using unicode_to_argv", args=args) - nodeargs = kwargs.get("nodeargs", []) + +def _getvalue(io): + """ + Read out the complete contents of a file-like object. + """ + io.seek(0) + return io.read() + + +def run_cli_native(verb, *args, **kwargs): + """ + Run a Tahoe-LAFS CLI command specified as bytes (on Python 2) or Unicode + (on Python 3); basically, it accepts a native string. + + Most code should prefer ``run_cli_unicode`` which deals with all the + necessary encoding considerations. + + :param native_str verb: The command to run. For example, ``"create-node"``. + + :param [native_str] args: The arguments to pass to the command. For example, + ``("--hostname=localhost",)``. + + :param [native_str] nodeargs: Extra arguments to pass to the Tahoe executable + before ``verb``. + + :param native_str stdin: Text to pass to the command via stdin. + + :param NoneType|str encoding: The name of an encoding which stdout and + stderr will be configured to use. ``None`` means stdout and stderr + will accept bytes and unicode and use the default system encoding for + translating between them. + """ + nodeargs = kwargs.pop("nodeargs", []) + encoding = kwargs.pop("encoding", None) + precondition( + all(isinstance(arg, native_str) for arg in [verb] + nodeargs + list(args)), + "arguments to run_cli must be a native string -- convert using unicode_to_argv", + verb=verb, + args=args, + nodeargs=nodeargs, + ) argv = nodeargs + [verb] + list(args) stdin = kwargs.get("stdin", "") - stdout = StringIO() - stderr = StringIO() + if encoding is None: + # The original behavior, the Python 2 behavior, is to accept either + # bytes or unicode and try to automatically encode or decode as + # necessary. This works okay for ASCII and if LANG is set + # appropriately. These aren't great constraints so we should move + # away from this behavior. + stdout = StringIO() + stderr = StringIO() + else: + # The new behavior, the Python 3 behavior, is to accept unicode and + # encode it using a specific encoding. For older versions of Python + # 3, the encoding is determined from LANG (bad) but for newer Python + # 3, the encoding is always utf-8 (good). Tests can pass in different + # encodings to exercise different behaviors. + stdout = TextIOWrapper(BytesIO(), encoding) + stderr = TextIOWrapper(BytesIO(), encoding) d = defer.succeed(argv) d.addCallback(runner.parse_or_exit_with_explanation, stdout=stdout) d.addCallback(runner.dispatch, stdin=StringIO(stdin), stdout=stdout, stderr=stderr) def _done(rc): - return 0, stdout.getvalue(), stderr.getvalue() + return 0, _getvalue(stdout), _getvalue(stderr) def _err(f): f.trap(SystemExit) - return f.value.code, stdout.getvalue(), stderr.getvalue() + return f.value.code, _getvalue(stdout), _getvalue(stderr) d.addCallbacks(_done, _err) return d + +def run_cli_unicode(verb, argv, nodeargs=None, stdin=None, encoding=None): + """ + Run a Tahoe-LAFS CLI command. + + :param unicode verb: The command to run. For example, ``u"create-node"``. + + :param [unicode] argv: The arguments to pass to the command. For example, + ``[u"--hostname=localhost"]``. + + :param [unicode] nodeargs: Extra arguments to pass to the Tahoe executable + before ``verb``. + + :param unicode stdin: Text to pass to the command via stdin. + + :param NoneType|str encoding: The name of an encoding to use for all + bytes/unicode conversions necessary *and* the encoding to cause stdio + to declare with its ``encoding`` attribute. ``None`` means ASCII will + be used and no declaration will be made at all. + """ + if nodeargs is None: + nodeargs = [] + precondition( + all(isinstance(arg, future_str) for arg in [verb] + nodeargs + argv), + "arguments to run_cli_unicode must be unicode", + verb=verb, + nodeargs=nodeargs, + argv=argv, + ) + codec = encoding or "ascii" + if PY2: + encode = lambda t: None if t is None else t.encode(codec) + else: + # On Python 3 command-line parsing expects Unicode! + encode = lambda t: t + d = run_cli_native( + encode(verb), + nodeargs=list(encode(arg) for arg in nodeargs), + stdin=encode(stdin), + encoding=encoding, + *list(encode(arg) for arg in argv) + ) + def maybe_decode(result): + code, stdout, stderr = result + if isinstance(stdout, bytes): + stdout = stdout.decode(codec) + if isinstance(stderr, bytes): + stderr = stderr.decode(codec) + return code, stdout, stderr + d.addCallback(maybe_decode) + return d + + +run_cli = run_cli_native + + def parse_cli(*argv): # This parses the CLI options (synchronously), and returns the Options # argument, or throws usage.UsageError if something went wrong. @@ -69,13 +181,12 @@ def insecurerandstr(n): return b''.join(map(bchr, map(randrange, [0]*n, [256]*n))) def flip_bit(good, which): - # TODO Probs need to update with bchr/bord as with flip_one_bit, below. - # flip the low-order bit of good[which] + """Flip the low-order bit of good[which].""" if which == -1: - pieces = good[:which], good[-1:], "" + pieces = good[:which], good[-1:], b"" else: pieces = good[:which], good[which:which+1], good[which+1:] - return pieces[0] + chr(ord(pieces[1]) ^ 0x01) + pieces[2] + return pieces[0] + bchr(ord(pieces[1]) ^ 0x01) + pieces[2] def flip_one_bit(s, offset=0, size=None): """ flip one random bit of the string s, in a byte greater than or equal to offset and less @@ -84,7 +195,7 @@ def flip_one_bit(s, offset=0, size=None): if size is None: size=len(s)-offset i = randrange(offset, offset+size) - result = s[:i] + bchr(bord(s[i])^(0x01< is not JSON-encodeable` from past.builtins import unicode as str +from future.utils import PY3 __all__ = [ "RUN_TEST", @@ -30,6 +31,9 @@ from twisted.internet.defer import ( maybeDeferred, ) +from ..util.jsonbytes import BytesJSONEncoder + + _NAME = Field.for_types( u"name", [str], @@ -60,6 +64,14 @@ def eliot_logged_test(f): class storage(object): pass + + # On Python 3, we want to use our custom JSON encoder when validating + # messages can be encoded to JSON: + if PY3: + capture = lambda f : capture_logging(None, encoder_=BytesJSONEncoder)(f) + else: + capture = lambda f : capture_logging(None)(f) + @wraps(f) def run_and_republish(self, *a, **kw): # Unfortunately the only way to get at the global/default logger... @@ -84,7 +96,7 @@ def eliot_logged_test(f): # can finish the test's action. storage.action.finish() - @capture_logging(None) + @capture def run(self, logger): # Record the MemoryLogger for later message extraction. storage.logger = logger diff --git a/src/allmydata/test/mutable/test_checker.py b/src/allmydata/test/mutable/test_checker.py index 666b70ce3..11ba776fd 100644 --- a/src/allmydata/test/mutable/test_checker.py +++ b/src/allmydata/test/mutable/test_checker.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from twisted.trial import unittest from foolscap.api import flushEventualQueue from allmydata.monitor import Monitor @@ -22,7 +34,7 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin): return d def test_check_no_shares(self): - for shares in self._storage._peers.values(): + for shares in list(self._storage._peers.values()): shares.clear() d = self._fn.check(Monitor()) d.addCallback(self.check_bad, "test_check_no_shares") @@ -31,7 +43,7 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin): def test_check_mdmf_no_shares(self): d = self.publish_mdmf() def _then(ignored): - for share in self._storage._peers.values(): + for share in list(self._storage._peers.values()): share.clear() d.addCallback(_then) d.addCallback(lambda ignored: @@ -40,8 +52,8 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin): return d def test_check_not_enough_shares(self): - for shares in self._storage._peers.values(): - for shnum in shares.keys(): + for shares in list(self._storage._peers.values()): + for shnum in list(shares.keys()): if shnum > 0: del shares[shnum] d = self._fn.check(Monitor()) @@ -51,8 +63,8 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin): def test_check_mdmf_not_enough_shares(self): d = self.publish_mdmf() def _then(ignored): - for shares in self._storage._peers.values(): - for shnum in shares.keys(): + for shares in list(self._storage._peers.values()): + for shnum in list(shares.keys()): if shnum > 0: del shares[shnum] d.addCallback(_then) @@ -83,7 +95,7 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin): # On 8 of the shares, corrupt the beginning of the share data. # The signature check during the servermap update won't catch this. d.addCallback(lambda ignored: - corrupt(None, self._storage, "share_data", range(8))) + corrupt(None, self._storage, "share_data", list(range(8)))) # On 2 of the shares, corrupt the end of the share data. # The signature check during the servermap update won't catch # this either, and the retrieval process will have to process @@ -242,14 +254,14 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin): return d def test_verify_sdmf_empty(self): - d = self.publish_sdmf("") + d = self.publish_sdmf(b"") d.addCallback(lambda ignored: self._fn.check(Monitor(), verify=True)) d.addCallback(self.check_good, "test_verify_sdmf") d.addCallback(flushEventualQueue) return d def test_verify_mdmf_empty(self): - d = self.publish_mdmf("") + d = self.publish_mdmf(b"") d.addCallback(lambda ignored: self._fn.check(Monitor(), verify=True)) d.addCallback(self.check_good, "test_verify_mdmf") d.addCallback(flushEventualQueue) diff --git a/src/allmydata/test/mutable/test_datahandle.py b/src/allmydata/test/mutable/test_datahandle.py index 39d65557d..1819cba01 100644 --- a/src/allmydata/test/mutable/test_datahandle.py +++ b/src/allmydata/test/mutable/test_datahandle.py @@ -1,17 +1,29 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from twisted.trial import unittest from allmydata.mutable.publish import MutableData class DataHandle(unittest.TestCase): def setUp(self): - self.test_data = "Test Data" * 50000 + self.test_data = b"Test Data" * 50000 self.uploadable = MutableData(self.test_data) def test_datahandle_read(self): chunk_size = 10 - for i in xrange(0, len(self.test_data), chunk_size): + for i in range(0, len(self.test_data), chunk_size): data = self.uploadable.read(chunk_size) - data = "".join(data) + data = b"".join(data) start = i end = i + chunk_size self.failUnlessEqual(data, self.test_data[start:end]) @@ -28,7 +40,7 @@ class DataHandle(unittest.TestCase): # disturbing the location of the seek pointer. chunk_size = 100 data = self.uploadable.read(chunk_size) - self.failUnlessEqual("".join(data), self.test_data[:chunk_size]) + self.failUnlessEqual(b"".join(data), self.test_data[:chunk_size]) # Now get the size. size = self.uploadable.get_size() @@ -38,4 +50,4 @@ class DataHandle(unittest.TestCase): more_data = self.uploadable.read(chunk_size) start = chunk_size end = chunk_size * 2 - self.failUnlessEqual("".join(more_data), self.test_data[start:end]) + self.failUnlessEqual(b"".join(more_data), self.test_data[start:end]) diff --git a/src/allmydata/test/mutable/test_different_encoding.py b/src/allmydata/test/mutable/test_different_encoding.py index dad96f875..a5165532c 100644 --- a/src/allmydata/test/mutable/test_different_encoding.py +++ b/src/allmydata/test/mutable/test_different_encoding.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from twisted.trial import unittest from .util import FakeStorage, make_nodemaker @@ -10,7 +22,7 @@ class DifferentEncoding(unittest.TestCase): # create a file with 3-of-20, then modify it with a client configured # to do 3-of-10. #1510 tracks a failure here self.nodemaker.default_encoding_parameters["n"] = 20 - d = self.nodemaker.create_mutable_file("old contents") + d = self.nodemaker.create_mutable_file(b"old contents") def _created(n): filecap = n.get_cap().to_string() del n # we want a new object, not the cached one @@ -19,6 +31,6 @@ class DifferentEncoding(unittest.TestCase): return n2 d.addCallback(_created) def modifier(old_contents, servermap, first_time): - return "new contents" + return b"new contents" d.addCallback(lambda n: n.modify(modifier)) return d diff --git a/src/allmydata/test/mutable/test_exceptions.py b/src/allmydata/test/mutable/test_exceptions.py index ed6cab1b0..6a9b2b575 100644 --- a/src/allmydata/test/mutable/test_exceptions.py +++ b/src/allmydata/test/mutable/test_exceptions.py @@ -1,3 +1,16 @@ +""" +Ported to Python 3. +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from twisted.trial import unittest from allmydata.mutable.common import NeedMoreDataError, UncoordinatedWriteError diff --git a/src/allmydata/test/mutable/test_filehandle.py b/src/allmydata/test/mutable/test_filehandle.py index 547ecac41..8db02f3fd 100644 --- a/src/allmydata/test/mutable/test_filehandle.py +++ b/src/allmydata/test/mutable/test_filehandle.py @@ -1,21 +1,33 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import os -from six.moves import cStringIO as StringIO +from io import BytesIO from twisted.trial import unittest from allmydata.mutable.publish import MutableFileHandle class FileHandle(unittest.TestCase): def setUp(self): - self.test_data = "Test Data" * 50000 - self.sio = StringIO(self.test_data) + self.test_data = b"Test Data" * 50000 + self.sio = BytesIO(self.test_data) self.uploadable = MutableFileHandle(self.sio) def test_filehandle_read(self): self.basedir = "mutable/FileHandle/test_filehandle_read" chunk_size = 10 - for i in xrange(0, len(self.test_data), chunk_size): + for i in range(0, len(self.test_data), chunk_size): data = self.uploadable.read(chunk_size) - data = "".join(data) + data = b"".join(data) start = i end = i + chunk_size self.failUnlessEqual(data, self.test_data[start:end]) @@ -33,7 +45,7 @@ class FileHandle(unittest.TestCase): # disturbing the location of the seek pointer. chunk_size = 100 data = self.uploadable.read(chunk_size) - self.failUnlessEqual("".join(data), self.test_data[:chunk_size]) + self.failUnlessEqual(b"".join(data), self.test_data[:chunk_size]) # Now get the size. size = self.uploadable.get_size() @@ -43,26 +55,26 @@ class FileHandle(unittest.TestCase): more_data = self.uploadable.read(chunk_size) start = chunk_size end = chunk_size * 2 - self.failUnlessEqual("".join(more_data), self.test_data[start:end]) + self.failUnlessEqual(b"".join(more_data), self.test_data[start:end]) def test_filehandle_file(self): # Make sure that the MutableFileHandle works on a file as well - # as a StringIO object, since in some cases it will be asked to + # as a BytesIO object, since in some cases it will be asked to # deal with files. self.basedir = self.mktemp() # necessary? What am I doing wrong here? os.mkdir(self.basedir) f_path = os.path.join(self.basedir, "test_file") - f = open(f_path, "w") + f = open(f_path, "wb") f.write(self.test_data) f.close() - f = open(f_path, "r") + f = open(f_path, "rb") uploadable = MutableFileHandle(f) data = uploadable.read(len(self.test_data)) - self.failUnlessEqual("".join(data), self.test_data) + self.failUnlessEqual(b"".join(data), self.test_data) size = uploadable.get_size() self.failUnlessEqual(size, len(self.test_data)) diff --git a/src/allmydata/test/mutable/test_filenode.py b/src/allmydata/test/mutable/test_filenode.py index fdc19d5cb..de03afc5a 100644 --- a/src/allmydata/test/mutable/test_filenode.py +++ b/src/allmydata/test/mutable/test_filenode.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from six.moves import cStringIO as StringIO from twisted.internet import defer, reactor from twisted.trial import unittest @@ -73,11 +85,11 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): return n d.addCallback(_created) d.addCallback(lambda n: - n.overwrite(MutableData("Contents" * 50000))) + n.overwrite(MutableData(b"Contents" * 50000))) d.addCallback(lambda ignored: self._node.download_best_version()) d.addCallback(lambda contents: - self.failUnlessEqual(contents, "Contents" * 50000)) + self.failUnlessEqual(contents, b"Contents" * 50000)) return d def test_max_shares(self): @@ -95,13 +107,13 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(_created) # Now we upload some contents d.addCallback(lambda n: - n.overwrite(MutableData("contents" * 50000))) + n.overwrite(MutableData(b"contents" * 50000))) # ...then download contents d.addCallback(lambda ignored: self._node.download_best_version()) # ...and check to make sure everything went okay. d.addCallback(lambda contents: - self.failUnlessEqual("contents" * 50000, contents)) + self.failUnlessEqual(b"contents" * 50000, contents)) return d def test_max_shares_mdmf(self): @@ -119,11 +131,11 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): return n d.addCallback(_created) d.addCallback(lambda n: - n.overwrite(MutableData("contents" * 50000))) + n.overwrite(MutableData(b"contents" * 50000))) d.addCallback(lambda ignored: self._node.download_best_version()) d.addCallback(lambda contents: - self.failUnlessEqual(contents, "contents" * 50000)) + self.failUnlessEqual(contents, b"contents" * 50000)) return d def test_mdmf_filenode_cap(self): @@ -148,7 +160,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): def _created(n): self.failUnless(isinstance(n, MutableFileNode)) s = n.get_uri() - self.failUnless(s.startswith("URI:MDMF")) + self.failUnless(s.startswith(b"URI:MDMF")) n2 = self.nodemaker.create_from_cap(s) self.failUnless(isinstance(n2, MutableFileNode)) self.failUnlessEqual(n.get_storage_index(), n2.get_storage_index()) @@ -216,33 +228,33 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(lambda smap: smap.dump(StringIO())) d.addCallback(lambda sio: self.failUnless("3-of-10" in sio.getvalue())) - d.addCallback(lambda res: n.overwrite(MutableData("contents 1"))) + d.addCallback(lambda res: n.overwrite(MutableData(b"contents 1"))) d.addCallback(lambda res: self.failUnlessIdentical(res, None)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, "contents 1")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1")) d.addCallback(lambda res: n.get_size_of_best_version()) d.addCallback(lambda size: - self.failUnlessEqual(size, len("contents 1"))) - d.addCallback(lambda res: n.overwrite(MutableData("contents 2"))) + self.failUnlessEqual(size, len(b"contents 1"))) + d.addCallback(lambda res: n.overwrite(MutableData(b"contents 2"))) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, "contents 2")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2")) d.addCallback(lambda res: n.get_servermap(MODE_WRITE)) - d.addCallback(lambda smap: n.upload(MutableData("contents 3"), smap)) + d.addCallback(lambda smap: n.upload(MutableData(b"contents 3"), smap)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, "contents 3")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 3")) d.addCallback(lambda res: n.get_servermap(MODE_ANYTHING)) d.addCallback(lambda smap: n.download_version(smap, smap.best_recoverable_version())) - d.addCallback(lambda res: self.failUnlessEqual(res, "contents 3")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 3")) # test a file that is large enough to overcome the # mapupdate-to-retrieve data caching (i.e. make the shares larger # than the default readsize, which is 2000 bytes). A 15kB file # will have 5kB shares. - d.addCallback(lambda res: n.overwrite(MutableData("large size file" * 1000))) + d.addCallback(lambda res: n.overwrite(MutableData(b"large size file" * 1000))) d.addCallback(lambda res: n.download_best_version()) d.addCallback(lambda res: - self.failUnlessEqual(res, "large size file" * 1000)) + self.failUnlessEqual(res, b"large size file" * 1000)) return d d.addCallback(_created) return d @@ -261,7 +273,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): # Now overwrite the contents with some new contents. We want # to make them big enough to force the file to be uploaded # in more than one segment. - big_contents = "contents1" * 100000 # about 900 KiB + big_contents = b"contents1" * 100000 # about 900 KiB big_contents_uploadable = MutableData(big_contents) d.addCallback(lambda ignored: n.overwrite(big_contents_uploadable)) @@ -273,7 +285,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): # before, they need to be big enough to force multiple # segments, so that we make the downloader deal with # multiple segments. - bigger_contents = "contents2" * 1000000 # about 9MiB + bigger_contents = b"contents2" * 1000000 # about 9MiB bigger_contents_uploadable = MutableData(bigger_contents) d.addCallback(lambda ignored: n.overwrite(bigger_contents_uploadable)) @@ -289,7 +301,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): def test_retrieve_producer_mdmf(self): # We should make sure that the retriever is able to pause and stop # correctly. - data = "contents1" * 100000 + data = b"contents1" * 100000 d = self.nodemaker.create_mutable_file(MutableData(data), version=MDMF_VERSION) d.addCallback(lambda node: node.get_best_mutable_version()) @@ -300,7 +312,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): # after-the-first-write() trick to pause or stop the download. # Disabled until we find a better approach. def OFF_test_retrieve_producer_sdmf(self): - data = "contents1" * 100000 + data = b"contents1" * 100000 d = self.nodemaker.create_mutable_file(MutableData(data), version=SDMF_VERSION) d.addCallback(lambda node: node.get_best_mutable_version()) @@ -347,15 +359,15 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): def _created(node): self.uri = node.get_uri() # also confirm that the cap has no extension fields - pieces = self.uri.split(":") + pieces = self.uri.split(b":") self.failUnlessEqual(len(pieces), 4) - return node.overwrite(MutableData("contents1" * 100000)) + return node.overwrite(MutableData(b"contents1" * 100000)) def _then(ignored): node = self.nodemaker.create_from_cap(self.uri) return node.download_best_version() def _downloaded(data): - self.failUnlessEqual(data, "contents1" * 100000) + self.failUnlessEqual(data, b"contents1" * 100000) d.addCallback(_created) d.addCallback(_then) d.addCallback(_downloaded) @@ -368,7 +380,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): be published. Otherwise, we introduce undesirable semantics that are a regression from SDMF. """ - upload = MutableData("MDMF" * 100000) # about 400 KiB + upload = MutableData(b"MDMF" * 100000) # about 400 KiB d = self.nodemaker.create_mutable_file(upload, version=MDMF_VERSION) def _check_server_write_counts(ignored): @@ -381,22 +393,22 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): def test_create_with_initial_contents(self): - upload1 = MutableData("contents 1") + upload1 = MutableData(b"contents 1") d = self.nodemaker.create_mutable_file(upload1) def _created(n): d = n.download_best_version() - d.addCallback(lambda res: self.failUnlessEqual(res, "contents 1")) - upload2 = MutableData("contents 2") + d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1")) + upload2 = MutableData(b"contents 2") d.addCallback(lambda res: n.overwrite(upload2)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, "contents 2")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2")) return d d.addCallback(_created) return d def test_create_mdmf_with_initial_contents(self): - initial_contents = "foobarbaz" * 131072 # 900KiB + initial_contents = b"foobarbaz" * 131072 # 900KiB initial_contents_uploadable = MutableData(initial_contents) d = self.nodemaker.create_mutable_file(initial_contents_uploadable, version=MDMF_VERSION) @@ -404,24 +416,24 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d = n.download_best_version() d.addCallback(lambda data: self.failUnlessEqual(data, initial_contents)) - uploadable2 = MutableData(initial_contents + "foobarbaz") + uploadable2 = MutableData(initial_contents + b"foobarbaz") d.addCallback(lambda ignored: n.overwrite(uploadable2)) d.addCallback(lambda ignored: n.download_best_version()) d.addCallback(lambda data: self.failUnlessEqual(data, initial_contents + - "foobarbaz")) + b"foobarbaz")) return d d.addCallback(_created) return d def test_create_with_initial_contents_function(self): - data = "initial contents" + data = b"initial contents" def _make_contents(n): self.failUnless(isinstance(n, MutableFileNode)) key = n.get_writekey() - self.failUnless(isinstance(key, str), key) + self.failUnless(isinstance(key, bytes), key) self.failUnlessEqual(len(key), 16) # AES key size return MutableData(data) d = self.nodemaker.create_mutable_file(_make_contents) @@ -433,11 +445,11 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): def test_create_mdmf_with_initial_contents_function(self): - data = "initial contents" * 100000 + data = b"initial contents" * 100000 def _make_contents(n): self.failUnless(isinstance(n, MutableFileNode)) key = n.get_writekey() - self.failUnless(isinstance(key, str), key) + self.failUnless(isinstance(key, bytes), key) self.failUnlessEqual(len(key), 16) return MutableData(data) d = self.nodemaker.create_mutable_file(_make_contents, @@ -450,7 +462,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): def test_create_with_too_large_contents(self): - BIG = "a" * (self.OLD_MAX_SEGMENT_SIZE + 1) + BIG = b"a" * (self.OLD_MAX_SEGMENT_SIZE + 1) BIG_uploadable = MutableData(BIG) d = self.nodemaker.create_mutable_file(BIG_uploadable) def _created(n): @@ -469,7 +481,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): def test_modify(self): def _modifier(old_contents, servermap, first_time): - new_contents = old_contents + "line2" + new_contents = old_contents + b"line2" return new_contents def _non_modifier(old_contents, servermap, first_time): return old_contents @@ -478,7 +490,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): def _error_modifier(old_contents, servermap, first_time): raise ValueError("oops") def _toobig_modifier(old_contents, servermap, first_time): - new_content = "b" * (self.OLD_MAX_SEGMENT_SIZE + 1) + new_content = b"b" * (self.OLD_MAX_SEGMENT_SIZE + 1) return new_content calls = [] def _ucw_error_modifier(old_contents, servermap, first_time): @@ -486,7 +498,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): calls.append(1) if len(calls) <= 1: raise UncoordinatedWriteError("simulated") - new_contents = old_contents + "line3" + new_contents = old_contents + b"line3" return new_contents def _ucw_error_non_modifier(old_contents, servermap, first_time): # simulate an UncoordinatedWriteError once, and don't actually @@ -496,41 +508,41 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): raise UncoordinatedWriteError("simulated") return old_contents - initial_contents = "line1" + initial_contents = b"line1" d = self.nodemaker.create_mutable_file(MutableData(initial_contents)) def _created(n): d = n.modify(_modifier) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, "line1line2")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2")) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "m")) d.addCallback(lambda res: n.modify(_non_modifier)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, "line1line2")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2")) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "non")) d.addCallback(lambda res: n.modify(_none_modifier)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, "line1line2")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2")) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "none")) d.addCallback(lambda res: self.shouldFail(ValueError, "error_modifier", None, n.modify, _error_modifier)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, "line1line2")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2")) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "err")) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, "line1line2")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2")) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "big")) d.addCallback(lambda res: n.modify(_ucw_error_modifier)) d.addCallback(lambda res: self.failUnlessEqual(len(calls), 2)) d.addCallback(lambda res: n.download_best_version()) d.addCallback(lambda res: self.failUnlessEqual(res, - "line1line2line3")) + b"line1line2line3")) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 3, "ucw")) def _reset_ucw_error_modifier(res): @@ -548,7 +560,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(lambda res: self.failUnlessEqual(len(calls), 2)) d.addCallback(lambda res: n.download_best_version()) d.addCallback(lambda res: self.failUnlessEqual(res, - "line1line2line3")) + b"line1line2line3")) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 4, "ucw")) d.addCallback(lambda res: n.modify(_toobig_modifier)) return d @@ -558,14 +570,14 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): def test_modify_backoffer(self): def _modifier(old_contents, servermap, first_time): - return old_contents + "line2" + return old_contents + b"line2" calls = [] def _ucw_error_modifier(old_contents, servermap, first_time): # simulate an UncoordinatedWriteError once calls.append(1) if len(calls) <= 1: raise UncoordinatedWriteError("simulated") - return old_contents + "line3" + return old_contents + b"line3" def _always_ucw_error_modifier(old_contents, servermap, first_time): raise UncoordinatedWriteError("simulated") def _backoff_stopper(node, f): @@ -580,11 +592,11 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): giveuper._delay = 0.1 giveuper.factor = 1 - d = self.nodemaker.create_mutable_file(MutableData("line1")) + d = self.nodemaker.create_mutable_file(MutableData(b"line1")) def _created(n): d = n.modify(_modifier) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, "line1line2")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2")) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "m")) d.addCallback(lambda res: @@ -593,7 +605,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): n.modify, _ucw_error_modifier, _backoff_stopper)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, "line1line2")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2")) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "stop")) def _reset_ucw_error_modifier(res): @@ -604,7 +616,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): _backoff_pauser)) d.addCallback(lambda res: n.download_best_version()) d.addCallback(lambda res: self.failUnlessEqual(res, - "line1line2line3")) + b"line1line2line3")) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 3, "pause")) d.addCallback(lambda res: @@ -614,7 +626,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): giveuper.delay)) d.addCallback(lambda res: n.download_best_version()) d.addCallback(lambda res: self.failUnlessEqual(res, - "line1line2line3")) + b"line1line2line3")) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 3, "giveup")) return d @@ -630,22 +642,22 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(lambda smap: smap.dump(StringIO())) d.addCallback(lambda sio: self.failUnless("3-of-10" in sio.getvalue())) - d.addCallback(lambda res: n.overwrite(MutableData("contents 1"))) + d.addCallback(lambda res: n.overwrite(MutableData(b"contents 1"))) d.addCallback(lambda res: self.failUnlessIdentical(res, None)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, "contents 1")) - d.addCallback(lambda res: n.overwrite(MutableData("contents 2"))) + d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1")) + d.addCallback(lambda res: n.overwrite(MutableData(b"contents 2"))) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, "contents 2")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2")) d.addCallback(lambda res: n.get_servermap(MODE_WRITE)) - d.addCallback(lambda smap: n.upload(MutableData("contents 3"), smap)) + d.addCallback(lambda smap: n.upload(MutableData(b"contents 3"), smap)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, "contents 3")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 3")) d.addCallback(lambda res: n.get_servermap(MODE_ANYTHING)) d.addCallback(lambda smap: n.download_version(smap, smap.best_recoverable_version())) - d.addCallback(lambda res: self.failUnlessEqual(res, "contents 3")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 3")) return d d.addCallback(_created) return d @@ -663,11 +675,11 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(lambda ignored: self.failUnlessEqual(self.n.get_size(), 0)) d.addCallback(lambda ignored: - self.n.overwrite(MutableData("foobarbaz"))) + self.n.overwrite(MutableData(b"foobarbaz"))) d.addCallback(lambda ignored: self.failUnlessEqual(self.n.get_size(), 9)) d.addCallback(lambda ignored: - self.nodemaker.create_mutable_file(MutableData("foobarbaz"))) + self.nodemaker.create_mutable_file(MutableData(b"foobarbaz"))) d.addCallback(_created) d.addCallback(lambda ignored: self.failUnlessEqual(self.n.get_size(), 9)) diff --git a/src/allmydata/test/mutable/test_interoperability.py b/src/allmydata/test/mutable/test_interoperability.py index b0ebc5cec..5d7414907 100644 --- a/src/allmydata/test/mutable/test_interoperability.py +++ b/src/allmydata/test/mutable/test_interoperability.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import os, base64 from twisted.trial import unittest from allmydata import uri @@ -8,24 +20,24 @@ from ..no_network import GridTestMixin class Interoperability(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): sdmf_old_shares = {} - sdmf_old_shares[0] = "VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcABOOLy8EETxh7h7/z9d62EiPu9CNpRrCOLxUhn+JUS+DuAAhgcAb/adrQFrhlrRNoRpvjDuxmFebA4F0qCyqWssm61AAQ/EX4eC/1+hGOQ/h4EiKUkqxdsfzdcPlDvd11SGWZ0VHsUclZChTzuBAU2zLTXm+cG8IFhO50ly6Ey/DB44NtMKVaVzO0nU8DE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" - sdmf_old_shares[1] = "VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcABOOLy8EETxh7h7/z9d62EiPu9CNpRrCOLxUhn+JUS+DuAAhgcAb/adrQFrhlrRNoRpvjDuxmFebA4F0qCyqWssm61AAP7FHJWQoU87gQFNsy015vnBvCBYTudJcuhMvwweODbTD8Rfh4L/X6EY5D+HgSIpSSrF2x/N1w+UO93XVIZZnRUeePDXEwhqYDE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" - sdmf_old_shares[2] = "VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcABOOLy8EETxh7h7/z9d62EiPu9CNpRrCOLxUhn+JUS+DuAAd8jdiCodW233N1acXhZGnulDKR3hiNsMdEIsijRPemewASoSCFpVj4utEE+eVFM146xfgC6DX39GaQ2zT3YKsWX3GiLwKtGffwqV7IlZIcBEVqMfTXSTZsY+dZm1MxxCZH0Zd33VY0yggDE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" - sdmf_old_shares[3] = "VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcABOOLy8EETxh7h7/z9d62EiPu9CNpRrCOLxUhn+JUS+DuAAd8jdiCodW233N1acXhZGnulDKR3hiNsMdEIsijRPemewARoi8CrRn38KleyJWSHARFajH010k2bGPnWZtTMcQmR9GhIIWlWPi60QT55UUzXjrF+ALoNff0ZpDbNPdgqxZfcSNSplrHqtsDE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" - sdmf_old_shares[4] = "VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcAA6dlE140Fc7FgB77PeM5Phv+bypQEYtyfLQHxd+OxlG3AAoIM8M4XulprmLd4gGMobS2Bv9CmwB5LpK/ySHE1QWjdwAUMA7/aVz7Mb1em0eks+biC8ZuVUhuAEkTVOAF4YulIjE8JlfW0dS1XKk62u0586QxiN38NTsluUDx8EAPTL66yRsfb1f3rRIDE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" - sdmf_old_shares[5] = "VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcAA6dlE140Fc7FgB77PeM5Phv+bypQEYtyfLQHxd+OxlG3AAoIM8M4XulprmLd4gGMobS2Bv9CmwB5LpK/ySHE1QWjdwATPCZX1tHUtVypOtrtOfOkMYjd/DU7JblA8fBAD0y+uskwDv9pXPsxvV6bR6Sz5uILxm5VSG4ASRNU4AXhi6UiMUKZHBmcmEgDE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" - sdmf_old_shares[6] = "VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcAA6dlE140Fc7FgB77PeM5Phv+bypQEYtyfLQHxd+OxlG3AAlyHZU7RfTJjbHu1gjabWZsTu+7nAeRVG6/ZSd4iMQ1ZgAWDSFSPvKzcFzRcuRlVgKUf0HBce1MCF8SwpUbPPEyfVJty4xLZ7DvNU/Eh/R6BarsVAagVXdp+GtEu0+fok7nilT4LchmHo8DE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" - sdmf_old_shares[7] = "VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcAA6dlE140Fc7FgB77PeM5Phv+bypQEYtyfLQHxd+OxlG3AAlyHZU7RfTJjbHu1gjabWZsTu+7nAeRVG6/ZSd4iMQ1ZgAVbcuMS2ew7zVPxIf0egWq7FQGoFV3afhrRLtPn6JO54oNIVI+8rNwXNFy5GVWApR/QcFx7UwIXxLClRs88TJ9UtLnNF4/mM0DE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" - sdmf_old_shares[8] = "VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgABUSzNKiMx0E91q51/WH6ASL0fDEOLef9oxuyBX5F5cpoABojmWkDX3k3FKfgNHIeptE3lxB8HHzxDfSD250psyfNCAAwGsKbMxbmI2NpdTozZ3SICrySwgGkatA1gsDOJmOnTzgAYmqKY7A9vQChuYa17fYSyKerIb3682jxiIneQvCMWCK5WcuI4PMeIsUAj8yxdxHvV+a9vtSCEsDVvymrrooDKX1GK98t37yoDE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" - sdmf_old_shares[9] = "VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgABUSzNKiMx0E91q51/WH6ASL0fDEOLef9oxuyBX5F5cpoABojmWkDX3k3FKfgNHIeptE3lxB8HHzxDfSD250psyfNCAAwGsKbMxbmI2NpdTozZ3SICrySwgGkatA1gsDOJmOnTzgAXVnLiODzHiLFAI/MsXcR71fmvb7UghLA1b8pq66KAyl+aopjsD29AKG5hrXt9hLIp6shvfrzaPGIid5C8IxYIrjgBj1YohGgDE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" - sdmf_old_cap = "URI:SSK:gmjgofw6gan57gwpsow6gtrz3e:5adm6fayxmu3e4lkmfvt6lkkfix34ai2wop2ioqr4bgvvhiol3kq" - sdmf_old_contents = "This is a test file.\n" + sdmf_old_shares[0] = b"VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcABOOLy8EETxh7h7/z9d62EiPu9CNpRrCOLxUhn+JUS+DuAAhgcAb/adrQFrhlrRNoRpvjDuxmFebA4F0qCyqWssm61AAQ/EX4eC/1+hGOQ/h4EiKUkqxdsfzdcPlDvd11SGWZ0VHsUclZChTzuBAU2zLTXm+cG8IFhO50ly6Ey/DB44NtMKVaVzO0nU8DE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" + sdmf_old_shares[1] = b"VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcABOOLy8EETxh7h7/z9d62EiPu9CNpRrCOLxUhn+JUS+DuAAhgcAb/adrQFrhlrRNoRpvjDuxmFebA4F0qCyqWssm61AAP7FHJWQoU87gQFNsy015vnBvCBYTudJcuhMvwweODbTD8Rfh4L/X6EY5D+HgSIpSSrF2x/N1w+UO93XVIZZnRUeePDXEwhqYDE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" + sdmf_old_shares[2] = b"VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcABOOLy8EETxh7h7/z9d62EiPu9CNpRrCOLxUhn+JUS+DuAAd8jdiCodW233N1acXhZGnulDKR3hiNsMdEIsijRPemewASoSCFpVj4utEE+eVFM146xfgC6DX39GaQ2zT3YKsWX3GiLwKtGffwqV7IlZIcBEVqMfTXSTZsY+dZm1MxxCZH0Zd33VY0yggDE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" + sdmf_old_shares[3] = b"VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcABOOLy8EETxh7h7/z9d62EiPu9CNpRrCOLxUhn+JUS+DuAAd8jdiCodW233N1acXhZGnulDKR3hiNsMdEIsijRPemewARoi8CrRn38KleyJWSHARFajH010k2bGPnWZtTMcQmR9GhIIWlWPi60QT55UUzXjrF+ALoNff0ZpDbNPdgqxZfcSNSplrHqtsDE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" + sdmf_old_shares[4] = b"VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcAA6dlE140Fc7FgB77PeM5Phv+bypQEYtyfLQHxd+OxlG3AAoIM8M4XulprmLd4gGMobS2Bv9CmwB5LpK/ySHE1QWjdwAUMA7/aVz7Mb1em0eks+biC8ZuVUhuAEkTVOAF4YulIjE8JlfW0dS1XKk62u0586QxiN38NTsluUDx8EAPTL66yRsfb1f3rRIDE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" + sdmf_old_shares[5] = b"VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcAA6dlE140Fc7FgB77PeM5Phv+bypQEYtyfLQHxd+OxlG3AAoIM8M4XulprmLd4gGMobS2Bv9CmwB5LpK/ySHE1QWjdwATPCZX1tHUtVypOtrtOfOkMYjd/DU7JblA8fBAD0y+uskwDv9pXPsxvV6bR6Sz5uILxm5VSG4ASRNU4AXhi6UiMUKZHBmcmEgDE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" + sdmf_old_shares[6] = b"VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcAA6dlE140Fc7FgB77PeM5Phv+bypQEYtyfLQHxd+OxlG3AAlyHZU7RfTJjbHu1gjabWZsTu+7nAeRVG6/ZSd4iMQ1ZgAWDSFSPvKzcFzRcuRlVgKUf0HBce1MCF8SwpUbPPEyfVJty4xLZ7DvNU/Eh/R6BarsVAagVXdp+GtEu0+fok7nilT4LchmHo8DE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" + sdmf_old_shares[7] = b"VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcAA6dlE140Fc7FgB77PeM5Phv+bypQEYtyfLQHxd+OxlG3AAlyHZU7RfTJjbHu1gjabWZsTu+7nAeRVG6/ZSd4iMQ1ZgAVbcuMS2ew7zVPxIf0egWq7FQGoFV3afhrRLtPn6JO54oNIVI+8rNwXNFy5GVWApR/QcFx7UwIXxLClRs88TJ9UtLnNF4/mM0DE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" + sdmf_old_shares[8] = b"VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgABUSzNKiMx0E91q51/WH6ASL0fDEOLef9oxuyBX5F5cpoABojmWkDX3k3FKfgNHIeptE3lxB8HHzxDfSD250psyfNCAAwGsKbMxbmI2NpdTozZ3SICrySwgGkatA1gsDOJmOnTzgAYmqKY7A9vQChuYa17fYSyKerIb3682jxiIneQvCMWCK5WcuI4PMeIsUAj8yxdxHvV+a9vtSCEsDVvymrrooDKX1GK98t37yoDE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" + sdmf_old_shares[9] = b"VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgABUSzNKiMx0E91q51/WH6ASL0fDEOLef9oxuyBX5F5cpoABojmWkDX3k3FKfgNHIeptE3lxB8HHzxDfSD250psyfNCAAwGsKbMxbmI2NpdTozZ3SICrySwgGkatA1gsDOJmOnTzgAXVnLiODzHiLFAI/MsXcR71fmvb7UghLA1b8pq66KAyl+aopjsD29AKG5hrXt9hLIp6shvfrzaPGIid5C8IxYIrjgBj1YohGgDE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" + sdmf_old_cap = b"URI:SSK:gmjgofw6gan57gwpsow6gtrz3e:5adm6fayxmu3e4lkmfvt6lkkfix34ai2wop2ioqr4bgvvhiol3kq" + sdmf_old_contents = b"This is a test file.\n" def copy_sdmf_shares(self): # We'll basically be short-circuiting the upload process. - servernums = self.g.servers_by_number.keys() + servernums = list(self.g.servers_by_number.keys()) assert len(servernums) == 10 - assignments = zip(self.sdmf_old_shares.keys(), servernums) + assignments = list(zip(self.sdmf_old_shares.keys(), servernums)) # Get the storage index. cap = uri.from_string(self.sdmf_old_cap) si = cap.get_storage_index() diff --git a/src/allmydata/test/mutable/test_multiple_encodings.py b/src/allmydata/test/mutable/test_multiple_encodings.py index 1811238d4..12c5be051 100644 --- a/src/allmydata/test/mutable/test_multiple_encodings.py +++ b/src/allmydata/test/mutable/test_multiple_encodings.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from twisted.trial import unittest from allmydata.interfaces import SDMF_VERSION from allmydata.monitor import Monitor @@ -10,7 +22,7 @@ from .util import FakeStorage, make_nodemaker class MultipleEncodings(unittest.TestCase): def setUp(self): - self.CONTENTS = "New contents go here" + self.CONTENTS = b"New contents go here" self.uploadable = MutableData(self.CONTENTS) self._storage = FakeStorage() self._nodemaker = make_nodemaker(self._storage, num_peers=20) @@ -63,9 +75,9 @@ class MultipleEncodings(unittest.TestCase): # then mix up the shares, to make sure that download survives seeing # a variety of encodings. This is actually kind of tricky to set up. - contents1 = "Contents for encoding 1 (3-of-10) go here"*1000 - contents2 = "Contents for encoding 2 (4-of-9) go here"*1000 - contents3 = "Contents for encoding 3 (4-of-7) go here"*1000 + contents1 = b"Contents for encoding 1 (3-of-10) go here"*1000 + contents2 = b"Contents for encoding 2 (4-of-9) go here"*1000 + contents3 = b"Contents for encoding 3 (4-of-7) go here"*1000 # we make a retrieval object that doesn't know what encoding # parameters to use diff --git a/src/allmydata/test/mutable/test_multiple_versions.py b/src/allmydata/test/mutable/test_multiple_versions.py index 0af15efb2..460cde4b3 100644 --- a/src/allmydata/test/mutable/test_multiple_versions.py +++ b/src/allmydata/test/mutable/test_multiple_versions.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from twisted.trial import unittest from allmydata.monitor import Monitor from allmydata.mutable.common import MODE_CHECK, MODE_READ @@ -36,7 +48,7 @@ class MultipleVersions(unittest.TestCase, PublishMixin, CheckerMixin): self.failUnlessEqual(len(smap.unrecoverable_versions()), 1) newer = smap.unrecoverable_newer_versions() self.failUnlessEqual(len(newer), 1) - verinfo, health = newer.items()[0] + verinfo, health = list(newer.items())[0] self.failUnlessEqual(verinfo[0], 4) self.failUnlessEqual(health, (1,3)) self.failIf(smap.needs_merge()) @@ -70,10 +82,10 @@ class MultipleVersions(unittest.TestCase, PublishMixin, CheckerMixin): self._set_versions(target) def _modify(oldversion, servermap, first_time): - return oldversion + " modified" + return oldversion + b" modified" d = self._fn.modify(_modify) d.addCallback(lambda res: self._fn.download_best_version()) - expected = self.CONTENTS[2] + " modified" + expected = self.CONTENTS[2] + b" modified" d.addCallback(lambda res: self.failUnlessEqual(res, expected)) # and the servermap should indicate that the outlier was replaced too d.addCallback(lambda res: self._fn.get_servermap(MODE_CHECK)) diff --git a/src/allmydata/test/mutable/test_problems.py b/src/allmydata/test/mutable/test_problems.py index 08990c180..86a367596 100644 --- a/src/allmydata/test/mutable/test_problems.py +++ b/src/allmydata/test/mutable/test_problems.py @@ -1,4 +1,14 @@ +""" +Ported to Python 3. +""" from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os, base64 from twisted.trial import unittest @@ -56,7 +66,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): self.basedir = "mutable/Problems/test_publish_surprise_%s" % version self.set_up_grid() nm = self.g.clients[0].nodemaker - d = nm.create_mutable_file(MutableData("contents 1"), + d = nm.create_mutable_file(MutableData(b"contents 1"), version=version) def _created(n): d = defer.succeed(None) @@ -67,7 +77,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(_got_smap1) # then modify the file, leaving the old map untouched d.addCallback(lambda res: log.msg("starting winning write")) - d.addCallback(lambda res: n.overwrite(MutableData("contents 2"))) + d.addCallback(lambda res: n.overwrite(MutableData(b"contents 2"))) # now attempt to modify the file with the old servermap. This # will look just like an uncoordinated write, in which every # single share got updated between our mapupdate and our publish @@ -76,7 +86,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): self.shouldFail(UncoordinatedWriteError, "test_publish_surprise", None, n.upload, - MutableData("contents 2a"), self.old_map)) + MutableData(b"contents 2a"), self.old_map)) return d d.addCallback(_created) return d @@ -91,7 +101,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): self.basedir = "mutable/Problems/test_retrieve_surprise" self.set_up_grid() nm = self.g.clients[0].nodemaker - d = nm.create_mutable_file(MutableData("contents 1"*4000)) + d = nm.create_mutable_file(MutableData(b"contents 1"*4000)) def _created(n): d = defer.succeed(None) d.addCallback(lambda res: n.get_servermap(MODE_READ)) @@ -101,7 +111,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(_got_smap1) # then modify the file, leaving the old map untouched d.addCallback(lambda res: log.msg("starting winning write")) - d.addCallback(lambda res: n.overwrite(MutableData("contents 2"))) + d.addCallback(lambda res: n.overwrite(MutableData(b"contents 2"))) # now attempt to retrieve the old version with the old servermap. # This will look like someone has changed the file since we # updated the servermap. @@ -128,7 +138,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): self.basedir = "mutable/Problems/test_unexpected_shares" self.set_up_grid() nm = self.g.clients[0].nodemaker - d = nm.create_mutable_file(MutableData("contents 1")) + d = nm.create_mutable_file(MutableData(b"contents 1")) def _created(n): d = defer.succeed(None) d.addCallback(lambda res: n.get_servermap(MODE_WRITE)) @@ -140,7 +150,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): self.g.remove_server(peer0) # then modify the file, leaving the old map untouched log.msg("starting winning write") - return n.overwrite(MutableData("contents 2")) + return n.overwrite(MutableData(b"contents 2")) d.addCallback(_got_smap1) # now attempt to modify the file with the old servermap. This # will look just like an uncoordinated write, in which every @@ -150,7 +160,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): self.shouldFail(UncoordinatedWriteError, "test_surprise", None, n.upload, - MutableData("contents 2a"), self.old_map)) + MutableData(b"contents 2a"), self.old_map)) return d d.addCallback(_created) return d @@ -159,7 +169,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): self.basedir = "mutable/Problems/test_multiply_placed_shares" self.set_up_grid() nm = self.g.clients[0].nodemaker - d = nm.create_mutable_file(MutableData("contents 1")) + d = nm.create_mutable_file(MutableData(b"contents 1")) # remove one of the servers and reupload the file. def _created(n): self._node = n @@ -226,19 +236,19 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(_break_peer0) # now "create" the file, using the pre-established key, and let the # initial publish finally happen - d.addCallback(lambda res: nm.create_mutable_file(MutableData("contents 1"))) + d.addCallback(lambda res: nm.create_mutable_file(MutableData(b"contents 1"))) # that ought to work def _got_node(n): d = n.download_best_version() - d.addCallback(lambda res: self.failUnlessEqual(res, "contents 1")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1")) # now break the second peer def _break_peer1(res): self.g.break_server(self.server1.get_serverid()) d.addCallback(_break_peer1) - d.addCallback(lambda res: n.overwrite(MutableData("contents 2"))) + d.addCallback(lambda res: n.overwrite(MutableData(b"contents 2"))) # that ought to work too d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, "contents 2")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2")) def _explain_error(f): print(f) if f.check(NotEnoughServersError): @@ -267,18 +277,18 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): peerids = [s.get_serverid() for s in sb.get_connected_servers()] self.g.break_server(peerids[0]) - d = nm.create_mutable_file(MutableData("contents 1")) + d = nm.create_mutable_file(MutableData(b"contents 1")) def _created(n): d = n.download_best_version() - d.addCallback(lambda res: self.failUnlessEqual(res, "contents 1")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1")) # now break one of the remaining servers def _break_second_server(res): self.g.break_server(peerids[1]) d.addCallback(_break_second_server) - d.addCallback(lambda res: n.overwrite(MutableData("contents 2"))) + d.addCallback(lambda res: n.overwrite(MutableData(b"contents 2"))) # that ought to work too d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, "contents 2")) + d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2")) return d d.addCallback(_created) return d @@ -294,7 +304,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): d = self.shouldFail(NotEnoughServersError, "test_publish_all_servers_bad", "ran out of good servers", - nm.create_mutable_file, MutableData("contents")) + nm.create_mutable_file, MutableData(b"contents")) return d def test_publish_no_servers(self): @@ -306,7 +316,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): d = self.shouldFail(NotEnoughServersError, "test_publish_no_servers", "Ran out of non-bad servers", - nm.create_mutable_file, MutableData("contents")) + nm.create_mutable_file, MutableData(b"contents")) return d @@ -322,7 +332,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): # we need some contents that are large enough to push the privkey out # of the early part of the file - LARGE = "These are Larger contents" * 2000 # about 50KB + LARGE = b"These are Larger contents" * 2000 # about 50KB LARGE_uploadable = MutableData(LARGE) d = nm.create_mutable_file(LARGE_uploadable) def _created(n): @@ -359,7 +369,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): self.basedir = "mutable/Problems/test_privkey_query_missing" self.set_up_grid(num_servers=20) nm = self.g.clients[0].nodemaker - LARGE = "These are Larger contents" * 2000 # about 50KiB + LARGE = b"These are Larger contents" * 2000 # about 50KiB LARGE_uploadable = MutableData(LARGE) nm._node_cache = DevNullDictionary() # disable the nodecache @@ -385,7 +395,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): self.basedir = "mutable/Problems/test_block_and_hash_query_error" self.set_up_grid(num_servers=20) nm = self.g.clients[0].nodemaker - CONTENTS = "contents" * 2000 + CONTENTS = b"contents" * 2000 CONTENTS_uploadable = MutableData(CONTENTS) d = nm.create_mutable_file(CONTENTS_uploadable) def _created(node): @@ -451,9 +461,9 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): return d -TEST_1654_CAP = "URI:SSK:6jthysgozssjnagqlcxjq7recm:yxawei54fmf2ijkrvs2shs6iey4kpdp6joi7brj2vrva6sp5nf3a" +TEST_1654_CAP = b"URI:SSK:6jthysgozssjnagqlcxjq7recm:yxawei54fmf2ijkrvs2shs6iey4kpdp6joi7brj2vrva6sp5nf3a" -TEST_1654_SH0 = """\ +TEST_1654_SH0 = b"""\ VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA46m9s5j6lnzsOHytBTs2JOo AkWe8058hyrDa8igfBSqZMKO3aDOrFuRVt0ySYZ6oihFqPJRAAAAAAAAB8YAAAAA AAAJmgAAAAFPNgDkK8brSCzKz6n8HFqzbnAlALvnaB0Qpa1Bjo9jiZdmeMyneHR+ @@ -507,7 +517,7 @@ TStXB+q0MndBXw5ADp/Jac1DVaSWruVAdjemQ+si1olk8xH+uTMXU7PgV9WkpIiy bQHi/oRGA1aHSn84SIt+HpAfRoVdr4N90bYWmYQNqfKoyWCbEr+dge/GSD1nddAJ 72mXGlqyLyWYuAAAAAA=""" -TEST_1654_SH1 = """\ +TEST_1654_SH1 = b"""\ VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA45R4Y4kuV458rSTGDVTqdzz 9Fig3NQ3LermyD+0XLeqbC7KNgvv6cNzMZ9psQQ3FseYsIR1AAAAAAAAB8YAAAAA AAAJmgAAAAFPNgDkd/Y9Z+cuKctZk9gjwF8thT+fkmNCsulILsJw5StGHAA1f7uL diff --git a/src/allmydata/test/mutable/test_repair.py b/src/allmydata/test/mutable/test_repair.py index af35c58c6..fb1caa974 100644 --- a/src/allmydata/test/mutable/test_repair.py +++ b/src/allmydata/test/mutable/test_repair.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from twisted.trial import unittest from allmydata.interfaces import IRepairResults, ICheckAndRepairResults from allmydata.monitor import Monitor @@ -239,7 +251,7 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin): def _get_readcap(res): self._fn3 = self._fn.get_readonly() # also delete some shares - for peerid,shares in self._storage._peers.items(): + for peerid,shares in list(self._storage._peers.items()): shares.pop(0, None) d.addCallback(_get_readcap) d.addCallback(lambda res: self._fn3.check_and_repair(Monitor())) @@ -258,7 +270,7 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin): # In the buggy version, the check that precedes the retrieve+publish # cycle uses MODE_READ, instead of MODE_REPAIR, and fails to get the # privkey that repair needs. - d = self.publish_sdmf("") + d = self.publish_sdmf(b"") def _delete_one_share(ign): shares = self._storage._peers for peerid in shares: diff --git a/src/allmydata/test/mutable/test_roundtrip.py b/src/allmydata/test/mutable/test_roundtrip.py index 477e33ce3..79292b000 100644 --- a/src/allmydata/test/mutable/test_roundtrip.py +++ b/src/allmydata/test/mutable/test_roundtrip.py @@ -1,4 +1,14 @@ +""" +Ported to Python 3. +""" from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six.moves import cStringIO as StringIO from twisted.trial import unittest @@ -35,7 +45,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): def abbrev_verinfo_dict(self, verinfo_d): output = {} - for verinfo,value in verinfo_d.items(): + for verinfo,value in list(verinfo_d.items()): (seqnum, root_hash, IV, segsize, datalength, k, N, prefix, offsets_tuple) = verinfo output["%d-%s" % (seqnum, base32.b2a(root_hash)[:4])] = value @@ -54,7 +64,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): r = Retrieve(self._fn, self._storage_broker, servermap, version) c = consumer.MemoryConsumer() d = r.download(consumer=c) - d.addCallback(lambda mc: "".join(mc.chunks)) + d.addCallback(lambda mc: b"".join(mc.chunks)) return d @@ -88,7 +98,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): def test_all_shares_vanished(self): d = self.make_servermap() def _remove_shares(servermap): - for shares in self._storage._peers.values(): + for shares in list(self._storage._peers.values()): shares.clear() d1 = self.shouldFail(NotEnoughSharesError, "test_all_shares_vanished", @@ -103,7 +113,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): d = self.make_servermap() def _remove_shares(servermap): self._version = servermap.best_recoverable_version() - for shares in self._storage._peers.values()[2:]: + for shares in list(self._storage._peers.values())[2:]: shares.clear() return self.make_servermap(servermap) d.addCallback(_remove_shares) @@ -317,7 +327,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): N = self._fn.get_total_shares() d = defer.succeed(None) d.addCallback(corrupt, self._storage, "pubkey", - shnums_to_corrupt=range(0, N-k)) + shnums_to_corrupt=list(range(0, N-k))) d.addCallback(lambda res: self.make_servermap()) def _do_retrieve(servermap): self.failUnless(servermap.get_problems()) @@ -328,7 +338,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): c = consumer.MemoryConsumer() return r.download(c) d.addCallback(_do_retrieve) - d.addCallback(lambda mc: "".join(mc.chunks)) + d.addCallback(lambda mc: b"".join(mc.chunks)) d.addCallback(lambda new_contents: self.failUnlessEqual(new_contents, self.CONTENTS)) return d @@ -340,7 +350,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): else: d = defer.succeed(None) d.addCallback(lambda ignored: - corrupt(None, self._storage, offset, range(5))) + corrupt(None, self._storage, offset, list(range(5)))) d.addCallback(lambda ignored: self.make_servermap()) def _do_retrieve(servermap): diff --git a/src/allmydata/test/mutable/test_servermap.py b/src/allmydata/test/mutable/test_servermap.py index 56a0b942e..e8f933977 100644 --- a/src/allmydata/test/mutable/test_servermap.py +++ b/src/allmydata/test/mutable/test_servermap.py @@ -1,3 +1,16 @@ +""" +Ported to Python 3. +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from twisted.trial import unittest from twisted.internet import defer from allmydata.monitor import Monitor @@ -36,7 +49,7 @@ class Servermap(unittest.TestCase, PublishMixin): self.failUnlessEqual(sm.recoverable_versions(), set([best])) self.failUnlessEqual(len(sm.shares_available()), 1) self.failUnlessEqual(sm.shares_available()[best], (num_shares, 3, 10)) - shnum, servers = sm.make_sharemap().items()[0] + shnum, servers = list(sm.make_sharemap().items())[0] server = list(servers)[0] self.failUnlessEqual(sm.version_on_server(server, shnum), best) self.failUnlessEqual(sm.version_on_server(server, 666), None) @@ -83,7 +96,7 @@ class Servermap(unittest.TestCase, PublishMixin): # create a new file, which is large enough to knock the privkey out # of the early part of the file - LARGE = "These are Larger contents" * 200 # about 5KB + LARGE = b"These are Larger contents" * 200 # about 5KB LARGE_uploadable = MutableData(LARGE) d.addCallback(lambda res: self._nodemaker.create_mutable_file(LARGE_uploadable)) def _created(large_fn): @@ -112,7 +125,7 @@ class Servermap(unittest.TestCase, PublishMixin): for (shnum, server, timestamp) in shares: if shnum < 5: self._corrupted.add( (server, shnum) ) - sm.mark_bad_share(server, shnum, "") + sm.mark_bad_share(server, shnum, b"") return self.update_servermap(sm, MODE_WRITE) d.addCallback(_made_map) def _check_map(sm): @@ -160,7 +173,7 @@ class Servermap(unittest.TestCase, PublishMixin): best = sm.best_recoverable_version() self.failUnlessEqual(best, None) self.failUnlessEqual(len(sm.shares_available()), 1) - self.failUnlessEqual(sm.shares_available().values()[0], (2,3,10) ) + self.failUnlessEqual(list(sm.shares_available().values())[0], (2,3,10) ) return sm def test_not_quite_enough_shares(self): @@ -218,7 +231,7 @@ class Servermap(unittest.TestCase, PublishMixin): # 10 shares self.failUnlessEqual(len(sm.update_data), 10) # one version - for data in sm.update_data.itervalues(): + for data in sm.update_data.values(): self.failUnlessEqual(len(data), 1) d.addCallback(_check_servermap) return d diff --git a/src/allmydata/test/mutable/test_update.py b/src/allmydata/test/mutable/test_update.py index 971273a80..65af06486 100644 --- a/src/allmydata/test/mutable/test_update.py +++ b/src/allmydata/test/mutable/test_update.py @@ -1,4 +1,14 @@ +""" +Ported to Python 3. +""" from __future__ import print_function +from __future__ import division +from __future__ import absolute_import +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import re from twisted.trial import unittest @@ -23,10 +33,10 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): self.c = self.g.clients[0] self.nm = self.c.nodemaker # self.data should be at least three segments long. - td = "testdata " - self.data = td*(int(3*SEGSIZE/len(td))+10) # currently about 400kB + td = b"testdata " + self.data = td*(int(3*SEGSIZE//len(td))+10) # currently about 400kB assert len(self.data) > 3*SEGSIZE - self.small_data = "test data" * 10 # 90 B; SDMF + self.small_data = b"test data" * 10 # 90 B; SDMF def do_upload_sdmf(self): @@ -68,42 +78,42 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): def test_append(self): # We should be able to append data to a mutable file and get # what we expect. - return self._test_replace(len(self.data), "appended") + return self._test_replace(len(self.data), b"appended") def test_replace_middle(self): # We should be able to replace data in the middle of a mutable # file and get what we expect back. - return self._test_replace(100, "replaced") + return self._test_replace(100, b"replaced") def test_replace_beginning(self): # We should be able to replace data at the beginning of the file # without truncating the file - return self._test_replace(0, "beginning") + return self._test_replace(0, b"beginning") def test_replace_segstart1(self): - return self._test_replace(128*1024+1, "NNNN") + return self._test_replace(128*1024+1, b"NNNN") def test_replace_zero_length_beginning(self): - return self._test_replace(0, "") + return self._test_replace(0, b"") def test_replace_zero_length_middle(self): - return self._test_replace(50, "") + return self._test_replace(50, b"") def test_replace_zero_length_segstart1(self): - return self._test_replace(128*1024+1, "") + return self._test_replace(128*1024+1, b"") def test_replace_and_extend(self): # We should be able to replace data in the middle of a mutable # file and extend that mutable file and get what we expect. - return self._test_replace(100, "modified " * 100000) + return self._test_replace(100, b"modified " * 100000) def _check_differences(self, got, expected): # displaying arbitrary file corruption is tricky for a # 1MB file of repeating data,, so look for likely places # with problems and display them separately - gotmods = [mo.span() for mo in re.finditer('([A-Z]+)', got)] - expmods = [mo.span() for mo in re.finditer('([A-Z]+)', expected)] + gotmods = [mo.span() for mo in re.finditer(b'([A-Z]+)', got)] + expmods = [mo.span() for mo in re.finditer(b'([A-Z]+)', expected)] gotspans = ["%d:%d=%s" % (start,end,got[start:end]) for (start,end) in gotmods] expspans = ["%d:%d=%s" % (start,end,expected[start:end]) @@ -131,14 +141,15 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): def test_replace_locations(self): # exercise fencepost conditions - suspects = range(SEGSIZE-3, SEGSIZE+1)+range(2*SEGSIZE-3, 2*SEGSIZE+1) + suspects = list(range(SEGSIZE-3, SEGSIZE+1)) + list( + range(2*SEGSIZE-3, 2*SEGSIZE+1)) letters = iter("ABCDEFGHIJKLMNOPQRSTUVWXYZ") d0 = self.do_upload_mdmf() def _run(ign): expected = self.data d = defer.succeed(None) for offset in suspects: - new_data = letters.next()*2 # "AA", then "BB", etc + new_data = next(letters).encode("ascii") * 2 # "AA", then "BB", etc expected = expected[:offset]+new_data+expected[offset+2:] d.addCallback(lambda ign: self.mdmf_node.get_best_mutable_version()) @@ -164,7 +175,7 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): # long -- this is 7 segments in the default segment size. So we # need to add 2 segments worth of data to push it over a # power-of-two boundary. - segment = "a" * DEFAULT_MAX_SEGMENT_SIZE + segment = b"a" * DEFAULT_MAX_SEGMENT_SIZE new_data = self.data + (segment * 2) d0 = self.do_upload_mdmf() def _run(ign): @@ -181,12 +192,12 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): def test_update_sdmf(self): # Running update on a single-segment file should still work. - new_data = self.small_data + "appended" + new_data = self.small_data + b"appended" d0 = self.do_upload_sdmf() def _run(ign): d = defer.succeed(None) d.addCallback(lambda ign: self.sdmf_node.get_best_mutable_version()) - d.addCallback(lambda mv: mv.update(MutableData("appended"), + d.addCallback(lambda mv: mv.update(MutableData(b"appended"), len(self.small_data))) d.addCallback(lambda ign: self.sdmf_node.download_best_version()) d.addCallback(lambda results: @@ -199,14 +210,14 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): # The wrapper should know how to handle the tail segment # appropriately. replace_offset = len(self.data) - 100 - new_data = self.data[:replace_offset] + "replaced" - rest_offset = replace_offset + len("replaced") + new_data = self.data[:replace_offset] + b"replaced" + rest_offset = replace_offset + len(b"replaced") new_data += self.data[rest_offset:] d0 = self.do_upload_mdmf() def _run(ign): d = defer.succeed(None) d.addCallback(lambda ign: self.mdmf_node.get_best_mutable_version()) - d.addCallback(lambda mv: mv.update(MutableData("replaced"), + d.addCallback(lambda mv: mv.update(MutableData(b"replaced"), replace_offset)) d.addCallback(lambda ign: self.mdmf_node.download_best_version()) d.addCallback(lambda results: @@ -218,16 +229,16 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): def test_multiple_segment_replace(self): replace_offset = 2 * DEFAULT_MAX_SEGMENT_SIZE new_data = self.data[:replace_offset] - new_segment = "a" * DEFAULT_MAX_SEGMENT_SIZE + new_segment = b"a" * DEFAULT_MAX_SEGMENT_SIZE new_data += 2 * new_segment - new_data += "replaced" + new_data += b"replaced" rest_offset = len(new_data) new_data += self.data[rest_offset:] d0 = self.do_upload_mdmf() def _run(ign): d = defer.succeed(None) d.addCallback(lambda ign: self.mdmf_node.get_best_mutable_version()) - d.addCallback(lambda mv: mv.update(MutableData((2 * new_segment) + "replaced"), + d.addCallback(lambda mv: mv.update(MutableData((2 * new_segment) + b"replaced"), replace_offset)) d.addCallback(lambda ignored: self.mdmf_node.download_best_version()) d.addCallback(lambda results: diff --git a/src/allmydata/test/mutable/test_version.py b/src/allmydata/test/mutable/test_version.py index dd871aeb1..06191b5fe 100644 --- a/src/allmydata/test/mutable/test_version.py +++ b/src/allmydata/test/mutable/test_version.py @@ -1,11 +1,16 @@ +""" +Ported to Python 3. +""" from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals -import os - -# Python 2 compatibility from future.utils import PY2 if PY2: - from future.builtins import str # noqa: F401 + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + +import os from six.moves import cStringIO as StringIO from twisted.internet import defer @@ -32,8 +37,8 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ self.set_up_grid() self.c = self.g.clients[0] self.nm = self.c.nodemaker - self.data = "test data" * 100000 # about 900 KiB; MDMF - self.small_data = "test data" * 10 # 90 B; SDMF + self.data = b"test data" * 100000 # about 900 KiB; MDMF + self.small_data = b"test data" * 10 # 90 B; SDMF def do_upload_mdmf(self, data=None): @@ -62,7 +67,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ return d def do_upload_empty_sdmf(self): - d = self.nm.create_mutable_file(MutableData("")) + d = self.nm.create_mutable_file(MutableData(b"")) def _then(n): assert isinstance(n, MutableFileNode) self.sdmf_zero_length_node = n @@ -107,9 +112,8 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ self.failUnless(" total_shares: 10" in lines, output) self.failUnless(" segsize: 131073" in lines, output) self.failUnless(" datalen: %d" % len(self.data) in lines, output) - vcap = n.get_verify_cap().to_string() + vcap = str(n.get_verify_cap().to_string(), "utf-8") self.failUnless(" verify-cap: %s" % vcap in lines, output) - cso = debug.CatalogSharesOptions() cso.nodedirs = fso.nodedirs cso.stdout = StringIO() @@ -121,7 +125,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ self.failUnless(oneshare.startswith("MDMF"), oneshare) fields = oneshare.split() self.failUnlessEqual(fields[0], "MDMF") - self.failUnlessEqual(fields[1], storage_index) + self.failUnlessEqual(fields[1].encode("ascii"), storage_index) self.failUnlessEqual(fields[2], "3/10") self.failUnlessEqual(fields[3], "%d" % len(self.data)) self.failUnless(fields[4].startswith("#1:"), fields[3]) @@ -144,8 +148,8 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ # Now update. The sequence number in both cases should be 1 in # both cases. def _do_update(ignored): - new_data = MutableData("foo bar baz" * 100000) - new_small_data = MutableData("foo bar baz" * 10) + new_data = MutableData(b"foo bar baz" * 100000) + new_small_data = MutableData(b"foo bar baz" * 10) d1 = self.mdmf_node.overwrite(new_data) d2 = self.sdmf_node.overwrite(new_small_data) dl = gatherResults([d1, d2]) @@ -221,38 +225,38 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ def test_toplevel_overwrite(self): - new_data = MutableData("foo bar baz" * 100000) - new_small_data = MutableData("foo bar baz" * 10) + new_data = MutableData(b"foo bar baz" * 100000) + new_small_data = MutableData(b"foo bar baz" * 10) d = self.do_upload() d.addCallback(lambda ign: self.mdmf_node.overwrite(new_data)) d.addCallback(lambda ignored: self.mdmf_node.download_best_version()) d.addCallback(lambda data: - self.failUnlessEqual(data, "foo bar baz" * 100000)) + self.failUnlessEqual(data, b"foo bar baz" * 100000)) d.addCallback(lambda ignored: self.sdmf_node.overwrite(new_small_data)) d.addCallback(lambda ignored: self.sdmf_node.download_best_version()) d.addCallback(lambda data: - self.failUnlessEqual(data, "foo bar baz" * 10)) + self.failUnlessEqual(data, b"foo bar baz" * 10)) return d def test_toplevel_modify(self): d = self.do_upload() def modifier(old_contents, servermap, first_time): - return old_contents + "modified" + return old_contents + b"modified" d.addCallback(lambda ign: self.mdmf_node.modify(modifier)) d.addCallback(lambda ignored: self.mdmf_node.download_best_version()) d.addCallback(lambda data: - self.failUnlessIn("modified", data)) + self.failUnlessIn(b"modified", data)) d.addCallback(lambda ignored: self.sdmf_node.modify(modifier)) d.addCallback(lambda ignored: self.sdmf_node.download_best_version()) d.addCallback(lambda data: - self.failUnlessIn("modified", data)) + self.failUnlessIn(b"modified", data)) return d @@ -262,18 +266,18 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ # test to see that the best recoverable version is that. d = self.do_upload() def modifier(old_contents, servermap, first_time): - return old_contents + "modified" + return old_contents + b"modified" d.addCallback(lambda ign: self.mdmf_node.modify(modifier)) d.addCallback(lambda ignored: self.mdmf_node.download_best_version()) d.addCallback(lambda data: - self.failUnlessIn("modified", data)) + self.failUnlessIn(b"modified", data)) d.addCallback(lambda ignored: self.sdmf_node.modify(modifier)) d.addCallback(lambda ignored: self.sdmf_node.download_best_version()) d.addCallback(lambda data: - self.failUnlessIn("modified", data)) + self.failUnlessIn(b"modified", data)) return d @@ -337,10 +341,10 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ def _read_data(version): c = consumer.MemoryConsumer() d2 = defer.succeed(None) - for i in xrange(0, len(expected), step): + for i in range(0, len(expected), step): d2.addCallback(lambda ignored, i=i: version.read(c, i, step)) d2.addCallback(lambda ignored: - self.failUnlessEqual(expected, "".join(c.chunks))) + self.failUnlessEqual(expected, b"".join(c.chunks))) return d2 d.addCallback(_read_data) return d @@ -352,7 +356,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ expected_range = expected[offset:] else: expected_range = expected[offset:offset+length] - d.addCallback(lambda ignored: "".join(c.chunks)) + d.addCallback(lambda ignored: b"".join(c.chunks)) def _check(results): if results != expected_range: print("read([%d]+%s) got %d bytes, not %d" % \ @@ -365,7 +369,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ return d def test_partial_read_mdmf_0(self): - data = "" + data = b"" d = self.do_upload_mdmf(data=data) modes = [("all1", 0,0), ("all2", 0,None), @@ -388,7 +392,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ return d def test_partial_read_sdmf_0(self): - data = "" + data = b"" modes = [("all1", 0,0), ("all2", 0,None), ] @@ -397,7 +401,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ return d def test_partial_read_sdmf_2(self): - data = "hi" + data = b"hi" modes = [("one_byte", 0, 1), ("last_byte", 1, 1), ("last_byte2", 1, None), @@ -422,7 +426,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ return d def test_partial_read_sdmf_100(self): - data = "test data "*10 + data = b"test data "*10 modes = [("start_at_middle", 50, 50), ("start_at_middle2", 50, None), ("zero_length_at_start", 0, 0), @@ -443,12 +447,12 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ d2 = defer.succeed(None) d2.addCallback(lambda ignored: version.read(c)) d2.addCallback(lambda ignored: - self.failUnlessEqual(expected, "".join(c.chunks))) + self.failUnlessEqual(expected, b"".join(c.chunks))) d2.addCallback(lambda ignored: version.read(c2, offset=0, size=len(expected))) d2.addCallback(lambda ignored: - self.failUnlessEqual(expected, "".join(c2.chunks))) + self.failUnlessEqual(expected, b"".join(c2.chunks))) return d2 d.addCallback(_read_data) d.addCallback(lambda ignored: node.download_best_version()) @@ -467,5 +471,5 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ def test_read_and_download_sdmf_zero_length(self): d = self.do_upload_empty_sdmf() - d.addCallback(self._test_read_and_download, "") + d.addCallback(self._test_read_and_download, b"") return d diff --git a/src/allmydata/test/mutable/util.py b/src/allmydata/test/mutable/util.py index a664c1e08..62e8d7295 100644 --- a/src/allmydata/test/mutable/util.py +++ b/src/allmydata/test/mutable/util.py @@ -1,4 +1,18 @@ -from six.moves import cStringIO as StringIO +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2, bchr +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + +from past.builtins import long + +from io import BytesIO import attr from twisted.internet import defer, reactor from foolscap.api import eventually, fireEventually @@ -75,8 +89,8 @@ class FakeStorage(object): if peerid not in self._peers: self._peers[peerid] = {} shares = self._peers[peerid] - f = StringIO() - f.write(shares.get(shnum, "")) + f = BytesIO() + f.write(shares.get(shnum, b"")) f.seek(offset) f.write(data) shares[shnum] = f.getvalue() @@ -127,9 +141,9 @@ class FakeStorageServer(object): tw_vectors, read_vector): # always-pass: parrot the test vectors back to them. readv = {} - for shnum, (testv, writev, new_length) in tw_vectors.items(): + for shnum, (testv, writev, new_length) in list(tw_vectors.items()): for (offset, length, op, specimen) in testv: - assert op in ("le", "eq", "ge") + assert op in (b"le", b"eq", b"ge") # TODO: this isn't right, the read is controlled by read_vector, # not by testv readv[shnum] = [ specimen @@ -144,14 +158,14 @@ class FakeStorageServer(object): def flip_bit(original, byte_offset): return (original[:byte_offset] + - chr(ord(original[byte_offset]) ^ 0x01) + + bchr(ord(original[byte_offset:byte_offset+1]) ^ 0x01) + original[byte_offset+1:]) def add_two(original, byte_offset): # It isn't enough to simply flip the bit for the version number, # because 1 is a valid version number. So we add two instead. return (original[:byte_offset] + - chr(ord(original[byte_offset]) ^ 0x02) + + bchr(ord(original[byte_offset:byte_offset+1]) ^ 0x02) + original[byte_offset+1:]) def corrupt(res, s, offset, shnums_to_corrupt=None, offset_offset=0): @@ -222,10 +236,10 @@ def make_peer(s, i): :rtype: ``Peer`` """ - peerid = base32.b2a(tagged_hash("peerid", "%d" % i)[:20]) + peerid = base32.b2a(tagged_hash(b"peerid", b"%d" % i)[:20]) fss = FakeStorageServer(peerid, s) ann = { - "anonymous-storage-FURL": "pb://%s@nowhere/fake" % (peerid,), + "anonymous-storage-FURL": "pb://%s@nowhere/fake" % (str(peerid, "utf-8"),), "permutation-seed-base32": peerid, } return Peer(peerid=peerid, storage_server=fss, announcement=ann) @@ -297,7 +311,7 @@ def make_nodemaker_with_storage_broker(storage_broker, keysize): :param StorageFarmBroker peers: The storage broker to use. """ - sh = client.SecretHolder("lease secret", "convergence secret") + sh = client.SecretHolder(b"lease secret", b"convergence secret") keygen = client.KeyGenerator() if keysize: keygen.set_default_keysize(keysize) @@ -311,7 +325,7 @@ class PublishMixin(object): def publish_one(self): # publish a file and create shares, which can then be manipulated # later. - self.CONTENTS = "New contents go here" * 1000 + self.CONTENTS = b"New contents go here" * 1000 self.uploadable = MutableData(self.CONTENTS) self._storage = FakeStorage() self._nodemaker = make_nodemaker(self._storage) @@ -328,7 +342,7 @@ class PublishMixin(object): # an MDMF file. # self.CONTENTS should have more than one segment. if data is None: - data = "This is an MDMF file" * 100000 + data = b"This is an MDMF file" * 100000 self.CONTENTS = data self.uploadable = MutableData(self.CONTENTS) self._storage = FakeStorage() @@ -346,7 +360,7 @@ class PublishMixin(object): # like publish_one, except that the result is guaranteed to be # an SDMF file if data is None: - data = "This is an SDMF file" * 1000 + data = b"This is an SDMF file" * 1000 self.CONTENTS = data self.uploadable = MutableData(self.CONTENTS) self._storage = FakeStorage() @@ -361,11 +375,11 @@ class PublishMixin(object): def publish_multiple(self, version=0): - self.CONTENTS = ["Contents 0", - "Contents 1", - "Contents 2", - "Contents 3a", - "Contents 3b"] + self.CONTENTS = [b"Contents 0", + b"Contents 1", + b"Contents 2", + b"Contents 3a", + b"Contents 3b"] self.uploadables = [MutableData(d) for d in self.CONTENTS] self._copied_shares = {} self._storage = FakeStorage() diff --git a/src/allmydata/test/no_network.py b/src/allmydata/test/no_network.py index 495553a83..cbea0dfcd 100644 --- a/src/allmydata/test/no_network.py +++ b/src/allmydata/test/no_network.py @@ -20,19 +20,26 @@ from __future__ import unicode_literals # Tubs, so it is not useful for tests that involve a Helper or the # control.furl . -from future.utils import PY2, PY3 +from future.utils import PY2 if PY2: from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from past.builtins import unicode +from six import ensure_text import os +from base64 import b32encode +from functools import ( + partial, +) from zope.interface import implementer from twisted.application import service from twisted.internet import defer from twisted.python.failure import Failure from twisted.web.error import Error from foolscap.api import Referenceable, fireEventually, RemoteException -from base64 import b32encode +from foolscap.ipb import ( + IRemoteReference, +) import treq from allmydata.util.assertutil import _assert @@ -59,14 +66,29 @@ class IntentionalError(Exception): class Marker(object): pass +fireNow = partial(defer.succeed, None) + +@implementer(IRemoteReference) # type: ignore # warner/foolscap#79 class LocalWrapper(object): - def __init__(self, original): + """ + A ``LocalWrapper`` presents the remote reference interface to a local + object which implements a ``RemoteInterface``. + """ + def __init__(self, original, fireEventually=fireEventually): + """ + :param Callable[[], Deferred[None]] fireEventually: Get a Deferred + that will fire at some point. This is used to control when + ``callRemote`` calls the remote method. The default value allows + the reactor to iterate before the call happens. Use ``fireNow`` + to call the remote method synchronously. + """ self.original = original self.broken = False self.hung_until = None self.post_call_notifier = None self.disconnectors = {} self.counter_by_methname = {} + self._fireEventually = fireEventually def _clear_counters(self): self.counter_by_methname = {} @@ -82,7 +104,7 @@ class LocalWrapper(object): # selected return values. def wrap(a): if isinstance(a, Referenceable): - return LocalWrapper(a) + return self._wrap(a) else: return a args = tuple([wrap(a) for a in args]) @@ -110,7 +132,7 @@ class LocalWrapper(object): return d2 return _really_call() - d = fireEventually() + d = self._fireEventually() d.addCallback(lambda res: _call()) def _wrap_exception(f): return Failure(RemoteException(f)) @@ -124,10 +146,10 @@ class LocalWrapper(object): if methname == "allocate_buckets": (alreadygot, allocated) = res for shnum in allocated: - allocated[shnum] = LocalWrapper(allocated[shnum]) + allocated[shnum] = self._wrap(allocated[shnum]) if methname == "get_buckets": for shnum in res: - res[shnum] = LocalWrapper(res[shnum]) + res[shnum] = self._wrap(res[shnum]) return res d.addCallback(_return_membrane) if self.post_call_notifier: @@ -141,6 +163,10 @@ class LocalWrapper(object): def dontNotifyOnDisconnect(self, marker): del self.disconnectors[marker] + def _wrap(self, value): + return LocalWrapper(value, self._fireEventually) + + def wrap_storage_server(original): # Much of the upload/download code uses rref.version (which normally # comes from rrefutil.add_version_to_remote_reference). To avoid using a @@ -187,9 +213,12 @@ class NoNetworkServer(object): return _StorageServer(lambda: self.rref) def get_version(self): return self.rref.version + def start_connecting(self, trigger_cb): + raise NotImplementedError + @implementer(IStorageBroker) -class NoNetworkStorageBroker(object): +class NoNetworkStorageBroker(object): # type: ignore # missing many methods def get_servers_for_psi(self, peer_selection_index): def _permuted(server): seed = server.get_permutation_seed() @@ -233,7 +262,7 @@ def create_no_network_client(basedir): return defer.succeed(client) -class _NoNetworkClient(_Client): +class _NoNetworkClient(_Client): # type: ignore # tahoe-lafs/ticket/3573 """ Overrides all _Client networking functionality to do nothing. """ @@ -269,10 +298,6 @@ class _NoNetworkClient(_Client): pass #._servers will be set by the NoNetworkGrid which creates us - if PY3: - def init_web(self, *args, **kwargs): - print("Web service is temporarily disabled until nevow is gone.") - class SimpleStats(object): def __init__(self): @@ -336,7 +361,7 @@ class NoNetworkGrid(service.MultiService): to complete properly """ if self._setup_errors: - raise self._setup_errors[0].value + self._setup_errors[0].raiseException() @defer.inlineCallbacks def make_client(self, i, write_config=True): @@ -479,12 +504,10 @@ class GridTestMixin(object): def _record_webports_and_baseurls(self): self.g._check_clients() - if PY2: - # Temporarily disabled on Python 3 until Nevow is gone: - self.client_webports = [c.getServiceNamed("webish").getPortnum() - for c in self.g.clients] - self.client_baseurls = [c.getServiceNamed("webish").getURL() - for c in self.g.clients] + self.client_webports = [c.getServiceNamed("webish").getPortnum() + for c in self.g.clients] + self.client_baseurls = [c.getServiceNamed("webish").getURL() + for c in self.g.clients] def get_client_config(self, i=0): self.g._check_clients() @@ -595,8 +618,7 @@ class GridTestMixin(object): method="GET", clientnum=0, **kwargs): # if return_response=True, this fires with (data, statuscode, # respheaders) instead of just data. - assert not isinstance(urlpath, unicode) - url = self.client_baseurls[clientnum] + urlpath + url = self.client_baseurls[clientnum] + ensure_text(urlpath) response = yield treq.request(method, url, persistent=False, allow_redirects=followRedirect, diff --git a/src/allmydata/test/storage_plugin.py b/src/allmydata/test/storage_plugin.py index 52e909b13..17ec89078 100644 --- a/src/allmydata/test/storage_plugin.py +++ b/src/allmydata/test/storage_plugin.py @@ -3,11 +3,8 @@ A storage server plugin the test suite can use to validate the functionality. """ -from future.utils import native_str - -from json import ( - dumps, -) +from future.utils import native_str, native_str_to_bytes +from six import ensure_str import attr @@ -35,6 +32,9 @@ from allmydata.interfaces import ( from allmydata.client import ( AnnounceableStorageServer, ) +from allmydata.util.jsonbytes import ( + dumps, +) class RIDummy(RemoteInterface): @@ -47,8 +47,9 @@ class RIDummy(RemoteInterface): """ - -@implementer(IFoolscapStoragePlugin) +# type ignored due to missing stubs for Twisted +# https://twistedmatrix.com/trac/ticket/9717 +@implementer(IFoolscapStoragePlugin) # type: ignore @attr.s class DummyStorage(object): name = attr.ib() @@ -84,8 +85,8 @@ class DummyStorage(object): """ items = configuration.items(self._client_section_name, []) resource = Data( - dumps(dict(items)), - b"text/json", + native_str_to_bytes(dumps(dict(items))), + ensure_str("text/json"), ) # Give it some dynamic stuff too. resource.putChild(b"counter", GetCounter()) @@ -102,12 +103,12 @@ class GetCounter(Resource, object): value = 0 def render_GET(self, request): self.value += 1 - return dumps({"value": self.value}) + return native_str_to_bytes(dumps({"value": self.value})) @implementer(RIDummy) @attr.s(frozen=True) -class DummyStorageServer(object): +class DummyStorageServer(object): # type: ignore # warner/foolscap#78 get_anonymous_storage_server = attr.ib() def remote_just_some_method(self): @@ -116,7 +117,7 @@ class DummyStorageServer(object): @implementer(IStorageServer) @attr.s -class DummyStorageClient(object): +class DummyStorageClient(object): # type: ignore # incomplete implementation get_rref = attr.ib() configuration = attr.ib() announcement = attr.ib() diff --git a/src/allmydata/test/strategies.py b/src/allmydata/test/strategies.py new file mode 100644 index 000000000..553b2c226 --- /dev/null +++ b/src/allmydata/test/strategies.py @@ -0,0 +1,111 @@ +""" +Hypothesis strategies use for testing Tahoe-LAFS. +""" + +from hypothesis.strategies import ( + one_of, + builds, + binary, +) + +from ..uri import ( + WriteableSSKFileURI, + WriteableMDMFFileURI, + DirectoryURI, + MDMFDirectoryURI, +) + +def write_capabilities(): + """ + Build ``IURI`` providers representing all kinds of write capabilities. + """ + return one_of([ + ssk_capabilities(), + mdmf_capabilities(), + dir2_capabilities(), + dir2_mdmf_capabilities(), + ]) + + +def ssk_capabilities(): + """ + Build ``WriteableSSKFileURI`` instances. + """ + return builds( + WriteableSSKFileURI, + ssk_writekeys(), + ssk_fingerprints(), + ) + + +def _writekeys(size=16): + """ + Build ``bytes`` representing write keys. + """ + return binary(min_size=size, max_size=size) + + +def ssk_writekeys(): + """ + Build ``bytes`` representing SSK write keys. + """ + return _writekeys() + + +def _fingerprints(size=32): + """ + Build ``bytes`` representing fingerprints. + """ + return binary(min_size=size, max_size=size) + + +def ssk_fingerprints(): + """ + Build ``bytes`` representing SSK fingerprints. + """ + return _fingerprints() + + +def mdmf_capabilities(): + """ + Build ``WriteableMDMFFileURI`` instances. + """ + return builds( + WriteableMDMFFileURI, + mdmf_writekeys(), + mdmf_fingerprints(), + ) + + +def mdmf_writekeys(): + """ + Build ``bytes`` representing MDMF write keys. + """ + return _writekeys() + + +def mdmf_fingerprints(): + """ + Build ``bytes`` representing MDMF fingerprints. + """ + return _fingerprints() + + +def dir2_capabilities(): + """ + Build ``DirectoryURI`` instances. + """ + return builds( + DirectoryURI, + ssk_capabilities(), + ) + + +def dir2_mdmf_capabilities(): + """ + Build ``MDMFDirectoryURI`` instances. + """ + return builds( + MDMFDirectoryURI, + mdmf_capabilities(), + ) diff --git a/src/allmydata/test/test_checker.py b/src/allmydata/test/test_checker.py index 882356aeb..f56ecd089 100644 --- a/src/allmydata/test/test_checker.py +++ b/src/allmydata/test/test_checker.py @@ -1,3 +1,16 @@ +""" +Ported to Python 3. +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import json import os.path, shutil @@ -7,11 +20,13 @@ from bs4 import BeautifulSoup from twisted.trial import unittest from twisted.internet import defer -from nevow.inevow import IRequest from zope.interface import implementer -from twisted.web.server import Request -from twisted.web.test.requesthelper import DummyChannel -from twisted.web.template import flattenString +from twisted.web.resource import ( + Resource, +) +from twisted.web.template import ( + renderElement, +) from allmydata import check_results, uri from allmydata import uri as tahoe_uri @@ -32,6 +47,9 @@ from allmydata.mutable.publish import MutableData from .common import ( EMPTY_CLIENT_CONFIG, ) +from .common_web import ( + render, +) from .web.common import ( assert_soup_has_favicon, @@ -42,27 +60,9 @@ class FakeClient(object): def get_storage_broker(self): return self.storage_broker -@implementer(IRequest) -class TestRequest(Request, object): - """ - A minimal Request class to use in tests. - - XXX: We have to have this class because `common.get_arg()` expects - a `nevow.inevow.IRequest`, which `twisted.web.server.Request` - isn't. The request needs to have `args`, `fields`, `prepath`, and - `postpath` properties so that `allmydata.web.common.get_arg()` - won't complain. - """ - def __init__(self, args=None, fields=None): - super(TestRequest, self).__init__(DummyChannel()) - self.args = args or {} - self.fields = fields or {} - self.prepath = [b""] - self.postpath = [b""] - @implementer(IServer) -class FakeServer(object): +class FakeServer(object): # type: ignore # incomplete implementation def get_name(self): return "fake name" @@ -75,7 +75,7 @@ class FakeServer(object): @implementer(ICheckResults) -class FakeCheckResults(object): +class FakeCheckResults(object): # type: ignore # incomplete implementation def __init__(self, si=None, healthy=False, recoverable=False, @@ -102,11 +102,11 @@ class FakeCheckResults(object): def get_corrupt_shares(self): # returns a list of (IServer, storage_index, sharenum) - return [(FakeServer(), "", 0)] + return [(FakeServer(), b"", 0)] @implementer(ICheckAndRepairResults) -class FakeCheckAndRepairResults(object): +class FakeCheckAndRepairResults(object): # type: ignore # incomplete implementation def __init__(self, si=None, repair_attempted=False, @@ -131,28 +131,37 @@ class FakeCheckAndRepairResults(object): return self._repair_success +class ElementResource(Resource, object): + def __init__(self, element): + Resource.__init__(self) + self.element = element + + def render(self, request): + return renderElement(request, self.element) + + class WebResultsRendering(unittest.TestCase): @staticmethod def remove_tags(html): - return BeautifulSoup(html).get_text(separator=" ") + return BeautifulSoup(html, 'html5lib').get_text(separator=" ") def create_fake_client(self): sb = StorageFarmBroker(True, None, EMPTY_CLIENT_CONFIG) # s.get_name() (the "short description") will be "v0-00000000". # s.get_longname() will include the -long suffix. - servers = [("v0-00000000-long", "\x00"*20, "peer-0"), - ("v0-ffffffff-long", "\xff"*20, "peer-f"), - ("v0-11111111-long", "\x11"*20, "peer-11")] + servers = [(b"v0-00000000-long", b"\x00"*20, "peer-0"), + (b"v0-ffffffff-long", b"\xff"*20, "peer-f"), + (b"v0-11111111-long", b"\x11"*20, "peer-11")] for (key_s, binary_tubid, nickname) in servers: server_id = key_s tubid_b32 = base32.b2a(binary_tubid) - furl = "pb://%s@nowhere/fake" % tubid_b32 + furl = "pb://%s@nowhere/fake" % str(tubid_b32, "utf-8") ann = { "version": 0, "service-name": "storage", "anonymous-storage-FURL": furl, "permutation-seed-base32": "", - "nickname": unicode(nickname), + "nickname": str(nickname), "app-versions": {}, # need #466 and v2 introducer "my-version": "ver", "oldest-supported": "oldest", @@ -164,21 +173,22 @@ class WebResultsRendering(unittest.TestCase): return c def render_json(self, resource): - return resource.render(TestRequest(args={"output": ["json"]})) + return self.successResultOf(render(resource, {b"output": [b"json"]})) def render_element(self, element, args=None): - d = flattenString(TestRequest(args), element) - return unittest.TestCase().successResultOf(d) + if args is None: + args = {} + return self.successResultOf(render(ElementResource(element), args)) def test_literal(self): lcr = web_check_results.LiteralCheckResultsRendererElement() html = self.render_element(lcr) - self.failUnlessIn("Literal files are always healthy", html) + self.failUnlessIn(b"Literal files are always healthy", html) - html = self.render_element(lcr, args={"return_to": ["FOOURL"]}) - self.failUnlessIn("Literal files are always healthy", html) - self.failUnlessIn('Return to file.', html) + html = self.render_element(lcr, args={b"return_to": [b"FOOURL"]}) + self.failUnlessIn(b"Literal files are always healthy", html) + self.failUnlessIn(b'Return to file.', html) c = self.create_fake_client() lcr = web_check_results.LiteralCheckResultsRenderer(c) @@ -192,11 +202,11 @@ class WebResultsRendering(unittest.TestCase): def test_check(self): c = self.create_fake_client() sb = c.storage_broker - serverid_1 = "\x00"*20 - serverid_f = "\xff"*20 + serverid_1 = b"\x00"*20 + serverid_f = b"\xff"*20 server_1 = sb.get_stub_server(serverid_1) server_f = sb.get_stub_server(serverid_f) - u = uri.CHKFileURI("\x00"*16, "\x00"*32, 3, 10, 1234) + u = uri.CHKFileURI(b"\x00"*16, b"\x00"*32, 3, 10, 1234) data = { "count_happiness": 8, "count_shares_needed": 3, "count_shares_expected": 9, @@ -259,8 +269,8 @@ class WebResultsRendering(unittest.TestCase): self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated self.failUnlessIn("Not Recoverable! : rather dead", s) - html = self.render_element(w, args={"return_to": ["FOOURL"]}) - self.failUnlessIn('Return to file/directory.', + html = self.render_element(w, args={b"return_to": [b"FOOURL"]}) + self.failUnlessIn(b'Return to file/directory.', html) w = web_check_results.CheckResultsRenderer(c, cr) @@ -301,9 +311,9 @@ class WebResultsRendering(unittest.TestCase): def test_check_and_repair(self): c = self.create_fake_client() sb = c.storage_broker - serverid_1 = "\x00"*20 - serverid_f = "\xff"*20 - u = uri.CHKFileURI("\x00"*16, "\x00"*32, 3, 10, 1234) + serverid_1 = b"\x00"*20 + serverid_f = b"\xff"*20 + u = uri.CHKFileURI(b"\x00"*16, b"\x00"*32, 3, 10, 1234) data = { "count_happiness": 5, "count_shares_needed": 3, @@ -419,21 +429,21 @@ class WebResultsRendering(unittest.TestCase): def test_deep_check_renderer(self): - status = check_results.DeepCheckResults("fake-root-si") + status = check_results.DeepCheckResults(b"fake-root-si") status.add_check( - FakeCheckResults("", False, False), + FakeCheckResults(b"", False, False), (u"fake", u"unhealthy", u"unrecoverable") ) status.add_check( - FakeCheckResults("", True, True), + FakeCheckResults(b"", True, True), (u"fake", u"healthy", u"recoverable") ) status.add_check( - FakeCheckResults("", True, False), + FakeCheckResults(b"", True, False), (u"fake", u"healthy", u"unrecoverable") ) status.add_check( - FakeCheckResults("", False, True), + FakeCheckResults(b"", False, True), (u"fake", u"unhealthy", u"recoverable") ) @@ -512,18 +522,18 @@ class WebResultsRendering(unittest.TestCase): ) def test_deep_check_and_repair_renderer(self): - status = check_results.DeepCheckAndRepairResults("") + status = check_results.DeepCheckAndRepairResults(b"") status.add_check_and_repair( - FakeCheckAndRepairResults("attempted/success", True, True), + FakeCheckAndRepairResults(b"attempted/success", True, True), (u"attempted", u"success") ) status.add_check_and_repair( - FakeCheckAndRepairResults("attempted/failure", True, False), + FakeCheckAndRepairResults(b"attempted/failure", True, False), (u"attempted", u"failure") ) status.add_check_and_repair( - FakeCheckAndRepairResults("unattempted/failure", False, False), + FakeCheckAndRepairResults(b"unattempted/failure", False, False), (u"unattempted", u"failure") ) @@ -662,7 +672,7 @@ class BalancingAct(GridTestMixin, unittest.TestCase): "This little printing function is only meant for < 26 servers" shares_chart = {} names = dict(zip([ss.my_nodeid - for _,ss in self.g.servers_by_number.iteritems()], + for _,ss in self.g.servers_by_number.items()], letters)) for shnum, serverid, _ in self.find_uri_shares(uri): shares_chart.setdefault(shnum, []).append(names[serverid]) @@ -676,8 +686,8 @@ class BalancingAct(GridTestMixin, unittest.TestCase): c0.encoding_params['n'] = 4 c0.encoding_params['k'] = 3 - DATA = "data" * 100 - d = c0.upload(Data(DATA, convergence="")) + DATA = b"data" * 100 + d = c0.upload(Data(DATA, convergence=b"")) def _stash_immutable(ur): self.imm = c0.create_node_from_uri(ur.get_uri()) self.uri = self.imm.get_uri() @@ -742,13 +752,13 @@ class AddLease(GridTestMixin, unittest.TestCase): c0 = self.g.clients[0] c0.encoding_params['happy'] = 1 self.uris = {} - DATA = "data" * 100 - d = c0.upload(Data(DATA, convergence="")) + DATA = b"data" * 100 + d = c0.upload(Data(DATA, convergence=b"")) def _stash_immutable(ur): self.imm = c0.create_node_from_uri(ur.get_uri()) d.addCallback(_stash_immutable) d.addCallback(lambda ign: - c0.create_mutable_file(MutableData("contents"))) + c0.create_mutable_file(MutableData(b"contents"))) def _stash_mutable(node): self.mut = node d.addCallback(_stash_mutable) @@ -834,8 +844,8 @@ class TooParallel(GridTestMixin, unittest.TestCase): "max_segment_size": 5, } self.uris = {} - DATA = "data" * 100 # 400/5 = 80 blocks - return self.c0.upload(Data(DATA, convergence="")) + DATA = b"data" * 100 # 400/5 = 80 blocks + return self.c0.upload(Data(DATA, convergence=b"")) d.addCallback(_start) def _do_check(ur): n = self.c0.create_node_from_uri(ur.get_uri()) diff --git a/src/allmydata/test/test_client.py b/src/allmydata/test/test_client.py index ea6b7bce9..63a5ceaaa 100644 --- a/src/allmydata/test/test_client.py +++ b/src/allmydata/test/test_client.py @@ -1,5 +1,4 @@ import os, sys -import mock from functools import ( partial, ) @@ -12,6 +11,15 @@ from fixtures import ( Fixture, TempDir, ) + +from hypothesis import ( + given, +) +from hypothesis.strategies import ( + sampled_from, + booleans, +) + from eliot.testing import ( capture_logging, assertHasAction, @@ -39,11 +47,10 @@ from testtools.twistedsupport import ( import allmydata import allmydata.util.log -from allmydata.node import OldConfigError, UnescapedHashError, _Config, create_node_dir -from allmydata.frontends.auth import NeedRootcapLookupScheme -from allmydata.version_checks import ( - get_package_versions_string, +from allmydata.nodemaker import ( + NodeMaker, ) +from allmydata.node import OldConfigError, UnescapedHashError, create_node_dir from allmydata import client from allmydata.storage_client import ( StorageClientConfig, @@ -58,11 +65,15 @@ from allmydata.util import ( from allmydata.util.fileutil import abspath_expanduser_unicode from allmydata.interfaces import IFilesystemNode, IFileNode, \ IImmutableFileNode, IMutableFileNode, IDirectoryNode +from allmydata.scripts.common import ( + write_introducer, +) from foolscap.api import flushEventualQueue import allmydata.test.common_util as testutil from .common import ( EMPTY_CLIENT_CONFIG, SyncTestCase, + AsyncBrokenTestCase, UseTestPlugins, MemoryIntroducerClient, get_published_announcements, @@ -72,16 +83,13 @@ from .matchers import ( matches_storage_announcement, matches_furl, ) +from .strategies import ( + write_capabilities, +) -SOME_FURL = b"pb://abcde@nowhere/fake" +SOME_FURL = "pb://abcde@nowhere/fake" -BASECONFIG = ("[client]\n" - "introducer.furl = \n" - ) - -BASECONFIG_I = ("[client]\n" - "introducer.furl = %s\n" - ) +BASECONFIG = "[client]\n" class Basic(testutil.ReallyEqualMixin, unittest.TestCase): def test_loadable(self): @@ -112,31 +120,25 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase): @defer.inlineCallbacks def test_comment(self): """ - An unescaped comment character (#) in a furl results in an + A comment character (#) in a furl results in an UnescapedHashError Failure. """ - should_fail = [r"test#test", r"#testtest", r"test\\#test"] - should_not_fail = [r"test\#test", r"test\\\#test", r"testtest"] + should_fail = [r"test#test", r"#testtest", r"test\\#test", r"test\#test", + r"test\\\#test"] basedir = "test_client.Basic.test_comment" os.mkdir(basedir) def write_config(s): config = ("[client]\n" - "introducer.furl = %s\n" % s) + "helper.furl = %s\n" % s) fileutil.write(os.path.join(basedir, "tahoe.cfg"), config) for s in should_fail: - self.failUnless(_Config._contains_unescaped_hash(s)) write_config(s) with self.assertRaises(UnescapedHashError) as ctx: yield client.create_client(basedir) - self.assertIn("[client]introducer.furl", str(ctx.exception)) - - for s in should_not_fail: - self.failIf(_Config._contains_unescaped_hash(s)) - write_config(s) - yield client.create_client(basedir) + self.assertIn("[client]helper.furl", str(ctx.exception)) def test_unreadable_config(self): if sys.platform == "win32": @@ -421,75 +423,8 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase): expected = fileutil.abspath_expanduser_unicode(u"relative", abs_basedir) self.failUnlessReallyEqual(w.staticdir, expected) - # TODO: also test config options for SFTP. - - @defer.inlineCallbacks - def test_ftp_create(self): - """ - configuration for sftpd results in it being started - """ - basedir = u"client.Basic.test_ftp_create" - create_node_dir(basedir, "testing") - with open(os.path.join(basedir, "tahoe.cfg"), "w") as f: - f.write( - '[sftpd]\n' - 'enabled = true\n' - 'accounts.file = foo\n' - 'host_pubkey_file = pubkey\n' - 'host_privkey_file = privkey\n' - ) - with mock.patch('allmydata.frontends.sftpd.SFTPServer') as p: - yield client.create_client(basedir) - self.assertTrue(p.called) - - @defer.inlineCallbacks - def test_ftp_auth_keyfile(self): - """ - ftpd accounts.file is parsed properly - """ - basedir = u"client.Basic.test_ftp_auth_keyfile" - os.mkdir(basedir) - fileutil.write(os.path.join(basedir, "tahoe.cfg"), - (BASECONFIG + - "[ftpd]\n" - "enabled = true\n" - "port = tcp:0:interface=127.0.0.1\n" - "accounts.file = private/accounts\n")) - os.mkdir(os.path.join(basedir, "private")) - fileutil.write(os.path.join(basedir, "private", "accounts"), "\n") - c = yield client.create_client(basedir) # just make sure it can be instantiated - del c - - @defer.inlineCallbacks - def test_ftp_auth_url(self): - """ - ftpd accounts.url is parsed properly - """ - basedir = u"client.Basic.test_ftp_auth_url" - os.mkdir(basedir) - fileutil.write(os.path.join(basedir, "tahoe.cfg"), - (BASECONFIG + - "[ftpd]\n" - "enabled = true\n" - "port = tcp:0:interface=127.0.0.1\n" - "accounts.url = http://0.0.0.0/\n")) - c = yield client.create_client(basedir) # just make sure it can be instantiated - del c - - @defer.inlineCallbacks - def test_ftp_auth_no_accountfile_or_url(self): - """ - ftpd requires some way to look up accounts - """ - basedir = u"client.Basic.test_ftp_auth_no_accountfile_or_url" - os.mkdir(basedir) - fileutil.write(os.path.join(basedir, "tahoe.cfg"), - (BASECONFIG + - "[ftpd]\n" - "enabled = true\n" - "port = tcp:0:interface=127.0.0.1\n")) - with self.assertRaises(NeedRootcapLookupScheme): - yield client.create_client(basedir) + # TODO: also test config options for SFTP. See Git history for deleted FTP + # tests that could be used as basis for these tests. @defer.inlineCallbacks def _storage_dir_test(self, basedir, storage_path, expected_path): @@ -627,8 +562,6 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase): self.failIfEqual(str(allmydata.__version__), "unknown") self.failUnless("." in str(allmydata.__full_version__), "non-numeric version in '%s'" % allmydata.__version__) - all_versions = get_package_versions_string() - self.failUnless(allmydata.__appname__ in all_versions) # also test stats stats = c.get_stats() self.failUnless("node.uptime" in stats) @@ -676,12 +609,13 @@ class AnonymousStorage(SyncTestCase): """ If anonymous storage access is enabled then the client announces it. """ - basedir = self.id() - os.makedirs(basedir + b"/private") + basedir = FilePath(self.id()) + basedir.child("private").makedirs() + write_introducer(basedir, "someintroducer", SOME_FURL) config = client.config_from_string( - basedir, + basedir.path, "tub.port", - BASECONFIG_I % (SOME_FURL,) + ( + BASECONFIG + ( "[storage]\n" "enabled = true\n" "anonymous = true\n" @@ -695,7 +629,7 @@ class AnonymousStorage(SyncTestCase): get_published_announcements(node), MatchesListwise([ matches_storage_announcement( - basedir, + basedir.path, anonymous=True, ), ]), @@ -707,12 +641,13 @@ class AnonymousStorage(SyncTestCase): If anonymous storage access is disabled then the client does not announce it nor does it write a fURL for it to beneath the node directory. """ - basedir = self.id() - os.makedirs(basedir + b"/private") + basedir = FilePath(self.id()) + basedir.child("private").makedirs() + write_introducer(basedir, "someintroducer", SOME_FURL) config = client.config_from_string( - basedir, + basedir.path, "tub.port", - BASECONFIG_I % (SOME_FURL,) + ( + BASECONFIG + ( "[storage]\n" "enabled = true\n" "anonymous = false\n" @@ -726,7 +661,7 @@ class AnonymousStorage(SyncTestCase): get_published_announcements(node), MatchesListwise([ matches_storage_announcement( - basedir, + basedir.path, anonymous=False, ), ]), @@ -744,12 +679,12 @@ class AnonymousStorage(SyncTestCase): possible to reach the anonymous storage server via the originally published fURL. """ - basedir = self.id() - os.makedirs(basedir + b"/private") + basedir = FilePath(self.id()) + basedir.child("private").makedirs() enabled_config = client.config_from_string( - basedir, + basedir.path, "tub.port", - BASECONFIG_I % (SOME_FURL,) + ( + BASECONFIG + ( "[storage]\n" "enabled = true\n" "anonymous = true\n" @@ -771,9 +706,9 @@ class AnonymousStorage(SyncTestCase): ) disabled_config = client.config_from_string( - basedir, + basedir.path, "tub.port", - BASECONFIG_I % (SOME_FURL,) + ( + BASECONFIG + ( "[storage]\n" "enabled = true\n" "anonymous = false\n" @@ -793,8 +728,8 @@ class IntroducerClients(unittest.TestCase): def test_invalid_introducer_furl(self): """ - An introducer.furl of 'None' is invalid and causes - create_introducer_clients to fail. + An introducer.furl of 'None' in the deprecated [client]introducer.furl + field is invalid and causes `create_introducer_clients` to fail. """ cfg = ( "[client]\n" @@ -959,20 +894,28 @@ class Run(unittest.TestCase, testutil.StallMixin): @defer.inlineCallbacks def test_loadable(self): - basedir = "test_client.Run.test_loadable" - os.mkdir(basedir) + """ + A configuration consisting only of an introducer can be turned into a + client node. + """ + basedir = FilePath("test_client.Run.test_loadable") + private = basedir.child("private") + private.makedirs() dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus" - fileutil.write(os.path.join(basedir, "tahoe.cfg"), BASECONFIG_I % dummy) - fileutil.write(os.path.join(basedir, client._Client.EXIT_TRIGGER_FILE), "") - yield client.create_client(basedir) + write_introducer(basedir, "someintroducer", dummy) + basedir.child("tahoe.cfg").setContent(BASECONFIG) + basedir.child(client._Client.EXIT_TRIGGER_FILE).touch() + yield client.create_client(basedir.path) @defer.inlineCallbacks def test_reloadable(self): - basedir = "test_client.Run.test_reloadable" - os.mkdir(basedir) + basedir = FilePath("test_client.Run.test_reloadable") + private = basedir.child("private") + private.makedirs() dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus" - fileutil.write(os.path.join(basedir, "tahoe.cfg"), BASECONFIG_I % dummy) - c1 = yield client.create_client(basedir) + write_introducer(basedir, "someintroducer", dummy) + basedir.child("tahoe.cfg").setContent(BASECONFIG) + c1 = yield client.create_client(basedir.path) c1.setServiceParent(self.sparent) # delay to let the service start up completely. I'm not entirely sure @@ -994,11 +937,102 @@ class Run(unittest.TestCase, testutil.StallMixin): # also change _check_exit_trigger to use it instead of a raw # reactor.stop, also instrument the shutdown event in an # attribute that we can check.) - c2 = yield client.create_client(basedir) + c2 = yield client.create_client(basedir.path) c2.setServiceParent(self.sparent) yield c2.disownServiceParent() -class NodeMaker(testutil.ReallyEqualMixin, unittest.TestCase): +class NodeMakerTests(testutil.ReallyEqualMixin, AsyncBrokenTestCase): + + def _make_node_maker(self, mode, writecap, deep_immutable): + """ + Create a callable which can create an ``IFilesystemNode`` provider for the + given cap. + + :param unicode mode: The read/write combination to pass to + ``NodeMaker.create_from_cap``. If it contains ``u"r"`` then a + readcap will be passed in. If it contains ``u"w"`` then a + writecap will be passed in. + + :param IURI writecap: The capability for which to create a node. + + :param bool deep_immutable: Whether to request a "deep immutable" node + which forces the result to be an immutable ``IFilesystemNode`` (I + think -exarkun). + """ + if writecap.is_mutable(): + # It's just not a valid combination to have a mutable alongside + # deep_immutable = True. It's easier to fix deep_immutable than + # writecap to clear up this conflict. + deep_immutable = False + + if "r" in mode: + readcap = writecap.get_readonly().to_string() + else: + readcap = None + if "w" in mode: + writecap = writecap.to_string() + else: + writecap = None + + nm = NodeMaker( + storage_broker=None, + secret_holder=None, + history=None, + uploader=None, + terminator=None, + default_encoding_parameters={u"k": 1, u"n": 1}, + mutable_file_default=None, + key_generator=None, + blacklist=None, + ) + return partial( + nm.create_from_cap, + writecap, + readcap, + deep_immutable, + ) + + @given( + mode=sampled_from(["w", "r", "rw"]), + writecap=write_capabilities(), + deep_immutable=booleans(), + ) + def test_cached_result(self, mode, writecap, deep_immutable): + """ + ``NodeMaker.create_from_cap`` returns the same object when called with the + same arguments. + """ + make_node = self._make_node_maker(mode, writecap, deep_immutable) + original = make_node() + additional = make_node() + + self.assertThat( + original, + Is(additional), + ) + + @given( + mode=sampled_from(["w", "r", "rw"]), + writecap=write_capabilities(), + deep_immutable=booleans(), + ) + def test_cache_expired(self, mode, writecap, deep_immutable): + """ + After the node object returned by an earlier call to + ``NodeMaker.create_from_cap`` has been garbage collected, a new call + to ``NodeMaker.create_from_cap`` returns a node object, maybe even a + new one although we can't really prove it. + """ + make_node = self._make_node_maker(mode, writecap, deep_immutable) + make_node() + additional = make_node() + self.assertThat( + additional, + AfterPreprocessing( + lambda node: node.get_readonly_uri(), + Equals(writecap.get_readonly().to_string()), + ), + ) @defer.inlineCallbacks def test_maker(self): @@ -1133,12 +1167,18 @@ class StorageAnnouncementTests(SyncTestCase): """ def setUp(self): super(StorageAnnouncementTests, self).setUp() - self.basedir = self.useFixture(TempDir()).path - create_node_dir(self.basedir, u"") + self.basedir = FilePath(self.useFixture(TempDir()).path) + create_node_dir(self.basedir.path, u"") + # Write an introducer configuration or we can't observer + # announcements. + write_introducer(self.basedir, "someintroducer", SOME_FURL) def get_config(self, storage_enabled, more_storage="", more_sections=""): return """ +[client] +# Empty + [node] tub.location = tcp:192.0.2.0:1234 @@ -1146,9 +1186,6 @@ tub.location = tcp:192.0.2.0:1234 enabled = {storage_enabled} {more_storage} -[client] -introducer.furl = pb://abcde@nowhere/fake - {more_sections} """.format( storage_enabled=storage_enabled, @@ -1162,7 +1199,7 @@ introducer.furl = pb://abcde@nowhere/fake No storage announcement is published if storage is not enabled. """ config = client.config_from_string( - self.basedir, + self.basedir.path, "tub.port", self.get_config(storage_enabled=False), ) @@ -1184,7 +1221,7 @@ introducer.furl = pb://abcde@nowhere/fake storage is enabled. """ config = client.config_from_string( - self.basedir, + self.basedir.path, "tub.port", self.get_config(storage_enabled=True), ) @@ -1201,7 +1238,7 @@ introducer.furl = pb://abcde@nowhere/fake # Match the following list (of one element) ... MatchesListwise([ # The only element in the list ... - matches_storage_announcement(self.basedir), + matches_storage_announcement(self.basedir.path), ]), )), ) @@ -1216,7 +1253,7 @@ introducer.furl = pb://abcde@nowhere/fake value = u"thing" config = client.config_from_string( - self.basedir, + self.basedir.path, "tub.port", self.get_config( storage_enabled=True, @@ -1236,7 +1273,7 @@ introducer.furl = pb://abcde@nowhere/fake get_published_announcements, MatchesListwise([ matches_storage_announcement( - self.basedir, + self.basedir.path, options=[ matches_dummy_announcement( u"tahoe-lafs-dummy-v1", @@ -1257,7 +1294,7 @@ introducer.furl = pb://abcde@nowhere/fake self.useFixture(UseTestPlugins()) config = client.config_from_string( - self.basedir, + self.basedir.path, "tub.port", self.get_config( storage_enabled=True, @@ -1279,7 +1316,7 @@ introducer.furl = pb://abcde@nowhere/fake get_published_announcements, MatchesListwise([ matches_storage_announcement( - self.basedir, + self.basedir.path, options=[ matches_dummy_announcement( u"tahoe-lafs-dummy-v1", @@ -1305,7 +1342,7 @@ introducer.furl = pb://abcde@nowhere/fake self.useFixture(UseTestPlugins()) config = client.config_from_string( - self.basedir, + self.basedir.path, "tub.port", self.get_config( storage_enabled=True, @@ -1341,7 +1378,7 @@ introducer.furl = pb://abcde@nowhere/fake self.useFixture(UseTestPlugins()) config = client.config_from_string( - self.basedir, + self.basedir.path, "tub.port", self.get_config( storage_enabled=True, @@ -1357,7 +1394,7 @@ introducer.furl = pb://abcde@nowhere/fake get_published_announcements, MatchesListwise([ matches_storage_announcement( - self.basedir, + self.basedir.path, options=[ matches_dummy_announcement( u"tahoe-lafs-dummy-v1", @@ -1379,7 +1416,7 @@ introducer.furl = pb://abcde@nowhere/fake self.useFixture(UseTestPlugins()) config = client.config_from_string( - self.basedir, + self.basedir.path, "tub.port", self.get_config( storage_enabled=True, @@ -1406,7 +1443,7 @@ introducer.furl = pb://abcde@nowhere/fake available on the system. """ config = client.config_from_string( - self.basedir, + self.basedir.path, "tub.port", self.get_config( storage_enabled=True, diff --git a/src/allmydata/test/test_configutil.py b/src/allmydata/test/test_configutil.py index c57381289..1b8fb5029 100644 --- a/src/allmydata/test/test_configutil.py +++ b/src/allmydata/test/test_configutil.py @@ -14,12 +14,89 @@ if PY2: from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 import os.path +from configparser import ( + ConfigParser, +) +from functools import ( + partial, +) +from hypothesis import ( + given, +) +from hypothesis.strategies import ( + dictionaries, + text, + characters, +) + +from twisted.python.filepath import ( + FilePath, +) from twisted.trial import unittest from allmydata.util import configutil +def arbitrary_config_dicts( + min_sections=0, + max_sections=3, + max_section_name_size=8, + max_items_per_section=3, + max_item_length=8, + max_value_length=8, +): + """ + Build ``dict[str, dict[str, str]]`` instances populated with arbitrary + configurations. + """ + identifier_text = partial( + text, + # Don't allow most control characters or spaces + alphabet=characters( + blacklist_categories=('Cc', 'Cs', 'Zs'), + ), + ) + return dictionaries( + identifier_text( + min_size=1, + max_size=max_section_name_size, + ), + dictionaries( + identifier_text( + min_size=1, + max_size=max_item_length, + ), + text(max_size=max_value_length), + max_size=max_items_per_section, + ), + min_size=min_sections, + max_size=max_sections, + ) + + +def to_configparser(dictconfig): + """ + Take a ``dict[str, dict[str, str]]`` and turn it into the corresponding + populated ``ConfigParser`` instance. + """ + cp = ConfigParser() + for section, items in dictconfig.items(): + cp.add_section(section) + for k, v in items.items(): + cp.set( + section, + k, + # ConfigParser has a feature that everyone knows and loves + # where it will use %-style interpolation to substitute + # values from one part of the config into another part of + # the config. Escape all our `%`s to avoid hitting this + # and complicating things. + v.replace("%", "%%"), + ) + return cp + + class ConfigUtilTests(unittest.TestCase): def setUp(self): super(ConfigUtilTests, self).setUp() @@ -55,7 +132,7 @@ enabled = false # test that set_config can mutate an existing option configutil.set_config(config, "node", "nickname", "Alice!") - configutil.write_config(tahoe_cfg, config) + configutil.write_config(FilePath(tahoe_cfg), config) config = configutil.get_config(tahoe_cfg) self.failUnlessEqual(config.get("node", "nickname"), "Alice!") @@ -63,19 +140,21 @@ enabled = false # test that set_config can set a new option descriptor = "Twas brillig, and the slithy toves Did gyre and gimble in the wabe" configutil.set_config(config, "node", "descriptor", descriptor) - configutil.write_config(tahoe_cfg, config) + configutil.write_config(FilePath(tahoe_cfg), config) config = configutil.get_config(tahoe_cfg) self.failUnlessEqual(config.get("node", "descriptor"), descriptor) def test_config_validation_success(self): - fname = self.create_tahoe_cfg('[node]\nvalid = foo\n') - - config = configutil.get_config(fname) + """ + ``configutil.validate_config`` returns ``None`` when the configuration it + is given has nothing more than the static sections and items defined + by the validator. + """ # should succeed, no exceptions configutil.validate_config( - fname, - config, + "", + to_configparser({"node": {"valid": "foo"}}), self.static_valid_config, ) @@ -85,24 +164,20 @@ enabled = false validation but are matched by the dynamic validation is considered valid. """ - fname = self.create_tahoe_cfg('[node]\nvalid = foo\n') - - config = configutil.get_config(fname) # should succeed, no exceptions configutil.validate_config( - fname, - config, + "", + to_configparser({"node": {"valid": "foo"}}), self.dynamic_valid_config, ) def test_config_validation_invalid_item(self): - fname = self.create_tahoe_cfg('[node]\nvalid = foo\ninvalid = foo\n') - - config = configutil.get_config(fname) + config = to_configparser({"node": {"valid": "foo", "invalid": "foo"}}) e = self.assertRaises( configutil.UnknownConfigError, configutil.validate_config, - fname, config, + "", + config, self.static_valid_config, ) self.assertIn("section [node] contains unknown option 'invalid'", str(e)) @@ -112,13 +187,12 @@ enabled = false A configuration with a section that is matched by neither the static nor dynamic validators is rejected. """ - fname = self.create_tahoe_cfg('[node]\nvalid = foo\n[invalid]\n') - - config = configutil.get_config(fname) + config = to_configparser({"node": {"valid": "foo"}, "invalid": {}}) e = self.assertRaises( configutil.UnknownConfigError, configutil.validate_config, - fname, config, + "", + config, self.static_valid_config, ) self.assertIn("contains unknown section [invalid]", str(e)) @@ -128,13 +202,12 @@ enabled = false A configuration with a section that is matched by neither the static nor dynamic validators is rejected. """ - fname = self.create_tahoe_cfg('[node]\nvalid = foo\n[invalid]\n') - - config = configutil.get_config(fname) + config = to_configparser({"node": {"valid": "foo"}, "invalid": {}}) e = self.assertRaises( configutil.UnknownConfigError, configutil.validate_config, - fname, config, + "", + config, self.dynamic_valid_config, ) self.assertIn("contains unknown section [invalid]", str(e)) @@ -144,13 +217,79 @@ enabled = false A configuration with a section, item pair that is matched by neither the static nor dynamic validators is rejected. """ - fname = self.create_tahoe_cfg('[node]\nvalid = foo\ninvalid = foo\n') - - config = configutil.get_config(fname) + config = to_configparser({"node": {"valid": "foo", "invalid": "foo"}}) e = self.assertRaises( configutil.UnknownConfigError, configutil.validate_config, - fname, config, + "", + config, self.dynamic_valid_config, ) self.assertIn("section [node] contains unknown option 'invalid'", str(e)) + + def test_duplicate_sections(self): + """ + Duplicate section names are merged. + """ + fname = self.create_tahoe_cfg('[node]\na = foo\n[node]\n b = bar\n') + config = configutil.get_config(fname) + self.assertEqual(config.get("node", "a"), "foo") + self.assertEqual(config.get("node", "b"), "bar") + + @given(arbitrary_config_dicts()) + def test_everything_valid(self, cfgdict): + """ + ``validate_config`` returns ``None`` when the validator is + ``ValidConfiguration.everything()``. + """ + cfg = to_configparser(cfgdict) + self.assertIs( + configutil.validate_config( + "", + cfg, + configutil.ValidConfiguration.everything(), + ), + None, + ) + + @given(arbitrary_config_dicts(min_sections=1)) + def test_nothing_valid(self, cfgdict): + """ + ``validate_config`` raises ``UnknownConfigError`` when the validator is + ``ValidConfiguration.nothing()`` for all non-empty configurations. + """ + cfg = to_configparser(cfgdict) + with self.assertRaises(configutil.UnknownConfigError): + configutil.validate_config( + "", + cfg, + configutil.ValidConfiguration.nothing(), + ) + + def test_nothing_empty_valid(self): + """ + ``validate_config`` returns ``None`` when the validator is + ``ValidConfiguration.nothing()`` if the configuration is empty. + """ + cfg = ConfigParser() + self.assertIs( + configutil.validate_config( + "", + cfg, + configutil.ValidConfiguration.nothing(), + ), + None, + ) + + @given(arbitrary_config_dicts()) + def test_copy_config(self, cfgdict): + """ + ``copy_config`` creates a new ``ConfigParser`` object containing the same + values as its input. + """ + cfg = to_configparser(cfgdict) + copied = configutil.copy_config(cfg) + # Should be equal + self.assertEqual(cfg, copied) + # But not because they're the same object. + self.assertIsNot(cfg, copied) diff --git a/src/allmydata/test/test_connections.py b/src/allmydata/test/test_connections.py index 9b5bd7f30..7a24ac794 100644 --- a/src/allmydata/test/test_connections.py +++ b/src/allmydata/test/test_connections.py @@ -1,149 +1,69 @@ -import os -import mock + from twisted.trial import unittest -from twisted.internet import reactor, endpoints, defer -from twisted.internet.interfaces import IStreamClientEndpoint +from twisted.internet import reactor + from foolscap.connections import tcp + +from testtools.matchers import ( + MatchesDict, + IsInstance, + Equals, +) + from ..node import PrivacyError, config_from_string from ..node import create_connection_handlers -from ..node import create_main_tub, _tub_portlocation +from ..node import create_main_tub from ..util.i2p_provider import create as create_i2p_provider from ..util.tor_provider import create as create_tor_provider +from .common import ( + SyncTestCase, + ConstantAddresses, +) + BASECONFIG = "" -class TCP(unittest.TestCase): - - def test_default(self): +class CreateConnectionHandlersTests(SyncTestCase): + """ + Tests for the Foolscap connection handlers return by + ``create_connection_handlers``. + """ + def test_foolscap_handlers(self): + """ + ``create_connection_handlers`` returns a Foolscap connection handlers + dictionary mapping ``"tcp"`` to + ``foolscap.connections.tcp.DefaultTCP``, ``"tor"`` to the supplied Tor + provider's handler, and ``"i2p"`` to the supplied I2P provider's + handler. + """ config = config_from_string( "fake.port", "no-basedir", BASECONFIG, ) - _, foolscap_handlers = create_connection_handlers(None, config, mock.Mock(), mock.Mock()) - self.assertIsInstance( - foolscap_handlers['tcp'], - tcp.DefaultTCP, + tor_endpoint = object() + tor = ConstantAddresses(handler=tor_endpoint) + i2p_endpoint = object() + i2p = ConstantAddresses(handler=i2p_endpoint) + _, foolscap_handlers = create_connection_handlers( + config, + i2p, + tor, + ) + self.assertThat( + foolscap_handlers, + MatchesDict({ + "tcp": IsInstance(tcp.DefaultTCP), + "i2p": Equals(i2p_endpoint), + "tor": Equals(tor_endpoint), + }), ) class Tor(unittest.TestCase): - def test_disabled(self): - config = config_from_string( - "fake.port", - "no-basedir", - BASECONFIG + "[tor]\nenabled = false\n", - ) - tor_provider = create_tor_provider(reactor, config) - h = tor_provider.get_tor_handler() - self.assertEqual(h, None) - - def test_unimportable(self): - with mock.patch("allmydata.util.tor_provider._import_tor", - return_value=None): - config = config_from_string("fake.port", "no-basedir", BASECONFIG) - tor_provider = create_tor_provider(reactor, config) - h = tor_provider.get_tor_handler() - self.assertEqual(h, None) - - def test_default(self): - h1 = mock.Mock() - with mock.patch("foolscap.connections.tor.default_socks", - return_value=h1) as f: - - config = config_from_string("fake.port", "no-basedir", BASECONFIG) - tor_provider = create_tor_provider(reactor, config) - h = tor_provider.get_tor_handler() - self.assertEqual(f.mock_calls, [mock.call()]) - self.assertIdentical(h, h1) - - def _do_test_launch(self, executable): - # the handler is created right away - config = BASECONFIG+"[tor]\nlaunch = true\n" - if executable: - config += "tor.executable = %s\n" % executable - h1 = mock.Mock() - with mock.patch("foolscap.connections.tor.control_endpoint_maker", - return_value=h1) as f: - - config = config_from_string("fake.port", ".", config) - tp = create_tor_provider("reactor", config) - h = tp.get_tor_handler() - - private_dir = config.get_config_path("private") - exp = mock.call(tp._make_control_endpoint, - takes_status=True) - self.assertEqual(f.mock_calls, [exp]) - self.assertIdentical(h, h1) - - # later, when Foolscap first connects, Tor should be launched - reactor = "reactor" - tcp = object() - tcep = object() - launch_tor = mock.Mock(return_value=defer.succeed(("ep_desc", tcp))) - cfs = mock.Mock(return_value=tcep) - with mock.patch("allmydata.util.tor_provider._launch_tor", launch_tor): - with mock.patch("allmydata.util.tor_provider.clientFromString", cfs): - d = tp._make_control_endpoint(reactor, - update_status=lambda status: None) - cep = self.successResultOf(d) - launch_tor.assert_called_with(reactor, executable, - os.path.abspath(private_dir), - tp._txtorcon) - cfs.assert_called_with(reactor, "ep_desc") - self.assertIs(cep, tcep) - - def test_launch(self): - self._do_test_launch(None) - - def test_launch_executable(self): - self._do_test_launch("/special/tor") - - def test_socksport_unix_endpoint(self): - h1 = mock.Mock() - with mock.patch("foolscap.connections.tor.socks_endpoint", - return_value=h1) as f: - config = config_from_string( - "fake.port", - "no-basedir", - BASECONFIG + "[tor]\nsocks.port = unix:/var/lib/fw-daemon/tor_socks.socket\n", - ) - tor_provider = create_tor_provider(reactor, config) - h = tor_provider.get_tor_handler() - self.assertTrue(IStreamClientEndpoint.providedBy(f.mock_calls[0][1][0])) - self.assertIdentical(h, h1) - - def test_socksport_endpoint(self): - h1 = mock.Mock() - with mock.patch("foolscap.connections.tor.socks_endpoint", - return_value=h1) as f: - config = config_from_string( - "fake.port", - "no-basedir", - BASECONFIG + "[tor]\nsocks.port = tcp:127.0.0.1:1234\n", - ) - tor_provider = create_tor_provider(reactor, config) - h = tor_provider.get_tor_handler() - self.assertTrue(IStreamClientEndpoint.providedBy(f.mock_calls[0][1][0])) - self.assertIdentical(h, h1) - - def test_socksport_endpoint_otherhost(self): - h1 = mock.Mock() - with mock.patch("foolscap.connections.tor.socks_endpoint", - return_value=h1) as f: - config = config_from_string( - "no-basedir", - "fake.port", - BASECONFIG + "[tor]\nsocks.port = tcp:otherhost:1234\n", - ) - tor_provider = create_tor_provider(reactor, config) - h = tor_provider.get_tor_handler() - self.assertTrue(IStreamClientEndpoint.providedBy(f.mock_calls[0][1][0])) - self.assertIdentical(h, h1) - def test_socksport_bad_endpoint(self): config = config_from_string( "fake.port", @@ -168,77 +88,16 @@ class Tor(unittest.TestCase): tor_provider = create_tor_provider(reactor, config) tor_provider.get_tor_handler() self.assertIn( - "invalid literal for int() with base 10: 'kumquat'", + "invalid literal for int()", + str(ctx.exception) + ) + self.assertIn( + "kumquat", str(ctx.exception) ) - def test_controlport(self): - h1 = mock.Mock() - with mock.patch("foolscap.connections.tor.control_endpoint", - return_value=h1) as f: - config = config_from_string( - "fake.port", - "no-basedir", - BASECONFIG + "[tor]\ncontrol.port = tcp:localhost:1234\n", - ) - tor_provider = create_tor_provider(reactor, config) - h = tor_provider.get_tor_handler() - self.assertEqual(len(f.mock_calls), 1) - ep = f.mock_calls[0][1][0] - self.assertIsInstance(ep, endpoints.TCP4ClientEndpoint) - self.assertIdentical(h, h1) - class I2P(unittest.TestCase): - def test_disabled(self): - config = config_from_string( - "fake.port", - "no-basedir", - BASECONFIG + "[i2p]\nenabled = false\n", - ) - i2p_provider = create_i2p_provider(None, config) - h = i2p_provider.get_i2p_handler() - self.assertEqual(h, None) - - def test_unimportable(self): - config = config_from_string( - "fake.port", - "no-basedir", - BASECONFIG, - ) - with mock.patch("allmydata.util.i2p_provider._import_i2p", - return_value=None): - i2p_provider = create_i2p_provider(reactor, config) - h = i2p_provider.get_i2p_handler() - self.assertEqual(h, None) - - def test_default(self): - config = config_from_string("fake.port", "no-basedir", BASECONFIG) - h1 = mock.Mock() - with mock.patch("foolscap.connections.i2p.default", - return_value=h1) as f: - i2p_provider = create_i2p_provider(reactor, config) - h = i2p_provider.get_i2p_handler() - self.assertEqual(f.mock_calls, [mock.call(reactor, keyfile=None)]) - self.assertIdentical(h, h1) - - def test_samport(self): - config = config_from_string( - "fake.port", - "no-basedir", - BASECONFIG + "[i2p]\nsam.port = tcp:localhost:1234\n", - ) - h1 = mock.Mock() - with mock.patch("foolscap.connections.i2p.sam_endpoint", - return_value=h1) as f: - i2p_provider = create_i2p_provider(reactor, config) - h = i2p_provider.get_i2p_handler() - - self.assertEqual(len(f.mock_calls), 1) - ep = f.mock_calls[0][1][0] - self.assertIsInstance(ep, endpoints.TCP4ClientEndpoint) - self.assertIdentical(h, h1) - def test_samport_and_launch(self): config = config_from_string( "no-basedir", @@ -254,82 +113,6 @@ class I2P(unittest.TestCase): str(ctx.exception) ) - def test_launch(self): - config = config_from_string( - "fake.port", - "no-basedir", - BASECONFIG + "[i2p]\nlaunch = true\n", - ) - h1 = mock.Mock() - with mock.patch("foolscap.connections.i2p.launch", - return_value=h1) as f: - i2p_provider = create_i2p_provider(reactor, config) - h = i2p_provider.get_i2p_handler() - exp = mock.call(i2p_configdir=None, i2p_binary=None) - self.assertEqual(f.mock_calls, [exp]) - self.assertIdentical(h, h1) - - def test_launch_executable(self): - config = config_from_string( - "fake.port", - "no-basedir", - BASECONFIG + "[i2p]\nlaunch = true\n" + "i2p.executable = i2p\n", - ) - h1 = mock.Mock() - with mock.patch("foolscap.connections.i2p.launch", - return_value=h1) as f: - i2p_provider = create_i2p_provider(reactor, config) - h = i2p_provider.get_i2p_handler() - exp = mock.call(i2p_configdir=None, i2p_binary="i2p") - self.assertEqual(f.mock_calls, [exp]) - self.assertIdentical(h, h1) - - def test_launch_configdir(self): - config = config_from_string( - "fake.port", - "no-basedir", - BASECONFIG + "[i2p]\nlaunch = true\n" + "i2p.configdir = cfg\n", - ) - h1 = mock.Mock() - with mock.patch("foolscap.connections.i2p.launch", - return_value=h1) as f: - i2p_provider = create_i2p_provider(reactor, config) - h = i2p_provider.get_i2p_handler() - exp = mock.call(i2p_configdir="cfg", i2p_binary=None) - self.assertEqual(f.mock_calls, [exp]) - self.assertIdentical(h, h1) - - def test_launch_configdir_and_executable(self): - config = config_from_string( - "no-basedir", - "fake.port", - BASECONFIG + "[i2p]\nlaunch = true\n" + - "i2p.executable = i2p\n" + "i2p.configdir = cfg\n", - ) - h1 = mock.Mock() - with mock.patch("foolscap.connections.i2p.launch", - return_value=h1) as f: - i2p_provider = create_i2p_provider(reactor, config) - h = i2p_provider.get_i2p_handler() - exp = mock.call(i2p_configdir="cfg", i2p_binary="i2p") - self.assertEqual(f.mock_calls, [exp]) - self.assertIdentical(h, h1) - - def test_configdir(self): - config = config_from_string( - "fake.port", - "no-basedir", - BASECONFIG + "[i2p]\ni2p.configdir = cfg\n", - ) - h1 = mock.Mock() - with mock.patch("foolscap.connections.i2p.local_i2p", - return_value=h1) as f: - i2p_provider = create_i2p_provider(None, config) - h = i2p_provider.get_i2p_handler() - - self.assertEqual(f.mock_calls, [mock.call("cfg")]) - self.assertIdentical(h, h1) - class Connections(unittest.TestCase): def setUp(self): @@ -337,7 +120,11 @@ class Connections(unittest.TestCase): self.config = config_from_string("fake.port", self.basedir, BASECONFIG) def test_default(self): - default_connection_handlers, _ = create_connection_handlers(None, self.config, mock.Mock(), mock.Mock()) + default_connection_handlers, _ = create_connection_handlers( + self.config, + ConstantAddresses(handler=object()), + ConstantAddresses(handler=object()), + ) self.assertEqual(default_connection_handlers["tcp"], "tcp") self.assertEqual(default_connection_handlers["tor"], "tor") self.assertEqual(default_connection_handlers["i2p"], "i2p") @@ -348,23 +135,39 @@ class Connections(unittest.TestCase): "no-basedir", BASECONFIG + "[connections]\ntcp = tor\n", ) - default_connection_handlers, _ = create_connection_handlers(None, config, mock.Mock(), mock.Mock()) + default_connection_handlers, _ = create_connection_handlers( + config, + ConstantAddresses(handler=object()), + ConstantAddresses(handler=object()), + ) self.assertEqual(default_connection_handlers["tcp"], "tor") self.assertEqual(default_connection_handlers["tor"], "tor") self.assertEqual(default_connection_handlers["i2p"], "i2p") def test_tor_unimportable(self): - with mock.patch("allmydata.util.tor_provider._import_tor", - return_value=None): - self.config = config_from_string( - "fake.port", - "no-basedir", - BASECONFIG + "[connections]\ntcp = tor\n", + """ + If the configuration calls for substituting Tor for TCP and + ``foolscap.connections.tor`` is not importable then + ``create_connection_handlers`` raises ``ValueError`` with a message + explaining this makes Tor unusable. + """ + self.config = config_from_string( + "fake.port", + "no-basedir", + BASECONFIG + "[connections]\ntcp = tor\n", + ) + tor_provider = create_tor_provider( + reactor, + self.config, + import_tor=lambda: None, + ) + with self.assertRaises(ValueError) as ctx: + default_connection_handlers, _ = create_connection_handlers( + self.config, + i2p_provider=ConstantAddresses(handler=object()), + tor_provider=tor_provider, ) - with self.assertRaises(ValueError) as ctx: - tor_provider = create_tor_provider(reactor, self.config) - default_connection_handlers, _ = create_connection_handlers(None, self.config, mock.Mock(), tor_provider) self.assertEqual( str(ctx.exception), "'tahoe.cfg [connections] tcp='" @@ -379,7 +182,11 @@ class Connections(unittest.TestCase): BASECONFIG + "[connections]\ntcp = unknown\n", ) with self.assertRaises(ValueError) as ctx: - create_connection_handlers(None, config, mock.Mock(), mock.Mock()) + create_connection_handlers( + config, + ConstantAddresses(handler=object()), + ConstantAddresses(handler=object()), + ) self.assertIn("'tahoe.cfg [connections] tcp='", str(ctx.exception)) self.assertIn("uses unknown handler type 'unknown'", str(ctx.exception)) @@ -389,7 +196,11 @@ class Connections(unittest.TestCase): "no-basedir", BASECONFIG + "[connections]\ntcp = disabled\n", ) - default_connection_handlers, _ = create_connection_handlers(None, config, mock.Mock(), mock.Mock()) + default_connection_handlers, _ = create_connection_handlers( + config, + ConstantAddresses(handler=object()), + ConstantAddresses(handler=object()), + ) self.assertEqual(default_connection_handlers["tcp"], None) self.assertEqual(default_connection_handlers["tor"], "tor") self.assertEqual(default_connection_handlers["i2p"], "i2p") @@ -404,11 +215,16 @@ class Privacy(unittest.TestCase): ) with self.assertRaises(PrivacyError) as ctx: - create_connection_handlers(None, config, mock.Mock(), mock.Mock()) + create_connection_handlers( + config, + ConstantAddresses(handler=object()), + ConstantAddresses(handler=object()), + ) self.assertEqual( str(ctx.exception), - "tcp = tcp, must be set to 'tor' or 'disabled'", + "Privacy requested with `reveal-IP-address = false` " + "but `tcp = tcp` conflicts with this.", ) def test_connections_tcp_disabled(self): @@ -418,7 +234,11 @@ class Privacy(unittest.TestCase): BASECONFIG + "[connections]\ntcp = disabled\n" + "[node]\nreveal-IP-address = false\n", ) - default_connection_handlers, _ = create_connection_handlers(None, config, mock.Mock(), mock.Mock()) + default_connection_handlers, _ = create_connection_handlers( + config, + ConstantAddresses(handler=object()), + ConstantAddresses(handler=object()), + ) self.assertEqual(default_connection_handlers["tcp"], None) def test_tub_location_auto(self): @@ -429,36 +249,15 @@ class Privacy(unittest.TestCase): ) with self.assertRaises(PrivacyError) as ctx: - create_main_tub(config, {}, {}, {}, mock.Mock(), mock.Mock()) + create_main_tub( + config, + tub_options={}, + default_connection_handlers={}, + foolscap_connection_handlers={}, + i2p_provider=ConstantAddresses(), + tor_provider=ConstantAddresses(), + ) self.assertEqual( str(ctx.exception), "tub.location uses AUTO", ) - - def test_tub_location_tcp(self): - config = config_from_string( - "fake.port", - "no-basedir", - BASECONFIG + "[node]\nreveal-IP-address = false\ntub.location=tcp:hostname:1234\n", - ) - with self.assertRaises(PrivacyError) as ctx: - _tub_portlocation(config) - self.assertEqual( - str(ctx.exception), - "tub.location includes tcp: hint", - ) - - def test_tub_location_legacy_tcp(self): - config = config_from_string( - "fake.port", - "no-basedir", - BASECONFIG + "[node]\nreveal-IP-address = false\ntub.location=hostname:1234\n", - ) - - with self.assertRaises(PrivacyError) as ctx: - _tub_portlocation(config) - - self.assertEqual( - str(ctx.exception), - "tub.location includes tcp: hint", - ) diff --git a/src/allmydata/test/test_deferredutil.py b/src/allmydata/test/test_deferredutil.py index 6ebc93556..2a155089f 100644 --- a/src/allmydata/test/test_deferredutil.py +++ b/src/allmydata/test/test_deferredutil.py @@ -74,3 +74,58 @@ class DeferredUtilTests(unittest.TestCase, deferredutil.WaitForDelayedCallsMixin d = defer.succeed(None) d.addBoth(self.wait_for_delayed_calls) return d + + +class UntilTests(unittest.TestCase): + """ + Tests for ``deferredutil.until``. + """ + def test_exception(self): + """ + If the action raises an exception, the ``Deferred`` returned by ``until`` + fires with a ``Failure``. + """ + self.assertFailure( + deferredutil.until(lambda: 1/0, lambda: True), + ZeroDivisionError, + ) + + def test_stops_on_condition(self): + """ + The action is called repeatedly until ``condition`` returns ``True``. + """ + calls = [] + def action(): + calls.append(None) + + def condition(): + return len(calls) == 3 + + self.assertIs( + self.successResultOf( + deferredutil.until(action, condition), + ), + None, + ) + self.assertEqual(3, len(calls)) + + def test_waits_for_deferred(self): + """ + If the action returns a ``Deferred`` then it is called again when the + ``Deferred`` fires. + """ + counter = [0] + r1 = defer.Deferred() + r2 = defer.Deferred() + results = [r1, r2] + def action(): + counter[0] += 1 + return results.pop(0) + + def condition(): + return False + + deferredutil.until(action, condition) + self.assertEqual([1], counter) + r1.callback(None) + self.assertEqual([2], counter) diff --git a/src/allmydata/test/test_dictutil.py b/src/allmydata/test/test_dictutil.py index 9b7124114..7e26a6ed9 100644 --- a/src/allmydata/test/test_dictutil.py +++ b/src/allmydata/test/test_dictutil.py @@ -8,9 +8,12 @@ from __future__ import division from __future__ import print_function from __future__ import unicode_literals -from future.utils import PY2 +from future.utils import PY2, PY3 if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + # dict omitted to match dictutil.py. + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 + +from unittest import skipIf from twisted.trial import unittest @@ -88,3 +91,80 @@ class DictUtil(unittest.TestCase): self.failUnlessEqual(sorted(d.keys()), ["one","two"]) self.failUnlessEqual(d["one"], 1) self.failUnlessEqual(d.get_aux("one"), None) + + +class TypedKeyDict(unittest.TestCase): + """Tests for dictionaries that limit keys.""" + + @skipIf(PY2, "Python 2 doesn't have issues mixing bytes and unicode.") + def setUp(self): + pass + + def test_bytes(self): + """BytesKeyDict is limited to just byte keys.""" + self.assertRaises(TypeError, dictutil.BytesKeyDict, {u"hello": 123}) + d = dictutil.BytesKeyDict({b"123": 200}) + with self.assertRaises(TypeError): + d[u"hello"] = "blah" + with self.assertRaises(TypeError): + d[u"hello"] + with self.assertRaises(TypeError): + del d[u"hello"] + with self.assertRaises(TypeError): + d.setdefault(u"hello", "123") + with self.assertRaises(TypeError): + d.get(u"xcd") + + # Byte keys are fine: + self.assertEqual(d, {b"123": 200}) + d[b"456"] = 400 + self.assertEqual(d[b"456"], 400) + del d[b"456"] + self.assertEqual(d.get(b"456", 50), 50) + self.assertEqual(d.setdefault(b"456", 300), 300) + self.assertEqual(d[b"456"], 300) + + def test_unicode(self): + """UnicodeKeyDict is limited to just unicode keys.""" + self.assertRaises(TypeError, dictutil.UnicodeKeyDict, {b"hello": 123}) + d = dictutil.UnicodeKeyDict({u"123": 200}) + with self.assertRaises(TypeError): + d[b"hello"] = "blah" + with self.assertRaises(TypeError): + d[b"hello"] + with self.assertRaises(TypeError): + del d[b"hello"] + with self.assertRaises(TypeError): + d.setdefault(b"hello", "123") + with self.assertRaises(TypeError): + d.get(b"xcd") + + # Byte keys are fine: + self.assertEqual(d, {u"123": 200}) + d[u"456"] = 400 + self.assertEqual(d[u"456"], 400) + del d[u"456"] + self.assertEqual(d.get(u"456", 50), 50) + self.assertEqual(d.setdefault(u"456", 300), 300) + self.assertEqual(d[u"456"], 300) + + +class TypedKeyDictPython2(unittest.TestCase): + """Tests for dictionaries that limit keys on Python 2.""" + + @skipIf(PY3, "Testing Python 2 behavior.") + def test_python2(self): + """ + On Python2, BytesKeyDict and UnicodeKeyDict are unnecessary, because + dicts can mix both without problem so you don't get confusing behavior + if you get the type wrong. + + Eventually in a Python 3-only world mixing bytes and unicode will be + bad, thus the existence of these classes, but as we port there will be + situations where it's mixed on Python 2, which again is fine. + """ + self.assertIs(dictutil.UnicodeKeyDict, dict) + self.assertIs(dictutil.BytesKeyDict, dict) + # Demonstration of how bytes and unicode can be mixed: + d = {u"abc": 1} + self.assertEqual(d[b"abc"], 1) diff --git a/src/allmydata/test/test_dirnode.py b/src/allmydata/test/test_dirnode.py index 48ffff45a..8e5e59b46 100644 --- a/src/allmydata/test/test_dirnode.py +++ b/src/allmydata/test/test_dirnode.py @@ -1,5 +1,19 @@ -"""Tests for the dirnode module.""" -import six +"""Tests for the dirnode module. + +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from past.builtins import long + +from future.utils import PY2 +if PY2: + # Skip list() since it results in spurious test failures + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, object, range, str, max, min # noqa: F401 + import time import unicodedata from zope.interface import implementer @@ -31,9 +45,6 @@ import allmydata.test.common_util as testutil from hypothesis import given from hypothesis.strategies import text -if six.PY3: - long = int - @implementer(IConsumer) class MemAccum(object): @@ -48,16 +59,16 @@ class MemAccum(object): self.data = data self.producer.resumeProducing() -setup_py_uri = "URI:CHK:n7r3m6wmomelk4sep3kw5cvduq:os7ijw5c3maek7pg65e5254k2fzjflavtpejjyhshpsxuqzhcwwq:3:20:14861" -one_uri = "URI:LIT:n5xgk" # LIT for "one" -mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq" -mdmf_write_uri = "URI:MDMF:x533rhbm6kiehzl5kj3s44n5ie:4gif5rhneyd763ouo5qjrgnsoa3bg43xycy4robj2rf3tvmhdl3a" -empty_litdir_uri = "URI:DIR2-LIT:" -tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT -mut_read_uri = "URI:SSK-RO:jf6wkflosyvntwxqcdo7a54jvm:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq" -mdmf_read_uri = "URI:MDMF-RO:d4cydxselputycfzkw6qgz4zv4:4gif5rhneyd763ouo5qjrgnsoa3bg43xycy4robj2rf3tvmhdl3a" -future_write_uri = "x-tahoe-crazy://I_am_from_the_future." -future_read_uri = "x-tahoe-crazy-readonly://I_am_from_the_future." +setup_py_uri = b"URI:CHK:n7r3m6wmomelk4sep3kw5cvduq:os7ijw5c3maek7pg65e5254k2fzjflavtpejjyhshpsxuqzhcwwq:3:20:14861" +one_uri = b"URI:LIT:n5xgk" # LIT for "one" +mut_write_uri = b"URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq" +mdmf_write_uri = b"URI:MDMF:x533rhbm6kiehzl5kj3s44n5ie:4gif5rhneyd763ouo5qjrgnsoa3bg43xycy4robj2rf3tvmhdl3a" +empty_litdir_uri = b"URI:DIR2-LIT:" +tiny_litdir_uri = b"URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT +mut_read_uri = b"URI:SSK-RO:jf6wkflosyvntwxqcdo7a54jvm:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq" +mdmf_read_uri = b"URI:MDMF-RO:d4cydxselputycfzkw6qgz4zv4:4gif5rhneyd763ouo5qjrgnsoa3bg43xycy4robj2rf3tvmhdl3a" +future_write_uri = b"x-tahoe-crazy://I_am_from_the_future." +future_read_uri = b"x-tahoe-crazy-readonly://I_am_from_the_future." future_nonascii_write_uri = u"x-tahoe-even-more-crazy://I_am_from_the_future_rw_\u263A".encode('utf-8') future_nonascii_read_uri = u"x-tahoe-even-more-crazy-readonly://I_am_from_the_future_ro_\u263A".encode('utf-8') @@ -95,13 +106,13 @@ class Dirnode(GridTestMixin, unittest.TestCase, self.failUnless(u) cap_formats = [] if mdmf: - cap_formats = ["URI:DIR2-MDMF:", - "URI:DIR2-MDMF-RO:", - "URI:DIR2-MDMF-Verifier:"] + cap_formats = [b"URI:DIR2-MDMF:", + b"URI:DIR2-MDMF-RO:", + b"URI:DIR2-MDMF-Verifier:"] else: - cap_formats = ["URI:DIR2:", - "URI:DIR2-RO", - "URI:DIR2-Verifier:"] + cap_formats = [b"URI:DIR2:", + b"URI:DIR2-RO", + b"URI:DIR2-Verifier:"] rw, ro, v = cap_formats self.failUnless(u.startswith(rw), u) u_ro = n.get_readonly_uri() @@ -149,7 +160,7 @@ class Dirnode(GridTestMixin, unittest.TestCase, self.failUnless(isinstance(subdir, dirnode.DirectoryNode)) self.subdir = subdir new_v = subdir.get_verify_cap().to_string() - assert isinstance(new_v, str) + assert isinstance(new_v, bytes) self.expected_manifest.append( ((u"subdir",), subdir.get_uri()) ) self.expected_verifycaps.add(new_v) si = subdir.get_storage_index() @@ -182,7 +193,7 @@ class Dirnode(GridTestMixin, unittest.TestCase, "largest-directory-children": 2, "largest-immutable-file": 0, } - for k,v in expected.iteritems(): + for k,v in expected.items(): self.failUnlessReallyEqual(stats[k], v, "stats[%s] was %s, not %s" % (k, stats[k], v)) @@ -272,8 +283,8 @@ class Dirnode(GridTestMixin, unittest.TestCase, { 'tahoe': {'linkcrtime': "bogus"}})) d.addCallback(lambda res: n.get_metadata_for(u"c2")) def _has_good_linkcrtime(metadata): - self.failUnless(metadata.has_key('tahoe')) - self.failUnless(metadata['tahoe'].has_key('linkcrtime')) + self.failUnless('tahoe' in metadata) + self.failUnless('linkcrtime' in metadata['tahoe']) self.failIfEqual(metadata['tahoe']['linkcrtime'], 'bogus') d.addCallback(_has_good_linkcrtime) @@ -423,7 +434,7 @@ class Dirnode(GridTestMixin, unittest.TestCase, # moved on to stdlib "json" which doesn't have it either. d.addCallback(self.stall, 0.1) d.addCallback(lambda res: n.add_file(u"timestamps", - upload.Data("stamp me", convergence="some convergence string"))) + upload.Data(b"stamp me", convergence=b"some convergence string"))) d.addCallback(self.stall, 0.1) def _stop(res): self._stop_timestamp = time.time() @@ -472,11 +483,11 @@ class Dirnode(GridTestMixin, unittest.TestCase, self.failUnlessReallyEqual(set(children.keys()), set([u"child"]))) - uploadable1 = upload.Data("some data", convergence="converge") + uploadable1 = upload.Data(b"some data", convergence=b"converge") d.addCallback(lambda res: n.add_file(u"newfile", uploadable1)) d.addCallback(lambda newnode: self.failUnless(IImmutableFileNode.providedBy(newnode))) - uploadable2 = upload.Data("some data", convergence="stuff") + uploadable2 = upload.Data(b"some data", convergence=b"stuff") d.addCallback(lambda res: self.shouldFail(ExistingChildError, "add_file-no", "child 'newfile' already exists", @@ -491,7 +502,7 @@ class Dirnode(GridTestMixin, unittest.TestCase, d.addCallback(lambda metadata: self.failUnlessEqual(set(metadata.keys()), set(["tahoe"]))) - uploadable3 = upload.Data("some data", convergence="converge") + uploadable3 = upload.Data(b"some data", convergence=b"converge") d.addCallback(lambda res: n.add_file(u"newfile-metadata", uploadable3, {"key": "value"})) @@ -507,8 +518,8 @@ class Dirnode(GridTestMixin, unittest.TestCase, def _created2(subdir2): self.subdir2 = subdir2 # put something in the way, to make sure it gets overwritten - return subdir2.add_file(u"child", upload.Data("overwrite me", - "converge")) + return subdir2.add_file(u"child", upload.Data(b"overwrite me", + b"converge")) d.addCallback(_created2) d.addCallback(lambda res: @@ -666,22 +677,22 @@ class Dirnode(GridTestMixin, unittest.TestCase, self.failUnless(fut_node.is_unknown()) self.failUnlessReallyEqual(fut_node.get_uri(), future_write_uri) - self.failUnlessReallyEqual(fut_node.get_readonly_uri(), "ro." + future_read_uri) + self.failUnlessReallyEqual(fut_node.get_readonly_uri(), b"ro." + future_read_uri) self.failUnless(isinstance(fut_metadata, dict), fut_metadata) self.failUnless(futna_node.is_unknown()) self.failUnlessReallyEqual(futna_node.get_uri(), future_nonascii_write_uri) - self.failUnlessReallyEqual(futna_node.get_readonly_uri(), "ro." + future_nonascii_read_uri) + self.failUnlessReallyEqual(futna_node.get_readonly_uri(), b"ro." + future_nonascii_read_uri) self.failUnless(isinstance(futna_metadata, dict), futna_metadata) self.failUnless(fro_node.is_unknown()) - self.failUnlessReallyEqual(fro_node.get_uri(), "ro." + future_read_uri) - self.failUnlessReallyEqual(fut_node.get_readonly_uri(), "ro." + future_read_uri) + self.failUnlessReallyEqual(fro_node.get_uri(), b"ro." + future_read_uri) + self.failUnlessReallyEqual(fut_node.get_readonly_uri(), b"ro." + future_read_uri) self.failUnless(isinstance(fro_metadata, dict), fro_metadata) self.failUnless(frona_node.is_unknown()) - self.failUnlessReallyEqual(frona_node.get_uri(), "ro." + future_nonascii_read_uri) - self.failUnlessReallyEqual(futna_node.get_readonly_uri(), "ro." + future_nonascii_read_uri) + self.failUnlessReallyEqual(frona_node.get_uri(), b"ro." + future_nonascii_read_uri) + self.failUnlessReallyEqual(futna_node.get_readonly_uri(), b"ro." + future_nonascii_read_uri) self.failUnless(isinstance(frona_metadata, dict), frona_metadata) self.failIf(emptylit_node.is_unknown()) @@ -697,7 +708,7 @@ class Dirnode(GridTestMixin, unittest.TestCase, set([u"short"]))) d2.addCallback(lambda ignored: tinylit_node.list()) d2.addCallback(lambda children: children[u"short"][0].read(MemAccum())) - d2.addCallback(lambda accum: self.failUnlessReallyEqual(accum.data, "The end.")) + d2.addCallback(lambda accum: self.failUnlessReallyEqual(accum.data, b"The end.")) return d2 d.addCallback(_check_kids) @@ -782,7 +793,7 @@ class Dirnode(GridTestMixin, unittest.TestCase, rep = str(dn) self.failUnless("RO-IMM" in rep) cap = dn.get_cap() - self.failUnlessIn("CHK", cap.to_string()) + self.failUnlessIn(b"CHK", cap.to_string()) self.cap = cap return dn.list() d.addCallback(_created) @@ -808,13 +819,13 @@ class Dirnode(GridTestMixin, unittest.TestCase, self.failUnlessEqual(two_metadata["metakey"], "metavalue") self.failUnless(fut_node.is_unknown()) - self.failUnlessReallyEqual(fut_node.get_uri(), "imm." + future_read_uri) - self.failUnlessReallyEqual(fut_node.get_readonly_uri(), "imm." + future_read_uri) + self.failUnlessReallyEqual(fut_node.get_uri(), b"imm." + future_read_uri) + self.failUnlessReallyEqual(fut_node.get_readonly_uri(), b"imm." + future_read_uri) self.failUnless(isinstance(fut_metadata, dict), fut_metadata) self.failUnless(futna_node.is_unknown()) - self.failUnlessReallyEqual(futna_node.get_uri(), "imm." + future_nonascii_read_uri) - self.failUnlessReallyEqual(futna_node.get_readonly_uri(), "imm." + future_nonascii_read_uri) + self.failUnlessReallyEqual(futna_node.get_uri(), b"imm." + future_nonascii_read_uri) + self.failUnlessReallyEqual(futna_node.get_readonly_uri(), b"imm." + future_nonascii_read_uri) self.failUnless(isinstance(futna_metadata, dict), futna_metadata) self.failIf(emptylit_node.is_unknown()) @@ -830,7 +841,7 @@ class Dirnode(GridTestMixin, unittest.TestCase, set([u"short"]))) d2.addCallback(lambda ignored: tinylit_node.list()) d2.addCallback(lambda children: children[u"short"][0].read(MemAccum())) - d2.addCallback(lambda accum: self.failUnlessReallyEqual(accum.data, "The end.")) + d2.addCallback(lambda accum: self.failUnlessReallyEqual(accum.data, b"The end.")) return d2 d.addCallback(_check_kids) @@ -894,8 +905,8 @@ class Dirnode(GridTestMixin, unittest.TestCase, rep = str(dn) self.failUnless("RO-IMM" in rep) cap = dn.get_cap() - self.failUnlessIn("LIT", cap.to_string()) - self.failUnlessReallyEqual(cap.to_string(), "URI:DIR2-LIT:") + self.failUnlessIn(b"LIT", cap.to_string()) + self.failUnlessReallyEqual(cap.to_string(), b"URI:DIR2-LIT:") self.cap = cap return dn.list() d.addCallback(_created_empty) @@ -912,13 +923,13 @@ class Dirnode(GridTestMixin, unittest.TestCase, rep = str(dn) self.failUnless("RO-IMM" in rep) cap = dn.get_cap() - self.failUnlessIn("LIT", cap.to_string()) + self.failUnlessIn(b"LIT", cap.to_string()) self.failUnlessReallyEqual(cap.to_string(), - "URI:DIR2-LIT:gi4tumj2n4wdcmz2kvjesosmjfkdu3rvpbtwwlbqhiwdeot3puwcy") + b"URI:DIR2-LIT:gi4tumj2n4wdcmz2kvjesosmjfkdu3rvpbtwwlbqhiwdeot3puwcy") self.cap = cap return dn.list() d.addCallback(_created_small) - d.addCallback(lambda kids: self.failUnlessReallyEqual(kids.keys(), [u"o"])) + d.addCallback(lambda kids: self.failUnlessReallyEqual(list(kids.keys()), [u"o"])) # now test n.create_subdirectory(mutable=False) d.addCallback(lambda ign: c.create_dirnode()) @@ -928,7 +939,7 @@ class Dirnode(GridTestMixin, unittest.TestCase, d.addCallback(_check_kids) d.addCallback(lambda ign: n.list()) d.addCallback(lambda children: - self.failUnlessReallyEqual(children.keys(), [u"subdir"])) + self.failUnlessReallyEqual(list(children.keys()), [u"subdir"])) d.addCallback(lambda ign: n.get(u"subdir")) d.addCallback(lambda sd: sd.list()) d.addCallback(_check_kids) @@ -962,14 +973,14 @@ class Dirnode(GridTestMixin, unittest.TestCase, # It also tests that we store child names as UTF-8 NFC, and normalize # them again when retrieving them. - stripped_write_uri = "lafs://from_the_future\t" - stripped_read_uri = "lafs://readonly_from_the_future\t" - spacedout_write_uri = stripped_write_uri + " " - spacedout_read_uri = stripped_read_uri + " " + stripped_write_uri = b"lafs://from_the_future\t" + stripped_read_uri = b"lafs://readonly_from_the_future\t" + spacedout_write_uri = stripped_write_uri + b" " + spacedout_read_uri = stripped_read_uri + b" " child = nm.create_from_cap(spacedout_write_uri, spacedout_read_uri) self.failUnlessReallyEqual(child.get_write_uri(), spacedout_write_uri) - self.failUnlessReallyEqual(child.get_readonly_uri(), "ro." + spacedout_read_uri) + self.failUnlessReallyEqual(child.get_readonly_uri(), b"ro." + spacedout_read_uri) child_dottedi = u"ch\u0131\u0307ld" @@ -1003,7 +1014,7 @@ class Dirnode(GridTestMixin, unittest.TestCase, self.failUnlessIn(name, kids_out) (expected_child, ign) = kids_out[name] self.failUnlessReallyEqual(rw_uri, expected_child.get_write_uri()) - self.failUnlessReallyEqual("ro." + ro_uri, expected_child.get_readonly_uri()) + self.failUnlessReallyEqual(b"ro." + ro_uri, expected_child.get_readonly_uri()) numkids += 1 self.failUnlessReallyEqual(numkids, len(kids_out)) @@ -1039,7 +1050,7 @@ class Dirnode(GridTestMixin, unittest.TestCase, child_node, child_metadata = children[u"child"] self.failUnlessReallyEqual(child_node.get_write_uri(), stripped_write_uri) - self.failUnlessReallyEqual(child_node.get_readonly_uri(), "ro." + stripped_read_uri) + self.failUnlessReallyEqual(child_node.get_readonly_uri(), b"ro." + stripped_read_uri) d.addCallback(_check_kids) d.addCallback(lambda ign: nm.create_from_cap(self.cap.to_string())) @@ -1074,7 +1085,7 @@ class Dirnode(GridTestMixin, unittest.TestCase, d.addCallback(_created_root) def _created_subdir(subdir): self._subdir = subdir - d = subdir.add_file(u"file1", upload.Data("data"*100, None)) + d = subdir.add_file(u"file1", upload.Data(b"data"*100, None)) d.addCallback(lambda res: subdir.set_node(u"link", self._rootnode)) d.addCallback(lambda res: c.create_dirnode()) d.addCallback(lambda dn: @@ -1250,7 +1261,7 @@ class Dirnode(GridTestMixin, unittest.TestCase, nm = c.nodemaker filecap = make_chk_file_uri(1234) filenode = nm.create_from_cap(filecap) - uploadable = upload.Data("some data", convergence="some convergence string") + uploadable = upload.Data(b"some data", convergence=b"some convergence string") d = c.create_dirnode(version=version) def _created(rw_dn): @@ -1386,7 +1397,7 @@ class Dirnode(GridTestMixin, unittest.TestCase, class MinimalFakeMutableFile(object): def get_writekey(self): - return "writekey" + return b"writekey" class Packing(testutil.ReallyEqualMixin, unittest.TestCase): # This is a base32-encoded representation of the directory tree @@ -1405,7 +1416,7 @@ class Packing(testutil.ReallyEqualMixin, unittest.TestCase): nodemaker = NodeMaker(None, None, None, None, None, {"k": 3, "n": 10}, None, None) - write_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q" + write_uri = b"URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q" filenode = nodemaker.create_from_cap(write_uri) node = dirnode.DirectoryNode(filenode, nodemaker, None) children = node._unpack_contents(known_tree) @@ -1417,13 +1428,13 @@ class Packing(testutil.ReallyEqualMixin, unittest.TestCase): def _check_children(self, children): # Are all the expected child nodes there? - self.failUnless(children.has_key(u'file1')) - self.failUnless(children.has_key(u'file2')) - self.failUnless(children.has_key(u'file3')) + self.failUnless(u'file1' in children) + self.failUnless(u'file2' in children) + self.failUnless(u'file3' in children) # Are the metadata for child 3 right? - file3_rocap = "URI:CHK:cmtcxq7hwxvfxan34yiev6ivhy:qvcekmjtoetdcw4kmi7b3rtblvgx7544crnwaqtiewemdliqsokq:3:10:5" - file3_rwcap = "URI:CHK:cmtcxq7hwxvfxan34yiev6ivhy:qvcekmjtoetdcw4kmi7b3rtblvgx7544crnwaqtiewemdliqsokq:3:10:5" + file3_rocap = b"URI:CHK:cmtcxq7hwxvfxan34yiev6ivhy:qvcekmjtoetdcw4kmi7b3rtblvgx7544crnwaqtiewemdliqsokq:3:10:5" + file3_rwcap = b"URI:CHK:cmtcxq7hwxvfxan34yiev6ivhy:qvcekmjtoetdcw4kmi7b3rtblvgx7544crnwaqtiewemdliqsokq:3:10:5" file3_metadata = {'ctime': 1246663897.4336269, 'tahoe': {'linkmotime': 1246663897.4336269, 'linkcrtime': 1246663897.4336269}, 'mtime': 1246663897.4336269} self.failUnlessEqual(file3_metadata, children[u'file3'][1]) self.failUnlessReallyEqual(file3_rocap, @@ -1432,8 +1443,8 @@ class Packing(testutil.ReallyEqualMixin, unittest.TestCase): children[u'file3'][0].get_uri()) # Are the metadata for child 2 right? - file2_rocap = "URI:CHK:apegrpehshwugkbh3jlt5ei6hq:5oougnemcl5xgx4ijgiumtdojlipibctjkbwvyygdymdphib2fvq:3:10:4" - file2_rwcap = "URI:CHK:apegrpehshwugkbh3jlt5ei6hq:5oougnemcl5xgx4ijgiumtdojlipibctjkbwvyygdymdphib2fvq:3:10:4" + file2_rocap = b"URI:CHK:apegrpehshwugkbh3jlt5ei6hq:5oougnemcl5xgx4ijgiumtdojlipibctjkbwvyygdymdphib2fvq:3:10:4" + file2_rwcap = b"URI:CHK:apegrpehshwugkbh3jlt5ei6hq:5oougnemcl5xgx4ijgiumtdojlipibctjkbwvyygdymdphib2fvq:3:10:4" file2_metadata = {'ctime': 1246663897.430218, 'tahoe': {'linkmotime': 1246663897.430218, 'linkcrtime': 1246663897.430218}, 'mtime': 1246663897.430218} self.failUnlessEqual(file2_metadata, children[u'file2'][1]) self.failUnlessReallyEqual(file2_rocap, @@ -1442,8 +1453,8 @@ class Packing(testutil.ReallyEqualMixin, unittest.TestCase): children[u'file2'][0].get_uri()) # Are the metadata for child 1 right? - file1_rocap = "URI:CHK:olxtimympo7f27jvhtgqlnbtn4:emzdnhk2um4seixozlkw3qx2nfijvdkx3ky7i7izl47yedl6e64a:3:10:10" - file1_rwcap = "URI:CHK:olxtimympo7f27jvhtgqlnbtn4:emzdnhk2um4seixozlkw3qx2nfijvdkx3ky7i7izl47yedl6e64a:3:10:10" + file1_rocap = b"URI:CHK:olxtimympo7f27jvhtgqlnbtn4:emzdnhk2um4seixozlkw3qx2nfijvdkx3ky7i7izl47yedl6e64a:3:10:10" + file1_rwcap = b"URI:CHK:olxtimympo7f27jvhtgqlnbtn4:emzdnhk2um4seixozlkw3qx2nfijvdkx3ky7i7izl47yedl6e64a:3:10:10" file1_metadata = {'ctime': 1246663897.4275661, 'tahoe': {'linkmotime': 1246663897.4275661, 'linkcrtime': 1246663897.4275661}, 'mtime': 1246663897.4275661} self.failUnlessEqual(file1_metadata, children[u'file1'][1]) self.failUnlessReallyEqual(file1_rocap, @@ -1452,18 +1463,42 @@ class Packing(testutil.ReallyEqualMixin, unittest.TestCase): children[u'file1'][0].get_uri()) def _make_kids(self, nm, which): - caps = {"imm": "URI:CHK:n7r3m6wmomelk4sep3kw5cvduq:os7ijw5c3maek7pg65e5254k2fzjflavtpejjyhshpsxuqzhcwwq:3:20:14861", - "lit": "URI:LIT:n5xgk", # LIT for "one" - "write": "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq", - "read": "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q", - "dirwrite": "URI:DIR2:n6x24zd3seu725yluj75q5boaa:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq", - "dirread": "URI:DIR2-RO:b7sr5qsifnicca7cbk3rhrhbvq:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq", + caps = {"imm": b"URI:CHK:n7r3m6wmomelk4sep3kw5cvduq:os7ijw5c3maek7pg65e5254k2fzjflavtpejjyhshpsxuqzhcwwq:3:20:14861", + "lit": b"URI:LIT:n5xgk", # LIT for "one" + "write": b"URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq", + "read": b"URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q", + "dirwrite": b"URI:DIR2:n6x24zd3seu725yluj75q5boaa:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq", + "dirread": b"URI:DIR2-RO:b7sr5qsifnicca7cbk3rhrhbvq:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq", } kids = {} for name in which: - kids[unicode(name)] = (nm.create_from_cap(caps[name]), {}) + kids[str(name)] = (nm.create_from_cap(caps[name]), {}) return kids + def test_pack_unpack_unknown(self): + """ + Minimal testing for roundtripping unknown URIs. + """ + nm = NodeMaker(None, None, None, None, None, {"k": 3, "n": 10}, None, None) + fn = MinimalFakeMutableFile() + # UnknownNode has massively complex rules about when it's an error. + # Just force it not to be an error. + unknown_rw = UnknownNode(b"whatevs://write", None) + unknown_rw.error = None + unknown_ro = UnknownNode(None, b"whatevs://readonly") + unknown_ro.error = None + kids = { + "unknown_rw": (unknown_rw, {}), + "unknown_ro": (unknown_ro, {}) + } + packed = dirnode.pack_children(kids, fn.get_writekey(), deep_immutable=False) + + write_uri = b"URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q" + filenode = nm.create_from_cap(write_uri) + dn = dirnode.DirectoryNode(filenode, nm, None) + unkids = dn._unpack_contents(packed) + self.assertEqual(kids, unkids) + @given(text(min_size=1, max_size=20)) def test_pack_unpack_unicode_hypothesis(self, name): """ @@ -1485,7 +1520,7 @@ class Packing(testutil.ReallyEqualMixin, unittest.TestCase): name: (LiteralFileNode(uri.from_string(one_uri)), {}), } packed = dirnode.pack_children(kids, fn.get_writekey(), deep_immutable=False) - write_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q" + write_uri = b"URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q" filenode = nm.create_from_cap(write_uri) dn = dirnode.DirectoryNode(filenode, nm, None) unkids = dn._unpack_contents(packed) @@ -1498,11 +1533,11 @@ class Packing(testutil.ReallyEqualMixin, unittest.TestCase): kids = self._make_kids(nm, ["imm", "lit", "write", "read", "dirwrite", "dirread"]) packed = dirnode.pack_children(kids, fn.get_writekey(), deep_immutable=False) - self.failUnlessIn("lit", packed) + self.failUnlessIn(b"lit", packed) kids = self._make_kids(nm, ["imm", "lit"]) packed = dirnode.pack_children(kids, fn.get_writekey(), deep_immutable=True) - self.failUnlessIn("lit", packed) + self.failUnlessIn(b"lit", packed) kids = self._make_kids(nm, ["imm", "lit", "write"]) self.failUnlessRaises(dirnode.MustBeDeepImmutableError, @@ -1526,24 +1561,24 @@ class Packing(testutil.ReallyEqualMixin, unittest.TestCase): kids, fn.get_writekey(), deep_immutable=True) @implementer(IMutableFileNode) -class FakeMutableFile(object): +class FakeMutableFile(object): # type: ignore # incomplete implementation counter = 0 - def __init__(self, initial_contents=""): + def __init__(self, initial_contents=b""): data = self._get_initial_contents(initial_contents) self.data = data.read(data.get_size()) - self.data = "".join(self.data) + self.data = b"".join(self.data) counter = FakeMutableFile.counter FakeMutableFile.counter += 1 - writekey = hashutil.ssk_writekey_hash(str(counter)) - fingerprint = hashutil.ssk_pubkey_fingerprint_hash(str(counter)) + writekey = hashutil.ssk_writekey_hash(b"%d" % counter) + fingerprint = hashutil.ssk_pubkey_fingerprint_hash(b"%d" % counter) self.uri = uri.WriteableSSKFileURI(writekey, fingerprint) def _get_initial_contents(self, contents): - if isinstance(contents, str): + if isinstance(contents, bytes): return contents if contents is None: - return "" + return b"" assert callable(contents), "%s should be callable, not %s" % \ (contents, type(contents)) return contents(self) @@ -1561,7 +1596,7 @@ class FakeMutableFile(object): return defer.succeed(self.data) def get_writekey(self): - return "writekey" + return b"writekey" def is_readonly(self): return False @@ -1584,10 +1619,10 @@ class FakeMutableFile(object): return defer.succeed(None) class FakeNodeMaker(NodeMaker): - def create_mutable_file(self, contents="", keysize=None, version=None): + def create_mutable_file(self, contents=b"", keysize=None, version=None): return defer.succeed(FakeMutableFile(contents)) -class FakeClient2(_Client): +class FakeClient2(_Client): # type: ignore # tahoe-lafs/ticket/3573 def __init__(self): self.nodemaker = FakeNodeMaker(None, None, None, None, None, @@ -1631,9 +1666,9 @@ class Dirnode2(testutil.ReallyEqualMixin, testutil.ShouldFailMixin, unittest.Tes # and to add an URI prefixed with "ro." or "imm." when it is given in a # write slot (or URL parameter). d.addCallback(lambda ign: self._node.set_uri(u"add-ro", - "ro." + future_read_uri, None)) + b"ro." + future_read_uri, None)) d.addCallback(lambda ign: self._node.set_uri(u"add-imm", - "imm." + future_imm_uri, None)) + b"imm." + future_imm_uri, None)) d.addCallback(lambda ign: self._node.list()) def _check(children): @@ -1642,25 +1677,25 @@ class Dirnode2(testutil.ReallyEqualMixin, testutil.ShouldFailMixin, unittest.Tes self.failUnless(isinstance(fn, UnknownNode), fn) self.failUnlessReallyEqual(fn.get_uri(), future_write_uri) self.failUnlessReallyEqual(fn.get_write_uri(), future_write_uri) - self.failUnlessReallyEqual(fn.get_readonly_uri(), "ro." + future_read_uri) + self.failUnlessReallyEqual(fn.get_readonly_uri(), b"ro." + future_read_uri) (fn2, metadata2) = children[u"add-pair"] self.failUnless(isinstance(fn2, UnknownNode), fn2) self.failUnlessReallyEqual(fn2.get_uri(), future_write_uri) self.failUnlessReallyEqual(fn2.get_write_uri(), future_write_uri) - self.failUnlessReallyEqual(fn2.get_readonly_uri(), "ro." + future_read_uri) + self.failUnlessReallyEqual(fn2.get_readonly_uri(), b"ro." + future_read_uri) (fn3, metadata3) = children[u"add-ro"] self.failUnless(isinstance(fn3, UnknownNode), fn3) - self.failUnlessReallyEqual(fn3.get_uri(), "ro." + future_read_uri) + self.failUnlessReallyEqual(fn3.get_uri(), b"ro." + future_read_uri) self.failUnlessReallyEqual(fn3.get_write_uri(), None) - self.failUnlessReallyEqual(fn3.get_readonly_uri(), "ro." + future_read_uri) + self.failUnlessReallyEqual(fn3.get_readonly_uri(), b"ro." + future_read_uri) (fn4, metadata4) = children[u"add-imm"] self.failUnless(isinstance(fn4, UnknownNode), fn4) - self.failUnlessReallyEqual(fn4.get_uri(), "imm." + future_imm_uri) + self.failUnlessReallyEqual(fn4.get_uri(), b"imm." + future_imm_uri) self.failUnlessReallyEqual(fn4.get_write_uri(), None) - self.failUnlessReallyEqual(fn4.get_readonly_uri(), "imm." + future_imm_uri) + self.failUnlessReallyEqual(fn4.get_readonly_uri(), b"imm." + future_imm_uri) # We should also be allowed to copy the "future" UnknownNode, because # it contains all the information that was in the original directory @@ -1675,17 +1710,17 @@ class Dirnode2(testutil.ReallyEqualMixin, testutil.ShouldFailMixin, unittest.Tes self.failUnless(isinstance(fn, UnknownNode), fn) self.failUnlessReallyEqual(fn.get_uri(), future_write_uri) self.failUnlessReallyEqual(fn.get_write_uri(), future_write_uri) - self.failUnlessReallyEqual(fn.get_readonly_uri(), "ro." + future_read_uri) + self.failUnlessReallyEqual(fn.get_readonly_uri(), b"ro." + future_read_uri) d.addCallback(_check2) return d def test_unknown_strip_prefix_for_ro(self): - self.failUnlessReallyEqual(strip_prefix_for_ro("foo", False), "foo") - self.failUnlessReallyEqual(strip_prefix_for_ro("ro.foo", False), "foo") - self.failUnlessReallyEqual(strip_prefix_for_ro("imm.foo", False), "imm.foo") - self.failUnlessReallyEqual(strip_prefix_for_ro("foo", True), "foo") - self.failUnlessReallyEqual(strip_prefix_for_ro("ro.foo", True), "foo") - self.failUnlessReallyEqual(strip_prefix_for_ro("imm.foo", True), "foo") + self.failUnlessReallyEqual(strip_prefix_for_ro(b"foo", False), b"foo") + self.failUnlessReallyEqual(strip_prefix_for_ro(b"ro.foo", False), b"foo") + self.failUnlessReallyEqual(strip_prefix_for_ro(b"imm.foo", False), b"imm.foo") + self.failUnlessReallyEqual(strip_prefix_for_ro(b"foo", True), b"foo") + self.failUnlessReallyEqual(strip_prefix_for_ro(b"ro.foo", True), b"foo") + self.failUnlessReallyEqual(strip_prefix_for_ro(b"imm.foo", True), b"foo") def test_unknownnode(self): lit_uri = one_uri @@ -1697,58 +1732,58 @@ class Dirnode2(testutil.ReallyEqualMixin, testutil.ShouldFailMixin, unittest.Tes ] unknown_rw = [# These are errors because we're only given a rw_uri, and we can't # diminish it. - ( 2, UnknownNode("foo", None)), - ( 3, UnknownNode("foo", None, deep_immutable=True)), - ( 4, UnknownNode("ro.foo", None, deep_immutable=True)), - ( 5, UnknownNode("ro." + mut_read_uri, None, deep_immutable=True)), - ( 5.1, UnknownNode("ro." + mdmf_read_uri, None, deep_immutable=True)), - ( 6, UnknownNode("URI:SSK-RO:foo", None, deep_immutable=True)), - ( 7, UnknownNode("URI:SSK:foo", None)), + ( 2, UnknownNode(b"foo", None)), + ( 3, UnknownNode(b"foo", None, deep_immutable=True)), + ( 4, UnknownNode(b"ro.foo", None, deep_immutable=True)), + ( 5, UnknownNode(b"ro." + mut_read_uri, None, deep_immutable=True)), + ( 5.1, UnknownNode(b"ro." + mdmf_read_uri, None, deep_immutable=True)), + ( 6, UnknownNode(b"URI:SSK-RO:foo", None, deep_immutable=True)), + ( 7, UnknownNode(b"URI:SSK:foo", None)), ] must_be_ro = [# These are errors because a readonly constraint is not met. - ( 8, UnknownNode("ro." + mut_write_uri, None)), - ( 8.1, UnknownNode("ro." + mdmf_write_uri, None)), - ( 9, UnknownNode(None, "ro." + mut_write_uri)), - ( 9.1, UnknownNode(None, "ro." + mdmf_write_uri)), + ( 8, UnknownNode(b"ro." + mut_write_uri, None)), + ( 8.1, UnknownNode(b"ro." + mdmf_write_uri, None)), + ( 9, UnknownNode(None, b"ro." + mut_write_uri)), + ( 9.1, UnknownNode(None, b"ro." + mdmf_write_uri)), ] must_be_imm = [# These are errors because an immutable constraint is not met. - (10, UnknownNode(None, "ro.URI:SSK-RO:foo", deep_immutable=True)), - (11, UnknownNode(None, "imm.URI:SSK:foo")), - (12, UnknownNode(None, "imm.URI:SSK-RO:foo")), - (13, UnknownNode("bar", "ro.foo", deep_immutable=True)), - (14, UnknownNode("bar", "imm.foo", deep_immutable=True)), - (15, UnknownNode("bar", "imm." + lit_uri, deep_immutable=True)), - (16, UnknownNode("imm." + mut_write_uri, None)), - (16.1, UnknownNode("imm." + mdmf_write_uri, None)), - (17, UnknownNode("imm." + mut_read_uri, None)), - (17.1, UnknownNode("imm." + mdmf_read_uri, None)), - (18, UnknownNode("bar", "imm.foo")), + (10, UnknownNode(None, b"ro.URI:SSK-RO:foo", deep_immutable=True)), + (11, UnknownNode(None, b"imm.URI:SSK:foo")), + (12, UnknownNode(None, b"imm.URI:SSK-RO:foo")), + (13, UnknownNode(b"bar", b"ro.foo", deep_immutable=True)), + (14, UnknownNode(b"bar", b"imm.foo", deep_immutable=True)), + (15, UnknownNode(b"bar", b"imm." + lit_uri, deep_immutable=True)), + (16, UnknownNode(b"imm." + mut_write_uri, None)), + (16.1, UnknownNode(b"imm." + mdmf_write_uri, None)), + (17, UnknownNode(b"imm." + mut_read_uri, None)), + (17.1, UnknownNode(b"imm." + mdmf_read_uri, None)), + (18, UnknownNode(b"bar", b"imm.foo")), ] bad_uri = [# These are errors because the URI is bad once we've stripped the prefix. - (19, UnknownNode("ro.URI:SSK-RO:foo", None)), - (20, UnknownNode("imm.URI:CHK:foo", None, deep_immutable=True)), - (21, UnknownNode(None, "URI:CHK:foo")), - (22, UnknownNode(None, "URI:CHK:foo", deep_immutable=True)), + (19, UnknownNode(b"ro.URI:SSK-RO:foo", None)), + (20, UnknownNode(b"imm.URI:CHK:foo", None, deep_immutable=True)), + (21, UnknownNode(None, b"URI:CHK:foo")), + (22, UnknownNode(None, b"URI:CHK:foo", deep_immutable=True)), ] ro_prefixed = [# These are valid, and the readcap should end up with a ro. prefix. - (23, UnknownNode(None, "foo")), - (24, UnknownNode(None, "ro.foo")), - (25, UnknownNode(None, "ro." + lit_uri)), - (26, UnknownNode("bar", "foo")), - (27, UnknownNode("bar", "ro.foo")), - (28, UnknownNode("bar", "ro." + lit_uri)), - (29, UnknownNode("ro.foo", None)), - (30, UnknownNode("ro." + lit_uri, None)), + (23, UnknownNode(None, b"foo")), + (24, UnknownNode(None, b"ro.foo")), + (25, UnknownNode(None, b"ro." + lit_uri)), + (26, UnknownNode(b"bar", b"foo")), + (27, UnknownNode(b"bar", b"ro.foo")), + (28, UnknownNode(b"bar", b"ro." + lit_uri)), + (29, UnknownNode(b"ro.foo", None)), + (30, UnknownNode(b"ro." + lit_uri, None)), ] imm_prefixed = [# These are valid, and the readcap should end up with an imm. prefix. - (31, UnknownNode(None, "foo", deep_immutable=True)), - (32, UnknownNode(None, "ro.foo", deep_immutable=True)), - (33, UnknownNode(None, "imm.foo")), - (34, UnknownNode(None, "imm.foo", deep_immutable=True)), - (35, UnknownNode("imm." + lit_uri, None)), - (36, UnknownNode("imm." + lit_uri, None, deep_immutable=True)), - (37, UnknownNode(None, "imm." + lit_uri)), - (38, UnknownNode(None, "imm." + lit_uri, deep_immutable=True)), + (31, UnknownNode(None, b"foo", deep_immutable=True)), + (32, UnknownNode(None, b"ro.foo", deep_immutable=True)), + (33, UnknownNode(None, b"imm.foo")), + (34, UnknownNode(None, b"imm.foo", deep_immutable=True)), + (35, UnknownNode(b"imm." + lit_uri, None)), + (36, UnknownNode(b"imm." + lit_uri, None, deep_immutable=True)), + (37, UnknownNode(None, b"imm." + lit_uri)), + (38, UnknownNode(None, b"imm." + lit_uri, deep_immutable=True)), ] error = unknown_rw + must_be_ro + must_be_imm + bad_uri ok = ro_prefixed + imm_prefixed @@ -1780,10 +1815,10 @@ class Dirnode2(testutil.ReallyEqualMixin, testutil.ShouldFailMixin, unittest.Tes self.failIf(n.get_readonly_uri() is None, i) for (i, n) in ro_prefixed: - self.failUnless(n.get_readonly_uri().startswith("ro."), i) + self.failUnless(n.get_readonly_uri().startswith(b"ro."), i) for (i, n) in imm_prefixed: - self.failUnless(n.get_readonly_uri().startswith("imm."), i) + self.failUnless(n.get_readonly_uri().startswith(b"imm."), i) @@ -1867,7 +1902,7 @@ class Deleter(GridTestMixin, testutil.ReallyEqualMixin, unittest.TestCase): self.set_up_grid(oneshare=True) c0 = self.g.clients[0] d = c0.create_dirnode() - small = upload.Data("Small enough for a LIT", None) + small = upload.Data(b"Small enough for a LIT", None) def _created_dir(dn): self.root = dn self.root_uri = dn.get_uri() @@ -1909,10 +1944,10 @@ class Adder(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): # root/file1 # root/file2 # root/dir1 - d = root_node.add_file(u'file1', upload.Data("Important Things", + d = root_node.add_file(u'file1', upload.Data(b"Important Things", None)) d.addCallback(lambda res: - root_node.add_file(u'file2', upload.Data("Sekrit Codes", None))) + root_node.add_file(u'file2', upload.Data(b"Sekrit Codes", None))) d.addCallback(lambda res: root_node.create_subdirectory(u"dir1")) d.addCallback(lambda res: root_node) diff --git a/src/allmydata/test/test_eliotutil.py b/src/allmydata/test/test_eliotutil.py index b382b7289..0073a7675 100644 --- a/src/allmydata/test/test_eliotutil.py +++ b/src/allmydata/test/test_eliotutil.py @@ -1,5 +1,7 @@ """ -Tests for ``allmydata.test.eliotutil``. +Tests for ``allmydata.util.eliotutil``. + +Ported to Python 3. """ from __future__ import ( @@ -9,6 +11,10 @@ from __future__ import ( division, ) +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from sys import stdout import logging @@ -51,11 +57,14 @@ from ..util.eliotutil import ( _parse_destination_description, _EliotLogging, ) +from ..util.jsonbytes import BytesJSONEncoder + from .common import ( SyncTestCase, AsyncTestCase, ) + class EliotLoggedTestTests(AsyncTestCase): def test_returns_none(self): Message.log(hello="world") @@ -88,7 +97,7 @@ class ParseDestinationDescriptionTests(SyncTestCase): reactor = object() self.assertThat( _parse_destination_description("file:-")(reactor), - Equals(FileDestination(stdout)), + Equals(FileDestination(stdout, encoder=BytesJSONEncoder)), ) diff --git a/src/allmydata/test/test_ftp.py b/src/allmydata/test/test_ftp.py deleted file mode 100644 index 4eddef440..000000000 --- a/src/allmydata/test/test_ftp.py +++ /dev/null @@ -1,106 +0,0 @@ - -from twisted.trial import unittest - -from allmydata.frontends import ftpd -from allmydata.immutable import upload -from allmydata.mutable import publish -from allmydata.test.no_network import GridTestMixin -from allmydata.test.common_util import ReallyEqualMixin - -class Handler(GridTestMixin, ReallyEqualMixin, unittest.TestCase): - """ - This is a no-network unit test of ftpd.Handler and the abstractions - it uses. - """ - - FALL_OF_BERLIN_WALL = 626644800 - TURN_OF_MILLENIUM = 946684800 - - def _set_up(self, basedir, num_clients=1, num_servers=10): - self.basedir = "ftp/" + basedir - self.set_up_grid(num_clients=num_clients, num_servers=num_servers, - oneshare=True) - - self.client = self.g.clients[0] - self.username = "alice" - self.convergence = "" - - d = self.client.create_dirnode() - def _created_root(node): - self.root = node - self.root_uri = node.get_uri() - self.handler = ftpd.Handler(self.client, self.root, self.username, - self.convergence) - d.addCallback(_created_root) - return d - - def _set_metadata(self, name, metadata): - """Set metadata for `name', avoiding MetadataSetter's timestamp reset - behavior.""" - def _modifier(old_contents, servermap, first_time): - children = self.root._unpack_contents(old_contents) - children[name] = (children[name][0], metadata) - return self.root._pack_contents(children) - - return self.root._node.modify(_modifier) - - def _set_up_tree(self): - # add immutable file at root - immutable = upload.Data("immutable file contents", None) - d = self.root.add_file(u"immutable", immutable) - - # `mtime' and `linkmotime' both set - md_both = {'mtime': self.FALL_OF_BERLIN_WALL, - 'tahoe': {'linkmotime': self.TURN_OF_MILLENIUM}} - d.addCallback(lambda _: self._set_metadata(u"immutable", md_both)) - - # add link to root from root - d.addCallback(lambda _: self.root.set_node(u"loop", self.root)) - - # `mtime' set, but no `linkmotime' - md_just_mtime = {'mtime': self.FALL_OF_BERLIN_WALL, 'tahoe': {}} - d.addCallback(lambda _: self._set_metadata(u"loop", md_just_mtime)) - - # add mutable file at root - mutable = publish.MutableData("mutable file contents") - d.addCallback(lambda _: self.client.create_mutable_file(mutable)) - d.addCallback(lambda node: self.root.set_node(u"mutable", node)) - - # neither `mtime' nor `linkmotime' set - d.addCallback(lambda _: self._set_metadata(u"mutable", {})) - - return d - - def _compareDirLists(self, actual, expected): - actual_list = sorted(actual) - expected_list = sorted(expected) - - self.failUnlessReallyEqual(len(actual_list), len(expected_list), - "%r is wrong length, expecting %r" % ( - actual_list, expected_list)) - for (a, b) in zip(actual_list, expected_list): - (name, meta) = a - (expected_name, expected_meta) = b - self.failUnlessReallyEqual(name, expected_name) - self.failUnlessReallyEqual(meta, expected_meta) - - def test_list(self): - keys = ("size", "directory", "permissions", "hardlinks", "modified", - "owner", "group", "unexpected") - d = self._set_up("list") - - d.addCallback(lambda _: self._set_up_tree()) - d.addCallback(lambda _: self.handler.list("", keys=keys)) - - expected_root = [ - ('loop', - [0, True, ftpd.IntishPermissions(0o600), 1, self.FALL_OF_BERLIN_WALL, 'alice', 'alice', '??']), - ('immutable', - [23, False, ftpd.IntishPermissions(0o600), 1, self.TURN_OF_MILLENIUM, 'alice', 'alice', '??']), - ('mutable', - # timestamp should be 0 if no timestamp metadata is present - [0, False, ftpd.IntishPermissions(0o600), 1, 0, 'alice', 'alice', '??'])] - - d.addCallback(lambda root: self._compareDirLists(root, expected_root)) - - return d diff --git a/src/allmydata/test/test_hashutil.py b/src/allmydata/test/test_hashutil.py index abcd4f0fb..6ec861c9f 100644 --- a/src/allmydata/test/test_hashutil.py +++ b/src/allmydata/test/test_hashutil.py @@ -102,9 +102,35 @@ class HashUtilTests(unittest.TestCase): got_a = base32.b2a(got) self.failUnlessEqual(got_a, expected_a) - def test_known_answers(self): - # assert backwards compatibility + def test_storage_index_hash_known_answers(self): + """ + Verify backwards compatibility by comparing ``storage_index_hash`` outputs + for some well-known (to us) inputs. + """ + # This is a marginal case. b"" is not a valid aes 128 key. The + # implementation does nothing to avoid producing a result for it, + # though. self._testknown(hashutil.storage_index_hash, b"qb5igbhcc5esa6lwqorsy7e6am", b"") + + # This is a little bit more realistic though clearly this is a poor key choice. + self._testknown(hashutil.storage_index_hash, b"wvggbrnrezdpa5yayrgiw5nzja", b"x" * 16) + + # Here's a much more realistic key that I generated by reading some + # bytes from /dev/urandom. I computed the expected hash value twice. + # First using hashlib.sha256 and then with sha256sum(1). The input + # string given to the hash function was "43:," + # in each case. + self._testknown( + hashutil.storage_index_hash, + b"aarbseqqrpsfowduchcjbonscq", + base32.a2b(b"2ckv3dfzh6rgjis6ogfqhyxnzy"), + ) + + def test_known_answers(self): + """ + Verify backwards compatibility by comparing hash outputs for some + well-known (to us) inputs. + """ self._testknown(hashutil.block_hash, b"msjr5bh4evuh7fa3zw7uovixfbvlnstr5b65mrerwfnvjxig2jvq", b"") self._testknown(hashutil.uri_extension_hash, b"wthsu45q7zewac2mnivoaa4ulh5xvbzdmsbuyztq2a5fzxdrnkka", b"") self._testknown(hashutil.plaintext_hash, b"5lz5hwz3qj3af7n6e3arblw7xzutvnd3p3fjsngqjcb7utf3x3da", b"") diff --git a/src/allmydata/test/test_helper.py b/src/allmydata/test/test_helper.py index c47da3277..3faffbe0d 100644 --- a/src/allmydata/test/test_helper.py +++ b/src/allmydata/test/test_helper.py @@ -1,3 +1,6 @@ +""" +Ported to Python 3. +""" from __future__ import absolute_import from __future__ import division from __future__ import print_function @@ -8,21 +11,60 @@ if PY2: from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os +from struct import ( + pack, +) +from functools import ( + partial, +) +import attr + +try: + from typing import List + from allmydata.introducer.client import IntroducerClient +except ImportError: + pass + from twisted.internet import defer from twisted.trial import unittest from twisted.application import service from foolscap.api import Tub, fireEventually, flushEventualQueue +from eliot.twisted import ( + inline_callbacks, +) + from allmydata.crypto import aes -from allmydata.storage.server import si_b2a +from allmydata.storage.server import ( + si_b2a, + StorageServer, +) from allmydata.storage_client import StorageFarmBroker +from allmydata.immutable.layout import ( + make_write_bucket_proxy, +) from allmydata.immutable import offloaded, upload from allmydata import uri, client -from allmydata.util import hashutil, fileutil, mathutil +from allmydata.util import hashutil, fileutil, mathutil, dictutil +from .no_network import ( + NoNetworkServer, + LocalWrapper, + fireNow, +) from .common import ( EMPTY_CLIENT_CONFIG, + SyncTestCase, +) + +from testtools.matchers import ( + Equals, + MatchesListwise, + IsInstance, +) +from testtools.twistedsupport import ( + succeeded, ) MiB = 1024*1024 @@ -63,38 +105,33 @@ class CHKUploadHelper_fake(offloaded.CHKUploadHelper): d.addCallback(_got_size) return d -class Helper_fake_upload(offloaded.Helper): - def _make_chk_upload_helper(self, storage_index, lp): - si_s = str(si_b2a(storage_index), "utf-8") - incoming_file = os.path.join(self._chk_incoming, si_s) - encoding_file = os.path.join(self._chk_encoding, si_s) - uh = CHKUploadHelper_fake(storage_index, self, - self._storage_broker, - self._secret_holder, - incoming_file, encoding_file, - lp) - return uh +@attr.s +class FakeCHKCheckerAndUEBFetcher(object): + """ + A fake of ``CHKCheckerAndUEBFetcher`` which hard-codes some check result. + """ + peer_getter = attr.ib() + storage_index = attr.ib() + logparent = attr.ib() -class Helper_already_uploaded(Helper_fake_upload): - def _check_chk(self, storage_index, lp): - res = upload.HelperUploadResults() - res.uri_extension_hash = hashutil.uri_extension_hash(b"") + _sharemap = attr.ib() + _ueb_data = attr.ib() - # we're pretending that the file they're trying to upload was already - # present in the grid. We return some information about the file, so - # the client can decide if they like the way it looks. The parameters - # used here are chosen to match the defaults. - PARAMS = FakeClient.DEFAULT_ENCODING_PARAMETERS - ueb_data = {"needed_shares": PARAMS["k"], - "total_shares": PARAMS["n"], - "segment_size": min(PARAMS["max_segment_size"], len(DATA)), - "size": len(DATA), - } - res.uri_extension_data = ueb_data - return defer.succeed(res) + @property + def _ueb_hash(self): + return hashutil.uri_extension_hash( + uri.pack_extension(self._ueb_data), + ) + + def check(self): + return defer.succeed(( + self._sharemap, + self._ueb_data, + self._ueb_hash, + )) class FakeClient(service.MultiService): - introducer_clients = [] + introducer_clients = [] # type: List[IntroducerClient] DEFAULT_ENCODING_PARAMETERS = {"k":25, "happy": 75, "n": 100, @@ -126,6 +163,26 @@ def upload_data(uploader, data, convergence): u = upload.Data(data, convergence=convergence) return uploader.upload(u) + +def make_uploader(helper_furl, parent, override_name=None): + """ + Make an ``upload.Uploader`` service pointed at the given helper and with + the given service parent. + + :param bytes helper_furl: The Foolscap URL of the upload helper. + + :param IServiceCollection parent: A parent to assign to the new uploader. + + :param str override_name: If not ``None``, a new name for the uploader + service. Multiple services cannot coexist with the same name. + """ + u = upload.Uploader(helper_furl) + if override_name is not None: + u.name = override_name + u.setServiceParent(parent) + return u + + class AssistedUpload(unittest.TestCase): def setUp(self): self.tub = t = Tub() @@ -145,13 +202,20 @@ class AssistedUpload(unittest.TestCase): # bogus host/port t.setLocation(b"bogus:1234") - def setUpHelper(self, basedir, helper_class=Helper_fake_upload): + def setUpHelper(self, basedir, chk_upload=CHKUploadHelper_fake, chk_checker=None): fileutil.make_dirs(basedir) - self.helper = h = helper_class(basedir, - self.s.storage_broker, - self.s.secret_holder, - None, None) - self.helper_furl = self.tub.registerReference(h) + self.helper = offloaded.Helper( + basedir, + self.s.storage_broker, + self.s.secret_holder, + None, + None, + ) + if chk_upload is not None: + self.helper.chk_upload = chk_upload + if chk_checker is not None: + self.helper.chk_checker = chk_checker + self.helper_furl = self.tub.registerReference(self.helper) def tearDown(self): d = self.s.stopService() @@ -159,34 +223,84 @@ class AssistedUpload(unittest.TestCase): d.addBoth(flush_but_dont_ignore) return d - def test_one(self): + """ + Some data that has never been uploaded before can be uploaded in CHK + format using the ``RIHelper`` provider and ``Uploader.upload``. + """ self.basedir = "helper/AssistedUpload/test_one" self.setUpHelper(self.basedir) - u = upload.Uploader(self.helper_furl) - u.setServiceParent(self.s) + u = make_uploader(self.helper_furl, self.s) d = wait_a_few_turns() def _ready(res): - assert u._helper - + self.assertTrue( + u._helper, + "Expected uploader to have a helper reference, had {} instead.".format( + u._helper, + ), + ) return upload_data(u, DATA, convergence=b"some convergence string") d.addCallback(_ready) + def _uploaded(results): the_uri = results.get_uri() - assert b"CHK" in the_uri + self.assertIn(b"CHK", the_uri) + self.assertNotEqual( + results.get_pushed_shares(), + 0, + ) d.addCallback(_uploaded) def _check_empty(res): + # Make sure the intermediate artifacts aren't left lying around. files = os.listdir(os.path.join(self.basedir, "CHK_encoding")) - self.failUnlessEqual(files, []) + self.assertEqual(files, []) files = os.listdir(os.path.join(self.basedir, "CHK_incoming")) - self.failUnlessEqual(files, []) + self.assertEqual(files, []) d.addCallback(_check_empty) return d + @inline_callbacks + def test_concurrent(self): + """ + The same data can be uploaded by more than one ``Uploader`` at a time. + """ + self.basedir = "helper/AssistedUpload/test_concurrent" + self.setUpHelper(self.basedir) + u1 = make_uploader(self.helper_furl, self.s, "u1") + u2 = make_uploader(self.helper_furl, self.s, "u2") + + yield wait_a_few_turns() + + for u in [u1, u2]: + self.assertTrue( + u._helper, + "Expected uploader to have a helper reference, had {} instead.".format( + u._helper, + ), + ) + + uploads = list( + upload_data(u, DATA, convergence=b"some convergence string") + for u + in [u1, u2] + ) + + result1, result2 = yield defer.gatherResults(uploads) + + self.assertEqual( + result1.get_uri(), + result2.get_uri(), + ) + # It would be really cool to assert that result1.get_pushed_shares() + + # result2.get_pushed_shares() == total_shares here. However, we're + # faking too much for that to be meaningful here. Also it doesn't + # hold because we don't actually push _anything_, we just lie about + # having pushed stuff. + def test_previous_upload_failed(self): self.basedir = "helper/AssistedUpload/test_previous_upload_failed" self.setUpHelper(self.basedir) @@ -214,8 +328,7 @@ class AssistedUpload(unittest.TestCase): f.write(aes.encrypt_data(encryptor, DATA)) f.close() - u = upload.Uploader(self.helper_furl) - u.setServiceParent(self.s) + u = make_uploader(self.helper_furl, self.s) d = wait_a_few_turns() @@ -237,29 +350,247 @@ class AssistedUpload(unittest.TestCase): return d + @inline_callbacks def test_already_uploaded(self): + """ + If enough shares to satisfy the needed parameter already exist, the upload + succeeds without pushing any shares. + """ + params = FakeClient.DEFAULT_ENCODING_PARAMETERS + chk_checker = partial( + FakeCHKCheckerAndUEBFetcher, + sharemap=dictutil.DictOfSets({ + 0: {b"server0"}, + 1: {b"server1"}, + }), + ueb_data={ + "size": len(DATA), + "segment_size": min(params["max_segment_size"], len(DATA)), + "needed_shares": params["k"], + "total_shares": params["n"], + }, + ) self.basedir = "helper/AssistedUpload/test_already_uploaded" - self.setUpHelper(self.basedir, helper_class=Helper_already_uploaded) - u = upload.Uploader(self.helper_furl) - u.setServiceParent(self.s) + self.setUpHelper( + self.basedir, + chk_checker=chk_checker, + ) + u = make_uploader(self.helper_furl, self.s) - d = wait_a_few_turns() + yield wait_a_few_turns() - def _ready(res): - assert u._helper + assert u._helper - return upload_data(u, DATA, convergence=b"some convergence string") - d.addCallback(_ready) - def _uploaded(results): - the_uri = results.get_uri() - assert b"CHK" in the_uri - d.addCallback(_uploaded) + results = yield upload_data(u, DATA, convergence=b"some convergence string") + the_uri = results.get_uri() + assert b"CHK" in the_uri - def _check_empty(res): - files = os.listdir(os.path.join(self.basedir, "CHK_encoding")) - self.failUnlessEqual(files, []) - files = os.listdir(os.path.join(self.basedir, "CHK_incoming")) - self.failUnlessEqual(files, []) - d.addCallback(_check_empty) + files = os.listdir(os.path.join(self.basedir, "CHK_encoding")) + self.failUnlessEqual(files, []) + files = os.listdir(os.path.join(self.basedir, "CHK_incoming")) + self.failUnlessEqual(files, []) - return d + self.assertEqual( + results.get_pushed_shares(), + 0, + ) + + +class CHKCheckerAndUEBFetcherTests(SyncTestCase): + """ + Tests for ``CHKCheckerAndUEBFetcher``. + """ + def test_check_no_peers(self): + """ + If the supplied "peer getter" returns no peers then + ``CHKCheckerAndUEBFetcher.check`` returns a ``Deferred`` that fires + with ``False``. + """ + storage_index = b"a" * 16 + peers = {storage_index: []} + caf = offloaded.CHKCheckerAndUEBFetcher( + peers.get, + storage_index, + None, + ) + self.assertThat( + caf.check(), + succeeded(Equals(False)), + ) + + @inline_callbacks + def test_check_ueb_unavailable(self): + """ + If the UEB cannot be read from any of the peers supplied by the "peer + getter" then ``CHKCheckerAndUEBFetcher.check`` returns a ``Deferred`` + that fires with ``False``. + """ + storage_index = b"a" * 16 + serverid = b"b" * 20 + storage = StorageServer(self.mktemp(), serverid) + rref_without_ueb = LocalWrapper(storage, fireNow) + yield write_bad_share(rref_without_ueb, storage_index) + server_without_ueb = NoNetworkServer(serverid, rref_without_ueb) + peers = {storage_index: [server_without_ueb]} + caf = offloaded.CHKCheckerAndUEBFetcher( + peers.get, + storage_index, + None, + ) + self.assertThat( + caf.check(), + succeeded(Equals(False)), + ) + + @inline_callbacks + def test_not_enough_shares(self): + """ + If fewer shares are found than are required to reassemble the data then + ``CHKCheckerAndUEBFetcher.check`` returns a ``Deferred`` that fires + with ``False``. + """ + storage_index = b"a" * 16 + serverid = b"b" * 20 + storage = StorageServer(self.mktemp(), serverid) + rref_with_ueb = LocalWrapper(storage, fireNow) + ueb = { + "needed_shares": 2, + "total_shares": 2, + "segment_size": 128 * 1024, + "size": 1024, + } + yield write_good_share(rref_with_ueb, storage_index, ueb, [0]) + + server_with_ueb = NoNetworkServer(serverid, rref_with_ueb) + peers = {storage_index: [server_with_ueb]} + caf = offloaded.CHKCheckerAndUEBFetcher( + peers.get, + storage_index, + None, + ) + self.assertThat( + caf.check(), + succeeded(Equals(False)), + ) + + @inline_callbacks + def test_enough_shares(self): + """ + If enough shares are found to reassemble the data then + ``CHKCheckerAndUEBFetcher.check`` returns a ``Deferred`` that fires + with share and share placement information. + """ + storage_index = b"a" * 16 + serverids = list( + ch * 20 + for ch + in [b"b", b"c"] + ) + storages = list( + StorageServer(self.mktemp(), serverid) + for serverid + in serverids + ) + rrefs_with_ueb = list( + LocalWrapper(storage, fireNow) + for storage + in storages + ) + ueb = { + "needed_shares": len(serverids), + "total_shares": len(serverids), + "segment_size": 128 * 1024, + "size": 1024, + } + for n, rref_with_ueb in enumerate(rrefs_with_ueb): + yield write_good_share(rref_with_ueb, storage_index, ueb, [n]) + + servers_with_ueb = list( + NoNetworkServer(serverid, rref_with_ueb) + for (serverid, rref_with_ueb) + in zip(serverids, rrefs_with_ueb) + ) + peers = {storage_index: servers_with_ueb} + caf = offloaded.CHKCheckerAndUEBFetcher( + peers.get, + storage_index, + None, + ) + self.assertThat( + caf.check(), + succeeded(MatchesListwise([ + Equals({ + n: {serverid} + for (n, serverid) + in enumerate(serverids) + }), + Equals(ueb), + IsInstance(bytes), + ])), + ) + + +def write_bad_share(storage_rref, storage_index): + """ + Write a share with a corrupt URI extension block. + """ + # Write some trash to the right bucket on this storage server. It won't + # have a recoverable UEB block. + return write_share(storage_rref, storage_index, [0], b"\0" * 1024) + + +def write_good_share(storage_rref, storage_index, ueb, sharenums): + """ + Write a valid share with the given URI extension block. + """ + write_proxy = make_write_bucket_proxy( + storage_rref, + None, + 1024, + ueb["segment_size"], + 1, + 1, + ueb["size"], + ) + # See allmydata/immutable/layout.py + offset = write_proxy._offsets["uri_extension"] + filler = b"\0" * (offset - len(write_proxy._offset_data)) + ueb_data = uri.pack_extension(ueb) + data = ( + write_proxy._offset_data + + filler + + pack(write_proxy.fieldstruct, len(ueb_data)) + + ueb_data + ) + return write_share(storage_rref, storage_index, sharenums, data) + + +@inline_callbacks +def write_share(storage_rref, storage_index, sharenums, sharedata): + """ + Write the given share data to the given storage index using the given + IStorageServer remote reference. + + :param foolscap.ipb.IRemoteReference storage_rref: A remote reference to + an IStorageServer. + + :param bytes storage_index: The storage index to which to write the share + data. + + :param [int] sharenums: The share numbers to which to write this sharedata. + + :param bytes sharedata: The ciphertext to write as the share. + """ + ignored, writers = yield storage_rref.callRemote( + "allocate_buckets", + storage_index, + b"x" * 16, + b"x" * 16, + sharenums, + len(sharedata), + LocalWrapper(None), + + ) + [writer] = writers.values() + yield writer.callRemote("write", 0, sharedata) + yield writer.callRemote("close") diff --git a/src/allmydata/test/test_i2p_provider.py b/src/allmydata/test/test_i2p_provider.py index a724b300e..37f2333f5 100644 --- a/src/allmydata/test/test_i2p_provider.py +++ b/src/allmydata/test/test_i2p_provider.py @@ -277,6 +277,20 @@ class Provider(unittest.TestCase): i2p.local_i2p.assert_called_with("configdir") self.assertIs(h, handler) + def test_handler_launch_executable(self): + i2p = mock.Mock() + handler = object() + i2p.launch = mock.Mock(return_value=handler) + reactor = object() + + with mock_i2p(i2p): + p = i2p_provider.create(reactor, + FakeConfig(launch=True, + **{"i2p.executable": "myi2p"})) + h = p.get_i2p_handler() + self.assertIs(h, handler) + i2p.launch.assert_called_with(i2p_configdir=None, i2p_binary="myi2p") + def test_handler_default(self): i2p = mock.Mock() handler = object() diff --git a/src/allmydata/test/test_introducer.py b/src/allmydata/test/test_introducer.py index d99e18c4a..0475d3f6c 100644 --- a/src/allmydata/test/test_introducer.py +++ b/src/allmydata/test/test_introducer.py @@ -1,8 +1,26 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + +from six import ensure_binary, ensure_text import os, re, itertools from base64 import b32decode import json -from mock import Mock, patch +from operator import ( + setitem, +) +from functools import ( + partial, +) from testtools.matchers import ( Is, @@ -39,8 +57,11 @@ from allmydata.util import pollmixin, idlib, fileutil, yamlutil from allmydata.util.iputil import ( listenOnUnused, ) +from allmydata.scripts.common import ( + write_introducer, +) import allmydata.test.common_util as testutil -from allmydata.test.common import ( +from .common import ( SyncTestCase, AsyncTestCase, AsyncBrokenTestCase, @@ -68,7 +89,8 @@ class Node(testutil.SignalMixin, testutil.ReallyEqualMixin, AsyncTestCase): def test_introducer_clients_unloadable(self): """ - Error if introducers.yaml exists but we can't read it + ``create_introducer_clients`` raises ``EnvironmentError`` if + ``introducers.yaml`` exists but we can't read it. """ basedir = u"introducer.IntroducerNode.test_introducer_clients_unloadable" os.mkdir(basedir) @@ -78,17 +100,10 @@ class Node(testutil.SignalMixin, testutil.ReallyEqualMixin, AsyncTestCase): f.write(u'---\n') os.chmod(yaml_fname, 0o000) self.addCleanup(lambda: os.chmod(yaml_fname, 0o700)) - # just mocking the yaml failure, as "yamlutil.safe_load" only - # returns None on some platforms for unreadable files - with patch("allmydata.client.yamlutil") as p: - p.safe_load = Mock(return_value=None) - - fake_tub = Mock() - config = read_config(basedir, "portnum") - - with self.assertRaises(EnvironmentError): - create_introducer_clients(config, fake_tub) + config = read_config(basedir, "portnum") + with self.assertRaises(EnvironmentError): + create_introducer_clients(config, Tub()) @defer.inlineCallbacks def test_furl(self): @@ -100,7 +115,7 @@ class Node(testutil.SignalMixin, testutil.ReallyEqualMixin, AsyncTestCase): q1 = yield create_introducer(basedir) del q1 # new nodes create unguessable furls in private/introducer.furl - ifurl = fileutil.read(private_fn) + ifurl = fileutil.read(private_fn, mode="r") self.failUnless(ifurl) ifurl = ifurl.strip() self.failIf(ifurl.endswith("/introducer"), ifurl) @@ -120,7 +135,7 @@ class Node(testutil.SignalMixin, testutil.ReallyEqualMixin, AsyncTestCase): q2 = yield create_introducer(basedir) del q2 self.failIf(os.path.exists(public_fn)) - ifurl2 = fileutil.read(private_fn) + ifurl2 = fileutil.read(private_fn, mode="r") self.failUnless(ifurl2) self.failUnlessEqual(ifurl2.strip(), guessable) @@ -155,7 +170,7 @@ class ServiceMixin(object): class Introducer(ServiceMixin, AsyncTestCase): def test_create(self): ic = IntroducerClient(None, "introducer.furl", u"my_nickname", - "my_version", "oldest_version", {}, fakeseq, + "my_version", "oldest_version", fakeseq, FilePath(self.mktemp())) self.failUnless(isinstance(ic, IntroducerClient)) @@ -169,7 +184,7 @@ def fakeseq(): seqnum_counter = itertools.count(1) def realseq(): - return seqnum_counter.next(), str(os.randint(1,100000)) + return next(seqnum_counter), str(os.randint(1,100000)) def make_ann(furl): ann = { "anonymous-storage-FURL": furl, @@ -188,13 +203,13 @@ class Client(AsyncTestCase): def test_duplicate_receive_v2(self): ic1 = IntroducerClient(None, "introducer.furl", u"my_nickname", - "ver23", "oldest_version", {}, fakeseq, + "ver23", "oldest_version", fakeseq, FilePath(self.mktemp())) # we use a second client just to create a different-looking # announcement ic2 = IntroducerClient(None, "introducer.furl", u"my_nickname", - "ver24","oldest_version",{}, fakeseq, + "ver24","oldest_version",fakeseq, FilePath(self.mktemp())) announcements = [] def _received(key_s, ann): @@ -206,7 +221,7 @@ class Client(AsyncTestCase): private_key, public_key = ed25519.create_signing_keypair() public_key_str = ed25519.string_from_verifying_key(public_key) - pubkey_s = remove_prefix(public_key_str, "pub-") + pubkey_s = remove_prefix(public_key_str, b"pub-") # ann1: ic1, furl1 # ann1a: ic1, furl1a (same SturdyRef, different connection hints) @@ -298,7 +313,7 @@ class Server(AsyncTestCase): i = IntroducerService() ic1 = IntroducerClient(None, "introducer.furl", u"my_nickname", - "ver23", "oldest_version", {}, realseq, + "ver23", "oldest_version", realseq, FilePath(self.mktemp())) furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/gydnp" @@ -396,7 +411,7 @@ class Queue(SystemTestMixin, AsyncTestCase): tub2 = Tub() tub2.setServiceParent(self.parent) c = IntroducerClient(tub2, ifurl, - u"nickname", "version", "oldest", {}, fakeseq, + u"nickname", "version", "oldest", fakeseq, FilePath(self.mktemp())) furl1 = "pb://onug64tu@127.0.0.1:123/short" # base32("short") private_key, _ = ed25519.create_signing_keypair() @@ -477,7 +492,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase): c = IntroducerClient(tub, self.introducer_furl, NICKNAME % str(i), "version", "oldest", - {"component": "component-v1"}, fakeseq, + fakeseq, FilePath(self.mktemp())) received_announcements[c] = {} def got(key_s_or_tubid, ann, announcements): @@ -581,7 +596,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase): serverid0 = printable_serverids[0] ann = anns[serverid0] nick = ann["nickname"] - self.failUnlessEqual(type(nick), unicode) + self.assertIsInstance(nick, str) self.failUnlessEqual(nick, NICKNAME % "0") for c in publishing_clients: cdc = c._debug_counts @@ -592,7 +607,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase): self.failUnlessEqual(cdc["outbound_message"], expected) # now check the web status, make sure it renders without error ir = introweb.IntroducerRoot(self.parent) - self.parent.nodeid = "NODEID" + self.parent.nodeid = b"NODEID" log.msg("_check1 done") return flattenString(None, ir._create_element()) d.addCallback(_check1) @@ -602,7 +617,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase): self.assertIn(NICKNAME % "0", text) # a v2 client self.assertIn(NICKNAME % "1", text) # another v2 client for i in range(NUM_STORAGE): - self.assertIn(printable_serverids[i], text, + self.assertIn(ensure_text(printable_serverids[i]), text, (i,printable_serverids[i],text)) # make sure there isn't a double-base32ed string too self.assertNotIn(idlib.nodeid_b2a(printable_serverids[i]), text, @@ -728,8 +743,6 @@ class SystemTest(SystemTestMixin, AsyncTestCase): class FakeRemoteReference(object): def notifyOnDisconnect(self, *args, **kwargs): pass def getRemoteTubID(self): return "62ubehyunnyhzs7r6vdonnm2hpi52w6y" - def getLocationHints(self): return ["tcp:here.example.com:1234", - "tcp:there.example.com2345"] def getPeer(self): return address.IPv4Address("TCP", "remote.example.com", 3456) @@ -737,9 +750,8 @@ class ClientInfo(AsyncTestCase): def test_client_v2(self): introducer = IntroducerService() tub = introducer_furl = None - app_versions = {"whizzy": "fizzy"} client_v2 = IntroducerClient(tub, introducer_furl, NICKNAME % u"v2", - "my_version", "oldest", app_versions, + "my_version", "oldest", fakeseq, FilePath(self.mktemp())) #furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:0/swissnum" #ann_s = make_ann_t(client_v2, furl1, None, 10) @@ -751,7 +763,6 @@ class ClientInfo(AsyncTestCase): self.failUnlessEqual(len(subs), 1) s0 = subs[0] self.failUnlessEqual(s0.service_name, "storage") - self.failUnlessEqual(s0.app_versions, app_versions) self.failUnlessEqual(s0.nickname, NICKNAME % u"v2") self.failUnlessEqual(s0.version, "my_version") @@ -760,14 +771,13 @@ class Announcements(AsyncTestCase): def test_client_v2_signed(self): introducer = IntroducerService() tub = introducer_furl = None - app_versions = {"whizzy": "fizzy"} client_v2 = IntroducerClient(tub, introducer_furl, u"nick-v2", - "my_version", "oldest", app_versions, + "my_version", "oldest", fakeseq, FilePath(self.mktemp())) furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:0/swissnum" private_key, public_key = ed25519.create_signing_keypair() - public_key_str = remove_prefix(ed25519.string_from_verifying_key(public_key), "pub-") + public_key_str = remove_prefix(ed25519.string_from_verifying_key(public_key), b"pub-") ann_t0 = make_ann_t(client_v2, furl1, private_key, 10) canary0 = Referenceable() @@ -776,7 +786,6 @@ class Announcements(AsyncTestCase): self.failUnlessEqual(len(a), 1) self.assertThat(a[0].canary, Is(canary0)) self.failUnlessEqual(a[0].index, ("storage", public_key_str)) - self.failUnlessEqual(a[0].announcement["app-versions"], app_versions) self.failUnlessEqual(a[0].nickname, u"nick-v2") self.failUnlessEqual(a[0].service_name, "storage") self.failUnlessEqual(a[0].version, "my_version") @@ -788,25 +797,31 @@ class Announcements(AsyncTestCase): @defer.inlineCallbacks def test_client_cache(self): - basedir = "introducer/ClientSeqnums/test_client_cache_1" - fileutil.make_dirs(basedir) - cache_filepath = FilePath(os.path.join(basedir, "private", - "introducer_default_cache.yaml")) + """ + Announcements received by an introducer client are written to that + introducer client's cache file. + """ + basedir = FilePath("introducer/ClientSeqnums/test_client_cache_1") + private = basedir.child("private") + private.makedirs() + write_introducer(basedir, "default", "nope") + cache_filepath = basedir.descendant([ + "private", + "introducer_default_cache.yaml", + ]) # if storage is enabled, the Client will publish its storage server # during startup (although the announcement will wait in a queue # until the introducer connection is established). To avoid getting # confused by this, disable storage. - with open(os.path.join(basedir, "tahoe.cfg"), "w") as f: - f.write("[client]\n") - f.write("introducer.furl = nope\n") - f.write("[storage]\n") - f.write("enabled = false\n") + with basedir.child("tahoe.cfg").open("w") as f: + f.write(b"[storage]\n") + f.write(b"enabled = false\n") - c = yield create_client(basedir) + c = yield create_client(basedir.path) ic = c.introducer_clients[0] private_key, public_key = ed25519.create_signing_keypair() - public_key_str = remove_prefix(ed25519.string_from_verifying_key(public_key), "pub-") + public_key_str = remove_prefix(ed25519.string_from_verifying_key(public_key), b"pub-") furl1 = "pb://onug64tu@127.0.0.1:123/short" # base32("short") ann_t = make_ann_t(ic, furl1, private_key, 1) @@ -816,7 +831,7 @@ class Announcements(AsyncTestCase): # check the cache for the announcement announcements = self._load_cache(cache_filepath) self.failUnlessEqual(len(announcements), 1) - self.failUnlessEqual(announcements[0]['key_s'], public_key_str) + self.failUnlessEqual(ensure_binary(announcements[0]['key_s']), public_key_str) ann = announcements[0]["ann"] self.failUnlessEqual(ann["anonymous-storage-FURL"], furl1) self.failUnlessEqual(ann["seqnum"], 1) @@ -829,7 +844,7 @@ class Announcements(AsyncTestCase): yield flushEventualQueue() announcements = self._load_cache(cache_filepath) self.failUnlessEqual(len(announcements), 1) - self.failUnlessEqual(announcements[0]['key_s'], public_key_str) + self.failUnlessEqual(ensure_binary(announcements[0]['key_s']), public_key_str) ann = announcements[0]["ann"] self.failUnlessEqual(ann["anonymous-storage-FURL"], furl2) self.failUnlessEqual(ann["seqnum"], 2) @@ -837,7 +852,7 @@ class Announcements(AsyncTestCase): # but a third announcement with a different key should add to the # cache private_key2, public_key2 = ed25519.create_signing_keypair() - public_key_str2 = remove_prefix(ed25519.string_from_verifying_key(public_key2), "pub-") + public_key_str2 = remove_prefix(ed25519.string_from_verifying_key(public_key2), b"pub-") furl3 = "pb://onug64tu@127.0.0.1:456/short" ann_t3 = make_ann_t(ic, furl3, private_key2, 1) ic.got_announcements([ann_t3]) @@ -846,7 +861,7 @@ class Announcements(AsyncTestCase): announcements = self._load_cache(cache_filepath) self.failUnlessEqual(len(announcements), 2) self.failUnlessEqual(set([public_key_str, public_key_str2]), - set([a["key_s"] for a in announcements])) + set([ensure_binary(a["key_s"]) for a in announcements])) self.failUnlessEqual(set([furl2, furl3]), set([a["ann"]["anonymous-storage-FURL"] for a in announcements])) @@ -854,7 +869,7 @@ class Announcements(AsyncTestCase): # test loading yield flushEventualQueue() ic2 = IntroducerClient(None, "introducer.furl", u"my_nickname", - "my_version", "oldest_version", {}, fakeseq, + "my_version", "oldest_version", fakeseq, ic._cache_filepath) announcements = {} def got(key_s, ann): @@ -869,7 +884,7 @@ class Announcements(AsyncTestCase): self.failUnlessEqual(announcements[public_key_str2]["anonymous-storage-FURL"], furl3) - c2 = yield create_client(basedir) + c2 = yield create_client(basedir.path) c2.introducer_clients[0]._load_announcements() yield flushEventualQueue() self.assertEqual(c2.storage_broker.get_all_serverids(), @@ -879,27 +894,24 @@ class ClientSeqnums(AsyncBrokenTestCase): @defer.inlineCallbacks def test_client(self): - basedir = "introducer/ClientSeqnums/test_client" - fileutil.make_dirs(basedir) + basedir = FilePath("introducer/ClientSeqnums/test_client") + private = basedir.child("private") + private.makedirs() + write_introducer(basedir, "default", "nope") # if storage is enabled, the Client will publish its storage server # during startup (although the announcement will wait in a queue # until the introducer connection is established). To avoid getting # confused by this, disable storage. - f = open(os.path.join(basedir, "tahoe.cfg"), "w") - f.write("[client]\n") - f.write("introducer.furl = nope\n") - f.write("[storage]\n") - f.write("enabled = false\n") - f.close() + with basedir.child("tahoe.cfg").open("w") as f: + f.write(b"[storage]\n") + f.write(b"enabled = false\n") - c = yield create_client(basedir) + c = yield create_client(basedir.path) ic = c.introducer_clients[0] outbound = ic._outbound_announcements published = ic._published_announcements def read_seqnum(): - f = open(os.path.join(basedir, "announcement-seqnum")) - seqnum = f.read().strip() - f.close() + seqnum = basedir.child("announcement-seqnum").getContent() return int(seqnum) ic.publish("sA", {"key": "value1"}, c._node_private_key) @@ -907,7 +919,9 @@ class ClientSeqnums(AsyncBrokenTestCase): self.failUnless("sA" in outbound) self.failUnlessEqual(outbound["sA"]["seqnum"], 1) nonce1 = outbound["sA"]["nonce"] - self.failUnless(isinstance(nonce1, str)) + self.failUnless(isinstance(nonce1, bytes)) + # Make nonce unicode, to match JSON: + outbound["sA"]["nonce"] = str(nonce1, "utf-8") self.failUnlessEqual(json.loads(published["sA"][0]), outbound["sA"]) # [1] is the signature, [2] is the pubkey @@ -921,8 +935,11 @@ class ClientSeqnums(AsyncBrokenTestCase): self.failUnless("sA" in outbound) self.failUnlessEqual(outbound["sA"]["seqnum"], 2) nonce2 = outbound["sA"]["nonce"] - self.failUnless(isinstance(nonce2, str)) + self.failUnless(isinstance(nonce2, bytes)) self.failIfEqual(nonce1, nonce2) + # Make nonce unicode, to match JSON: + outbound["sA"]["nonce"] = str(nonce2, "utf-8") + outbound["sB"]["nonce"] = str(outbound["sB"]["nonce"], "utf-8") self.failUnlessEqual(json.loads(published["sA"][0]), outbound["sA"]) self.failUnlessEqual(json.loads(published["sB"][0]), @@ -954,7 +971,7 @@ class NonV1Server(SystemTestMixin, AsyncTestCase): tub.setServiceParent(self.parent) listenOnUnused(tub) c = IntroducerClient(tub, self.introducer_furl, - u"nickname-client", "version", "oldest", {}, + u"nickname-client", "version", "oldest", fakeseq, FilePath(self.mktemp())) announcements = {} def got(key_s, ann): @@ -982,8 +999,8 @@ class DecodeFurl(SyncTestCase): furl = 'pb://t5g7egomnnktbpydbuijt6zgtmw4oqi5@127.0.0.1:51857/hfzv36i' m = re.match(r'pb://(\w+)@', furl) assert m - nodeid = b32decode(m.group(1).upper()) - self.failUnlessEqual(nodeid, "\x9fM\xf2\x19\xcckU0\xbf\x03\r\x10\x99\xfb&\x9b-\xc7A\x1d") + nodeid = b32decode(m.group(1).upper().encode("ascii")) + self.failUnlessEqual(nodeid, b"\x9fM\xf2\x19\xcckU0\xbf\x03\r\x10\x99\xfb&\x9b-\xc7A\x1d") class Signatures(SyncTestCase): @@ -995,11 +1012,11 @@ class Signatures(SyncTestCase): (msg, sig, key) = ann_t self.failUnlessEqual(type(msg), type("".encode("utf-8"))) # bytes self.failUnlessEqual(json.loads(msg.decode("utf-8")), ann) - self.failUnless(sig.startswith("v0-")) - self.failUnless(key.startswith("v0-")) + self.failUnless(sig.startswith(b"v0-")) + self.failUnless(key.startswith(b"v0-")) (ann2,key2) = unsign_from_foolscap(ann_t) self.failUnlessEqual(ann2, ann) - self.failUnlessEqual("pub-" + key2, public_key_str) + self.failUnlessEqual(b"pub-" + key2, public_key_str) # not signed self.failUnlessRaises(UnknownKeyError, @@ -1014,29 +1031,58 @@ class Signatures(SyncTestCase): # unrecognized signatures self.failUnlessRaises(UnknownKeyError, - unsign_from_foolscap, (bad_msg, "v999-sig", key)) + unsign_from_foolscap, (bad_msg, b"v999-sig", key)) self.failUnlessRaises(UnknownKeyError, - unsign_from_foolscap, (bad_msg, sig, "v999-key")) + unsign_from_foolscap, (bad_msg, sig, b"v999-key")) def test_unsigned_announcement(self): - ed25519.verifying_key_from_string(b"pub-v0-wodst6ly4f7i7akt2nxizsmmy2rlmer6apltl56zctn67wfyu5tq") - mock_tub = Mock() + """ + An incorrectly signed announcement is not delivered to subscribers. + """ + private_key, public_key = ed25519.create_signing_keypair() + public_key_str = ed25519.string_from_verifying_key(public_key) + ic = IntroducerClient( - mock_tub, - u"pb://", + Tub(), + "pb://", u"fake_nick", "0.0.0", "1.2.3", - {}, (0, u"i am a nonce"), - "invalid", + FilePath(self.mktemp()), + ) + received = {} + ic.subscribe_to("good-stuff", partial(setitem, received)) + + # Deliver a good message to prove our test code is valid. + ann = {"service-name": "good-stuff", "payload": "hello"} + ann_t = sign_to_foolscap(ann, private_key) + ic.got_announcements([ann_t]) + + self.assertEqual( + {public_key_str[len("pub-"):]: ann}, + received, + ) + received.clear() + + # Now deliver one without a valid signature and observe that it isn't + # delivered to the subscriber. + ann = {"service-name": "good-stuff", "payload": "bad stuff"} + (msg, sig, key) = sign_to_foolscap(ann, private_key) + # Drop a base32 word from the middle of the key to invalidate the + # signature. + sig_a = bytearray(sig) + sig_a[20:22] = [] + sig = bytes(sig_a) + ann_t = (msg, sig, key) + ic.got_announcements([ann_t]) + + # The received announcements dict should remain empty because we + # should not receive the announcement with the invalid signature. + self.assertEqual( + {}, + received, ) - self.assertEqual(0, ic._debug_counts["inbound_announcement"]) - ic.got_announcements([ - ("message", "v0-aaaaaaa", "v0-wodst6ly4f7i7akt2nxizsmmy2rlmer6apltl56zctn67wfyu5tq") - ]) - # we should have rejected this announcement due to a bad signature - self.assertEqual(0, ic._debug_counts["inbound_announcement"]) # add tests of StorageFarmBroker: if it receives duplicate announcements, it diff --git a/src/allmydata/test/test_iputil.py b/src/allmydata/test/test_iputil.py index f403de35b..081c80ee3 100644 --- a/src/allmydata/test/test_iputil.py +++ b/src/allmydata/test/test_iputil.py @@ -13,9 +13,16 @@ from future.utils import PY2, native_str if PY2: from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -import re, errno, subprocess, os, socket +import os, socket import gc +from testtools.matchers import ( + MatchesAll, + IsInstance, + AllMatch, + MatchesPredicate, +) + from twisted.trial import unittest from tenacity import retry, stop_after_attempt @@ -23,172 +30,14 @@ from tenacity import retry, stop_after_attempt from foolscap.api import Tub from allmydata.util import iputil, gcutil -import allmydata.test.common_util as testutil -from allmydata.util.namespace import Namespace +from ..util.iputil import ( + get_local_addresses_sync, +) -DOTTED_QUAD_RE=re.compile(r"^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$") - -# Mock output from subprocesses should be bytes, that's what happens on both -# Python 2 and Python 3: -MOCK_IPADDR_OUTPUT = b"""\ -1: lo: mtu 16436 qdisc noqueue state UNKNOWN \n\ - link/loopback 00:00:00:00:00:00 brd 00:00:00:00:00:00 - inet 127.0.0.1/8 scope host lo - inet6 ::1/128 scope host \n\ - valid_lft forever preferred_lft forever -2: eth1: mtu 1500 qdisc pfifo_fast state UP qlen 1000 - link/ether d4:3d:7e:01:b4:3e brd ff:ff:ff:ff:ff:ff - inet 192.168.0.6/24 brd 192.168.0.255 scope global eth1 - inet6 fe80::d63d:7eff:fe01:b43e/64 scope link \n\ - valid_lft forever preferred_lft forever -3: wlan0: mtu 1500 qdisc mq state UP qlen 1000 - link/ether 90:f6:52:27:15:0a brd ff:ff:ff:ff:ff:ff - inet 192.168.0.2/24 brd 192.168.0.255 scope global wlan0 - inet6 fe80::92f6:52ff:fe27:150a/64 scope link \n\ - valid_lft forever preferred_lft forever -""" - -MOCK_IFCONFIG_OUTPUT = b"""\ -eth1 Link encap:Ethernet HWaddr d4:3d:7e:01:b4:3e \n\ - inet addr:192.168.0.6 Bcast:192.168.0.255 Mask:255.255.255.0 - inet6 addr: fe80::d63d:7eff:fe01:b43e/64 Scope:Link - UP BROADCAST RUNNING MULTICAST MTU:1500 Metric:1 - RX packets:154242234 errors:0 dropped:0 overruns:0 frame:0 - TX packets:155461891 errors:0 dropped:0 overruns:0 carrier:0 - collisions:0 txqueuelen:1000 \n\ - RX bytes:84367213640 (78.5 GiB) TX bytes:73401695329 (68.3 GiB) - Interrupt:20 Memory:f4f00000-f4f20000 \n\ - -lo Link encap:Local Loopback \n\ - inet addr:127.0.0.1 Mask:255.0.0.0 - inet6 addr: ::1/128 Scope:Host - UP LOOPBACK RUNNING MTU:16436 Metric:1 - RX packets:27449267 errors:0 dropped:0 overruns:0 frame:0 - TX packets:27449267 errors:0 dropped:0 overruns:0 carrier:0 - collisions:0 txqueuelen:0 \n\ - RX bytes:192643017823 (179.4 GiB) TX bytes:192643017823 (179.4 GiB) - -wlan0 Link encap:Ethernet HWaddr 90:f6:52:27:15:0a \n\ - inet addr:192.168.0.2 Bcast:192.168.0.255 Mask:255.255.255.0 - inet6 addr: fe80::92f6:52ff:fe27:150a/64 Scope:Link - UP BROADCAST RUNNING MULTICAST MTU:1500 Metric:1 - RX packets:12352750 errors:0 dropped:0 overruns:0 frame:0 - TX packets:4501451 errors:0 dropped:0 overruns:0 carrier:0 - collisions:0 txqueuelen:1000 \n\ - RX bytes:3916475942 (3.6 GiB) TX bytes:458353654 (437.1 MiB) -""" - -# This is actually from a VirtualBox VM running XP. -MOCK_ROUTE_OUTPUT = b"""\ -=========================================================================== -Interface List -0x1 ........................... MS TCP Loopback interface -0x2 ...08 00 27 c3 80 ad ...... AMD PCNET Family PCI Ethernet Adapter - Packet Scheduler Miniport -=========================================================================== -=========================================================================== -Active Routes: -Network Destination Netmask Gateway Interface Metric - 0.0.0.0 0.0.0.0 10.0.2.2 10.0.2.15 20 - 10.0.2.0 255.255.255.0 10.0.2.15 10.0.2.15 20 - 10.0.2.15 255.255.255.255 127.0.0.1 127.0.0.1 20 - 10.255.255.255 255.255.255.255 10.0.2.15 10.0.2.15 20 - 127.0.0.0 255.0.0.0 127.0.0.1 127.0.0.1 1 - 224.0.0.0 240.0.0.0 10.0.2.15 10.0.2.15 20 - 255.255.255.255 255.255.255.255 10.0.2.15 10.0.2.15 1 -Default Gateway: 10.0.2.2 -=========================================================================== -Persistent Routes: - None -""" - -UNIX_TEST_ADDRESSES = set(["127.0.0.1", "192.168.0.6", "192.168.0.2", "192.168.0.10"]) -WINDOWS_TEST_ADDRESSES = set(["127.0.0.1", "10.0.2.15", "192.168.0.10"]) -CYGWIN_TEST_ADDRESSES = set(["127.0.0.1", "192.168.0.10"]) - - -class FakeProcess(object): - def __init__(self, output, err): - self.output = output - self.err = err - def communicate(self): - return (self.output, self.err) - - -class ListAddresses(testutil.SignalMixin, unittest.TestCase): - def test_get_local_ip_for(self): - addr = iputil.get_local_ip_for('127.0.0.1') - self.failUnless(DOTTED_QUAD_RE.match(addr)) - # Bytes can be taken as input: - bytes_addr = iputil.get_local_ip_for(b'127.0.0.1') - self.assertEqual(addr, bytes_addr) - # The output is a native string: - self.assertIsInstance(addr, native_str) - - def test_list_async(self): - d = iputil.get_local_addresses_async() - def _check(addresses): - self.failUnlessIn("127.0.0.1", addresses) - self.failIfIn("0.0.0.0", addresses) - d.addCallbacks(_check) - return d - # David A.'s OpenSolaris box timed out on this test one time when it was at 2s. - test_list_async.timeout=4 - - def _test_list_async_mock(self, command, output, expected): - ns = Namespace() - ns.first = True - - def call_Popen(args, bufsize=0, executable=None, stdin=None, stdout=None, stderr=None, - preexec_fn=None, close_fds=False, shell=False, cwd=None, env=None, - universal_newlines=False, startupinfo=None, creationflags=0): - if ns.first: - ns.first = False - e = OSError("EINTR") - e.errno = errno.EINTR - raise e - elif os.path.basename(args[0]) == command: - return FakeProcess(output, "") - else: - e = OSError("[Errno 2] No such file or directory") - e.errno = errno.ENOENT - raise e - self.patch(subprocess, 'Popen', call_Popen) - self.patch(os.path, 'isfile', lambda x: True) - - def call_get_local_ip_for(target): - if target in ("localhost", "127.0.0.1"): - return "127.0.0.1" - else: - return "192.168.0.10" - self.patch(iputil, 'get_local_ip_for', call_get_local_ip_for) - - def call_which(name): - return [name] - self.patch(iputil, 'which', call_which) - - d = iputil.get_local_addresses_async() - def _check(addresses): - self.failUnlessEquals(set(addresses), set(expected)) - d.addCallbacks(_check) - return d - - def test_list_async_mock_ip_addr(self): - self.patch(iputil, 'platform', "linux2") - return self._test_list_async_mock("ip", MOCK_IPADDR_OUTPUT, UNIX_TEST_ADDRESSES) - - def test_list_async_mock_ifconfig(self): - self.patch(iputil, 'platform', "linux2") - return self._test_list_async_mock("ifconfig", MOCK_IFCONFIG_OUTPUT, UNIX_TEST_ADDRESSES) - - def test_list_async_mock_route(self): - self.patch(iputil, 'platform', "win32") - return self._test_list_async_mock("route.exe", MOCK_ROUTE_OUTPUT, WINDOWS_TEST_ADDRESSES) - - def test_list_async_mock_cygwin(self): - self.patch(iputil, 'platform', "cygwin") - return self._test_list_async_mock(None, None, CYGWIN_TEST_ADDRESSES) - +from .common import ( + SyncTestCase, +) class ListenOnUsed(unittest.TestCase): """Tests for listenOnUnused.""" @@ -261,3 +110,29 @@ class GcUtil(unittest.TestCase): self.assertEqual(len(collections), 0) tracker.allocate() self.assertEqual(len(collections), 1) + + +class GetLocalAddressesSyncTests(SyncTestCase): + """ + Tests for ``get_local_addresses_sync``. + """ + def test_some_ipv4_addresses(self): + """ + ``get_local_addresses_sync`` returns a list of IPv4 addresses as native + strings. + """ + self.assertThat( + get_local_addresses_sync(), + MatchesAll( + IsInstance(list), + AllMatch( + MatchesAll( + IsInstance(native_str), + MatchesPredicate( + lambda addr: socket.inet_pton(socket.AF_INET, addr), + "%r is not an IPv4 address.", + ), + ), + ), + ), + ) diff --git a/src/allmydata/test/test_json_metadata.py b/src/allmydata/test/test_json_metadata.py index 75d4e1567..a0cb9c142 100644 --- a/src/allmydata/test/test_json_metadata.py +++ b/src/allmydata/test/test_json_metadata.py @@ -1,3 +1,14 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from twisted.trial.unittest import TestCase diff --git a/src/allmydata/test/test_log.py b/src/allmydata/test/test_log.py index eecbda9e3..bf079aaeb 100644 --- a/src/allmydata/test/test_log.py +++ b/src/allmydata/test/test_log.py @@ -9,7 +9,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function -from future.utils import PY2 +from future.utils import PY2, native_str if PY2: from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 @@ -154,3 +154,17 @@ class Log(unittest.TestCase): obj.log("four") self.assertEqual([m[2] for m in self.messages], ["grand", "par1", "par2", "msg1", "msg1"]) + + def test_native_string_keys(self): + """Keyword argument keys are all native strings.""" + class LoggingObject17(tahoe_log.PrefixingLogMixin): + pass + + obj = LoggingObject17() + # Native string by default: + obj.log(hello="world") + # Will be Unicode on Python 2: + obj.log(**{"my": "message"}) + for message in self.messages: + for k in message[-1].keys(): + self.assertIsInstance(k, native_str) diff --git a/src/allmydata/test/test_multi_introducers.py b/src/allmydata/test/test_multi_introducers.py index 34e6e5d96..520a5a69a 100644 --- a/src/allmydata/test/test_multi_introducers.py +++ b/src/allmydata/test/test_multi_introducers.py @@ -24,9 +24,6 @@ class MultiIntroTests(unittest.TestCase): config = {'hide-ip':False, 'listen': 'tcp', 'port': None, 'location': None, 'hostname': 'example.net'} write_node_config(c, config) - fake_furl = "furl1" - c.write("[client]\n") - c.write("introducer.furl = %s\n" % fake_furl) c.write("[storage]\n") c.write("enabled = false\n") c.close() @@ -36,8 +33,10 @@ class MultiIntroTests(unittest.TestCase): @defer.inlineCallbacks def test_introducer_count(self): - """ Ensure that the Client creates same number of introducer clients - as found in "basedir/private/introducers" config file. """ + """ + If there are two introducers configured in ``introducers.yaml`` then + ``Client`` creates two introducer clients. + """ connections = { 'introducers': { u'intro1':{ 'furl': 'furl1' }, @@ -50,25 +49,13 @@ class MultiIntroTests(unittest.TestCase): ic_count = len(myclient.introducer_clients) # assertions - self.failUnlessEqual(ic_count, 3) - - @defer.inlineCallbacks - def test_introducer_count_commented(self): - """ Ensure that the Client creates same number of introducer clients - as found in "basedir/private/introducers" config file when there is one - commented.""" - self.yaml_path.setContent(INTRODUCERS_CFG_FURLS_COMMENTED) - # get a client and count of introducer_clients - myclient = yield create_client(self.basedir) - ic_count = len(myclient.introducer_clients) - - # assertions - self.failUnlessEqual(ic_count, 2) + self.failUnlessEqual(ic_count, len(connections["introducers"])) @defer.inlineCallbacks def test_read_introducer_furl_from_tahoecfg(self): - """ Ensure that the Client reads the introducer.furl config item from - the tahoe.cfg file. """ + """ + The deprecated [client]introducer.furl item is still read and respected. + """ # create a custom tahoe.cfg c = open(os.path.join(self.basedir, "tahoe.cfg"), "w") config = {'hide-ip':False, 'listen': 'tcp', @@ -87,20 +74,42 @@ class MultiIntroTests(unittest.TestCase): # assertions self.failUnlessEqual(fake_furl, tahoe_cfg_furl) + self.assertEqual( + list( + warning["message"] + for warning + in self.flushWarnings() + if warning["category"] is DeprecationWarning + ), + ["tahoe.cfg [client]introducer.furl is deprecated; " + "use private/introducers.yaml instead."], + ) @defer.inlineCallbacks def test_reject_default_in_yaml(self): - connections = {'introducers': { - u'default': { 'furl': 'furl1' }, - }} + """ + If an introducer is configured in tahoe.cfg with the deprecated + [client]introducer.furl then a "default" introducer in + introducers.yaml is rejected. + """ + connections = { + 'introducers': { + u'default': { 'furl': 'furl1' }, + }, + } self.yaml_path.setContent(yamlutil.safe_dump(connections)) + FilePath(self.basedir).child("tahoe.cfg").setContent( + "[client]\n" + "introducer.furl = furl1\n" + ) + with self.assertRaises(ValueError) as ctx: yield create_client(self.basedir) self.assertEquals( str(ctx.exception), - "'default' introducer furl cannot be specified in introducers.yaml; please " - "fix impossible configuration.", + "'default' introducer furl cannot be specified in tahoe.cfg and introducers.yaml; " + "please fix impossible configuration.", ) SIMPLE_YAML = """ @@ -126,8 +135,6 @@ class NoDefault(unittest.TestCase): config = {'hide-ip':False, 'listen': 'tcp', 'port': None, 'location': None, 'hostname': 'example.net'} write_node_config(c, config) - c.write("[client]\n") - c.write("# introducer.furl =\n") # omit default c.write("[storage]\n") c.write("enabled = false\n") c.close() diff --git a/src/allmydata/test/test_node.py b/src/allmydata/test/test_node.py index ecbf28d80..e44fd5743 100644 --- a/src/allmydata/test/test_node.py +++ b/src/allmydata/test/test_node.py @@ -1,42 +1,78 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2, native_str +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import base64 import os import stat import sys import time -import mock from textwrap import dedent +import configparser + +from hypothesis import ( + given, +) +from hypothesis.strategies import ( + integers, + sets, +) from unittest import skipIf +from twisted.python.filepath import ( + FilePath, +) from twisted.trial import unittest from twisted.internet import defer -from twisted.python import log -from foolscap.api import flushEventualQueue import foolscap.logging.log from twisted.application import service from allmydata.node import ( + PortAssignmentRequired, + PrivacyError, + tub_listen_on, create_tub_options, create_main_tub, create_node_dir, + create_default_connection_handlers, create_connection_handlers, config_from_string, read_config, MissingConfigEntry, _tub_portlocation, formatTimeTahoeStyle, + UnescapedHashError, ) from allmydata.introducer.server import create_introducer from allmydata import client from allmydata.util import fileutil, iputil from allmydata.util.namespace import Namespace -from allmydata.util.configutil import UnknownConfigError +from allmydata.util.configutil import ( + ValidConfiguration, + UnknownConfigError, +) + from allmydata.util.i2p_provider import create as create_i2p_provider from allmydata.util.tor_provider import create as create_tor_provider import allmydata.test.common_util as testutil +from .common import ( + ConstantAddresses, +) + +def port_numbers(): + return integers(min_value=1, max_value=2 ** 16 - 1) class LoggingMultiService(service.MultiService): def log(self, msg, **kw): @@ -55,7 +91,7 @@ def testing_tub(config_data=''): i2p_provider = create_i2p_provider(reactor, config) tor_provider = create_tor_provider(reactor, config) - handlers = create_connection_handlers(reactor, config, i2p_provider, tor_provider) + handlers = create_connection_handlers(config, i2p_provider, tor_provider) default_connection_handlers, foolscap_connection_handlers = handlers tub_options = create_tub_options(config) @@ -69,22 +105,39 @@ def testing_tub(config_data=''): class TestCase(testutil.SignalMixin, unittest.TestCase): - @defer.inlineCallbacks def setUp(self): testutil.SignalMixin.setUp(self) self.parent = LoggingMultiService() - self.parent.startService() - self._available_port = yield iputil.allocate_tcp_port() + # We can use a made-up port number because these tests never actually + # try to bind the port. We'll use a low-numbered one that's likely to + # conflict with another service to prove it. + self._available_port = 22 - def tearDown(self): - log.msg("%s.tearDown" % self.__class__.__name__) - testutil.SignalMixin.tearDown(self) - d = defer.succeed(None) - d.addCallback(lambda res: self.parent.stopService()) - d.addCallback(flushEventualQueue) - return d + def _test_location( + self, + expected_addresses, + tub_port=None, + tub_location=None, + local_addresses=None, + ): + """ + Verify that a Tub configured with the given *tub.port* and *tub.location* + values generates fURLs with the given addresses in its location hints. - def _test_location(self, basedir, expected_addresses, tub_port=None, tub_location=None, local_addresses=None): + :param [str] expected_addresses: The addresses which must appear in + the generated fURL for the test to pass. All addresses must + appear. + + :param tub_port: If not ``None`` then a value for the *tub.port* + configuration item. + + :param tub_location: If not ``None`` then a value for the *tub.port* + configuration item. + + :param local_addresses: If not ``None`` then a list of addresses to + supply to the system under test as local addresses. + """ + basedir = self.mktemp() create_node_dir(basedir, "testing") config_data = "[node]\n" if tub_port: @@ -92,34 +145,30 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): if tub_location is not None: config_data += "tub.location = {}\n".format(tub_location) - if local_addresses: + if local_addresses is not None: self.patch(iputil, 'get_local_addresses_sync', lambda: local_addresses) tub = testing_tub(config_data) - tub.setServiceParent(self.parent) class Foo(object): pass furl = tub.registerReference(Foo()) for address in expected_addresses: - self.failUnlessIn(address, furl) + self.assertIn(address, furl) def test_location1(self): - return self._test_location(basedir="test_node/test_location1", - expected_addresses=["192.0.2.0:1234"], + return self._test_location(expected_addresses=["192.0.2.0:1234"], tub_location="192.0.2.0:1234") def test_location2(self): - return self._test_location(basedir="test_node/test_location2", - expected_addresses=["192.0.2.0:1234", "example.org:8091"], + return self._test_location(expected_addresses=["192.0.2.0:1234", "example.org:8091"], tub_location="192.0.2.0:1234,example.org:8091") def test_location_not_set(self): """Checks the autogenerated furl when tub.location is not set.""" return self._test_location( - basedir="test_node/test_location3", expected_addresses=[ "127.0.0.1:{}".format(self._available_port), "192.0.2.0:{}".format(self._available_port), @@ -131,7 +180,6 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): def test_location_auto_and_explicit(self): """Checks the autogenerated furl when tub.location contains 'AUTO'.""" return self._test_location( - basedir="test_node/test_location4", expected_addresses=[ "127.0.0.1:{}".format(self._available_port), "192.0.2.0:{}".format(self._available_port), @@ -145,13 +193,13 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): def test_tahoe_cfg_utf8(self): basedir = "test_node/test_tahoe_cfg_utf8" fileutil.make_dirs(basedir) - f = open(os.path.join(basedir, 'tahoe.cfg'), 'wt') + f = open(os.path.join(basedir, 'tahoe.cfg'), 'wb') f.write(u"\uFEFF[node]\n".encode('utf-8')) f.write(u"nickname = \u2621\n".encode('utf-8')) f.close() config = read_config(basedir, "") - self.failUnlessEqual(config.get_config("node", "nickname").decode('utf-8'), + self.failUnlessEqual(config.get_config("node", "nickname"), u"\u2621") def test_tahoe_cfg_hash_in_name(self): @@ -166,6 +214,37 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): config = read_config(basedir, "") self.failUnless(config.nickname == nickname) + def test_hash_in_furl(self): + """ + Hashes in furl options are not allowed, resulting in exception. + """ + basedir = self.mktemp() + fileutil.make_dirs(basedir) + with open(os.path.join(basedir, 'tahoe.cfg'), 'wt') as f: + f.write("[node]\n") + f.write("log_gatherer.furl = lalal#onohash\n") + + config = read_config(basedir, "") + with self.assertRaises(UnescapedHashError): + config.get_config("node", "log_gatherer.furl") + + def test_missing_config_item(self): + """ + If a config item is missing: + + 1. Given a default, return default. + 2. Otherwise, raise MissingConfigEntry. + """ + basedir = self.mktemp() + fileutil.make_dirs(basedir) + with open(os.path.join(basedir, 'tahoe.cfg'), 'wt') as f: + f.write("[node]\n") + config = read_config(basedir, "") + + self.assertEquals(config.get_config("node", "log_gatherer.furl", "def"), "def") + with self.assertRaises(MissingConfigEntry): + config.get_config("node", "log_gatherer.furl") + def test_config_required(self): """ Asking for missing (but required) configuration is an error @@ -194,10 +273,35 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): config = read_config(basedir, "portnum") self.assertEqual( config.items("node"), - [(b"nickname", b"foo"), - (b"timeout.disconnect", b"12"), + [("nickname", "foo"), + ("timeout.disconnect", "12"), ], ) + self.assertEqual( + config.items("node", [("unnecessary", "default")]), + [("nickname", "foo"), + ("timeout.disconnect", "12"), + ], + ) + + + def test_config_items_missing_section(self): + """ + If a default is given for a missing section, the default is used. + + Lacking both default and section, an error is raised. + """ + basedir = self.mktemp() + create_node_dir(basedir, "testing") + + with open(os.path.join(basedir, 'tahoe.cfg'), 'wt') as f: + f.write("") + + config = read_config(basedir, "portnum") + with self.assertRaises(configparser.NoSectionError): + config.items("nosuch") + default = [("hello", "world")] + self.assertEqual(config.items("nosuch", default), default) @skipIf( "win32" in sys.platform.lower() or "cygwin" in sys.platform.lower(), @@ -340,28 +444,136 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): yield client.create_client(basedir) self.failUnless(ns.called) + def test_set_config_unescaped_furl_hash(self): + """ + ``_Config.set_config`` raises ``UnescapedHashError`` if the item being set + is a furl and the value includes ``"#"`` and does not set the value. + """ + basedir = self.mktemp() + new_config = config_from_string(basedir, "", "") + with self.assertRaises(UnescapedHashError): + new_config.set_config("foo", "bar.furl", "value#1") + with self.assertRaises(MissingConfigEntry): + new_config.get_config("foo", "bar.furl") + + def test_set_config_new_section(self): + """ + ``_Config.set_config`` can be called with the name of a section that does + not already exist to create that section and set an item in it. + """ + basedir = self.mktemp() + new_config = config_from_string(basedir, "", "", ValidConfiguration.everything()) + new_config.set_config("foo", "bar", "value1") + self.assertEqual( + new_config.get_config("foo", "bar"), + "value1" + ) + + def test_set_config_replace(self): + """ + ``_Config.set_config`` can be called with a section and item that already + exists to change an existing value to a new one. + """ + basedir = self.mktemp() + new_config = config_from_string(basedir, "", "", ValidConfiguration.everything()) + new_config.set_config("foo", "bar", "value1") + new_config.set_config("foo", "bar", "value2") + self.assertEqual( + new_config.get_config("foo", "bar"), + "value2" + ) + + def test_set_config_write(self): + """ + ``_Config.set_config`` persists the configuration change so it can be + re-loaded later. + """ + # Let our nonsense config through + valid_config = ValidConfiguration.everything() + basedir = FilePath(self.mktemp()) + basedir.makedirs() + cfg = basedir.child(b"tahoe.cfg") + cfg.setContent(b"") + new_config = read_config(basedir.path, "", [], valid_config) + new_config.set_config("foo", "bar", "value1") + loaded_config = read_config(basedir.path, "", [], valid_config) + self.assertEqual( + loaded_config.get_config("foo", "bar"), + "value1", + ) + + def test_set_config_rejects_invalid_config(self): + """ + ``_Config.set_config`` raises ``UnknownConfigError`` if the section or + item is not recognized by the validation object and does not set the + value. + """ + # Make everything invalid. + valid_config = ValidConfiguration.nothing() + new_config = config_from_string(self.mktemp(), "", "", valid_config) + with self.assertRaises(UnknownConfigError): + new_config.set_config("foo", "bar", "baz") + with self.assertRaises(MissingConfigEntry): + new_config.get_config("foo", "bar") + + +def _stub_get_local_addresses_sync(): + """ + A function like ``allmydata.util.iputil.get_local_addresses_sync``. + """ + return ["LOCAL"] + + +def _stub_allocate_tcp_port(): + """ + A function like ``allmydata.util.iputil.allocate_tcp_port``. + """ + return 999 + class TestMissingPorts(unittest.TestCase): """ - Test certain error-cases for ports setup + Test certain ``_tub_portlocation`` error cases for ports setup. """ - def setUp(self): self.basedir = self.mktemp() create_node_dir(self.basedir, "testing") + def test_listen_on_zero(self): + """ + ``_tub_portlocation`` raises ``PortAssignmentRequired`` called with a + listen address including port 0 and no interface. + """ + config_data = ( + "[node]\n" + "tub.port = tcp:0\n" + ) + config = config_from_string(self.basedir, "portnum", config_data) + with self.assertRaises(PortAssignmentRequired): + _tub_portlocation(config, None, None) + + def test_listen_on_zero_with_host(self): + """ + ``_tub_portlocation`` raises ``PortAssignmentRequired`` called with a + listen address including port 0 and an interface. + """ + config_data = ( + "[node]\n" + "tub.port = tcp:0:interface=127.0.0.1\n" + ) + config = config_from_string(self.basedir, "portnum", config_data) + with self.assertRaises(PortAssignmentRequired): + _tub_portlocation(config, None, None) + test_listen_on_zero_with_host.todo = native_str( # type: ignore + "https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3563" + ) + def test_parsing_tcp(self): """ - parse explicit tub.port with explicitly-default tub.location + When ``tub.port`` is given and ``tub.location`` is **AUTO** the port + number from ``tub.port`` is used as the port number for the value + constructed for ``tub.location``. """ - get_addr = mock.patch( - "allmydata.util.iputil.get_local_addresses_sync", - return_value=["LOCAL"], - ) - alloc_port = mock.patch( - "allmydata.util.iputil.allocate_tcp_port", - return_value=999, - ) config_data = ( "[node]\n" "tub.port = tcp:777\n" @@ -369,68 +581,53 @@ class TestMissingPorts(unittest.TestCase): ) config = config_from_string(self.basedir, "portnum", config_data) - with get_addr, alloc_port: - tubport, tublocation = _tub_portlocation(config) + tubport, tublocation = _tub_portlocation( + config, + _stub_get_local_addresses_sync, + _stub_allocate_tcp_port, + ) self.assertEqual(tubport, "tcp:777") - self.assertEqual(tublocation, "tcp:LOCAL:777") + self.assertEqual(tublocation, b"tcp:LOCAL:777") def test_parsing_defaults(self): """ parse empty config, check defaults """ - get_addr = mock.patch( - "allmydata.util.iputil.get_local_addresses_sync", - return_value=["LOCAL"], - ) - alloc_port = mock.patch( - "allmydata.util.iputil.allocate_tcp_port", - return_value=999, - ) config_data = ( "[node]\n" ) config = config_from_string(self.basedir, "portnum", config_data) - with get_addr, alloc_port: - tubport, tublocation = _tub_portlocation(config) + tubport, tublocation = _tub_portlocation( + config, + _stub_get_local_addresses_sync, + _stub_allocate_tcp_port, + ) self.assertEqual(tubport, "tcp:999") - self.assertEqual(tublocation, "tcp:LOCAL:999") + self.assertEqual(tublocation, b"tcp:LOCAL:999") def test_parsing_location_complex(self): """ location with two options (including defaults) """ - get_addr = mock.patch( - "allmydata.util.iputil.get_local_addresses_sync", - return_value=["LOCAL"], - ) - alloc_port = mock.patch( - "allmydata.util.iputil.allocate_tcp_port", - return_value=999, - ) config_data = ( "[node]\n" "tub.location = tcp:HOST:888,AUTO\n" ) config = config_from_string(self.basedir, "portnum", config_data) - with get_addr, alloc_port: - tubport, tublocation = _tub_portlocation(config) + tubport, tublocation = _tub_portlocation( + config, + _stub_get_local_addresses_sync, + _stub_allocate_tcp_port, + ) self.assertEqual(tubport, "tcp:999") - self.assertEqual(tublocation, "tcp:HOST:888,tcp:LOCAL:999") + self.assertEqual(tublocation, b"tcp:HOST:888,tcp:LOCAL:999") def test_parsing_all_disabled(self): """ parse config with both port + location disabled """ - get_addr = mock.patch( - "allmydata.util.iputil.get_local_addresses_sync", - return_value=["LOCAL"], - ) - alloc_port = mock.patch( - "allmydata.util.iputil.allocate_tcp_port", - return_value=999, - ) config_data = ( "[node]\n" "tub.port = disabled\n" @@ -438,8 +635,11 @@ class TestMissingPorts(unittest.TestCase): ) config = config_from_string(self.basedir, "portnum", config_data) - with get_addr, alloc_port: - res = _tub_portlocation(config) + res = _tub_portlocation( + config, + _stub_get_local_addresses_sync, + _stub_allocate_tcp_port, + ) self.assertTrue(res is None) def test_empty_tub_port(self): @@ -453,7 +653,11 @@ class TestMissingPorts(unittest.TestCase): config = config_from_string(self.basedir, "portnum", config_data) with self.assertRaises(ValueError) as ctx: - _tub_portlocation(config) + _tub_portlocation( + config, + _stub_get_local_addresses_sync, + _stub_allocate_tcp_port, + ) self.assertIn( "tub.port must not be empty", str(ctx.exception) @@ -470,7 +674,11 @@ class TestMissingPorts(unittest.TestCase): config = config_from_string(self.basedir, "portnum", config_data) with self.assertRaises(ValueError) as ctx: - _tub_portlocation(config) + _tub_portlocation( + config, + _stub_get_local_addresses_sync, + _stub_allocate_tcp_port, + ) self.assertIn( "tub.location must not be empty", str(ctx.exception) @@ -488,7 +696,11 @@ class TestMissingPorts(unittest.TestCase): config = config_from_string(self.basedir, "portnum", config_data) with self.assertRaises(ValueError) as ctx: - _tub_portlocation(config) + _tub_portlocation( + config, + _stub_get_local_addresses_sync, + _stub_allocate_tcp_port, + ) self.assertIn( "tub.port is disabled, but not tub.location", str(ctx.exception) @@ -506,16 +718,64 @@ class TestMissingPorts(unittest.TestCase): config = config_from_string(self.basedir, "portnum", config_data) with self.assertRaises(ValueError) as ctx: - _tub_portlocation(config) + _tub_portlocation( + config, + _stub_get_local_addresses_sync, + _stub_allocate_tcp_port, + ) self.assertIn( "tub.location is disabled, but not tub.port", str(ctx.exception) ) + def test_tub_location_tcp(self): + """ + If ``reveal-IP-address`` is set to false and ``tub.location`` includes a + **tcp** hint then ``_tub_portlocation`` raises `PrivacyError`` because + TCP leaks IP addresses. + """ + config = config_from_string( + "fake.port", + "no-basedir", + "[node]\nreveal-IP-address = false\ntub.location=tcp:hostname:1234\n", + ) + with self.assertRaises(PrivacyError) as ctx: + _tub_portlocation( + config, + _stub_get_local_addresses_sync, + _stub_allocate_tcp_port, + ) + self.assertEqual( + str(ctx.exception), + "tub.location includes tcp: hint", + ) + + def test_tub_location_legacy_tcp(self): + """ + If ``reveal-IP-address`` is set to false and ``tub.location`` includes a + "legacy" hint with no explicit type (which means it is a **tcp** hint) + then the behavior is the same as for an explicit **tcp** hint. + """ + config = config_from_string( + "fake.port", + "no-basedir", + "[node]\nreveal-IP-address = false\ntub.location=hostname:1234\n", + ) + + with self.assertRaises(PrivacyError) as ctx: + _tub_portlocation( + config, + _stub_get_local_addresses_sync, + _stub_allocate_tcp_port, + ) + + self.assertEqual( + str(ctx.exception), + "tub.location includes tcp: hint", + ) + BASE_CONFIG = """ -[client] -introducer.furl = empty [tor] enabled = false [i2p] @@ -545,7 +805,7 @@ enabled = true class FakeTub(object): def __init__(self): - self.tubID = base64.b32encode("foo") + self.tubID = base64.b32encode(b"foo") self.listening_ports = [] def setOption(self, name, value): pass def removeAllConnectionHintHandlers(self): pass @@ -557,89 +817,50 @@ class FakeTub(object): class Listeners(unittest.TestCase): - def test_listen_on_zero(self): + # Randomly allocate a couple distinct port numbers to try out. The test + # never actually binds these port numbers so we don't care if they're "in + # use" on the system or not. We just want a couple distinct values we can + # check expected results against. + @given(ports=sets(elements=port_numbers(), min_size=2, max_size=2)) + def test_multiple_ports(self, ports): """ - Trying to listen on port 0 should be an error + When there are multiple listen addresses suggested by the ``tub.port`` and + ``tub.location`` configuration, the node's *main* port listens on all + of them. """ - basedir = self.mktemp() - create_node_dir(basedir, "testing") - with open(os.path.join(basedir, "tahoe.cfg"), "w") as f: - f.write(BASE_CONFIG) - f.write("[node]\n") - f.write("tub.port = tcp:0\n") - f.write("tub.location = AUTO\n") - - config = client.read_config(basedir, "client.port") - i2p_provider = mock.Mock() - tor_provider = mock.Mock() - dfh, fch = create_connection_handlers(None, config, i2p_provider, tor_provider) - tub_options = create_tub_options(config) - t = FakeTub() - - with mock.patch("allmydata.node.Tub", return_value=t): - with self.assertRaises(ValueError) as ctx: - create_main_tub(config, tub_options, dfh, fch, i2p_provider, tor_provider) - self.assertIn( - "you must choose", - str(ctx.exception), - ) - - def test_multiple_ports(self): - basedir = self.mktemp() - create_node_dir(basedir, "testing") - port1 = iputil.allocate_tcp_port() - port2 = iputil.allocate_tcp_port() + port1, port2 = iter(ports) port = ("tcp:%d:interface=127.0.0.1,tcp:%d:interface=127.0.0.1" % (port1, port2)) location = "tcp:localhost:%d,tcp:localhost:%d" % (port1, port2) - with open(os.path.join(basedir, "tahoe.cfg"), "w") as f: - f.write(BASE_CONFIG) - f.write("[node]\n") - f.write("tub.port = %s\n" % port) - f.write("tub.location = %s\n" % location) - - config = client.read_config(basedir, "client.port") - i2p_provider = mock.Mock() - tor_provider = mock.Mock() - dfh, fch = create_connection_handlers(None, config, i2p_provider, tor_provider) - tub_options = create_tub_options(config) t = FakeTub() - - with mock.patch("allmydata.node.Tub", return_value=t): - create_main_tub(config, tub_options, dfh, fch, i2p_provider, tor_provider) + tub_listen_on(None, None, t, port, location) self.assertEqual(t.listening_ports, ["tcp:%d:interface=127.0.0.1" % port1, "tcp:%d:interface=127.0.0.1" % port2]) def test_tor_i2p_listeners(self): - basedir = self.mktemp() - config_fname = os.path.join(basedir, "tahoe.cfg") - os.mkdir(basedir) - os.mkdir(os.path.join(basedir, "private")) - with open(config_fname, "w") as f: - f.write(BASE_CONFIG) - f.write("[node]\n") - f.write("tub.port = listen:i2p,listen:tor\n") - f.write("tub.location = tcp:example.org:1234\n") - config = client.read_config(basedir, "client.port") - tub_options = create_tub_options(config) + """ + When configured to listen on an "i2p" or "tor" address, ``tub_listen_on`` + tells the Tub to listen on endpoints supplied by the given Tor and I2P + providers. + """ t = FakeTub() - i2p_provider = mock.Mock() - tor_provider = mock.Mock() - dfh, fch = create_connection_handlers(None, config, i2p_provider, tor_provider) + i2p_listener = object() + i2p_provider = ConstantAddresses(i2p_listener) + tor_listener = object() + tor_provider = ConstantAddresses(tor_listener) - with mock.patch("allmydata.node.Tub", return_value=t): - create_main_tub(config, tub_options, dfh, fch, i2p_provider, tor_provider) - - self.assertEqual(i2p_provider.get_listener.mock_calls, [mock.call()]) - self.assertEqual(tor_provider.get_listener.mock_calls, [mock.call()]) + tub_listen_on( + i2p_provider, + tor_provider, + t, + "listen:i2p,listen:tor", + "tcp:example.org:1234", + ) self.assertEqual( t.listening_ports, - [ - i2p_provider.get_listener(), - tor_provider.get_listener(), - ] + [i2p_listener, tor_listener], ) @@ -744,3 +965,24 @@ class Configuration(unittest.TestCase): "invalid section", str(ctx.exception), ) + + + +class CreateDefaultConnectionHandlersTests(unittest.TestCase): + """ + Tests for create_default_connection_handlers(). + """ + + def test_tcp_disabled(self): + """ + If tcp is set to disabled, no TCP handler is set. + """ + config = config_from_string("", "", dedent(""" + [connections] + tcp = disabled + """)) + default_handlers = create_default_connection_handlers( + config, + {}, + ) + self.assertIs(default_handlers["tcp"], None) diff --git a/src/allmydata/test/test_observer.py b/src/allmydata/test/test_observer.py index 0db13db58..134876be3 100644 --- a/src/allmydata/test/test_observer.py +++ b/src/allmydata/test/test_observer.py @@ -101,3 +101,56 @@ class Observer(unittest.TestCase): d.addCallback(_step2) d.addCallback(_check2) return d + + def test_observer_list_reentrant(self): + """ + ``ObserverList`` is reentrant. + """ + observed = [] + + def observer_one(): + obs.unsubscribe(observer_one) + + def observer_two(): + observed.append(None) + + obs = observer.ObserverList() + obs.subscribe(observer_one) + obs.subscribe(observer_two) + obs.notify() + + self.assertEqual([None], observed) + + def test_observer_list_observer_errors(self): + """ + An error in an earlier observer does not prevent notification from being + delivered to a later observer. + """ + observed = [] + + def observer_one(): + raise Exception("Some problem here") + + def observer_two(): + observed.append(None) + + obs = observer.ObserverList() + obs.subscribe(observer_one) + obs.subscribe(observer_two) + obs.notify() + + self.assertEqual([None], observed) + self.assertEqual(1, len(self.flushLoggedErrors(Exception))) + + def test_observer_list_propagate_keyboardinterrupt(self): + """ + ``KeyboardInterrupt`` escapes ``ObserverList.notify``. + """ + def observer_one(): + raise KeyboardInterrupt() + + obs = observer.ObserverList() + obs.subscribe(observer_one) + + with self.assertRaises(KeyboardInterrupt): + obs.notify() diff --git a/src/allmydata/test/test_python2_regressions.py b/src/allmydata/test/test_python2_regressions.py index 84484f1cf..5c6a654c1 100644 --- a/src/allmydata/test/test_python2_regressions.py +++ b/src/allmydata/test/test_python2_regressions.py @@ -16,6 +16,7 @@ from testtools.matchers import ( BLACKLIST = { "allmydata.test.check_load", "allmydata.windows.registry", + "allmydata.scripts.types_", } diff --git a/src/allmydata/test/test_python3.py b/src/allmydata/test/test_python3.py index 7a6d0b282..c1f0e83d6 100644 --- a/src/allmydata/test/test_python3.py +++ b/src/allmydata/test/test_python3.py @@ -8,7 +8,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function -from future.utils import PY2 +from future.utils import PY2, native_str if PY2: from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 @@ -44,10 +44,9 @@ class Python3PortingEffortTests(SynchronousTestCase): ), ), ) - if PY2: - test_finished_porting.skip = "For some reason todo isn't working on Python 2 now" - else: - test_finished_porting.todo = "https://tahoe-lafs.org/trac/tahoe-lafs/milestone/Support%20Python%203 should be completed" + test_finished_porting.todo = native_str( # type: ignore + "https://tahoe-lafs.org/trac/tahoe-lafs/milestone/Support%20Python%203 should be completed", + ) def test_ported_modules_exist(self): """ diff --git a/src/allmydata/test/test_repairer.py b/src/allmydata/test/test_repairer.py index 4fdffe70e..63a54a505 100644 --- a/src/allmydata/test/test_repairer.py +++ b/src/allmydata/test/test_repairer.py @@ -1,5 +1,15 @@ # -*- coding: utf-8 -*- +""" +Ported to Python 3. +""" from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from allmydata.test import common from allmydata.monitor import Monitor @@ -62,7 +72,7 @@ class RepairTestMixin(object): c0 = self.g.clients[0] c1 = self.g.clients[1] c0.encoding_params['max_segment_size'] = 12 - d = c0.upload(upload.Data(common.TEST_DATA, convergence="")) + d = c0.upload(upload.Data(common.TEST_DATA, convergence=b"")) def _stash_uri(ur): self.uri = ur.get_uri() self.c0_filenode = c0.create_node_from_uri(ur.get_uri()) @@ -464,7 +474,7 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin, # previously-deleted share #2. d.addCallback(lambda ignored: - self.delete_shares_numbered(self.uri, range(3, 10+1))) + self.delete_shares_numbered(self.uri, list(range(3, 10+1)))) d.addCallback(lambda ignored: download_to_data(self.c1_filenode)) d.addCallback(lambda newdata: self.failUnlessEqual(newdata, common.TEST_DATA)) @@ -476,7 +486,7 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin, self.set_up_grid(num_clients=2) d = self.upload_and_stash() d.addCallback(lambda ignored: - self.delete_shares_numbered(self.uri, range(7))) + self.delete_shares_numbered(self.uri, list(range(7)))) d.addCallback(lambda ignored: self._stash_counts()) d.addCallback(lambda ignored: self.c0_filenode.check_and_repair(Monitor(), @@ -509,7 +519,7 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin, # previously-deleted share #2. d.addCallback(lambda ignored: - self.delete_shares_numbered(self.uri, range(3, 10+1))) + self.delete_shares_numbered(self.uri, list(range(3, 10+1)))) d.addCallback(lambda ignored: download_to_data(self.c1_filenode)) d.addCallback(lambda newdata: self.failUnlessEqual(newdata, common.TEST_DATA)) @@ -527,7 +537,7 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin, # distributing the shares widely enough to satisfy the default # happiness setting. def _delete_some_servers(ignored): - for i in xrange(7): + for i in range(7): self.g.remove_server(self.g.servers_by_number[i].my_nodeid) assert len(self.g.servers_by_number) == 3 @@ -640,7 +650,7 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin, # downloading and has the right contents. This can't work # unless it has already repaired the previously-corrupted share. def _then_delete_7_and_try_a_download(unused=None): - shnums = range(10) + shnums = list(range(10)) shnums.remove(shnum) random.shuffle(shnums) for sharenum in shnums[:7]: @@ -679,10 +689,10 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin, self.basedir = "repairer/Repairer/test_tiny_reads" self.set_up_grid() c0 = self.g.clients[0] - DATA = "a"*135 + DATA = b"a"*135 c0.encoding_params['k'] = 22 c0.encoding_params['n'] = 66 - d = c0.upload(upload.Data(DATA, convergence="")) + d = c0.upload(upload.Data(DATA, convergence=b"")) def _then(ur): self.uri = ur.get_uri() self.delete_shares_numbered(self.uri, [0]) diff --git a/src/allmydata/test/test_runner.py b/src/allmydata/test/test_runner.py index d7fa08a0c..ef2b99a19 100644 --- a/src/allmydata/test/test_runner.py +++ b/src/allmydata/test/test_runner.py @@ -12,7 +12,6 @@ from twisted.internet import reactor from twisted.python import usage from twisted.internet.defer import ( inlineCallbacks, - returnValue, DeferredList, ) from twisted.python.filepath import FilePath @@ -20,12 +19,9 @@ from twisted.python.runtime import ( platform, ) from allmydata.util import fileutil, pollmixin -from allmydata.util.encodingutil import unicode_to_argv, unicode_to_output, \ - get_filesystem_encoding +from allmydata.util.encodingutil import unicode_to_argv, unicode_to_output from allmydata.test import common_util -from allmydata.version_checks import normalized_version import allmydata -from allmydata import __appname__ from .common_util import parse_cli, run_cli from .cli_node_api import ( CLINodeAPI, @@ -38,6 +34,7 @@ from ._twisted_9607 import ( ) from ..util.eliotutil import ( inline_callbacks, + log_call_deferred, ) def get_root_from_file(src): @@ -58,17 +55,7 @@ rootdir = get_root_from_file(srcfile) class RunBinTahoeMixin(object): - - @inlineCallbacks - def find_import_location(self): - res = yield self.run_bintahoe(["--version-and-path"]) - out, err, rc_or_sig = res - self.assertEqual(rc_or_sig, 0, res) - lines = out.splitlines() - tahoe_pieces = lines[0].split() - self.assertEqual(tahoe_pieces[0], "%s:" % (__appname__,), (tahoe_pieces, res)) - returnValue(tahoe_pieces[-1].strip("()")) - + @log_call_deferred(action_type="run-bin-tahoe") def run_bintahoe(self, args, stdin=None, python_options=[], env=None): command = sys.executable argv = python_options + ["-m", "allmydata.scripts.runner"] + args @@ -86,64 +73,6 @@ class RunBinTahoeMixin(object): class BinTahoe(common_util.SignalMixin, unittest.TestCase, RunBinTahoeMixin): - @inlineCallbacks - def test_the_right_code(self): - # running "tahoe" in a subprocess should find the same code that - # holds this test file, else something is weird - test_path = os.path.dirname(os.path.dirname(os.path.normcase(os.path.realpath(srcfile)))) - bintahoe_import_path = yield self.find_import_location() - - same = (bintahoe_import_path == test_path) - if not same: - msg = ("My tests and my 'tahoe' executable are using different paths.\n" - "tahoe: %r\n" - "tests: %r\n" - "( according to the test source filename %r)\n" % - (bintahoe_import_path, test_path, srcfile)) - - if (not isinstance(rootdir, unicode) and - rootdir.decode(get_filesystem_encoding(), 'replace') != rootdir): - msg += ("However, this may be a false alarm because the import path\n" - "is not representable in the filesystem encoding.") - raise unittest.SkipTest(msg) - else: - msg += "Please run the tests in a virtualenv that includes both the Tahoe-LAFS library and the 'tahoe' executable." - self.fail(msg) - - def test_path(self): - d = self.run_bintahoe(["--version-and-path"]) - def _cb(res): - out, err, rc_or_sig = res - self.failUnlessEqual(rc_or_sig, 0, str(res)) - - # Fail unless the __appname__ package is *this* version *and* - # was loaded from *this* source directory. - - required_verstr = str(allmydata.__version__) - - self.failIfEqual(required_verstr, "unknown", - "We don't know our version, because this distribution didn't come " - "with a _version.py and 'setup.py update_version' hasn't been run.") - - srcdir = os.path.dirname(os.path.dirname(os.path.normcase(os.path.realpath(srcfile)))) - info = repr((res, allmydata.__appname__, required_verstr, srcdir)) - - appverpath = out.split(')')[0] - (appverfull, path) = appverpath.split('] (') - (appver, comment) = appverfull.split(' [') - (branch, full_version) = comment.split(': ') - (app, ver) = appver.split(': ') - - self.failUnlessEqual(app, allmydata.__appname__, info) - norm_ver = normalized_version(ver) - norm_required = normalized_version(required_verstr) - self.failUnlessEqual(norm_ver, norm_required, info) - self.failUnlessEqual(path, srcdir, info) - self.failUnlessEqual(branch, allmydata.branch) - self.failUnlessEqual(full_version, allmydata.full_version) - d.addCallback(_cb) - return d - def test_unicode_arguments_and_output(self): tricky = u"\u2621" try: @@ -165,8 +94,8 @@ class BinTahoe(common_util.SignalMixin, unittest.TestCase, RunBinTahoeMixin): d = self.run_bintahoe(["--version"], python_options=["-t"]) def _cb(res): out, err, rc_or_sig = res - self.failUnlessEqual(rc_or_sig, 0, str(res)) - self.failUnless(out.startswith(allmydata.__appname__+':'), str(res)) + self.assertEqual(rc_or_sig, 0, str(res)) + self.assertTrue(out.startswith(allmydata.__appname__ + '/'), str(res)) d.addCallback(_cb) return d @@ -215,9 +144,8 @@ class BinTahoe(common_util.SignalMixin, unittest.TestCase, RunBinTahoeMixin): class CreateNode(unittest.TestCase): - # exercise "tahoe create-node", create-introducer, - # create-key-generator, and create-stats-gatherer, by calling the - # corresponding code as a subroutine. + # exercise "tahoe create-node" and "tahoe create-introducer" by calling + # the corresponding code as a subroutine. def workdir(self, name): basedir = os.path.join("test_runner", "CreateNode", name) @@ -316,62 +244,20 @@ class CreateNode(unittest.TestCase): def test_introducer(self): self.do_create("introducer", "--hostname=127.0.0.1") - def test_stats_gatherer(self): - self.do_create("stats-gatherer", "--hostname=127.0.0.1") - def test_subcommands(self): # no arguments should trigger a command listing, via UsageError self.failUnlessRaises(usage.UsageError, parse_cli, ) - @inlineCallbacks - def test_stats_gatherer_good_args(self): - rc,out,err = yield run_cli("create-stats-gatherer", "--hostname=foo", - self.mktemp()) - self.assertEqual(rc, 0) - rc,out,err = yield run_cli("create-stats-gatherer", - "--location=tcp:foo:1234", - "--port=tcp:1234", self.mktemp()) - self.assertEqual(rc, 0) - - - def test_stats_gatherer_bad_args(self): - def _test(args): - argv = args.split() - self.assertRaises(usage.UsageError, parse_cli, *argv) - - # missing hostname/location/port - _test("create-stats-gatherer D") - - # missing port - _test("create-stats-gatherer --location=foo D") - - # missing location - _test("create-stats-gatherer --port=foo D") - - # can't provide both - _test("create-stats-gatherer --hostname=foo --port=foo D") - - # can't provide both - _test("create-stats-gatherer --hostname=foo --location=foo D") - - # can't provide all three - _test("create-stats-gatherer --hostname=foo --location=foo --port=foo D") - class RunNode(common_util.SignalMixin, unittest.TestCase, pollmixin.PollMixin, RunBinTahoeMixin): """ - exercise "tahoe run" for both introducer, client node, and key-generator, - by spawning "tahoe run" (or "tahoe start") as a subprocess. This doesn't - get us line-level coverage, but it does a better job of confirming that - the user can actually run "./bin/tahoe run" and expect it to work. This - verifies that bin/tahoe sets up PYTHONPATH and the like correctly. - - This doesn't work on cygwin (it hangs forever), so we skip this test - when we're on cygwin. It is likely that "tahoe start" itself doesn't - work on cygwin: twisted seems unable to provide a version of - spawnProcess which really works there. + exercise "tahoe run" for both introducer and client node, by spawning + "tahoe run" as a subprocess. This doesn't get us line-level coverage, but + it does a better job of confirming that the user can actually run + "./bin/tahoe run" and expect it to work. This verifies that bin/tahoe sets + up PYTHONPATH and the like correctly. """ def workdir(self, name): @@ -451,7 +337,7 @@ class RunNode(common_util.SignalMixin, unittest.TestCase, pollmixin.PollMixin, @inline_callbacks def test_client(self): """ - Test many things. + Test too many things. 0) Verify that "tahoe create-node" takes a --webport option and writes the value to the configuration file. @@ -459,9 +345,9 @@ class RunNode(common_util.SignalMixin, unittest.TestCase, pollmixin.PollMixin, 1) Verify that "tahoe run" writes a pid file and a node url file (on POSIX). 2) Verify that the storage furl file has a stable value across a - "tahoe run" / "tahoe stop" / "tahoe run" sequence. + "tahoe run" / stop / "tahoe run" sequence. - 3) Verify that the pid file is removed after "tahoe stop" succeeds (on POSIX). + 3) Verify that the pid file is removed after SIGTERM (on POSIX). """ basedir = self.workdir("test_client") c1 = os.path.join(basedir, "c1") @@ -565,18 +451,6 @@ class RunNode(common_util.SignalMixin, unittest.TestCase, pollmixin.PollMixin, "does not look like a directory at all" ) - def test_stop_bad_directory(self): - """ - If ``tahoe run`` is pointed at a directory where no node is running, it - reports an error and exits. - """ - return self._bad_directory_test( - u"test_stop_bad_directory", - "tahoe stop", - lambda tahoe, p: tahoe.stop(p), - "does not look like a running node directory", - ) - @inline_callbacks def _bad_directory_test(self, workdir, description, operation, expected_message): """ diff --git a/src/allmydata/test/test_sftp.py b/src/allmydata/test/test_sftp.py index b6f1fbc8a..2214e4e5b 100644 --- a/src/allmydata/test/test_sftp.py +++ b/src/allmydata/test/test_sftp.py @@ -1,4 +1,14 @@ +""" +Ported to Python 3. +""" from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import re, struct, traceback, time, calendar from stat import S_IFREG, S_IFDIR @@ -9,18 +19,15 @@ from twisted.python.failure import Failure from twisted.internet.error import ProcessDone, ProcessTerminated from allmydata.util import deferredutil -conch_interfaces = None -sftp = None -sftpd = None - try: from twisted.conch import interfaces as conch_interfaces from twisted.conch.ssh import filetransfer as sftp from allmydata.frontends import sftpd except ImportError as e: + conch_interfaces = sftp = sftpd = None # type: ignore conch_unavailable_reason = e else: - conch_unavailable_reason = None + conch_unavailable_reason = None # type: ignore from allmydata.interfaces import IDirectoryNode, ExistingChildError, NoSuchChildError from allmydata.mutable.common import NotWriteableError @@ -76,7 +83,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas return d def _set_up_tree(self): - u = publish.MutableData("mutable file contents") + u = publish.MutableData(b"mutable file contents") d = self.client.create_mutable_file(u) d.addCallback(lambda node: self.root.set_node(u"mutable", node)) def _created_mutable(n): @@ -92,33 +99,33 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas self.readonly_uri = n.get_uri() d.addCallback(_created_readonly) - gross = upload.Data("0123456789" * 101, None) + gross = upload.Data(b"0123456789" * 101, None) d.addCallback(lambda ign: self.root.add_file(u"gro\u00DF", gross)) def _created_gross(n): self.gross = n self.gross_uri = n.get_uri() d.addCallback(_created_gross) - small = upload.Data("0123456789", None) + small = upload.Data(b"0123456789", None) d.addCallback(lambda ign: self.root.add_file(u"small", small)) def _created_small(n): self.small = n self.small_uri = n.get_uri() d.addCallback(_created_small) - small2 = upload.Data("Small enough for a LIT too", None) + small2 = upload.Data(b"Small enough for a LIT too", None) d.addCallback(lambda ign: self.root.add_file(u"small2", small2)) def _created_small2(n): self.small2 = n self.small2_uri = n.get_uri() d.addCallback(_created_small2) - empty_litdir_uri = "URI:DIR2-LIT:" + empty_litdir_uri = b"URI:DIR2-LIT:" # contains one child which is itself also LIT: - tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" + tiny_litdir_uri = b"URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" - unknown_uri = "x-tahoe-crazy://I_am_from_the_future." + unknown_uri = b"x-tahoe-crazy://I_am_from_the_future." d.addCallback(lambda ign: self.root._create_and_validate_node(None, empty_litdir_uri, name=u"empty_lit_dir")) def _created_empty_lit_dir(n): @@ -154,55 +161,55 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas version = self.handler.gotVersion(3, {}) self.failUnless(isinstance(version, dict)) - self.failUnlessReallyEqual(self.handler._path_from_string(""), []) - self.failUnlessReallyEqual(self.handler._path_from_string("/"), []) - self.failUnlessReallyEqual(self.handler._path_from_string("."), []) - self.failUnlessReallyEqual(self.handler._path_from_string("//"), []) - self.failUnlessReallyEqual(self.handler._path_from_string("/."), []) - self.failUnlessReallyEqual(self.handler._path_from_string("/./"), []) - self.failUnlessReallyEqual(self.handler._path_from_string("foo"), [u"foo"]) - self.failUnlessReallyEqual(self.handler._path_from_string("/foo"), [u"foo"]) - self.failUnlessReallyEqual(self.handler._path_from_string("foo/"), [u"foo"]) - self.failUnlessReallyEqual(self.handler._path_from_string("/foo/"), [u"foo"]) - self.failUnlessReallyEqual(self.handler._path_from_string("foo/bar"), [u"foo", u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("/foo/bar"), [u"foo", u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("foo/bar//"), [u"foo", u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("/foo/bar//"), [u"foo", u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("foo/./bar"), [u"foo", u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("./foo/./bar"), [u"foo", u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("foo/../bar"), [u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("/foo/../bar"), [u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("../bar"), [u"bar"]) - self.failUnlessReallyEqual(self.handler._path_from_string("/../bar"), [u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b""), []) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/"), []) + self.failUnlessReallyEqual(self.handler._path_from_string(b"."), []) + self.failUnlessReallyEqual(self.handler._path_from_string(b"//"), []) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/."), []) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/./"), []) + self.failUnlessReallyEqual(self.handler._path_from_string(b"foo"), [u"foo"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/foo"), [u"foo"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"foo/"), [u"foo"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/foo/"), [u"foo"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"foo/bar"), [u"foo", u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/foo/bar"), [u"foo", u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"foo/bar//"), [u"foo", u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/foo/bar//"), [u"foo", u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"foo/./bar"), [u"foo", u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"./foo/./bar"), [u"foo", u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"foo/../bar"), [u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/foo/../bar"), [u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"../bar"), [u"bar"]) + self.failUnlessReallyEqual(self.handler._path_from_string(b"/../bar"), [u"bar"]) - self.failUnlessReallyEqual(self.handler.realPath(""), "/") - self.failUnlessReallyEqual(self.handler.realPath("/"), "/") - self.failUnlessReallyEqual(self.handler.realPath("."), "/") - self.failUnlessReallyEqual(self.handler.realPath("//"), "/") - self.failUnlessReallyEqual(self.handler.realPath("/."), "/") - self.failUnlessReallyEqual(self.handler.realPath("/./"), "/") - self.failUnlessReallyEqual(self.handler.realPath("foo"), "/foo") - self.failUnlessReallyEqual(self.handler.realPath("/foo"), "/foo") - self.failUnlessReallyEqual(self.handler.realPath("foo/"), "/foo") - self.failUnlessReallyEqual(self.handler.realPath("/foo/"), "/foo") - self.failUnlessReallyEqual(self.handler.realPath("foo/bar"), "/foo/bar") - self.failUnlessReallyEqual(self.handler.realPath("/foo/bar"), "/foo/bar") - self.failUnlessReallyEqual(self.handler.realPath("foo/bar//"), "/foo/bar") - self.failUnlessReallyEqual(self.handler.realPath("/foo/bar//"), "/foo/bar") - self.failUnlessReallyEqual(self.handler.realPath("foo/./bar"), "/foo/bar") - self.failUnlessReallyEqual(self.handler.realPath("./foo/./bar"), "/foo/bar") - self.failUnlessReallyEqual(self.handler.realPath("foo/../bar"), "/bar") - self.failUnlessReallyEqual(self.handler.realPath("/foo/../bar"), "/bar") - self.failUnlessReallyEqual(self.handler.realPath("../bar"), "/bar") - self.failUnlessReallyEqual(self.handler.realPath("/../bar"), "/bar") + self.failUnlessReallyEqual(self.handler.realPath(b""), b"/") + self.failUnlessReallyEqual(self.handler.realPath(b"/"), b"/") + self.failUnlessReallyEqual(self.handler.realPath(b"."), b"/") + self.failUnlessReallyEqual(self.handler.realPath(b"//"), b"/") + self.failUnlessReallyEqual(self.handler.realPath(b"/."), b"/") + self.failUnlessReallyEqual(self.handler.realPath(b"/./"), b"/") + self.failUnlessReallyEqual(self.handler.realPath(b"foo"), b"/foo") + self.failUnlessReallyEqual(self.handler.realPath(b"/foo"), b"/foo") + self.failUnlessReallyEqual(self.handler.realPath(b"foo/"), b"/foo") + self.failUnlessReallyEqual(self.handler.realPath(b"/foo/"), b"/foo") + self.failUnlessReallyEqual(self.handler.realPath(b"foo/bar"), b"/foo/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"/foo/bar"), b"/foo/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"foo/bar//"), b"/foo/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"/foo/bar//"), b"/foo/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"foo/./bar"), b"/foo/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"./foo/./bar"), b"/foo/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"foo/../bar"), b"/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"/foo/../bar"), b"/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"../bar"), b"/bar") + self.failUnlessReallyEqual(self.handler.realPath(b"/../bar"), b"/bar") d.addCallback(_check) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "_path_from_string invalid UTF-8", - self.handler._path_from_string, "\xFF")) + self.handler._path_from_string, b"\xFF")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "realPath invalid UTF-8", - self.handler.realPath, "\xFF")) + self.handler.realPath, b"\xFF")) return d @@ -243,10 +250,10 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "readLink link", - self.handler.readLink, "link")) + self.handler.readLink, b"link")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "makeLink link file", - self.handler.makeLink, "link", "file")) + self.handler.makeLink, b"link", b"file")) return d @@ -277,64 +284,64 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openDirectory small", - self.handler.openDirectory, "small")) + self.handler.openDirectory, b"small")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openDirectory unknown", - self.handler.openDirectory, "unknown")) + self.handler.openDirectory, b"unknown")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openDirectory nodir", - self.handler.openDirectory, "nodir")) + self.handler.openDirectory, b"nodir")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openDirectory nodir/nodir", - self.handler.openDirectory, "nodir/nodir")) + self.handler.openDirectory, b"nodir/nodir")) gross = u"gro\u00DF".encode("utf-8") expected_root = [ - ('empty_lit_dir', r'dr-xr-xr-x .* 0 .* empty_lit_dir$', {'permissions': S_IFDIR | 0o555}), - (gross, r'-rw-rw-rw- .* 1010 .* '+gross+'$', {'permissions': S_IFREG | 0o666, 'size': 1010}), + (b'empty_lit_dir', br'dr-xr-xr-x .* 0 .* empty_lit_dir$', {'permissions': S_IFDIR | 0o555}), + (gross, br'-rw-rw-rw- .* 1010 .* '+gross+b'$', {'permissions': S_IFREG | 0o666, 'size': 1010}), # The fall of the Berlin wall may have been on 9th or 10th November 1989 depending on the gateway's timezone. #('loop', r'drwxrwxrwx .* 0 Nov (09|10) 1989 loop$', {'permissions': S_IFDIR | 0777}), - ('loop', r'drwxrwxrwx .* 0 .* loop$', {'permissions': S_IFDIR | 0o777}), - ('mutable', r'-rw-rw-rw- .* 0 .* mutable$', {'permissions': S_IFREG | 0o666}), - ('readonly', r'-r--r--r-- .* 0 .* readonly$', {'permissions': S_IFREG | 0o444}), - ('small', r'-rw-rw-rw- .* 10 .* small$', {'permissions': S_IFREG | 0o666, 'size': 10}), - ('small2', r'-rw-rw-rw- .* 26 .* small2$', {'permissions': S_IFREG | 0o666, 'size': 26}), - ('tiny_lit_dir', r'dr-xr-xr-x .* 0 .* tiny_lit_dir$', {'permissions': S_IFDIR | 0o555}), - ('unknown', r'\?--------- .* 0 .* unknown$', {'permissions': 0}), + (b'loop', br'drwxrwxrwx .* 0 .* loop$', {'permissions': S_IFDIR | 0o777}), + (b'mutable', br'-rw-rw-rw- .* 0 .* mutable$', {'permissions': S_IFREG | 0o666}), + (b'readonly', br'-r--r--r-- .* 0 .* readonly$', {'permissions': S_IFREG | 0o444}), + (b'small', br'-rw-rw-rw- .* 10 .* small$', {'permissions': S_IFREG | 0o666, 'size': 10}), + (b'small2', br'-rw-rw-rw- .* 26 .* small2$', {'permissions': S_IFREG | 0o666, 'size': 26}), + (b'tiny_lit_dir', br'dr-xr-xr-x .* 0 .* tiny_lit_dir$', {'permissions': S_IFDIR | 0o555}), + (b'unknown', br'\?--------- .* 0 .* unknown$', {'permissions': 0}), ] - d.addCallback(lambda ign: self.handler.openDirectory("")) + d.addCallback(lambda ign: self.handler.openDirectory(b"")) d.addCallback(lambda res: self._compareDirLists(res, expected_root)) - d.addCallback(lambda ign: self.handler.openDirectory("loop")) + d.addCallback(lambda ign: self.handler.openDirectory(b"loop")) d.addCallback(lambda res: self._compareDirLists(res, expected_root)) - d.addCallback(lambda ign: self.handler.openDirectory("loop/loop")) + d.addCallback(lambda ign: self.handler.openDirectory(b"loop/loop")) d.addCallback(lambda res: self._compareDirLists(res, expected_root)) - d.addCallback(lambda ign: self.handler.openDirectory("empty_lit_dir")) + d.addCallback(lambda ign: self.handler.openDirectory(b"empty_lit_dir")) d.addCallback(lambda res: self._compareDirLists(res, [])) # The UTC epoch may either be in Jan 1 1970 or Dec 31 1969 depending on the gateway's timezone. expected_tiny_lit = [ - ('short', r'-r--r--r-- .* 8 (Jan 01 1970|Dec 31 1969) short$', {'permissions': S_IFREG | 0o444, 'size': 8}), + (b'short', br'-r--r--r-- .* 8 (Jan 01 1970|Dec 31 1969) short$', {'permissions': S_IFREG | 0o444, 'size': 8}), ] - d.addCallback(lambda ign: self.handler.openDirectory("tiny_lit_dir")) + d.addCallback(lambda ign: self.handler.openDirectory(b"tiny_lit_dir")) d.addCallback(lambda res: self._compareDirLists(res, expected_tiny_lit)) - d.addCallback(lambda ign: self.handler.getAttrs("small", True)) + d.addCallback(lambda ign: self.handler.getAttrs(b"small", True)) d.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 10})) - d.addCallback(lambda ign: self.handler.setAttrs("small", {})) + d.addCallback(lambda ign: self.handler.setAttrs(b"small", {})) d.addCallback(lambda res: self.failUnlessReallyEqual(res, None)) - d.addCallback(lambda ign: self.handler.getAttrs("small", True)) + d.addCallback(lambda ign: self.handler.getAttrs(b"small", True)) d.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 10})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "setAttrs size", - self.handler.setAttrs, "small", {'size': 0})) + self.handler.setAttrs, b"small", {'size': 0})) d.addCallback(lambda ign: self.failUnlessEqual(sftpd.all_heisenfiles, {})) d.addCallback(lambda ign: self.failUnlessEqual(self.handler._heisenfiles, {})) @@ -346,53 +353,53 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "openFile small 0 bad", - self.handler.openFile, "small", 0, {})) + self.handler.openFile, b"small", 0, {})) # attempting to open a non-existent file should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openFile nofile READ nosuch", - self.handler.openFile, "nofile", sftp.FXF_READ, {})) + self.handler.openFile, b"nofile", sftp.FXF_READ, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openFile nodir/file READ nosuch", - self.handler.openFile, "nodir/file", sftp.FXF_READ, {})) + self.handler.openFile, b"nodir/file", sftp.FXF_READ, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile unknown READ denied", - self.handler.openFile, "unknown", sftp.FXF_READ, {})) + self.handler.openFile, b"unknown", sftp.FXF_READ, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile unknown/file READ denied", - self.handler.openFile, "unknown/file", sftp.FXF_READ, {})) + self.handler.openFile, b"unknown/file", sftp.FXF_READ, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile tiny_lit_dir READ denied", - self.handler.openFile, "tiny_lit_dir", sftp.FXF_READ, {})) + self.handler.openFile, b"tiny_lit_dir", sftp.FXF_READ, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile unknown uri READ denied", - self.handler.openFile, "uri/"+self.unknown_uri, sftp.FXF_READ, {})) + self.handler.openFile, b"uri/"+self.unknown_uri, sftp.FXF_READ, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile tiny_lit_dir uri READ denied", - self.handler.openFile, "uri/"+self.tiny_lit_dir_uri, sftp.FXF_READ, {})) + self.handler.openFile, b"uri/"+self.tiny_lit_dir_uri, sftp.FXF_READ, {})) # FIXME: should be FX_NO_SUCH_FILE? d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile noexist uri READ denied", - self.handler.openFile, "uri/URI:noexist", sftp.FXF_READ, {})) + self.handler.openFile, b"uri/URI:noexist", sftp.FXF_READ, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openFile invalid UTF-8 uri READ denied", - self.handler.openFile, "uri/URI:\xFF", sftp.FXF_READ, {})) + self.handler.openFile, b"uri/URI:\xFF", sftp.FXF_READ, {})) # reading an existing file should succeed - d.addCallback(lambda ign: self.handler.openFile("small", sftp.FXF_READ, {})) + d.addCallback(lambda ign: self.handler.openFile(b"small", sftp.FXF_READ, {})) def _read_small(rf): d2 = rf.readChunk(0, 10) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) d2.addCallback(lambda ign: rf.readChunk(2, 6)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "234567")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"234567")) d2.addCallback(lambda ign: rf.readChunk(1, 0)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"")) d2.addCallback(lambda ign: rf.readChunk(8, 4)) # read that starts before EOF is OK - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "89")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"89")) d2.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting at EOF (0-byte)", @@ -407,12 +414,12 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d2.addCallback(lambda ign: rf.getAttrs()) d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 10})) - d2.addCallback(lambda ign: self.handler.getAttrs("small", followLinks=0)) + d2.addCallback(lambda ign: self.handler.getAttrs(b"small", followLinks=0)) d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 10})) d2.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "writeChunk on read-only handle denied", - rf.writeChunk, 0, "a")) + rf.writeChunk, 0, b"a")) d2.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "setAttrs on read-only handle denied", rf.setAttrs, {})) @@ -435,16 +442,16 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self.handler.openFile(gross, sftp.FXF_READ, {})) def _read_gross(rf): d2 = rf.readChunk(0, 10) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) d2.addCallback(lambda ign: rf.readChunk(2, 6)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "234567")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"234567")) d2.addCallback(lambda ign: rf.readChunk(1, 0)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"")) d2.addCallback(lambda ign: rf.readChunk(1008, 4)) # read that starts before EOF is OK - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "89")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"89")) d2.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_EOF, "readChunk starting at EOF (0-byte)", @@ -464,7 +471,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d2.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "writeChunk on read-only handle denied", - rf.writeChunk, 0, "a")) + rf.writeChunk, 0, b"a")) d2.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "setAttrs on read-only handle denied", rf.setAttrs, {})) @@ -483,37 +490,37 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(_read_gross) # reading an existing small file via uri/ should succeed - d.addCallback(lambda ign: self.handler.openFile("uri/"+self.small_uri, sftp.FXF_READ, {})) + d.addCallback(lambda ign: self.handler.openFile(b"uri/"+self.small_uri, sftp.FXF_READ, {})) def _read_small_uri(rf): d2 = rf.readChunk(0, 10) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) d2.addCallback(lambda ign: rf.close()) return d2 d.addCallback(_read_small_uri) # repeat for a large file - d.addCallback(lambda ign: self.handler.openFile("uri/"+self.gross_uri, sftp.FXF_READ, {})) + d.addCallback(lambda ign: self.handler.openFile(b"uri/"+self.gross_uri, sftp.FXF_READ, {})) def _read_gross_uri(rf): d2 = rf.readChunk(0, 10) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) d2.addCallback(lambda ign: rf.close()) return d2 d.addCallback(_read_gross_uri) # repeat for a mutable file - d.addCallback(lambda ign: self.handler.openFile("uri/"+self.mutable_uri, sftp.FXF_READ, {})) + d.addCallback(lambda ign: self.handler.openFile(b"uri/"+self.mutable_uri, sftp.FXF_READ, {})) def _read_mutable_uri(rf): d2 = rf.readChunk(0, 100) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "mutable file contents")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"mutable file contents")) d2.addCallback(lambda ign: rf.close()) return d2 d.addCallback(_read_mutable_uri) # repeat for a file within a directory referenced by URI - d.addCallback(lambda ign: self.handler.openFile("uri/"+self.tiny_lit_dir_uri+"/short", sftp.FXF_READ, {})) + d.addCallback(lambda ign: self.handler.openFile(b"uri/"+self.tiny_lit_dir_uri+b"/short", sftp.FXF_READ, {})) def _read_short(rf): d2 = rf.readChunk(0, 100) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "The end.")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"The end.")) d2.addCallback(lambda ign: rf.close()) return d2 d.addCallback(_read_short) @@ -521,7 +528,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas # check that failed downloads cause failed reads. Note that this # trashes the grid (by deleting all shares), so this must be at the # end of the test function. - d.addCallback(lambda ign: self.handler.openFile("uri/"+self.gross_uri, sftp.FXF_READ, {})) + d.addCallback(lambda ign: self.handler.openFile(b"uri/"+self.gross_uri, sftp.FXF_READ, {})) def _read_broken(rf): d2 = defer.succeed(None) d2.addCallback(lambda ign: self.g.nuke_from_orbit()) @@ -542,10 +549,10 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas # The check at the end of openFile_read tested this for large files, # but it trashed the grid in the process, so this needs to be a # separate test. - small = upload.Data("0123456789"*10, None) + small = upload.Data(b"0123456789"*10, None) d = self._set_up("openFile_read_error") d.addCallback(lambda ign: self.root.add_file(u"small", small)) - d.addCallback(lambda n: self.handler.openFile("/uri/"+n.get_uri(), sftp.FXF_READ, {})) + d.addCallback(lambda n: self.handler.openFile(b"/uri/"+n.get_uri(), sftp.FXF_READ, {})) def _read_broken(rf): d2 = defer.succeed(None) d2.addCallback(lambda ign: self.g.nuke_from_orbit()) @@ -569,106 +576,106 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas # '' is an invalid filename d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openFile '' WRITE|CREAT|TRUNC nosuch", - self.handler.openFile, "", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC, {})) + self.handler.openFile, b"", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC, {})) # TRUNC is not valid without CREAT if the file does not already exist d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "openFile newfile WRITE|TRUNC nosuch", - self.handler.openFile, "newfile", sftp.FXF_WRITE | sftp.FXF_TRUNC, {})) + self.handler.openFile, b"newfile", sftp.FXF_WRITE | sftp.FXF_TRUNC, {})) # EXCL is not valid without CREAT d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "openFile small WRITE|EXCL bad", - self.handler.openFile, "small", sftp.FXF_WRITE | sftp.FXF_EXCL, {})) + self.handler.openFile, b"small", sftp.FXF_WRITE | sftp.FXF_EXCL, {})) # cannot write to an existing directory d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile tiny_lit_dir WRITE denied", - self.handler.openFile, "tiny_lit_dir", sftp.FXF_WRITE, {})) + self.handler.openFile, b"tiny_lit_dir", sftp.FXF_WRITE, {})) # cannot write to an existing unknown d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile unknown WRITE denied", - self.handler.openFile, "unknown", sftp.FXF_WRITE, {})) + self.handler.openFile, b"unknown", sftp.FXF_WRITE, {})) # cannot create a child of an unknown d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile unknown/newfile WRITE|CREAT denied", - self.handler.openFile, "unknown/newfile", + self.handler.openFile, b"unknown/newfile", sftp.FXF_WRITE | sftp.FXF_CREAT, {})) # cannot write to a new file in an immutable directory d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile tiny_lit_dir/newfile WRITE|CREAT|TRUNC denied", - self.handler.openFile, "tiny_lit_dir/newfile", + self.handler.openFile, b"tiny_lit_dir/newfile", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC, {})) # cannot write to an existing immutable file in an immutable directory (with or without CREAT and EXCL) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile tiny_lit_dir/short WRITE denied", - self.handler.openFile, "tiny_lit_dir/short", sftp.FXF_WRITE, {})) + self.handler.openFile, b"tiny_lit_dir/short", sftp.FXF_WRITE, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile tiny_lit_dir/short WRITE|CREAT denied", - self.handler.openFile, "tiny_lit_dir/short", + self.handler.openFile, b"tiny_lit_dir/short", sftp.FXF_WRITE | sftp.FXF_CREAT, {})) # cannot write to a mutable file via a readonly cap (by path or uri) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile readonly WRITE denied", - self.handler.openFile, "readonly", sftp.FXF_WRITE, {})) + self.handler.openFile, b"readonly", sftp.FXF_WRITE, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile readonly uri WRITE denied", - self.handler.openFile, "uri/"+self.readonly_uri, sftp.FXF_WRITE, {})) + self.handler.openFile, b"uri/"+self.readonly_uri, sftp.FXF_WRITE, {})) # cannot create a file with the EXCL flag if it already exists d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_FAILURE, "openFile small WRITE|CREAT|EXCL failure", - self.handler.openFile, "small", + self.handler.openFile, b"small", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_FAILURE, "openFile mutable WRITE|CREAT|EXCL failure", - self.handler.openFile, "mutable", + self.handler.openFile, b"mutable", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_FAILURE, "openFile mutable uri WRITE|CREAT|EXCL failure", - self.handler.openFile, "uri/"+self.mutable_uri, + self.handler.openFile, b"uri/"+self.mutable_uri, sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_FAILURE, "openFile tiny_lit_dir/short WRITE|CREAT|EXCL failure", - self.handler.openFile, "tiny_lit_dir/short", + self.handler.openFile, b"tiny_lit_dir/short", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) # cannot write to an immutable file if we don't have its parent (with or without CREAT, TRUNC, or EXCL) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile small uri WRITE denied", - self.handler.openFile, "uri/"+self.small_uri, sftp.FXF_WRITE, {})) + self.handler.openFile, b"uri/"+self.small_uri, sftp.FXF_WRITE, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile small uri WRITE|CREAT denied", - self.handler.openFile, "uri/"+self.small_uri, + self.handler.openFile, b"uri/"+self.small_uri, sftp.FXF_WRITE | sftp.FXF_CREAT, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile small uri WRITE|CREAT|TRUNC denied", - self.handler.openFile, "uri/"+self.small_uri, + self.handler.openFile, b"uri/"+self.small_uri, sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC, {})) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "openFile small uri WRITE|CREAT|EXCL denied", - self.handler.openFile, "uri/"+self.small_uri, + self.handler.openFile, b"uri/"+self.small_uri, sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) # test creating a new file with truncation and extension d.addCallback(lambda ign: - self.handler.openFile("newfile", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC, {})) + self.handler.openFile(b"newfile", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC, {})) def _write(wf): - d2 = wf.writeChunk(0, "0123456789") + d2 = wf.writeChunk(0, b"0123456789") d2.addCallback(lambda res: self.failUnlessReallyEqual(res, None)) - d2.addCallback(lambda ign: wf.writeChunk(8, "0123")) - d2.addCallback(lambda ign: wf.writeChunk(13, "abc")) + d2.addCallback(lambda ign: wf.writeChunk(8, b"0123")) + d2.addCallback(lambda ign: wf.writeChunk(13, b"abc")) d2.addCallback(lambda ign: wf.getAttrs()) d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 16})) - d2.addCallback(lambda ign: self.handler.getAttrs("newfile", followLinks=0)) + d2.addCallback(lambda ign: self.handler.getAttrs(b"newfile", followLinks=0)) d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 16})) d2.addCallback(lambda ign: wf.setAttrs({})) @@ -688,7 +695,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d2.addCallback(lambda ign: wf.setAttrs({'size': 17})) d2.addCallback(lambda ign: wf.getAttrs()) d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['size'], 17)) - d2.addCallback(lambda ign: self.handler.getAttrs("newfile", followLinks=0)) + d2.addCallback(lambda ign: self.handler.getAttrs(b"newfile", followLinks=0)) d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['size'], 17)) d2.addCallback(lambda ign: @@ -699,7 +706,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d2.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "writeChunk on closed file bad", - wf.writeChunk, 0, "a")) + wf.writeChunk, 0, b"a")) d2.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "setAttrs on closed file bad", wf.setAttrs, {'size': 0})) @@ -709,77 +716,77 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(_write) d.addCallback(lambda ign: self.root.get(u"newfile")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "012345670123\x00a\x00\x00\x00")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"012345670123\x00a\x00\x00\x00")) # test APPEND flag, and also replacing an existing file ("newfile" created by the previous test) d.addCallback(lambda ign: - self.handler.openFile("newfile", sftp.FXF_WRITE | sftp.FXF_CREAT | + self.handler.openFile(b"newfile", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC | sftp.FXF_APPEND, {})) def _write_append(wf): - d2 = wf.writeChunk(0, "0123456789") - d2.addCallback(lambda ign: wf.writeChunk(8, "0123")) + d2 = wf.writeChunk(0, b"0123456789") + d2.addCallback(lambda ign: wf.writeChunk(8, b"0123")) d2.addCallback(lambda ign: wf.setAttrs({'size': 17})) d2.addCallback(lambda ign: wf.getAttrs()) d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['size'], 17)) - d2.addCallback(lambda ign: wf.writeChunk(0, "z")) + d2.addCallback(lambda ign: wf.writeChunk(0, b"z")) d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_append) d.addCallback(lambda ign: self.root.get(u"newfile")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "01234567890123\x00\x00\x00z")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"01234567890123\x00\x00\x00z")) # test WRITE | TRUNC without CREAT, when the file already exists # This is invalid according to section 6.3 of the SFTP spec, but required for interoperability, # since POSIX does allow O_WRONLY | O_TRUNC. d.addCallback(lambda ign: - self.handler.openFile("newfile", sftp.FXF_WRITE | sftp.FXF_TRUNC, {})) + self.handler.openFile(b"newfile", sftp.FXF_WRITE | sftp.FXF_TRUNC, {})) def _write_trunc(wf): - d2 = wf.writeChunk(0, "01234") + d2 = wf.writeChunk(0, b"01234") d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_trunc) d.addCallback(lambda ign: self.root.get(u"newfile")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "01234")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"01234")) # test WRITE | TRUNC with permissions: 0 d.addCallback(lambda ign: - self.handler.openFile("newfile", sftp.FXF_WRITE | sftp.FXF_TRUNC, {'permissions': 0})) + self.handler.openFile(b"newfile", sftp.FXF_WRITE | sftp.FXF_TRUNC, {'permissions': 0})) d.addCallback(_write_trunc) d.addCallback(lambda ign: self.root.get(u"newfile")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "01234")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"01234")) d.addCallback(lambda ign: self.root.get_metadata_for(u"newfile")) d.addCallback(lambda metadata: self.failIf(metadata.get('no-write', False), metadata)) # test EXCL flag d.addCallback(lambda ign: - self.handler.openFile("excl", sftp.FXF_WRITE | sftp.FXF_CREAT | + self.handler.openFile(b"excl", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_TRUNC | sftp.FXF_EXCL, {})) def _write_excl(wf): d2 = self.root.get(u"excl") d2.addCallback(lambda node: download_to_data(node)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"")) - d2.addCallback(lambda ign: wf.writeChunk(0, "0123456789")) + d2.addCallback(lambda ign: wf.writeChunk(0, b"0123456789")) d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_excl) d.addCallback(lambda ign: self.root.get(u"excl")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) # test that writing a zero-length file with EXCL only updates the directory once d.addCallback(lambda ign: - self.handler.openFile("zerolength", sftp.FXF_WRITE | sftp.FXF_CREAT | + self.handler.openFile(b"zerolength", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) def _write_excl_zerolength(wf): d2 = self.root.get(u"zerolength") d2.addCallback(lambda node: download_to_data(node)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"")) # FIXME: no API to get the best version number exists (fix as part of #993) """ @@ -796,84 +803,84 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(_write_excl_zerolength) d.addCallback(lambda ign: self.root.get(u"zerolength")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"")) # test WRITE | CREAT | EXCL | APPEND d.addCallback(lambda ign: - self.handler.openFile("exclappend", sftp.FXF_WRITE | sftp.FXF_CREAT | + self.handler.openFile(b"exclappend", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL | sftp.FXF_APPEND, {})) def _write_excl_append(wf): d2 = self.root.get(u"exclappend") d2.addCallback(lambda node: download_to_data(node)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"")) - d2.addCallback(lambda ign: wf.writeChunk(10, "0123456789")) - d2.addCallback(lambda ign: wf.writeChunk(5, "01234")) + d2.addCallback(lambda ign: wf.writeChunk(10, b"0123456789")) + d2.addCallback(lambda ign: wf.writeChunk(5, b"01234")) d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_excl_append) d.addCallback(lambda ign: self.root.get(u"exclappend")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "012345678901234")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"012345678901234")) # test WRITE | CREAT | APPEND when the file does not already exist d.addCallback(lambda ign: - self.handler.openFile("creatappend", sftp.FXF_WRITE | sftp.FXF_CREAT | + self.handler.openFile(b"creatappend", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_APPEND, {})) def _write_creat_append_new(wf): - d2 = wf.writeChunk(10, "0123456789") - d2.addCallback(lambda ign: wf.writeChunk(5, "01234")) + d2 = wf.writeChunk(10, b"0123456789") + d2.addCallback(lambda ign: wf.writeChunk(5, b"01234")) d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_creat_append_new) d.addCallback(lambda ign: self.root.get(u"creatappend")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "012345678901234")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"012345678901234")) # ... and when it does exist d.addCallback(lambda ign: - self.handler.openFile("creatappend", sftp.FXF_WRITE | sftp.FXF_CREAT | + self.handler.openFile(b"creatappend", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_APPEND, {})) def _write_creat_append_existing(wf): - d2 = wf.writeChunk(5, "01234") + d2 = wf.writeChunk(5, b"01234") d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_creat_append_existing) d.addCallback(lambda ign: self.root.get(u"creatappend")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "01234567890123401234")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"01234567890123401234")) # test WRITE | CREAT without TRUNC, when the file does not already exist d.addCallback(lambda ign: - self.handler.openFile("newfile2", sftp.FXF_WRITE | sftp.FXF_CREAT, {})) + self.handler.openFile(b"newfile2", sftp.FXF_WRITE | sftp.FXF_CREAT, {})) def _write_creat_new(wf): - d2 = wf.writeChunk(0, "0123456789") + d2 = wf.writeChunk(0, b"0123456789") d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_creat_new) d.addCallback(lambda ign: self.root.get(u"newfile2")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) # ... and when it does exist d.addCallback(lambda ign: - self.handler.openFile("newfile2", sftp.FXF_WRITE | sftp.FXF_CREAT, {})) + self.handler.openFile(b"newfile2", sftp.FXF_WRITE | sftp.FXF_CREAT, {})) def _write_creat_existing(wf): - d2 = wf.writeChunk(0, "abcde") + d2 = wf.writeChunk(0, b"abcde") d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_creat_existing) d.addCallback(lambda ign: self.root.get(u"newfile2")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "abcde56789")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"abcde56789")) d.addCallback(lambda ign: self.root.set_node(u"mutable2", self.mutable)) # test writing to a mutable file d.addCallback(lambda ign: - self.handler.openFile("mutable", sftp.FXF_WRITE, {})) + self.handler.openFile(b"mutable", sftp.FXF_WRITE, {})) def _write_mutable(wf): - d2 = wf.writeChunk(8, "new!") + d2 = wf.writeChunk(8, b"new!") d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_mutable) @@ -884,30 +891,30 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas self.failUnlessReallyEqual(node.get_uri(), self.mutable_uri) return node.download_best_version() d.addCallback(_check_same_file) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "mutable new! contents")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"mutable new! contents")) # ... and with permissions, which should be ignored d.addCallback(lambda ign: - self.handler.openFile("mutable", sftp.FXF_WRITE, {'permissions': 0})) + self.handler.openFile(b"mutable", sftp.FXF_WRITE, {'permissions': 0})) d.addCallback(_write_mutable) d.addCallback(lambda ign: self.root.get(u"mutable")) d.addCallback(_check_same_file) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "mutable new! contents")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"mutable new! contents")) # ... and with a setAttrs call that diminishes the parent link to read-only, first by path d.addCallback(lambda ign: - self.handler.openFile("mutable", sftp.FXF_WRITE, {})) + self.handler.openFile(b"mutable", sftp.FXF_WRITE, {})) def _write_mutable_setattr(wf): - d2 = wf.writeChunk(8, "read-only link from parent") + d2 = wf.writeChunk(8, b"read-only link from parent") - d2.addCallback(lambda ign: self.handler.setAttrs("mutable", {'permissions': 0o444})) + d2.addCallback(lambda ign: self.handler.setAttrs(b"mutable", {'permissions': 0o444})) d2.addCallback(lambda ign: self.root.get(u"mutable")) d2.addCallback(lambda node: self.failUnless(node.is_readonly())) d2.addCallback(lambda ign: wf.getAttrs()) d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0o666)) - d2.addCallback(lambda ign: self.handler.getAttrs("mutable", followLinks=0)) + d2.addCallback(lambda ign: self.handler.getAttrs(b"mutable", followLinks=0)) d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0o444)) d2.addCallback(lambda ign: wf.close()) @@ -921,13 +928,13 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas self.failUnlessReallyEqual(node.get_storage_index(), self.mutable.get_storage_index()) return node.download_best_version() d.addCallback(_check_readonly_file) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "mutable read-only link from parent")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"mutable read-only link from parent")) # ... and then by handle d.addCallback(lambda ign: - self.handler.openFile("mutable2", sftp.FXF_WRITE, {})) + self.handler.openFile(b"mutable2", sftp.FXF_WRITE, {})) def _write_mutable2_setattr(wf): - d2 = wf.writeChunk(7, "2") + d2 = wf.writeChunk(7, b"2") d2.addCallback(lambda ign: wf.setAttrs({'permissions': 0o444, 'size': 8})) @@ -937,7 +944,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d2.addCallback(lambda ign: wf.getAttrs()) d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0o444)) - d2.addCallback(lambda ign: self.handler.getAttrs("mutable2", followLinks=0)) + d2.addCallback(lambda ign: self.handler.getAttrs(b"mutable2", followLinks=0)) d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0o666)) d2.addCallback(lambda ign: wf.close()) @@ -945,55 +952,55 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(_write_mutable2_setattr) d.addCallback(lambda ign: self.root.get(u"mutable2")) d.addCallback(_check_readonly_file) # from above - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "mutable2")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"mutable2")) # test READ | WRITE without CREAT or TRUNC d.addCallback(lambda ign: - self.handler.openFile("small", sftp.FXF_READ | sftp.FXF_WRITE, {})) + self.handler.openFile(b"small", sftp.FXF_READ | sftp.FXF_WRITE, {})) def _read_write(rwf): - d2 = rwf.writeChunk(8, "0123") + d2 = rwf.writeChunk(8, b"0123") # test immediate read starting after the old end-of-file d2.addCallback(lambda ign: rwf.readChunk(11, 1)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "3")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"3")) d2.addCallback(lambda ign: rwf.readChunk(0, 100)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "012345670123")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"012345670123")) d2.addCallback(lambda ign: rwf.close()) return d2 d.addCallback(_read_write) d.addCallback(lambda ign: self.root.get(u"small")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "012345670123")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"012345670123")) # test WRITE and rename while still open d.addCallback(lambda ign: - self.handler.openFile("small", sftp.FXF_WRITE, {})) + self.handler.openFile(b"small", sftp.FXF_WRITE, {})) def _write_rename(wf): - d2 = wf.writeChunk(0, "abcd") - d2.addCallback(lambda ign: self.handler.renameFile("small", "renamed")) - d2.addCallback(lambda ign: wf.writeChunk(4, "efgh")) + d2 = wf.writeChunk(0, b"abcd") + d2.addCallback(lambda ign: self.handler.renameFile(b"small", b"renamed")) + d2.addCallback(lambda ign: wf.writeChunk(4, b"efgh")) d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_rename) d.addCallback(lambda ign: self.root.get(u"renamed")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "abcdefgh0123")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"abcdefgh0123")) d.addCallback(lambda ign: self.shouldFail(NoSuchChildError, "rename small while open", "small", self.root.get, u"small")) # test WRITE | CREAT | EXCL and rename while still open d.addCallback(lambda ign: - self.handler.openFile("newexcl", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) + self.handler.openFile(b"newexcl", sftp.FXF_WRITE | sftp.FXF_CREAT | sftp.FXF_EXCL, {})) def _write_creat_excl_rename(wf): - d2 = wf.writeChunk(0, "abcd") - d2.addCallback(lambda ign: self.handler.renameFile("newexcl", "renamedexcl")) - d2.addCallback(lambda ign: wf.writeChunk(4, "efgh")) + d2 = wf.writeChunk(0, b"abcd") + d2.addCallback(lambda ign: self.handler.renameFile(b"newexcl", b"renamedexcl")) + d2.addCallback(lambda ign: wf.writeChunk(4, b"efgh")) d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_creat_excl_rename) d.addCallback(lambda ign: self.root.get(u"renamedexcl")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "abcdefgh")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"abcdefgh")) d.addCallback(lambda ign: self.shouldFail(NoSuchChildError, "rename newexcl while open", "newexcl", self.root.get, u"newexcl")) @@ -1002,21 +1009,21 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas def _open_and_rename_race(ign): slow_open = defer.Deferred() reactor.callLater(1, slow_open.callback, None) - d2 = self.handler.openFile("new", sftp.FXF_WRITE | sftp.FXF_CREAT, {}, delay=slow_open) + d2 = self.handler.openFile(b"new", sftp.FXF_WRITE | sftp.FXF_CREAT, {}, delay=slow_open) # deliberate race between openFile and renameFile - d3 = self.handler.renameFile("new", "new2") + d3 = self.handler.renameFile(b"new", b"new2") d3.addErrback(lambda err: self.fail("renameFile failed: %r" % (err,))) return d2 d.addCallback(_open_and_rename_race) def _write_rename_race(wf): - d2 = wf.writeChunk(0, "abcd") + d2 = wf.writeChunk(0, b"abcd") d2.addCallback(lambda ign: wf.close()) return d2 d.addCallback(_write_rename_race) d.addCallback(lambda ign: self.root.get(u"new2")) d.addCallback(lambda node: download_to_data(node)) - d.addCallback(lambda data: self.failUnlessReallyEqual(data, "abcd")) + d.addCallback(lambda data: self.failUnlessReallyEqual(data, b"abcd")) d.addCallback(lambda ign: self.shouldFail(NoSuchChildError, "rename new while open", "new", self.root.get, u"new")) @@ -1027,7 +1034,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas gross = u"gro\u00DF".encode("utf-8") d.addCallback(lambda ign: self.handler.openFile(gross, sftp.FXF_READ | sftp.FXF_WRITE, {})) def _read_write_broken(rwf): - d2 = rwf.writeChunk(0, "abcdefghij") + d2 = rwf.writeChunk(0, b"abcdefghij") d2.addCallback(lambda ign: self.g.nuke_from_orbit()) # reading should fail (reliably if we read past the written chunk) @@ -1051,57 +1058,57 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "removeFile nofile", - self.handler.removeFile, "nofile")) + self.handler.removeFile, b"nofile")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "removeFile nofile", - self.handler.removeFile, "nofile")) + self.handler.removeFile, b"nofile")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "removeFile nodir/file", - self.handler.removeFile, "nodir/file")) + self.handler.removeFile, b"nodir/file")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "removefile ''", - self.handler.removeFile, "")) + self.handler.removeFile, b"")) # removing a directory should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "removeFile tiny_lit_dir", - self.handler.removeFile, "tiny_lit_dir")) + self.handler.removeFile, b"tiny_lit_dir")) # removing a file should succeed d.addCallback(lambda ign: self.root.get(u"gro\u00DF")) d.addCallback(lambda ign: self.handler.removeFile(u"gro\u00DF".encode('utf-8'))) d.addCallback(lambda ign: - self.shouldFail(NoSuchChildError, "removeFile gross", "gro\\xdf", + self.shouldFail(NoSuchChildError, "removeFile gross", "gro", self.root.get, u"gro\u00DF")) # removing an unknown should succeed d.addCallback(lambda ign: self.root.get(u"unknown")) - d.addCallback(lambda ign: self.handler.removeFile("unknown")) + d.addCallback(lambda ign: self.handler.removeFile(b"unknown")) d.addCallback(lambda ign: self.shouldFail(NoSuchChildError, "removeFile unknown", "unknown", self.root.get, u"unknown")) # removing a link to an open file should not prevent it from being read - d.addCallback(lambda ign: self.handler.openFile("small", sftp.FXF_READ, {})) + d.addCallback(lambda ign: self.handler.openFile(b"small", sftp.FXF_READ, {})) def _remove_and_read_small(rf): - d2 = self.handler.removeFile("small") + d2 = self.handler.removeFile(b"small") d2.addCallback(lambda ign: self.shouldFail(NoSuchChildError, "removeFile small", "small", self.root.get, u"small")) d2.addCallback(lambda ign: rf.readChunk(0, 10)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) d2.addCallback(lambda ign: rf.close()) return d2 d.addCallback(_remove_and_read_small) # removing a link to a created file should prevent it from being created - d.addCallback(lambda ign: self.handler.openFile("tempfile", sftp.FXF_READ | sftp.FXF_WRITE | + d.addCallback(lambda ign: self.handler.openFile(b"tempfile", sftp.FXF_READ | sftp.FXF_WRITE | sftp.FXF_CREAT, {})) def _write_remove(rwf): - d2 = rwf.writeChunk(0, "0123456789") - d2.addCallback(lambda ign: self.handler.removeFile("tempfile")) + d2 = rwf.writeChunk(0, b"0123456789") + d2.addCallback(lambda ign: self.handler.removeFile(b"tempfile")) d2.addCallback(lambda ign: rwf.readChunk(0, 10)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) d2.addCallback(lambda ign: rwf.close()) return d2 d.addCallback(_write_remove) @@ -1110,14 +1117,14 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas self.root.get, u"tempfile")) # ... even if the link is renamed while open - d.addCallback(lambda ign: self.handler.openFile("tempfile2", sftp.FXF_READ | sftp.FXF_WRITE | + d.addCallback(lambda ign: self.handler.openFile(b"tempfile2", sftp.FXF_READ | sftp.FXF_WRITE | sftp.FXF_CREAT, {})) def _write_rename_remove(rwf): - d2 = rwf.writeChunk(0, "0123456789") - d2.addCallback(lambda ign: self.handler.renameFile("tempfile2", "tempfile3")) - d2.addCallback(lambda ign: self.handler.removeFile("tempfile3")) + d2 = rwf.writeChunk(0, b"0123456789") + d2.addCallback(lambda ign: self.handler.renameFile(b"tempfile2", b"tempfile3")) + d2.addCallback(lambda ign: self.handler.removeFile(b"tempfile3")) d2.addCallback(lambda ign: rwf.readChunk(0, 10)) - d2.addCallback(lambda data: self.failUnlessReallyEqual(data, "0123456789")) + d2.addCallback(lambda data: self.failUnlessReallyEqual(data, b"0123456789")) d2.addCallback(lambda ign: rwf.close()) return d2 d.addCallback(_write_rename_remove) @@ -1138,13 +1145,13 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "removeDirectory nodir", - self.handler.removeDirectory, "nodir")) + self.handler.removeDirectory, b"nodir")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "removeDirectory nodir/nodir", - self.handler.removeDirectory, "nodir/nodir")) + self.handler.removeDirectory, b"nodir/nodir")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "removeDirectory ''", - self.handler.removeDirectory, "")) + self.handler.removeDirectory, b"")) # removing a file should fail d.addCallback(lambda ign: @@ -1153,14 +1160,14 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas # removing a directory should succeed d.addCallback(lambda ign: self.root.get(u"tiny_lit_dir")) - d.addCallback(lambda ign: self.handler.removeDirectory("tiny_lit_dir")) + d.addCallback(lambda ign: self.handler.removeDirectory(b"tiny_lit_dir")) d.addCallback(lambda ign: self.shouldFail(NoSuchChildError, "removeDirectory tiny_lit_dir", "tiny_lit_dir", self.root.get, u"tiny_lit_dir")) # removing an unknown should succeed d.addCallback(lambda ign: self.root.get(u"unknown")) - d.addCallback(lambda ign: self.handler.removeDirectory("unknown")) + d.addCallback(lambda ign: self.handler.removeDirectory(b"unknown")) d.addCallback(lambda err: self.shouldFail(NoSuchChildError, "removeDirectory unknown", "unknown", self.root.get, u"unknown")) @@ -1176,58 +1183,58 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas # renaming a non-existent file should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile nofile newfile", - self.handler.renameFile, "nofile", "newfile")) + self.handler.renameFile, b"nofile", b"newfile")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile '' newfile", - self.handler.renameFile, "", "newfile")) + self.handler.renameFile, b"", b"newfile")) # renaming a file to a non-existent path should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile small nodir/small", - self.handler.renameFile, "small", "nodir/small")) + self.handler.renameFile, b"small", b"nodir/small")) # renaming a file to an invalid UTF-8 name should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile small invalid", - self.handler.renameFile, "small", "\xFF")) + self.handler.renameFile, b"small", b"\xFF")) # renaming a file to or from an URI should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile small from uri", - self.handler.renameFile, "uri/"+self.small_uri, "new")) + self.handler.renameFile, b"uri/"+self.small_uri, b"new")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile small to uri", - self.handler.renameFile, "small", "uri/fake_uri")) + self.handler.renameFile, b"small", b"uri/fake_uri")) # renaming a file onto an existing file, directory or unknown should fail # The SFTP spec isn't clear about what error should be returned, but sshfs depends on # it being FX_PERMISSION_DENIED. d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "renameFile small small2", - self.handler.renameFile, "small", "small2")) + self.handler.renameFile, b"small", b"small2")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "renameFile small tiny_lit_dir", - self.handler.renameFile, "small", "tiny_lit_dir")) + self.handler.renameFile, b"small", b"tiny_lit_dir")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "renameFile small unknown", - self.handler.renameFile, "small", "unknown")) + self.handler.renameFile, b"small", b"unknown")) # renaming a file onto a heisenfile should fail, even if the open hasn't completed def _rename_onto_heisenfile_race(wf): slow_open = defer.Deferred() reactor.callLater(1, slow_open.callback, None) - d2 = self.handler.openFile("heisenfile", sftp.FXF_WRITE | sftp.FXF_CREAT, {}, delay=slow_open) + d2 = self.handler.openFile(b"heisenfile", sftp.FXF_WRITE | sftp.FXF_CREAT, {}, delay=slow_open) # deliberate race between openFile and renameFile d3 = self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "renameFile small heisenfile", - self.handler.renameFile, "small", "heisenfile") + self.handler.renameFile, b"small", b"heisenfile") d2.addCallback(lambda wf: wf.close()) return deferredutil.gatherResults([d2, d3]) d.addCallback(_rename_onto_heisenfile_race) # renaming a file to a correct path should succeed - d.addCallback(lambda ign: self.handler.renameFile("small", "new_small")) + d.addCallback(lambda ign: self.handler.renameFile(b"small", b"new_small")) d.addCallback(lambda ign: self.root.get(u"new_small")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.small_uri)) @@ -1238,12 +1245,12 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.gross_uri)) # renaming a directory to a correct path should succeed - d.addCallback(lambda ign: self.handler.renameFile("tiny_lit_dir", "new_tiny_lit_dir")) + d.addCallback(lambda ign: self.handler.renameFile(b"tiny_lit_dir", b"new_tiny_lit_dir")) d.addCallback(lambda ign: self.root.get(u"new_tiny_lit_dir")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.tiny_lit_dir_uri)) # renaming an unknown to a correct path should succeed - d.addCallback(lambda ign: self.handler.renameFile("unknown", "new_unknown")) + d.addCallback(lambda ign: self.handler.renameFile(b"unknown", b"new_unknown")) d.addCallback(lambda ign: self.root.get(u"new_unknown")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.unknown_uri)) @@ -1256,7 +1263,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas extData = (struct.pack('>L', len(fromPathstring)) + fromPathstring + struct.pack('>L', len(toPathstring)) + toPathstring) - d2 = self.handler.extendedRequest('posix-rename@openssh.com', extData) + d2 = self.handler.extendedRequest(b'posix-rename@openssh.com', extData) def _check(res): res.trap(sftp.SFTPError) if res.value.code == sftp.FX_OK: @@ -1276,44 +1283,44 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas # POSIX-renaming a non-existent file should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile_posix nofile newfile", - _renameFile, "nofile", "newfile")) + _renameFile, b"nofile", b"newfile")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile_posix '' newfile", - _renameFile, "", "newfile")) + _renameFile, b"", b"newfile")) # POSIX-renaming a file to a non-existent path should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile_posix small nodir/small", - _renameFile, "small", "nodir/small")) + _renameFile, b"small", b"nodir/small")) # POSIX-renaming a file to an invalid UTF-8 name should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile_posix small invalid", - _renameFile, "small", "\xFF")) + _renameFile, b"small", b"\xFF")) # POSIX-renaming a file to or from an URI should fail d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile_posix small from uri", - _renameFile, "uri/"+self.small_uri, "new")) + _renameFile, b"uri/"+self.small_uri, b"new")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "renameFile_posix small to uri", - _renameFile, "small", "uri/fake_uri")) + _renameFile, b"small", b"uri/fake_uri")) # POSIX-renaming a file onto an existing file, directory or unknown should succeed - d.addCallback(lambda ign: _renameFile("small", "small2")) + d.addCallback(lambda ign: _renameFile(b"small", b"small2")) d.addCallback(lambda ign: self.root.get(u"small2")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.small_uri)) - d.addCallback(lambda ign: _renameFile("small2", "loop2")) + d.addCallback(lambda ign: _renameFile(b"small2", b"loop2")) d.addCallback(lambda ign: self.root.get(u"loop2")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.small_uri)) - d.addCallback(lambda ign: _renameFile("loop2", "unknown2")) + d.addCallback(lambda ign: _renameFile(b"loop2", b"unknown2")) d.addCallback(lambda ign: self.root.get(u"unknown2")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.small_uri)) # POSIX-renaming a file to a correct new path should succeed - d.addCallback(lambda ign: _renameFile("unknown2", "new_small")) + d.addCallback(lambda ign: _renameFile(b"unknown2", b"new_small")) d.addCallback(lambda ign: self.root.get(u"new_small")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.small_uri)) @@ -1324,12 +1331,12 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.gross_uri)) # POSIX-renaming a directory to a correct path should succeed - d.addCallback(lambda ign: _renameFile("tiny_lit_dir", "new_tiny_lit_dir")) + d.addCallback(lambda ign: _renameFile(b"tiny_lit_dir", b"new_tiny_lit_dir")) d.addCallback(lambda ign: self.root.get(u"new_tiny_lit_dir")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.tiny_lit_dir_uri)) # POSIX-renaming an unknown to a correct path should succeed - d.addCallback(lambda ign: _renameFile("unknown", "new_unknown")) + d.addCallback(lambda ign: _renameFile(b"unknown", b"new_unknown")) d.addCallback(lambda ign: self.root.get(u"new_unknown")) d.addCallback(lambda node: self.failUnlessReallyEqual(node.get_uri(), self.unknown_uri)) @@ -1342,7 +1349,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self._set_up_tree()) # making a directory at a correct path should succeed - d.addCallback(lambda ign: self.handler.makeDirectory("newdir", {'ext_foo': 'bar', 'ctime': 42})) + d.addCallback(lambda ign: self.handler.makeDirectory(b"newdir", {'ext_foo': 'bar', 'ctime': 42})) d.addCallback(lambda ign: self.root.get_child_and_metadata(u"newdir")) def _got(child_and_metadata): @@ -1358,7 +1365,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(_got) # making intermediate directories should also succeed - d.addCallback(lambda ign: self.handler.makeDirectory("newparent/newchild", {})) + d.addCallback(lambda ign: self.handler.makeDirectory(b"newparent/newchild", {})) d.addCallback(lambda ign: self.root.get(u"newparent")) def _got_newparent(newparent): @@ -1374,17 +1381,17 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_NO_SUCH_FILE, "makeDirectory invalid UTF-8", - self.handler.makeDirectory, "\xFF", {})) + self.handler.makeDirectory, b"\xFF", {})) # should fail because there is an existing file "small" d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_FAILURE, "makeDirectory small", - self.handler.makeDirectory, "small", {})) + self.handler.makeDirectory, b"small", {})) # directories cannot be created read-only via SFTP d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "makeDirectory newdir2 permissions:0444 denied", - self.handler.makeDirectory, "newdir2", + self.handler.makeDirectory, b"newdir2", {'permissions': 0o444})) d.addCallback(lambda ign: self.failUnlessEqual(sftpd.all_heisenfiles, {})) @@ -1464,24 +1471,24 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas def test_extendedRequest(self): d = self._set_up("extendedRequest") - d.addCallback(lambda ign: self.handler.extendedRequest("statvfs@openssh.com", "/")) + d.addCallback(lambda ign: self.handler.extendedRequest(b"statvfs@openssh.com", b"/")) def _check(res): - self.failUnless(isinstance(res, str)) + self.failUnless(isinstance(res, bytes)) self.failUnlessEqual(len(res), 8*11) d.addCallback(_check) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "extendedRequest foo bar", - self.handler.extendedRequest, "foo", "bar")) + self.handler.extendedRequest, b"foo", b"bar")) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "extendedRequest posix-rename@openssh.com invalid 1", - self.handler.extendedRequest, 'posix-rename@openssh.com', '')) + self.handler.extendedRequest, b'posix-rename@openssh.com', b'')) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "extendedRequest posix-rename@openssh.com invalid 2", - self.handler.extendedRequest, 'posix-rename@openssh.com', '\x00\x00\x00\x01')) + self.handler.extendedRequest, b'posix-rename@openssh.com', b'\x00\x00\x00\x01')) d.addCallback(lambda ign: self.shouldFailWithSFTPError(sftp.FX_BAD_MESSAGE, "extendedRequest posix-rename@openssh.com invalid 3", - self.handler.extendedRequest, 'posix-rename@openssh.com', '\x00\x00\x00\x01_\x00\x00\x00\x01')) + self.handler.extendedRequest, b'posix-rename@openssh.com', b'\x00\x00\x00\x01_\x00\x00\x00\x01')) return d diff --git a/src/allmydata/test/test_stats.py b/src/allmydata/test/test_stats.py index 3ee495927..e56f9d444 100644 --- a/src/allmydata/test/test_stats.py +++ b/src/allmydata/test/test_stats.py @@ -1,3 +1,14 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from twisted.trial import unittest from twisted.application import service diff --git a/src/allmydata/test/test_storage_client.py b/src/allmydata/test/test_storage_client.py index 00343ea20..8500d6bff 100644 --- a/src/allmydata/test/test_storage_client.py +++ b/src/allmydata/test/test_storage_client.py @@ -1,9 +1,22 @@ -import hashlib -from mock import Mock +""" +Ported from Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + +from six import ensure_text + from json import ( - dumps, loads, ) + +import hashlib from fixtures import ( TempDir, ) @@ -30,12 +43,20 @@ from hyperlink import ( URL, ) +import attr + +from twisted.internet.interfaces import ( + IStreamClientEndpoint, +) from twisted.application.service import ( Service, ) from twisted.trial import unittest -from twisted.internet.defer import succeed, inlineCallbacks +from twisted.internet.defer import ( + Deferred, + inlineCallbacks, +) from twisted.python.filepath import ( FilePath, ) @@ -43,7 +64,11 @@ from twisted.python.filepath import ( from foolscap.api import ( Tub, ) +from foolscap.ipb import ( + IConnectionHintHandler, +) +from .no_network import LocalWrapper from .common import ( EMPTY_CLIENT_CONFIG, SyncTestCase, @@ -51,6 +76,7 @@ from .common import ( UseTestPlugins, UseNode, SameProcessStreamEndpointAssigner, + MemoryIntroducerClient, ) from .common_web import ( do_http, @@ -69,14 +95,18 @@ from allmydata.storage_client import ( _FoolscapStorage, _NullStorage, ) +from ..storage.server import ( + StorageServer, +) from allmydata.interfaces import ( IConnectionStatus, IStorageServer, ) -SOME_FURL = b"pb://abcde@nowhere/fake" +SOME_FURL = "pb://abcde@nowhere/fake" -class NativeStorageServerWithVersion(NativeStorageServer): + +class NativeStorageServerWithVersion(NativeStorageServer): # type: ignore # tahoe-lafs/ticket/3573 def __init__(self, version): # note: these instances won't work for anything other than # get_available_space() because we don't upcall @@ -107,7 +137,7 @@ class TestNativeStorageServer(unittest.TestCase): ann = {"anonymous-storage-FURL": "pb://w2hqnbaa25yw4qgcvghl5psa3srpfgw3@tcp:127.0.0.1:51309/vucto2z4fxment3vfxbqecblbf6zyp6x", "permutation-seed-base32": "w2hqnbaa25yw4qgcvghl5psa3srpfgw3", } - nss = NativeStorageServer("server_id", ann, None, {}, EMPTY_CLIENT_CONFIG) + nss = NativeStorageServer(b"server_id", ann, None, {}, EMPTY_CLIENT_CONFIG) self.assertEqual(nss.get_nickname(), "") @@ -123,7 +153,7 @@ class GetConnectionStatus(unittest.TestCase): """ # Pretty hard to recognize anything from an empty announcement. ann = {} - nss = NativeStorageServer("server_id", ann, Tub, {}, EMPTY_CLIENT_CONFIG) + nss = NativeStorageServer(b"server_id", ann, Tub, {}, EMPTY_CLIENT_CONFIG) nss.start_connecting(lambda: None) connection_status = nss.get_connection_status() self.assertTrue(IConnectionStatus.providedBy(connection_status)) @@ -271,7 +301,7 @@ class PluginMatchedAnnouncement(SyncTestCase): """ yield self.make_node( introducer_furl=SOME_FURL, - storage_plugin=b"tahoe-lafs-dummy-v1", + storage_plugin="tahoe-lafs-dummy-v1", plugin_config=None, ) server_id = b"v0-abcdef" @@ -281,7 +311,7 @@ class PluginMatchedAnnouncement(SyncTestCase): # notice how the announcement is for a different storage plugin # than the one that is enabled. u"name": u"tahoe-lafs-dummy-v2", - u"storage-server-FURL": SOME_FURL.decode("ascii"), + u"storage-server-FURL": SOME_FURL, }], } self.publish(server_id, ann, self.introducer_client) @@ -295,9 +325,9 @@ class PluginMatchedAnnouncement(SyncTestCase): configuration is matched and the plugin's storage client is used. """ plugin_config = { - b"abc": b"xyz", + "abc": "xyz", } - plugin_name = b"tahoe-lafs-dummy-v1" + plugin_name = "tahoe-lafs-dummy-v1" yield self.make_node( introducer_furl=SOME_FURL, storage_plugin=plugin_name, @@ -309,7 +339,7 @@ class PluginMatchedAnnouncement(SyncTestCase): u"storage-options": [{ # and this announcement is for a plugin with a matching name u"name": plugin_name, - u"storage-server-FURL": SOME_FURL.decode("ascii"), + u"storage-server-FURL": SOME_FURL, }], } self.publish(server_id, ann, self.introducer_client) @@ -348,7 +378,7 @@ class PluginMatchedAnnouncement(SyncTestCase): An announcement that could be matched by a plugin that is enabled with no configuration is matched and the plugin's storage client is used. """ - plugin_name = b"tahoe-lafs-dummy-v1" + plugin_name = "tahoe-lafs-dummy-v1" yield self.make_node( introducer_furl=SOME_FURL, storage_plugin=plugin_name, @@ -360,7 +390,7 @@ class PluginMatchedAnnouncement(SyncTestCase): u"storage-options": [{ # and this announcement is for a plugin with a matching name u"name": plugin_name, - u"storage-server-FURL": SOME_FURL.decode("ascii"), + u"storage-server-FURL": SOME_FURL, }], } self.publish(server_id, ann, self.introducer_client) @@ -403,7 +433,7 @@ class FoolscapStorageServers(unittest.TestCase): verifyObject( IFoolscapStorageServer, _FoolscapStorage.from_announcement( - u"server-id", + b"server-id", SOME_FURL, {u"permutation-seed-base32": base32.b2a(b"permutationseed")}, NotStorageServer(), @@ -425,10 +455,11 @@ class StoragePluginWebPresence(AsyncTestCase): self.port_assigner = SameProcessStreamEndpointAssigner() self.port_assigner.setUp() self.addCleanup(self.port_assigner.tearDown) - self.storage_plugin = b"tahoe-lafs-dummy-v1" + self.storage_plugin = u"tahoe-lafs-dummy-v1" from twisted.internet import reactor - _, port_endpoint = self.port_assigner.assign(reactor) + _, webport_endpoint = self.port_assigner.assign(reactor) + tubport_location, tubport_endpoint = self.port_assigner.assign(reactor) tempdir = TempDir() self.useFixture(tempdir) @@ -436,11 +467,15 @@ class StoragePluginWebPresence(AsyncTestCase): self.basedir.child(u"private").makedirs() self.node_fixture = self.useFixture(UseNode( plugin_config={ - b"web": b"1", + "web": "1", }, node_config={ - b"tub.location": b"127.0.0.1:1", - b"web.port": port_endpoint, + # We don't really need the main Tub listening but if we + # disable it then we also have to disable storage (because + # config validation policy). + "tub.port": tubport_endpoint, + "tub.location": tubport_location, + "web.port": ensure_text(webport_endpoint), }, storage_plugin=self.storage_plugin, basedir=self.basedir, @@ -461,8 +496,8 @@ class StoragePluginWebPresence(AsyncTestCase): port=self.port, plugin_name=self.storage_plugin, ).encode("utf-8") - result = yield do_http(b"get", url) - self.assertThat(result, Equals(dumps({b"web": b"1"}))) + result = yield do_http("get", url) + self.assertThat(loads(result), Equals({"web": "1"})) @inlineCallbacks def test_plugin_resource_persistent_across_requests(self): @@ -476,13 +511,13 @@ class StoragePluginWebPresence(AsyncTestCase): port=self.port, path=( u"storage-plugins", - self.storage_plugin.decode("utf-8"), + self.storage_plugin, u"counter", ), ).to_text().encode("utf-8") values = { - loads((yield do_http(b"get", url)))[u"value"], - loads((yield do_http(b"get", url)))[u"value"], + loads((yield do_http("get", url)))[u"value"], + loads((yield do_http("get", url)))[u"value"], } self.assertThat( values, @@ -491,21 +526,75 @@ class StoragePluginWebPresence(AsyncTestCase): ) -def make_broker(tub_maker=lambda h: Mock()): +_aCertPEM = Tub().myCertificate.dumpPEM() +def new_tub(): + """ + Make a new ``Tub`` with a hard-coded private key. + """ + # Use a private key / certificate generated by Tub how it wants. But just + # re-use the same one every time so we don't waste a lot of time + # generating them over and over in the tests. + return Tub(certData=_aCertPEM) + + +def make_broker(tub_maker=None): """ Create a ``StorageFarmBroker`` with the given tub maker and an empty client configuration. """ + if tub_maker is None: + tub_maker = lambda handler_overrides: new_tub() return StorageFarmBroker(True, tub_maker, EMPTY_CLIENT_CONFIG) +@implementer(IStreamClientEndpoint) +@attr.s +class SpyEndpoint(object): + """ + Observe and record connection attempts. + + :ivar list _append: A callable that accepts two-tuples. For each + attempted connection, it will be called with ``Deferred`` that was + returned and the ``Factory`` that was passed in. + """ + _append = attr.ib() + + def connect(self, factory): + """ + Record the connection attempt. + + :return: A ``Deferred`` that ``SpyEndpoint`` will not fire. + """ + d = Deferred() + self._append((d, factory)) + return d + + +@implementer(IConnectionHintHandler) # type: ignore # warner/foolscap#78 +@attr.s +class SpyHandler(object): + """ + A Foolscap connection hint handler for the "spy" hint type. Connections + are handled by just observing and recording them. + + :ivar list _connects: A list containing one element for each connection + attempted with this handler. Each element is a two-tuple of the + ``Deferred`` that was returned from ``connect`` and the factory that + was passed to ``connect``. + """ + _connects = attr.ib(default=attr.Factory(list)) + + def hint_to_endpoint(self, hint, reactor, update_status): + return (SpyEndpoint(self._connects.append), hint) + + class TestStorageFarmBroker(unittest.TestCase): def test_static_servers(self): broker = make_broker() - key_s = 'v0-1234-1' - servers_yaml = b"""\ + key_s = b'v0-1234-1' + servers_yaml = """\ storage: v0-1234-1: ann: @@ -513,7 +602,7 @@ storage: permutation-seed-base32: aaaaaaaaaaaaaaaaaaaaaaaa """.format(furl=SOME_FURL) servers = yamlutil.safe_load(servers_yaml) - permseed = base32.a2b("aaaaaaaaaaaaaaaaaaaaaaaa") + permseed = base32.a2b(b"aaaaaaaaaaaaaaaaaaaaaaaa") broker.set_static_servers(servers["storage"]) self.failUnlessEqual(len(broker._static_server_ids), 1) s = broker.servers[key_s] @@ -527,7 +616,7 @@ storage: ann2 = { "service-name": "storage", - "anonymous-storage-FURL": "pb://{}@nowhere/fake2".format(base32.b2a(str(1))), + "anonymous-storage-FURL": "pb://{}@nowhere/fake2".format(str(base32.b2a(b"1"), "utf-8")), "permutation-seed-base32": "bbbbbbbbbbbbbbbbbbbbbbbb", } broker._got_announcement(key_s, ann2) @@ -537,8 +626,8 @@ storage: def test_static_permutation_seed_pubkey(self): broker = make_broker() - server_id = "v0-4uazse3xb6uu5qpkb7tel2bm6bpea4jhuigdhqcuvvse7hugtsia" - k = "4uazse3xb6uu5qpkb7tel2bm6bpea4jhuigdhqcuvvse7hugtsia" + server_id = b"v0-4uazse3xb6uu5qpkb7tel2bm6bpea4jhuigdhqcuvvse7hugtsia" + k = b"4uazse3xb6uu5qpkb7tel2bm6bpea4jhuigdhqcuvvse7hugtsia" ann = { "anonymous-storage-FURL": SOME_FURL, } @@ -548,8 +637,8 @@ storage: def test_static_permutation_seed_explicit(self): broker = make_broker() - server_id = "v0-4uazse3xb6uu5qpkb7tel2bm6bpea4jhuigdhqcuvvse7hugtsia" - k = "w5gl5igiexhwmftwzhai5jy2jixn7yx7" + server_id = b"v0-4uazse3xb6uu5qpkb7tel2bm6bpea4jhuigdhqcuvvse7hugtsia" + k = b"w5gl5igiexhwmftwzhai5jy2jixn7yx7" ann = { "anonymous-storage-FURL": SOME_FURL, "permutation-seed-base32": k, @@ -560,7 +649,7 @@ storage: def test_static_permutation_seed_hashed(self): broker = make_broker() - server_id = "unparseable" + server_id = b"unparseable" ann = { "anonymous-storage-FURL": SOME_FURL, } @@ -571,18 +660,38 @@ storage: @inlineCallbacks def test_threshold_reached(self): - introducer = Mock() + """ + ``StorageFarmBroker.when_connected_enough`` returns a ``Deferred`` which + only fires after the ``StorageFarmBroker`` has established at least as + many connections as requested. + """ + introducer = MemoryIntroducerClient( + new_tub(), + SOME_FURL, + b"", + None, + None, + None, + None, + ) new_tubs = [] def make_tub(*args, **kwargs): return new_tubs.pop() broker = make_broker(make_tub) + # Start the broker so that it will start Tubs attached to it so they + # will attempt to make connections as necessary so that we can observe + # those connections. + broker.startService() + self.addCleanup(broker.stopService) done = broker.when_connected_enough(5) broker.use_introducer(introducer) # subscribes to "storage" to learn of new storage nodes - subscribe = introducer.mock_calls[0] - self.assertEqual(subscribe[0], 'subscribe_to') - self.assertEqual(subscribe[1][0], 'storage') - got_announcement = subscribe[1][1] + [subscribe] = introducer.subscribed_to + self.assertEqual( + subscribe.service_name, + "storage", + ) + got_announcement = subscribe.cb data = { "service-name": "storage", @@ -591,15 +700,25 @@ storage: } def add_one_server(x): - data["anonymous-storage-FURL"] = "pb://{}@nowhere/fake".format(base32.b2a(str(x))) - tub = Mock() + data["anonymous-storage-FURL"] = "pb://%s@spy:nowhere/fake" % (str(base32.b2a(b"%d" % x), "ascii"),) + tub = new_tub() + connects = [] + spy = SpyHandler(connects) + tub.addConnectionHintHandler("spy", spy) new_tubs.append(tub) - got_announcement('v0-1234-{}'.format(x), data) - self.assertEqual(tub.mock_calls[-1][0], 'connectTo') - got_connection = tub.mock_calls[-1][1][1] - rref = Mock() - rref.callRemote = Mock(return_value=succeed(1234)) - got_connection(rref) + got_announcement(b'v0-1234-%d' % x, data) + + self.assertEqual( + 1, len(connects), + "Expected one connection attempt, got {!r} instead".format(connects), + ) + + # Skip over all the Foolscap negotiation. It's complex with lots + # of pieces and I don't want to figure out how to fake + # it. -exarkun + native = broker.servers[b"v0-1234-%d" % (x,)] + rref = LocalWrapper(StorageServer(self.mktemp(), b"x" * 20)) + native._got_connection(rref) # first 4 shouldn't trigger connected_threashold for x in range(4): diff --git a/src/allmydata/test/test_storage_web.py b/src/allmydata/test/test_storage_web.py index aa1f19936..b3f5fac98 100644 --- a/src/allmydata/test/test_storage_web.py +++ b/src/allmydata/test/test_storage_web.py @@ -11,7 +11,7 @@ from __future__ import unicode_literals from future.utils import PY2 if PY2: - # Omitted list sinc it broke a test on Python 2. Shouldn't require further + # Omitted list since it broke a test on Python 2. Shouldn't require further # work, when we switch to Python 3 we'll be dropping this, anyway. from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, object, range, str, max, min # noqa: F401 @@ -26,18 +26,6 @@ from twisted.internet import defer from twisted.application import service from twisted.web.template import flattenString -# We need to use `nevow.inevow.IRequest` for now for compatibility -# with the code in web/common.py. Once nevow bits are gone from -# web/common.py, we can use `twisted.web.iweb.IRequest` here. -if PY2: - from nevow.inevow import IRequest -else: - from twisted.web.iweb import IRequest - -from twisted.web.server import Request -from twisted.web.test.requesthelper import DummyChannel -from zope.interface import implementer - from foolscap.api import fireEventually from allmydata.util import fileutil, hashutil, base32, pollmixin from allmydata.storage.common import storage_index_to_dir, \ @@ -52,6 +40,10 @@ from allmydata.web.storage import ( ) from .common_util import FakeCanary +from .common_web import ( + render, +) + def remove_tags(s): s = re.sub(br'<[^>]*>', b' ', s) s = re.sub(br'\s+', b' ', s) @@ -75,20 +67,10 @@ def renderDeferred(ss): return flattenString(None, elem) def renderJSON(resource): - """Render a JSON from the given resource.""" - - @implementer(IRequest) - class JSONRequest(Request): - """ - A Request with t=json argument added to it. This is useful to - invoke a Resouce.render_JSON() method. - """ - def __init__(self): - Request.__init__(self, DummyChannel()) - self.args = {"t": ["json"]} - self.fields = {} - - return resource.render(JSONRequest()) + """ + Render a JSON from the given resource. + """ + return render(resource, {b"t": [b"json"]}) class MyBucketCountingCrawler(BucketCountingCrawler): def finished_prefix(self, cycle, prefix): diff --git a/src/allmydata/test/test_system.py b/src/allmydata/test/test_system.py index 21da0a914..235361cf8 100644 --- a/src/allmydata/test/test_system.py +++ b/src/allmydata/test/test_system.py @@ -1,7 +1,22 @@ +""" +Ported to Python 3, partially: test_filesystem* will be done in a future round. +""" from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +from future.utils import PY2, PY3 +if PY2: + # Don't import bytes since it causes issues on (so far unported) modules on Python 2. + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, max, min, str # noqa: F401 + +from past.builtins import chr as byteschr, long +from six import ensure_text, ensure_str import os, re, sys, time, json from functools import partial +from unittest import skipIf from bs4 import BeautifulSoup @@ -23,7 +38,6 @@ from allmydata.util import log, base32 from allmydata.util.encodingutil import quote_output, unicode_to_argv from allmydata.util.fileutil import abspath_expanduser_unicode from allmydata.util.consumer import MemoryConsumer, download_to_data -from allmydata.stats import StatsGathererService from allmydata.interfaces import IDirectoryNode, IFileNode, \ NoSuchChildError, NoSharesError from allmydata.monitor import Monitor @@ -33,12 +47,15 @@ from allmydata.mutable.publish import MutableData from foolscap.api import DeadReferenceError, fireEventually, flushEventualQueue from twisted.python.failure import Failure +from twisted.python.filepath import ( + FilePath, +) from .common import ( TEST_RSA_KEY_SIZE, SameProcessStreamEndpointAssigner, ) -from .common_web import do_http, Error +from .common_web import do_http as do_http_bytes, Error from .web.common import ( assert_soup_has_tag_with_attributes ) @@ -46,9 +63,34 @@ from .web.common import ( # TODO: move this to common or common_util from allmydata.test.test_runner import RunBinTahoeMixin from . import common_util as testutil -from .common_util import run_cli +from .common_util import run_cli_unicode +from ..scripts.common import ( + write_introducer, +) -LARGE_DATA = """ +def run_cli(*args, **kwargs): + """ + Run a Tahoe-LAFS CLI utility, but inline. + + Version of run_cli_unicode() that takes any kind of string, and the + command-line args inline instead of as verb + list. + + Backwards compatible version so we don't have to change all the tests that + expected this API. + """ + nodeargs = [ensure_text(a) for a in kwargs.pop("nodeargs", [])] + kwargs["nodeargs"] = nodeargs + return run_cli_unicode( + ensure_text(args[0]), [ensure_text(a) for a in args[1:]], **kwargs) + + +def do_http(*args, **kwargs): + """Wrapper for do_http() that returns Unicode.""" + return do_http_bytes(*args, **kwargs).addCallback( + lambda b: str(b, "utf-8")) + + +LARGE_DATA = b""" This is some data to publish to the remote grid.., which needs to be large enough to not fit inside a LIT uri. """ @@ -622,9 +664,9 @@ def flush_but_dont_ignore(res): def _render_config(config): """ - Convert a ``dict`` of ``dict`` of ``bytes`` to an ini-format string. + Convert a ``dict`` of ``dict`` of ``unicode`` to an ini-format string. """ - return "\n\n".join(list( + return u"\n\n".join(list( _render_config_section(k, v) for (k, v) in config.items() @@ -632,20 +674,20 @@ def _render_config(config): def _render_config_section(heading, values): """ - Convert a ``bytes`` heading and a ``dict`` of ``bytes`` to an ini-format - section as ``bytes``. + Convert a ``unicode`` heading and a ``dict`` of ``unicode`` to an ini-format + section as ``unicode``. """ - return "[{}]\n{}\n".format( + return u"[{}]\n{}\n".format( heading, _render_section_values(values) ) def _render_section_values(values): """ - Convert a ``dict`` of ``bytes`` to the body of an ini-format section as - ``bytes``. + Convert a ``dict`` of ``unicode`` to the body of an ini-format section as + ``unicode``. """ - return "\n".join(list( - "{} = {}".format(k, v) + return u"\n".join(list( + u"{} = {}".format(k, v) for (k, v) in sorted(values.items()) )) @@ -661,9 +703,6 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): self.sparent = service.MultiService() self.sparent.startService() - self.stats_gatherer = None - self.stats_gatherer_furl = None - def tearDown(self): log.msg("shutting down SystemTest services") d = self.sparent.stopService() @@ -707,7 +746,7 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): return f.read().strip() @inlineCallbacks - def set_up_nodes(self, NUMCLIENTS=5, use_stats_gatherer=False): + def set_up_nodes(self, NUMCLIENTS=5): """ Create an introducer and ``NUMCLIENTS`` client nodes pointed at it. All of the nodes are running in this process. @@ -720,9 +759,6 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): :param int NUMCLIENTS: The number of client nodes to create. - :param bool use_stats_gatherer: If ``True`` then also create a stats - gatherer and configure the other nodes to use it. - :return: A ``Deferred`` that fires when the nodes have connected to each other. """ @@ -731,33 +767,7 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): self.introducer = yield self._create_introducer() self.add_service(self.introducer) self.introweb_url = self._get_introducer_web() - - if use_stats_gatherer: - yield self._set_up_stats_gatherer() yield self._set_up_client_nodes() - if use_stats_gatherer: - yield self._grab_stats() - - def _set_up_stats_gatherer(self): - statsdir = self.getdir("stats_gatherer") - fileutil.make_dirs(statsdir) - - location_hint, port_endpoint = self.port_assigner.assign(reactor) - fileutil.write(os.path.join(statsdir, "location"), location_hint) - fileutil.write(os.path.join(statsdir, "port"), port_endpoint) - self.stats_gatherer_svc = StatsGathererService(statsdir) - self.stats_gatherer = self.stats_gatherer_svc.stats_gatherer - self.stats_gatherer_svc.setServiceParent(self.sparent) - - d = fireEventually() - sgf = os.path.join(statsdir, 'stats_gatherer.furl') - def check_for_furl(): - return os.path.exists(sgf) - d.addCallback(lambda junk: self.poll(check_for_furl, timeout=30)) - def get_furl(junk): - self.stats_gatherer_furl = file(sgf, 'rb').read().strip() - d.addCallback(get_furl) - return d @inlineCallbacks def _set_up_client_nodes(self): @@ -780,7 +790,7 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): self.helper_furl = helper_furl if self.numclients >= 4: - with open(os.path.join(basedirs[3], 'tahoe.cfg'), 'ab+') as f: + with open(os.path.join(basedirs[3], 'tahoe.cfg'), 'a+') as f: f.write( "[client]\n" "helper.furl = {}\n".format(helper_furl) @@ -806,8 +816,6 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): except1 = set(range(self.numclients)) - {1} feature_matrix = { - # client 1 uses private/introducers.yaml, not tahoe.cfg - ("client", "introducer.furl"): except1, ("client", "nickname"): except1, # client 1 has to auto-assign an address. @@ -825,20 +833,13 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): def setconf(config, which, section, feature, value): if which in feature_matrix.get((section, feature), {which}): - if isinstance(value, unicode): - value = value.encode("utf-8") config.setdefault(section, {})[feature] = value - setclient = partial(setconf, config, which, "client") setnode = partial(setconf, config, which, "node") sethelper = partial(setconf, config, which, "helper") - setclient("introducer.furl", self.introducer_furl) setnode("nickname", u"client %d \N{BLACK SMILING FACE}" % (which,)) - if self.stats_gatherer_furl: - setclient("stats_gatherer.furl", self.stats_gatherer_furl) - tub_location_hint, tub_port_endpoint = self.port_assigner.assign(reactor) setnode("tub.port", tub_port_endpoint) setnode("tub.location", tub_location_hint) @@ -850,13 +851,11 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): sethelper("enabled", "True") - if which == 1: - # clients[1] uses private/introducers.yaml, not tahoe.cfg - iyaml = ("introducers:\n" - " petname2:\n" - " furl: %s\n") % self.introducer_furl - iyaml_fn = os.path.join(basedir, "private", "introducers.yaml") - fileutil.write(iyaml_fn, iyaml) + iyaml = ("introducers:\n" + " petname2:\n" + " furl: %s\n") % self.introducer_furl + iyaml_fn = os.path.join(basedir, "private", "introducers.yaml") + fileutil.write(iyaml_fn, iyaml) return _render_config(config) @@ -871,10 +870,6 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): fileutil.write(os.path.join(basedir, 'tahoe.cfg'), config) return basedir - def _grab_stats(self): - d = self.stats_gatherer.poll() - return d - def bounce_client(self, num): c = self.clients[num] d = c.disownServiceParent() @@ -905,16 +900,21 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): # usually this node is *not* parented to our self.sparent, so we can # shut it down separately from the rest, to exercise the # connection-lost code - basedir = self.getdir("client%d" % client_num) - if not os.path.isdir(basedir): - fileutil.make_dirs(basedir) + basedir = FilePath(self.getdir("client%d" % client_num)) + basedir.makedirs() config = "[client]\n" - config += "introducer.furl = %s\n" % self.introducer_furl if helper_furl: config += "helper.furl = %s\n" % helper_furl - fileutil.write(os.path.join(basedir, 'tahoe.cfg'), config) + basedir.child("tahoe.cfg").setContent(config.encode("utf-8")) + private = basedir.child("private") + private.makedirs() + write_introducer( + basedir, + "default", + self.introducer_furl, + ) - c = yield client.create_client(basedir) + c = yield client.create_client(basedir.path) self.clients.append(c) c.set_default_mutable_keysize(TEST_RSA_KEY_SIZE) self.numclients += 1 @@ -1015,12 +1015,12 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): def test_upload_and_download_convergent(self): self.basedir = "system/SystemTest/test_upload_and_download_convergent" - return self._test_upload_and_download(convergence="some convergence string") + return self._test_upload_and_download(convergence=b"some convergence string") def _test_upload_and_download(self, convergence): # we use 4000 bytes of data, which will result in about 400k written # to disk among all our simulated nodes - DATA = "Some data to upload\n" * 200 + DATA = b"Some data to upload\n" * 200 d = self.set_up_nodes() def _check_connections(res): for c in self.clients: @@ -1028,7 +1028,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): all_peerids = c.get_storage_broker().get_all_serverids() self.failUnlessEqual(len(all_peerids), self.numclients) sb = c.storage_broker - permuted_peers = sb.get_servers_for_psi("a") + permuted_peers = sb.get_servers_for_psi(b"a") self.failUnlessEqual(len(permuted_peers), self.numclients) d.addCallback(_check_connections) @@ -1051,7 +1051,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): theuri = results.get_uri() log.msg("upload finished: uri is %s" % (theuri,)) self.uri = theuri - assert isinstance(self.uri, str), self.uri + assert isinstance(self.uri, bytes), self.uri self.cap = uri.from_string(self.uri) self.n = self.clients[1].create_node_from_uri(self.uri) d.addCallback(_upload_done) @@ -1085,17 +1085,17 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): d.addCallback(lambda ign: n.read(MemoryConsumer(), offset=1, size=4)) def _read_portion_done(mc): - self.failUnlessEqual("".join(mc.chunks), DATA[1:1+4]) + self.failUnlessEqual(b"".join(mc.chunks), DATA[1:1+4]) d.addCallback(_read_portion_done) d.addCallback(lambda ign: n.read(MemoryConsumer(), offset=2, size=None)) def _read_tail_done(mc): - self.failUnlessEqual("".join(mc.chunks), DATA[2:]) + self.failUnlessEqual(b"".join(mc.chunks), DATA[2:]) d.addCallback(_read_tail_done) d.addCallback(lambda ign: n.read(MemoryConsumer(), size=len(DATA)+1000)) def _read_too_much(mc): - self.failUnlessEqual("".join(mc.chunks), DATA) + self.failUnlessEqual(b"".join(mc.chunks), DATA) d.addCallback(_read_too_much) return d @@ -1145,7 +1145,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): return connected d.addCallback(lambda ign: self.poll(_has_helper)) - HELPER_DATA = "Data that needs help to upload" * 1000 + HELPER_DATA = b"Data that needs help to upload" * 1000 def _upload_with_helper(res): u = upload.Data(HELPER_DATA, convergence=convergence) d = self.extra_node.upload(u) @@ -1179,7 +1179,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): d.addCallback(fireEventually) def _upload_resumable(res): - DATA = "Data that needs help to upload and gets interrupted" * 1000 + DATA = b"Data that needs help to upload and gets interrupted" * 1000 u1 = CountingDataUploadable(DATA, convergence=convergence) u2 = CountingDataUploadable(DATA, convergence=convergence) @@ -1297,25 +1297,13 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): d.addCallback(_upload_resumable) def _grab_stats(ignored): - # the StatsProvider doesn't normally publish a FURL: - # instead it passes a live reference to the StatsGatherer - # (if and when it connects). To exercise the remote stats - # interface, we manually publish client0's StatsProvider - # and use client1 to query it. - sp = self.clients[0].stats_provider - sp_furl = self.clients[0].tub.registerReference(sp) - d = self.clients[1].tub.getReference(sp_furl) - d.addCallback(lambda sp_rref: sp_rref.callRemote("get_stats")) - def _got_stats(stats): - #print("STATS") - #from pprint import pprint - #pprint(stats) - s = stats["stats"] - self.failUnlessEqual(s["storage_server.accepting_immutable_shares"], 1) - c = stats["counters"] - self.failUnless("storage_server.allocate" in c) - d.addCallback(_got_stats) - return d + stats = self.clients[0].stats_provider.get_stats() + s = stats["stats"] + self.failUnlessEqual(s["storage_server.accepting_immutable_shares"], 1) + c = stats["counters"] + # Probably this should be Unicode eventually? But we haven't ported + # stats code yet. + self.failUnless(b"storage_server.allocate" in c) d.addCallback(_grab_stats) return d @@ -1336,7 +1324,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): assert pieces[-5].startswith("client") client_num = int(pieces[-5][-1]) storage_index_s = pieces[-1] - storage_index = si_a2b(storage_index_s) + storage_index = si_a2b(storage_index_s.encode("ascii")) for sharename in filenames: shnum = int(sharename) filename = os.path.join(dirpath, sharename) @@ -1369,7 +1357,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): elif which == "signature": signature = self.flip_bit(signature) elif which == "share_hash_chain": - nodenum = share_hash_chain.keys()[0] + nodenum = list(share_hash_chain.keys())[0] share_hash_chain[nodenum] = self.flip_bit(share_hash_chain[nodenum]) elif which == "block_hash_tree": block_hash_tree[-1] = self.flip_bit(block_hash_tree[-1]) @@ -1392,11 +1380,11 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): def test_mutable(self): self.basedir = "system/SystemTest/test_mutable" - DATA = "initial contents go here." # 25 bytes % 3 != 0 + DATA = b"initial contents go here." # 25 bytes % 3 != 0 DATA_uploadable = MutableData(DATA) - NEWDATA = "new contents yay" + NEWDATA = b"new contents yay" NEWDATA_uploadable = MutableData(NEWDATA) - NEWERDATA = "this is getting old" + NEWERDATA = b"this is getting old" NEWERDATA_uploadable = MutableData(NEWERDATA) d = self.set_up_nodes() @@ -1445,7 +1433,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): self.failUnless(" share_hash_chain: " in output) self.failUnless(" block_hash_tree: 1 nodes\n" in output) expected = (" verify-cap: URI:SSK-Verifier:%s:" % - base32.b2a(storage_index)) + str(base32.b2a(storage_index), "ascii")) self.failUnless(expected in output) except unittest.FailTest: print() @@ -1524,7 +1512,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): for (client_num, storage_index, filename, shnum) in shares ]) assert len(where) == 10 # this test is designed for 3-of-10 - for shnum, filename in where.items(): + for shnum, filename in list(where.items()): # shares 7,8,9 are left alone. read will check # (share_hash_chain, block_hash_tree, share_data). New # seqnum+R pairs will trigger a check of (seqnum, R, IV, @@ -1574,9 +1562,9 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): def _check_empty_file(res): # make sure we can create empty files, this usually screws up the # segsize math - d1 = self.clients[2].create_mutable_file(MutableData("")) + d1 = self.clients[2].create_mutable_file(MutableData(b"")) d1.addCallback(lambda newnode: newnode.download_best_version()) - d1.addCallback(lambda res: self.failUnlessEqual("", res)) + d1.addCallback(lambda res: self.failUnlessEqual(b"", res)) return d1 d.addCallback(_check_empty_file) @@ -1599,7 +1587,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): return d def flip_bit(self, good): - return good[:-1] + chr(ord(good[-1]) ^ 0x01) + return good[:-1] + byteschr(ord(good[-1:]) ^ 0x01) def mangle_uri(self, gooduri): # change the key, which changes the storage index, which means we'll @@ -1620,10 +1608,11 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): # the key, which should cause the download to fail the post-download # plaintext_hash check. + @skipIf(PY3, "Python 3 web support hasn't happened yet.") def test_filesystem(self): self.basedir = "system/SystemTest/test_filesystem" self.data = LARGE_DATA - d = self.set_up_nodes(use_stats_gatherer=True) + d = self.set_up_nodes() def _new_happy_semantics(ign): for c in self.clients: c.encoding_params['happy'] = 1 @@ -1681,7 +1670,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): d1.addCallback(self.log, "publish finished") def _stash_uri(filenode): self.uri = filenode.get_uri() - assert isinstance(self.uri, str), (self.uri, filenode) + assert isinstance(self.uri, bytes), (self.uri, filenode) d1.addCallback(_stash_uri) return d1 d.addCallback(_made_subdir1) @@ -1699,7 +1688,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): return res def _do_publish_private(self, res): - self.smalldata = "sssh, very secret stuff" + self.smalldata = b"sssh, very secret stuff" ut = upload.Data(self.smalldata, convergence=None) d = self.clients[0].create_dirnode() d.addCallback(self.log, "GOT private directory") @@ -1786,7 +1775,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): d1.addCallback(lambda res: self.shouldFail2(NotWriteableError, "mkdir(nope)", None, dirnode.create_subdirectory, u"nope")) d1.addCallback(self.log, "doing add_file(ro)") - ut = upload.Data("I will disappear, unrecorded and unobserved. The tragedy of my demise is made more poignant by its silence, but this beauty is not for you to ever know.", convergence="99i-p1x4-xd4-18yc-ywt-87uu-msu-zo -- completely and totally unguessable string (unless you read this)") + ut = upload.Data(b"I will disappear, unrecorded and unobserved. The tragedy of my demise is made more poignant by its silence, but this beauty is not for you to ever know.", convergence=b"99i-p1x4-xd4-18yc-ywt-87uu-msu-zo -- completely and totally unguessable string (unless you read this)") d1.addCallback(lambda res: self.shouldFail2(NotWriteableError, "add_file(nope)", None, dirnode.add_file, u"hope", ut)) d1.addCallback(self.log, "doing get(ro)") @@ -1850,7 +1839,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): "largest-directory-children": 3, "largest-immutable-file": 112, } - for k,v in expected.iteritems(): + for k,v in list(expected.items()): self.failUnlessEqual(stats[k], v, "stats[%s] was %s, not %s" % (k, stats[k], v)) @@ -1899,33 +1888,33 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): return do_http("get", self.webish_url + urlpath) def POST(self, urlpath, use_helper=False, **fields): - sepbase = "boogabooga" - sep = "--" + sepbase + sepbase = b"boogabooga" + sep = b"--" + sepbase form = [] form.append(sep) - form.append('Content-Disposition: form-data; name="_charset"') - form.append('') - form.append('UTF-8') + form.append(b'Content-Disposition: form-data; name="_charset"') + form.append(b'') + form.append(b'UTF-8') form.append(sep) - for name, value in fields.iteritems(): + for name, value in fields.items(): if isinstance(value, tuple): filename, value = value - form.append('Content-Disposition: form-data; name="%s"; ' - 'filename="%s"' % (name, filename.encode("utf-8"))) + form.append(b'Content-Disposition: form-data; name="%s"; ' + b'filename="%s"' % (name, filename.encode("utf-8"))) else: - form.append('Content-Disposition: form-data; name="%s"' % name) - form.append('') - form.append(str(value)) + form.append(b'Content-Disposition: form-data; name="%s"' % name) + form.append(b'') + form.append(b"%s" % (value,)) form.append(sep) - form[-1] += "--" - body = "" + form[-1] += b"--" + body = b"" headers = {} if fields: - body = "\r\n".join(form) + "\r\n" - headers["content-type"] = "multipart/form-data; boundary=%s" % sepbase + body = b"\r\n".join(form) + b"\r\n" + headers["content-type"] = "multipart/form-data; boundary=%s" % str(sepbase, "ascii") return self.POST2(urlpath, body, headers, use_helper) - def POST2(self, urlpath, body="", headers={}, use_helper=False): + def POST2(self, urlpath, body=b"", headers={}, use_helper=False): if use_helper: url = self.helper_webish_url + urlpath else: @@ -1933,7 +1922,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): return do_http("post", url, data=body, headers=headers) def _test_web(self, res): - public = "uri/" + self._root_directory_uri + public = "uri/" + str(self._root_directory_uri, "ascii") d = self.GET("") def _got_welcome(page): html = page.replace('\n', ' ') @@ -1942,7 +1931,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): "I didn't see the right '%s' message in:\n%s" % (connected_re, page)) # nodeids/tubids don't have any regexp-special characters nodeid_re = r'Node ID:\s*%s' % ( - self.clients[0].get_long_tubid(), self.clients[0].get_long_nodeid()) + self.clients[0].get_long_tubid(), str(self.clients[0].get_long_nodeid(), "ascii")) self.failUnless(re.search(nodeid_re, html), "I didn't see the right '%s' message in:\n%s" % (nodeid_re, page)) self.failUnless("Helper: 0 active uploads" in page) @@ -2003,7 +1992,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): # upload a file with PUT d.addCallback(self.log, "about to try PUT") d.addCallback(lambda res: self.PUT(public + "/subdir3/new.txt", - "new.txt contents")) + b"new.txt contents")) d.addCallback(lambda res: self.GET(public + "/subdir3/new.txt")) d.addCallback(self.failUnlessEqual, "new.txt contents") # and again with something large enough to use multiple segments, @@ -2014,23 +2003,23 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): c.encoding_params['happy'] = 1 d.addCallback(_new_happy_semantics) d.addCallback(lambda res: self.PUT(public + "/subdir3/big.txt", - "big" * 500000)) # 1.5MB + b"big" * 500000)) # 1.5MB d.addCallback(lambda res: self.GET(public + "/subdir3/big.txt")) d.addCallback(lambda res: self.failUnlessEqual(len(res), 1500000)) # can we replace files in place? d.addCallback(lambda res: self.PUT(public + "/subdir3/new.txt", - "NEWER contents")) + b"NEWER contents")) d.addCallback(lambda res: self.GET(public + "/subdir3/new.txt")) d.addCallback(self.failUnlessEqual, "NEWER contents") # test unlinked POST - d.addCallback(lambda res: self.POST("uri", t="upload", - file=("new.txt", "data" * 10000))) + d.addCallback(lambda res: self.POST("uri", t=b"upload", + file=("new.txt", b"data" * 10000))) # and again using the helper, which exercises different upload-status # display code - d.addCallback(lambda res: self.POST("uri", use_helper=True, t="upload", - file=("foo.txt", "data2" * 10000))) + d.addCallback(lambda res: self.POST("uri", use_helper=True, t=b"upload", + file=("foo.txt", b"data2" * 10000))) # check that the status page exists d.addCallback(lambda res: self.GET("status")) @@ -2154,7 +2143,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): # exercise some of the diagnostic tools in runner.py # find a share - for (dirpath, dirnames, filenames) in os.walk(unicode(self.basedir)): + for (dirpath, dirnames, filenames) in os.walk(ensure_text(self.basedir)): if "storage" not in dirpath: continue if not filenames: @@ -2168,7 +2157,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): filename = os.path.join(dirpath, filenames[0]) # peek at the magic to see if it is a chk share magic = open(filename, "rb").read(4) - if magic == '\x00\x00\x00\x01': + if magic == b'\x00\x00\x00\x01': break else: self.fail("unable to find any uri_extension files in %r" @@ -2201,7 +2190,6 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): # 'find-shares' tool sharedir, shnum = os.path.split(filename) storagedir, storage_index_s = os.path.split(sharedir) - storage_index_s = str(storage_index_s) nodedirs = [self.getdir("client%d" % i) for i in range(self.numclients)] rc,out,err = yield run_cli("debug", "find-shares", storage_index_s, *nodedirs) @@ -2225,7 +2213,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): # allmydata.control (mostly used for performance tests) c0 = self.clients[0] control_furl_file = c0.config.get_private_path("control.furl") - control_furl = open(control_furl_file, "r").read().strip() + control_furl = ensure_str(open(control_furl_file, "r").read().strip()) # it doesn't really matter which Tub we use to connect to the client, # so let's just use our IntroducerNode's d = self.introducer.tub.getReference(control_furl) @@ -2257,7 +2245,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): # sure that works, before we add other aliases. root_file = os.path.join(client0_basedir, "private", "root_dir.cap") - f = open(root_file, "w") + f = open(root_file, "wb") f.write(private_uri) f.close() @@ -2339,7 +2327,8 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): files.append(fn) data = "data to be uploaded: file%d\n" % i datas.append(data) - open(fn,"wb").write(data) + with open(fn, "wb") as f: + f.write(data) def _check_stdout_against(out_and_err, filenum=None, data=None): (out, err) = out_and_err @@ -2517,13 +2506,18 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): # recursive copy: setup dn = os.path.join(self.basedir, "dir1") os.makedirs(dn) - open(os.path.join(dn, "rfile1"), "wb").write("rfile1") - open(os.path.join(dn, "rfile2"), "wb").write("rfile2") - open(os.path.join(dn, "rfile3"), "wb").write("rfile3") + with open(os.path.join(dn, "rfile1"), "wb") as f: + f.write("rfile1") + with open(os.path.join(dn, "rfile2"), "wb") as f: + f.write("rfile2") + with open(os.path.join(dn, "rfile3"), "wb") as f: + f.write("rfile3") sdn2 = os.path.join(dn, "subdir2") os.makedirs(sdn2) - open(os.path.join(sdn2, "rfile4"), "wb").write("rfile4") - open(os.path.join(sdn2, "rfile5"), "wb").write("rfile5") + with open(os.path.join(sdn2, "rfile4"), "wb") as f: + f.write("rfile4") + with open(os.path.join(sdn2, "rfile5"), "wb") as f: + f.write("rfile5") # from disk into tahoe d.addCallback(run, "cp", "-r", dn, "tahoe:") @@ -2600,6 +2594,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): return d + @skipIf(PY3, "Python 3 CLI support hasn't happened yet.") def test_filesystem_with_cli_in_subprocess(self): # We do this in a separate test so that test_filesystem doesn't skip if we can't run bin/tahoe. @@ -2612,6 +2607,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): def _run_in_subprocess(ignored, verb, *args, **kwargs): stdin = kwargs.get("stdin") + # XXX https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3548 env = kwargs.get("env", os.environ) # Python warnings from the child process don't matter. env["PYTHONWARNINGS"] = "ignore" @@ -2622,12 +2618,12 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): out, err, rc_or_sig = res self.failUnlessEqual(rc_or_sig, 0, str(res)) if check_stderr: - self.failUnlessEqual(err, "") + self.failUnlessEqual(err, b"") d.addCallback(_run_in_subprocess, "create-alias", "newalias") d.addCallback(_check_succeeded) - STDIN_DATA = "This is the file to upload from stdin." + STDIN_DATA = b"This is the file to upload from stdin." d.addCallback(_run_in_subprocess, "put", "-", "newalias:tahoe-file", stdin=STDIN_DATA) d.addCallback(_check_succeeded, check_stderr=False) @@ -2649,7 +2645,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): return d def _test_checker(self, res): - ut = upload.Data("too big to be literal" * 200, convergence=None) + ut = upload.Data(b"too big to be literal" * 200, convergence=None) d = self._personal_node.add_file(u"big file", ut) d.addCallback(lambda res: self._personal_node.check(Monitor())) diff --git a/src/allmydata/test/test_tor_provider.py b/src/allmydata/test/test_tor_provider.py index bfc962831..f5dd2e29c 100644 --- a/src/allmydata/test/test_tor_provider.py +++ b/src/allmydata/test/test_tor_provider.py @@ -349,6 +349,10 @@ class Provider(unittest.TestCase): cfs2.assert_called_with(reactor, ep_desc) def test_handler_socks_endpoint(self): + """ + If not configured otherwise, the Tor provider returns a Socks-based + handler. + """ tor = mock.Mock() handler = object() tor.socks_endpoint = mock.Mock(return_value=handler) @@ -365,6 +369,46 @@ class Provider(unittest.TestCase): tor.socks_endpoint.assert_called_with(ep) self.assertIs(h, handler) + def test_handler_socks_unix_endpoint(self): + """ + ``socks.port`` can be configured as a UNIX client endpoint. + """ + tor = mock.Mock() + handler = object() + tor.socks_endpoint = mock.Mock(return_value=handler) + ep = object() + cfs = mock.Mock(return_value=ep) + reactor = object() + + with mock_tor(tor): + p = tor_provider.create(reactor, + FakeConfig(**{"socks.port": "unix:path"})) + with mock.patch("allmydata.util.tor_provider.clientFromString", cfs): + h = p.get_tor_handler() + cfs.assert_called_with(reactor, "unix:path") + tor.socks_endpoint.assert_called_with(ep) + self.assertIs(h, handler) + + def test_handler_socks_tcp_endpoint(self): + """ + ``socks.port`` can be configured as a UNIX client endpoint. + """ + tor = mock.Mock() + handler = object() + tor.socks_endpoint = mock.Mock(return_value=handler) + ep = object() + cfs = mock.Mock(return_value=ep) + reactor = object() + + with mock_tor(tor): + p = tor_provider.create(reactor, + FakeConfig(**{"socks.port": "tcp:127.0.0.1:1234"})) + with mock.patch("allmydata.util.tor_provider.clientFromString", cfs): + h = p.get_tor_handler() + cfs.assert_called_with(reactor, "tcp:127.0.0.1:1234") + tor.socks_endpoint.assert_called_with(ep) + self.assertIs(h, handler) + def test_handler_control_endpoint(self): tor = mock.Mock() handler = object() diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py index a6438c1fc..07ede2074 100644 --- a/src/allmydata/test/test_upload.py +++ b/src/allmydata/test/test_upload.py @@ -14,6 +14,17 @@ if PY2: import os, shutil from io import BytesIO +from base64 import ( + b64encode, +) + +from hypothesis import ( + given, +) +from hypothesis.strategies import ( + just, + integers, +) from twisted.trial import unittest from twisted.python.failure import Failure @@ -239,7 +250,7 @@ class FakeClient(object): node_config=EMPTY_CLIENT_CONFIG, ) for (serverid, rref) in servers: - ann = {"anonymous-storage-FURL": b"pb://%s@nowhere/fake" % base32.b2a(serverid), + ann = {"anonymous-storage-FURL": "pb://%s@nowhere/fake" % str(base32.b2a(serverid), "ascii"), "permutation-seed-base32": base32.b2a(serverid) } self.storage_broker.test_add_rref(serverid, rref, ann) self.last_servers = [s[1] for s in servers] @@ -2029,6 +2040,91 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin, f.close() return None + +class EncryptAnUploadableTests(unittest.TestCase): + """ + Tests for ``EncryptAnUploadable``. + """ + def test_same_length(self): + """ + ``EncryptAnUploadable.read_encrypted`` returns ciphertext of the same + length as the underlying plaintext. + """ + plaintext = b"hello world" + uploadable = upload.FileHandle(BytesIO(plaintext), None) + uploadable.set_default_encoding_parameters({ + # These values shouldn't matter. + "k": 3, + "happy": 5, + "n": 10, + "max_segment_size": 128 * 1024, + }) + encrypter = upload.EncryptAnUploadable(uploadable) + ciphertext = b"".join(self.successResultOf(encrypter.read_encrypted(1024, False))) + self.assertEqual(len(ciphertext), len(plaintext)) + + @given(just(b"hello world"), integers(min_value=0, max_value=len(b"hello world"))) + def test_known_result(self, plaintext, split_at): + """ + ``EncryptAnUploadable.read_encrypted`` returns a known-correct ciphertext + string for certain inputs. The ciphertext is independent of the read + sizes. + """ + convergence = b"\x42" * 16 + uploadable = upload.FileHandle(BytesIO(plaintext), convergence) + uploadable.set_default_encoding_parameters({ + # The convergence key is a function of k, n, and max_segment_size + # (among other things). The value for happy doesn't matter + # though. + "k": 3, + "happy": 5, + "n": 10, + "max_segment_size": 128 * 1024, + }) + encrypter = upload.EncryptAnUploadable(uploadable) + def read(n): + return b"".join(self.successResultOf(encrypter.read_encrypted(n, False))) + + # Read the string in one or two pieces to make sure underlying state + # is maintained properly. + first = read(split_at) + second = read(len(plaintext) - split_at) + third = read(1) + ciphertext = first + second + third + + self.assertEqual( + b"Jd2LHCRXozwrEJc=", + b64encode(ciphertext), + ) + + def test_large_read(self): + """ + ``EncryptAnUploadable.read_encrypted`` succeeds even when the requested + data length is much larger than the chunk size. + """ + convergence = b"\x42" * 16 + # 4kB of plaintext + plaintext = b"\xde\xad\xbe\xef" * 1024 + uploadable = upload.FileHandle(BytesIO(plaintext), convergence) + uploadable.set_default_encoding_parameters({ + "k": 3, + "happy": 5, + "n": 10, + "max_segment_size": 128 * 1024, + }) + # Make the chunk size very small so we don't have to operate on a huge + # amount of data to exercise the relevant codepath. + encrypter = upload.EncryptAnUploadable(uploadable, chunk_size=1) + d = encrypter.read_encrypted(len(plaintext), False) + ciphertext = self.successResultOf(d) + self.assertEqual( + list(map(len, ciphertext)), + # Chunk size was specified as 1 above so we will get the whole + # plaintext in one byte chunks. + [1] * len(plaintext), + ) + + # TODO: # upload with exactly 75 servers (shares_of_happiness) # have a download fail diff --git a/src/allmydata/test/test_uri.py b/src/allmydata/test/test_uri.py index 3e21c1674..748a0f6ef 100644 --- a/src/allmydata/test/test_uri.py +++ b/src/allmydata/test/test_uri.py @@ -11,7 +11,7 @@ from __future__ import unicode_literals from future.utils import PY2 if PY2: - from future.builtins import filter, map, zip, ascii, chr, dict, hex, input, next, oct, open, pow, round, super, bytes, int, list, object, range, str, max, min # noqa: F401 + from future.builtins import filter, map, zip, ascii, chr, dict, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 import os from twisted.trial import unittest diff --git a/src/allmydata/test/test_util.py b/src/allmydata/test/test_util.py index f1f2b1c66..c556eb4b9 100644 --- a/src/allmydata/test/test_util.py +++ b/src/allmydata/test/test_util.py @@ -9,27 +9,33 @@ from __future__ import unicode_literals from future.utils import PY2 if PY2: + # open is not here because we want to use native strings on Py2 from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import six import os, time, sys import yaml +import json from twisted.trial import unittest from allmydata.util import idlib, mathutil from allmydata.util import fileutil +from allmydata.util import jsonbytes from allmydata.util import pollmixin from allmydata.util import yamlutil from allmydata.util.fileutil import EncryptedTemporaryFile from allmydata.test.common_util import ReallyEqualMixin + if six.PY3: long = int class IDLib(unittest.TestCase): def test_nodeid_b2a(self): - self.failUnlessEqual(idlib.nodeid_b2a(b"\x00"*20), "a"*32) + result = idlib.nodeid_b2a(b"\x00"*20) + self.assertEqual(result, "a"*32) + self.assertIsInstance(result, str) class MyList(list): @@ -409,6 +415,16 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase): f.write(b"foobar") f.close() + def test_write(self): + """fileutil.write() can write both unicode and bytes.""" + path = self.mktemp() + fileutil.write(path, b"abc") + with open(path, "rb") as f: + self.assertEqual(f.read(), b"abc") + fileutil.write(path, u"def \u1234") + with open(path, "rb") as f: + self.assertEqual(f.read(), u"def \u1234".encode("utf-8")) + class PollMixinTests(unittest.TestCase): def setUp(self): @@ -469,3 +485,29 @@ class YAML(unittest.TestCase): self.assertIsInstance(back[0], str) self.assertIsInstance(back[1], str) self.assertIsInstance(back[2], str) + + +class JSONBytes(unittest.TestCase): + """Tests for BytesJSONEncoder.""" + + def test_encode_bytes(self): + """BytesJSONEncoder can encode bytes.""" + data = { + b"hello": [1, b"cd"], + } + expected = { + u"hello": [1, u"cd"], + } + # Bytes get passed through as if they were UTF-8 Unicode: + encoded = jsonbytes.dumps(data) + self.assertEqual(json.loads(encoded), expected) + self.assertEqual(jsonbytes.loads(encoded), expected) + + + def test_encode_unicode(self): + """BytesJSONEncoder encodes Unicode string as usual.""" + expected = { + u"hello": [1, u"cd"], + } + encoded = jsonbytes.dumps(expected) + self.assertEqual(json.loads(encoded), expected) diff --git a/src/allmydata/test/test_version.py b/src/allmydata/test/test_version.py deleted file mode 100644 index 7301399d9..000000000 --- a/src/allmydata/test/test_version.py +++ /dev/null @@ -1,275 +0,0 @@ -""" -Tests for allmydata.util.verlib and allmydata.version_checks. - -Ported to Python 3. -""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -import sys -import pkg_resources -from operator import ( - setitem, -) -from twisted.trial import unittest - -from allmydata.version_checks import ( - _cross_check as cross_check, - _extract_openssl_version as extract_openssl_version, - _get_package_versions_and_locations as get_package_versions_and_locations, -) -from allmydata.util.verlib import NormalizedVersion as V, \ - IrrationalVersionError, \ - suggest_normalized_version as suggest - - -class MockSSL(object): - SSLEAY_VERSION = 0 - SSLEAY_CFLAGS = 2 - - def __init__(self, version, compiled_without_heartbeats=False): - self.opts = { - self.SSLEAY_VERSION: version, - self.SSLEAY_CFLAGS: compiled_without_heartbeats and 'compiler: gcc -DOPENSSL_NO_HEARTBEATS' - or 'compiler: gcc', - } - - def SSLeay_version(self, which): - return self.opts[which] - - -class CheckRequirement(unittest.TestCase): - def test_packages_from_pkg_resources(self): - if hasattr(sys, 'frozen'): - raise unittest.SkipTest("This test doesn't apply to frozen builds.") - - class MockPackage(object): - def __init__(self, project_name, version, location): - self.project_name = project_name - self.version = version - self.location = location - - def call_pkg_resources_require(*args): - return [MockPackage("Foo", "1.0", "/path")] - self.patch(pkg_resources, 'require', call_pkg_resources_require) - - (packages, errors) = get_package_versions_and_locations() - self.failUnlessIn(("foo", ("1.0", "/path", "according to pkg_resources")), packages) - self.failIfEqual(errors, []) - self.failUnlessEqual([e for e in errors if "was not found by pkg_resources" not in e], []) - - def test_cross_check_unparseable_versions(self): - # The bug in #1355 is triggered when a version string from either pkg_resources or import - # is not parseable at all by normalized_version. - - res = cross_check({"foo": ("unparseable", "")}, [("foo", ("1.0", "", None))]) - self.failUnlessEqual(res, []) - - res = cross_check({"foo": ("1.0", "")}, [("foo", ("unparseable", "", None))]) - self.failUnlessEqual(res, []) - - res = cross_check({"foo": ("unparseable", "")}, [("foo", ("unparseable", "", None))]) - self.failUnlessEqual(res, []) - - def test_cross_check(self): - res = cross_check({}, []) - self.failUnlessEqual(res, []) - - res = cross_check({}, [("tahoe-lafs", ("1.0", "", "blah"))]) - self.failUnlessEqual(res, []) - - res = cross_check({"foo": ("unparseable", "")}, []) - self.failUnlessEqual(res, []) - - res = cross_check({"argparse": ("unparseable", "")}, []) - self.failUnlessEqual(res, []) - - res = cross_check({}, [("foo", ("unparseable", "", None))]) - self.failUnlessEqual(len(res), 1) - self.assertTrue(("version 'unparseable'" in res[0]) or ("version u'unparseable'" in res[0])) - self.failUnlessIn("was not found by pkg_resources", res[0]) - - res = cross_check({"distribute": ("1.0", "/somewhere")}, [("setuptools", ("2.0", "/somewhere", "distribute"))]) - self.failUnlessEqual(res, []) - - res = cross_check({"distribute": ("1.0", "/somewhere")}, [("setuptools", ("2.0", "/somewhere", None))]) - self.failUnlessEqual(len(res), 1) - self.failUnlessIn("location mismatch", res[0]) - - res = cross_check({"distribute": ("1.0", "/somewhere")}, [("setuptools", ("2.0", "/somewhere_different", None))]) - self.failUnlessEqual(len(res), 1) - self.failUnlessIn("location mismatch", res[0]) - - res = cross_check({"zope.interface": ("1.0", "")}, [("zope.interface", ("unknown", "", None))]) - self.failUnlessEqual(res, []) - - res = cross_check({"zope.interface": ("unknown", "")}, [("zope.interface", ("unknown", "", None))]) - self.failUnlessEqual(res, []) - - res = cross_check({"foo": ("1.0", "")}, [("foo", ("unknown", "", None))]) - self.failUnlessEqual(len(res), 1) - self.failUnlessIn("could not find a version number", res[0]) - - res = cross_check({"foo": ("unknown", "")}, [("foo", ("unknown", "", None))]) - self.failUnlessEqual(res, []) - - # When pkg_resources and import both find a package, there is only a warning if both - # the version and the path fail to match. - - res = cross_check({"foo": ("1.0", "/somewhere")}, [("foo", ("2.0", "/somewhere", None))]) - self.failUnlessEqual(res, []) - - res = cross_check({"foo": ("1.0", "/somewhere")}, [("foo", ("1.0", "/somewhere_different", None))]) - self.failUnlessEqual(res, []) - - res = cross_check({"foo": ("1.0-r123", "/somewhere")}, [("foo", ("1.0.post123", "/somewhere_different", None))]) - self.failUnlessEqual(res, []) - - res = cross_check({"foo": ("1.0", "/somewhere")}, [("foo", ("2.0", "/somewhere_different", None))]) - self.failUnlessEqual(len(res), 1) - self.assertTrue(("but version '2.0'" in res[0]) or ("but version u'2.0'" in res[0])) - - def test_extract_openssl_version(self): - self.failUnlessEqual(extract_openssl_version(MockSSL("")), - ("", None, None)) - self.failUnlessEqual(extract_openssl_version(MockSSL("NotOpenSSL a.b.c foo")), - ("NotOpenSSL", None, "a.b.c foo")) - self.failUnlessEqual(extract_openssl_version(MockSSL("OpenSSL a.b.c")), - ("a.b.c", None, None)) - self.failUnlessEqual(extract_openssl_version(MockSSL("OpenSSL 1.0.1e 11 Feb 2013")), - ("1.0.1e", None, "11 Feb 2013")) - self.failUnlessEqual(extract_openssl_version(MockSSL("OpenSSL 1.0.1e 11 Feb 2013", compiled_without_heartbeats=True)), - ("1.0.1e", None, "11 Feb 2013, no heartbeats")) - - -# based on https://bitbucket.org/tarek/distutilsversion/src/17df9a7d96ef/test_verlib.py - -class VersionTestCase(unittest.TestCase): - versions = ((V('1.0'), '1.0'), - (V('1.1'), '1.1'), - (V('1.2.3'), '1.2.3'), - (V('1.2'), '1.2'), - (V('1.2.3a4'), '1.2.3a4'), - (V('1.2c4'), '1.2c4'), - (V('1.2.3.4'), '1.2.3.4'), - (V('1.2.3.4.0b3'), '1.2.3.4b3'), - (V('1.2.0.0.0'), '1.2'), - (V('1.0.dev345'), '1.0.dev345'), - (V('1.0.post456.dev623'), '1.0.post456.dev623')) - - def test_basic_versions(self): - for v, s in self.versions: - self.failUnlessEqual(str(v), s) - - def test_from_parts(self): - for v, s in self.versions: - parts = v.parts - v2 = V.from_parts(*parts) - self.failUnlessEqual(v, v2) - self.failUnlessEqual(str(v), str(v2)) - - def test_irrational_versions(self): - irrational = ('1', '1.2a', '1.2.3b', '1.02', '1.2a03', - '1.2a3.04', '1.2.dev.2', '1.2dev', '1.2.dev', - '1.2.dev2.post2', '1.2.post2.dev3.post4') - - for s in irrational: - self.failUnlessRaises(IrrationalVersionError, V, s) - - def test_comparison(self): - self.failUnlessRaises(TypeError, lambda: V('1.2.0') == '1.2') - - self.failUnlessEqual(V('1.2.0'), V('1.2')) - self.failIfEqual(V('1.2.0'), V('1.2.3')) - self.failUnless(V('1.2.0') < V('1.2.3')) - self.failUnless(V('1.0') > V('1.0b2')) - self.failUnless(V('1.0') > V('1.0c2') > V('1.0c1') > V('1.0b2') > V('1.0b1') - > V('1.0a2') > V('1.0a1')) - self.failUnless(V('1.0.0') > V('1.0.0c2') > V('1.0.0c1') > V('1.0.0b2') > V('1.0.0b1') - > V('1.0.0a2') > V('1.0.0a1')) - - self.failUnless(V('1.0') < V('1.0.post456.dev623')) - self.failUnless(V('1.0.post456.dev623') < V('1.0.post456') < V('1.0.post1234')) - - self.failUnless(V('1.0a1') - < V('1.0a2.dev456') - < V('1.0a2') - < V('1.0a2.1.dev456') # e.g. need to do a quick post release on 1.0a2 - < V('1.0a2.1') - < V('1.0b1.dev456') - < V('1.0b2') - < V('1.0c1') - < V('1.0c2.dev456') - < V('1.0c2') - < V('1.0.dev7') - < V('1.0.dev18') - < V('1.0.dev456') - < V('1.0.dev1234') - < V('1.0') - < V('1.0.post456.dev623') # development version of a post release - < V('1.0.post456')) - - def test_suggest_normalized_version(self): - self.failUnlessEqual(suggest('1.0'), '1.0') - self.failUnlessEqual(suggest('1.0-alpha1'), '1.0a1') - self.failUnlessEqual(suggest('1.0c2'), '1.0c2') - self.failUnlessEqual(suggest('walla walla washington'), None) - self.failUnlessEqual(suggest('2.4c1'), '2.4c1') - - # from setuptools - self.failUnlessEqual(suggest('0.4a1.r10'), '0.4a1.post10') - self.failUnlessEqual(suggest('0.7a1dev-r66608'), '0.7a1.dev66608') - self.failUnlessEqual(suggest('0.6a9.dev-r41475'), '0.6a9.dev41475') - self.failUnlessEqual(suggest('2.4preview1'), '2.4c1') - self.failUnlessEqual(suggest('2.4pre1') , '2.4c1') - self.failUnlessEqual(suggest('2.1-rc2'), '2.1c2') - - # from pypi - self.failUnlessEqual(suggest('0.1dev'), '0.1.dev0') - self.failUnlessEqual(suggest('0.1.dev'), '0.1.dev0') - - # we want to be able to parse Twisted - # development versions are like post releases in Twisted - self.failUnlessEqual(suggest('9.0.0+r2363'), '9.0.0.post2363') - - # pre-releases are using markers like "pre1" - self.failUnlessEqual(suggest('9.0.0pre1'), '9.0.0c1') - - # we want to be able to parse Tcl-TK - # they use "p1" "p2" for post releases - self.failUnlessEqual(suggest('1.4p1'), '1.4.post1') - - # from darcsver - self.failUnlessEqual(suggest('1.8.1-r4956'), '1.8.1.post4956') - - # zetuptoolz - self.failUnlessEqual(suggest('0.6c16dev3'), '0.6c16.dev3') - - -class T(unittest.TestCase): - def test_report_import_error(self): - """ - get_package_versions_and_locations reports a dependency if a dependency - cannot be imported. - """ - # Make sure we don't leave the system in a bad state. - self.addCleanup( - lambda foolscap=sys.modules["foolscap"]: setitem( - sys.modules, - "foolscap", - foolscap, - ), - ) - # Make it look like Foolscap isn't installed. - sys.modules["foolscap"] = None - vers_and_locs, errors = get_package_versions_and_locations() - - foolscap_stuffs = [stuff for (pkg, stuff) in vers_and_locs if pkg == 'foolscap'] - self.failUnlessEqual(len(foolscap_stuffs), 1) - self.failUnless([e for e in errors if "dependency \'foolscap\' could not be imported" in e]) diff --git a/src/allmydata/test/test_websocket_logs.py b/src/allmydata/test/test_websocket_logs.py deleted file mode 100644 index e666a4902..000000000 --- a/src/allmydata/test/test_websocket_logs.py +++ /dev/null @@ -1,54 +0,0 @@ -import json - -from twisted.trial import unittest -from twisted.internet.defer import inlineCallbacks - -from eliot import log_call - -from autobahn.twisted.testing import create_memory_agent, MemoryReactorClockResolver, create_pumper - -from allmydata.web.logs import TokenAuthenticatedWebSocketServerProtocol - - -class TestStreamingLogs(unittest.TestCase): - """ - Test websocket streaming of logs - """ - - def setUp(self): - self.reactor = MemoryReactorClockResolver() - self.pumper = create_pumper() - self.agent = create_memory_agent(self.reactor, self.pumper, TokenAuthenticatedWebSocketServerProtocol) - return self.pumper.start() - - def tearDown(self): - return self.pumper.stop() - - @inlineCallbacks - def test_one_log(self): - """ - write a single Eliot log and see it streamed via websocket - """ - - proto = yield self.agent.open( - transport_config=u"ws://localhost:1234/ws", - options={}, - ) - - messages = [] - def got_message(msg, is_binary=False): - messages.append(json.loads(msg)) - proto.on("message", got_message) - - @log_call(action_type=u"test:cli:some-exciting-action") - def do_a_thing(): - pass - - do_a_thing() - - proto.transport.loseConnection() - yield proto.is_closed - - self.assertEqual(len(messages), 2) - self.assertEqual("started", messages[0]["action_status"]) - self.assertEqual("succeeded", messages[1]["action_status"]) diff --git a/src/allmydata/test/web/common.py b/src/allmydata/test/web/common.py index 1f568ad8d..00a40e3c5 100644 --- a/src/allmydata/test/web/common.py +++ b/src/allmydata/test/web/common.py @@ -25,7 +25,8 @@ def assert_soup_has_tag_with_attributes(testcase, soup, tag_name, attrs): tags = soup.find_all(tag_name) for tag in tags: if all(v in tag.attrs.get(k, []) for k, v in attrs.items()): - return # we found every attr in this tag; done + # we found every attr in this tag; done + return tag testcase.fail( u"No <{}> tags contain attributes: {}".format(tag_name, attrs) ) diff --git a/src/allmydata/test/web/test_common.py b/src/allmydata/test/web/test_common.py new file mode 100644 index 000000000..84ab5cab2 --- /dev/null +++ b/src/allmydata/test/web/test_common.py @@ -0,0 +1,267 @@ +""" +Tests for ``allmydata.web.common``. + +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + +import gc + +from bs4 import ( + BeautifulSoup, +) +from hyperlink import ( + DecodedURL, +) + +from testtools.matchers import ( + Equals, + Contains, + MatchesPredicate, + AfterPreprocessing, +) +from testtools.twistedsupport import ( + failed, + succeeded, + has_no_result, +) + +from twisted.python.failure import ( + Failure, +) +from twisted.internet.error import ( + ConnectionDone, +) +from twisted.internet.defer import ( + Deferred, + fail, +) +from twisted.web.server import ( + NOT_DONE_YET, +) +from twisted.web.resource import ( + Resource, +) + +from ...web.common import ( + render_exception, +) + +from ..common import ( + SyncTestCase, +) +from ..common_web import ( + render, +) +from .common import ( + assert_soup_has_tag_with_attributes, +) + +class StaticResource(Resource, object): + """ + ``StaticResource`` is a resource that returns whatever Python object it is + given from its render method. This is useful for testing + ``render_exception``\\ 's handling of different render results. + """ + def __init__(self, response): + Resource.__init__(self) + self._response = response + self._request = None + + @render_exception + def render(self, request): + self._request = request + return self._response + + +class RenderExceptionTests(SyncTestCase): + """ + Tests for ``render_exception`` (including the private helper ``_finish``). + """ + def test_exception(self): + """ + If the decorated method raises an exception then the exception is rendered + into the response. + """ + class R(Resource): + @render_exception + def render(self, request): + raise Exception("synthetic exception") + + self.assertThat( + render(R(), {}), + succeeded( + Contains(b"synthetic exception"), + ), + ) + + def test_failure(self): + """ + If the decorated method returns a ``Deferred`` that fires with a + ``Failure`` then the exception the ``Failure`` wraps is rendered into + the response. + """ + resource = StaticResource(fail(Exception("synthetic exception"))) + self.assertThat( + render(resource, {}), + succeeded( + Contains(b"synthetic exception"), + ), + ) + + def test_resource(self): + """ + If the decorated method returns an ``IResource`` provider then that + resource is used to render the response. + """ + resource = StaticResource(StaticResource(b"static result")) + self.assertThat( + render(resource, {}), + succeeded( + Equals(b"static result"), + ), + ) + + def test_unicode(self): + """ + If the decorated method returns a ``unicode`` string then that string is + UTF-8 encoded and rendered into the response. + """ + text = u"\N{SNOWMAN}" + resource = StaticResource(text) + self.assertThat( + render(resource, {}), + succeeded( + Equals(text.encode("utf-8")), + ), + ) + + def test_bytes(self): + """ + If the decorated method returns a ``bytes`` string then that string is + rendered into the response. + """ + data = b"hello world" + resource = StaticResource(data) + self.assertThat( + render(resource, {}), + succeeded( + Equals(data), + ), + ) + + def test_decodedurl(self): + """ + If the decorated method returns a ``DecodedURL`` then a redirect to that + location is rendered into the response. + """ + loc = u"http://example.invalid/foo?bar=baz" + resource = StaticResource(DecodedURL.from_text(loc)) + self.assertThat( + render(resource, {}), + succeeded( + MatchesPredicate( + lambda value: assert_soup_has_tag_with_attributes( + self, + BeautifulSoup(value, 'html5lib'), + "meta", + {"http-equiv": "refresh", + "content": "0;URL={}".format(loc), + }, + ) + # The assertion will raise if it has a problem, otherwise + # return None. Turn the None into something + # MatchesPredicate recognizes as success. + or True, + "did not find meta refresh tag in %r", + ), + ), + ) + + def test_none(self): + """ + If the decorated method returns ``None`` then the response is finished + with no additional content. + """ + self.assertThat( + render(StaticResource(None), {}), + succeeded( + Equals(b""), + ), + ) + + def test_not_done_yet(self): + """ + If the decorated method returns ``NOT_DONE_YET`` then the resource is + responsible for finishing the request itself. + """ + the_request = [] + class R(Resource): + @render_exception + def render(self, request): + the_request.append(request) + return NOT_DONE_YET + + d = render(R(), {}) + + self.assertThat( + d, + has_no_result(), + ) + + the_request[0].write(b"some content") + the_request[0].finish() + + self.assertThat( + d, + succeeded( + Equals(b"some content"), + ), + ) + + def test_unknown(self): + """ + If the decorated method returns something which is not explicitly + supported, an internal server error is rendered into the response. + """ + self.assertThat( + render(StaticResource(object()), {}), + succeeded( + Equals(b"Internal Server Error"), + ), + ) + + def test_disconnected(self): + """ + If the transport is disconnected before the response is available, no + ``RuntimeError`` is logged for finishing a disconnected request. + """ + result = Deferred() + resource = StaticResource(result) + d = render(resource, {}) + + resource._request.connectionLost(Failure(ConnectionDone())) + result.callback(b"Some result") + + self.assertThat( + d, + failed( + AfterPreprocessing( + lambda reason: reason.type, + Equals(ConnectionDone), + ), + ), + ) + + # Since we're not a trial TestCase we don't have flushLoggedErrors. + # The next best thing is to make sure any dangling Deferreds have been + # garbage collected and then let the generic trial logic for failing + # tests with logged errors kick in. + gc.collect() diff --git a/src/allmydata/test/web/test_grid.py b/src/allmydata/test/web/test_grid.py index 01eb93fa7..ef2718df4 100644 --- a/src/allmydata/test/web/test_grid.py +++ b/src/allmydata/test/web/test_grid.py @@ -1,6 +1,17 @@ +""" +Ported to Python 3. +""" from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals -import os.path, re, urllib +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + +import os.path, re +from urllib.parse import quote as url_quote import json from six.moves import StringIO @@ -18,6 +29,10 @@ from allmydata.storage.shares import get_share_file from allmydata.scripts.debug import CorruptShareOptions, corrupt_share from allmydata.immutable import upload from allmydata.mutable import publish + +from ...web.common import ( + render_exception, +) from .. import common_util as testutil from ..common import WebErrorMixin, ShouldFailMixin from ..no_network import GridTestMixin @@ -33,7 +48,8 @@ DIR_HTML_TAG = '' class CompletelyUnhandledError(Exception): pass -class ErrorBoom(object, resource.Resource): +class ErrorBoom(resource.Resource, object): + @render_exception def render(self, req): raise CompletelyUnhandledError("whoops") @@ -42,32 +58,38 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi def CHECK(self, ign, which, args, clientnum=0): fileurl = self.fileurls[which] url = fileurl + "?" + args - return self.GET(url, method="POST", clientnum=clientnum) + return self.GET_unicode(url, method="POST", clientnum=clientnum) + + def GET_unicode(self, *args, **kwargs): + """Send an HTTP request, but convert result to Unicode string.""" + d = GridTestMixin.GET(self, *args, **kwargs) + d.addCallback(str, "utf-8") + return d def test_filecheck(self): self.basedir = "web/Grid/filecheck" self.set_up_grid() c0 = self.g.clients[0] self.uris = {} - DATA = "data" * 100 - d = c0.upload(upload.Data(DATA, convergence="")) + DATA = b"data" * 100 + d = c0.upload(upload.Data(DATA, convergence=b"")) def _stash_uri(ur, which): self.uris[which] = ur.get_uri() d.addCallback(_stash_uri, "good") d.addCallback(lambda ign: - c0.upload(upload.Data(DATA+"1", convergence=""))) + c0.upload(upload.Data(DATA+b"1", convergence=b""))) d.addCallback(_stash_uri, "sick") d.addCallback(lambda ign: - c0.upload(upload.Data(DATA+"2", convergence=""))) + c0.upload(upload.Data(DATA+b"2", convergence=b""))) d.addCallback(_stash_uri, "dead") def _stash_mutable_uri(n, which): self.uris[which] = n.get_uri() - assert isinstance(self.uris[which], str) + assert isinstance(self.uris[which], bytes) d.addCallback(lambda ign: - c0.create_mutable_file(publish.MutableData(DATA+"3"))) + c0.create_mutable_file(publish.MutableData(DATA+b"3"))) d.addCallback(_stash_mutable_uri, "corrupt") d.addCallback(lambda ign: - c0.upload(upload.Data("literal", convergence=""))) + c0.upload(upload.Data(b"literal", convergence=b""))) d.addCallback(_stash_uri, "small") d.addCallback(lambda ign: c0.create_immutable_dirnode({})) d.addCallback(_stash_mutable_uri, "smalldir") @@ -75,7 +97,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi def _compute_fileurls(ignored): self.fileurls = {} for which in self.uris: - self.fileurls[which] = "uri/" + urllib.quote(self.uris[which]) + self.fileurls[which] = "uri/" + url_quote(self.uris[which]) d.addCallback(_compute_fileurls) def _clobber_shares(ignored): @@ -198,28 +220,28 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.set_up_grid() c0 = self.g.clients[0] self.uris = {} - DATA = "data" * 100 - d = c0.upload(upload.Data(DATA, convergence="")) + DATA = b"data" * 100 + d = c0.upload(upload.Data(DATA, convergence=b"")) def _stash_uri(ur, which): self.uris[which] = ur.get_uri() d.addCallback(_stash_uri, "good") d.addCallback(lambda ign: - c0.upload(upload.Data(DATA+"1", convergence=""))) + c0.upload(upload.Data(DATA+b"1", convergence=b""))) d.addCallback(_stash_uri, "sick") d.addCallback(lambda ign: - c0.upload(upload.Data(DATA+"2", convergence=""))) + c0.upload(upload.Data(DATA+b"2", convergence=b""))) d.addCallback(_stash_uri, "dead") def _stash_mutable_uri(n, which): self.uris[which] = n.get_uri() - assert isinstance(self.uris[which], str) + assert isinstance(self.uris[which], bytes) d.addCallback(lambda ign: - c0.create_mutable_file(publish.MutableData(DATA+"3"))) + c0.create_mutable_file(publish.MutableData(DATA+b"3"))) d.addCallback(_stash_mutable_uri, "corrupt") def _compute_fileurls(ignored): self.fileurls = {} for which in self.uris: - self.fileurls[which] = "uri/" + urllib.quote(self.uris[which]) + self.fileurls[which] = "uri/" + url_quote(self.uris[which]) d.addCallback(_compute_fileurls) def _clobber_shares(ignored): @@ -281,8 +303,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.set_up_grid() c0 = self.g.clients[0] self.uris = {} - DATA = "data" * 100 - d = c0.upload(upload.Data(DATA+"1", convergence="")) + DATA = b"data" * 100 + d = c0.upload(upload.Data(DATA+b"1", convergence=b"")) def _stash_uri(ur, which): self.uris[which] = ur.get_uri() d.addCallback(_stash_uri, "sick") @@ -290,7 +312,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi def _compute_fileurls(ignored): self.fileurls = {} for which in self.uris: - self.fileurls[which] = "uri/" + urllib.quote(self.uris[which]) + self.fileurls[which] = "uri/" + url_quote(self.uris[which]) d.addCallback(_compute_fileurls) def _clobber_shares(ignored): @@ -324,7 +346,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.fileurls = {} # the future cap format may contain slashes, which must be tolerated - expected_info_url = "uri/%s?t=info" % urllib.quote(unknown_rwcap, + expected_info_url = "uri/%s?t=info" % url_quote(unknown_rwcap, safe="") if immutable: @@ -338,8 +360,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi def _stash_root_and_create_file(n): self.rootnode = n - self.rooturl = "uri/" + urllib.quote(n.get_uri()) - self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + self.rooturl = "uri/" + url_quote(n.get_uri()) + self.rourl = "uri/" + url_quote(n.get_readonly_uri()) if not immutable: return self.rootnode.set_node(name, future_node) d.addCallback(_stash_root_and_create_file) @@ -347,18 +369,19 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi # make sure directory listing tolerates unknown nodes d.addCallback(lambda ign: self.GET(self.rooturl)) def _check_directory_html(res, expected_type_suffix): - pattern = re.compile(r'\?%s[ \t\n\r]*' - '%s' % (expected_type_suffix, str(name)), + pattern = re.compile(br'\?%s[ \t\n\r]*' + b'%s' % ( + expected_type_suffix, name.encode("ascii")), re.DOTALL) self.failUnless(re.search(pattern, res), res) # find the More Info link for name, should be relative - mo = re.search(r'More Info', res) + mo = re.search(br'More Info', res) info_url = mo.group(1) - self.failUnlessReallyEqual(info_url, "%s?t=info" % (str(name),)) + self.failUnlessReallyEqual(info_url, b"%s?t=info" % (name.encode("ascii"),)) if immutable: - d.addCallback(_check_directory_html, "-IMM") + d.addCallback(_check_directory_html, b"-IMM") else: - d.addCallback(_check_directory_html, "") + d.addCallback(_check_directory_html, b"") d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json")) def _check_directory_json(res, expect_rw_uri): @@ -378,7 +401,6 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi d.addCallback(_check_directory_json, expect_rw_uri=not immutable) def _check_info(res, expect_rw_uri, expect_ro_uri): - self.failUnlessIn("Object Type: unknown", res) if expect_rw_uri: self.failUnlessIn(unknown_rwcap, res) if expect_ro_uri: @@ -388,6 +410,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.failUnlessIn(unknown_rocap, res) else: self.failIfIn(unknown_rocap, res) + res = str(res, "utf-8") + self.failUnlessIn("Object Type: unknown", res) self.failIfIn("Raw data as", res) self.failIfIn("Directory writecap", res) self.failIfIn("Checker Operations", res) @@ -399,7 +423,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi d.addCallback(lambda ign: self.GET(expected_info_url)) d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False) - d.addCallback(lambda ign: self.GET("%s/%s?t=info" % (self.rooturl, str(name)))) + d.addCallback(lambda ign: self.GET("%s/%s?t=info" % (self.rooturl, name))) d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True) def _check_json(res, expect_rw_uri): @@ -431,9 +455,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi # or not future_node was immutable. d.addCallback(lambda ign: self.GET(self.rourl)) if immutable: - d.addCallback(_check_directory_html, "-IMM") + d.addCallback(_check_directory_html, b"-IMM") else: - d.addCallback(_check_directory_html, "-RO") + d.addCallback(_check_directory_html, b"-RO") d.addCallback(lambda ign: self.GET(self.rourl+"?t=json")) d.addCallback(_check_directory_json, expect_rw_uri=False) @@ -457,9 +481,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.uris = {} self.fileurls = {} - lonely_uri = "URI:LIT:n5xgk" # LIT for "one" - mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq" - mut_read_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q" + lonely_uri = b"URI:LIT:n5xgk" # LIT for "one" + mut_write_uri = b"URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq" + mut_read_uri = b"URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q" # This method tests mainly dirnode, but we'd have to duplicate code in order to # test the dirnode and web layers separately. @@ -502,10 +526,10 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi rep = str(dn) self.failUnlessIn("RO-IMM", rep) cap = dn.get_cap() - self.failUnlessIn("CHK", cap.to_string()) + self.failUnlessIn(b"CHK", cap.to_string()) self.cap = cap self.rootnode = dn - self.rooturl = "uri/" + urllib.quote(dn.get_uri()) + self.rooturl = "uri/" + url_quote(dn.get_uri()) return download_to_data(dn._node) d.addCallback(_created) @@ -521,7 +545,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi entry = entries[0] (name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4) name = name_utf8.decode("utf-8") - self.failUnlessEqual(rwcapdata, "") + self.failUnlessEqual(rwcapdata, b"") self.failUnlessIn(name, kids) (expected_child, ign) = kids[name] self.failUnlessReallyEqual(ro_uri, expected_child.get_readonly_uri()) @@ -548,13 +572,13 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi d.addCallback(lambda ign: self.GET(self.rooturl)) def _check_html(res): soup = BeautifulSoup(res, 'html5lib') - self.failIfIn("URI:SSK", res) + self.failIfIn(b"URI:SSK", res) found = False for td in soup.find_all(u"td"): if td.text != u"FILE": continue a = td.findNextSibling()(u"a")[0] - self.assertIn(urllib.quote(lonely_uri), a[u"href"]) + self.assertIn(url_quote(lonely_uri), a[u"href"]) self.assertEqual(u"lonely", a.text) self.assertEqual(a[u"rel"], [u"noreferrer"]) self.assertEqual(u"{}".format(len("one")), td.findNextSibling().findNextSibling().text) @@ -568,7 +592,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi if a.text == u"More Info" ) self.assertEqual(1, len(infos)) - self.assertTrue(infos[0].endswith(urllib.quote(lonely_uri) + "?t=info")) + self.assertTrue(infos[0].endswith(url_quote(lonely_uri) + "?t=info")) d.addCallback(_check_html) # ... and in JSON. @@ -591,12 +615,12 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi c0 = self.g.clients[0] self.uris = {} self.fileurls = {} - DATA = "data" * 100 + DATA = b"data" * 100 d = c0.create_dirnode() def _stash_root_and_create_file(n): self.rootnode = n - self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) - return n.add_file(u"good", upload.Data(DATA, convergence="")) + self.fileurls["root"] = "uri/" + url_quote(n.get_uri()) + return n.add_file(u"good", upload.Data(DATA, convergence=b"")) d.addCallback(_stash_root_and_create_file) def _stash_uri(fn, which): self.uris[which] = fn.get_uri() @@ -604,13 +628,13 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi d.addCallback(_stash_uri, "good") d.addCallback(lambda ign: self.rootnode.add_file(u"small", - upload.Data("literal", - convergence=""))) + upload.Data(b"literal", + convergence=b""))) d.addCallback(_stash_uri, "small") d.addCallback(lambda ign: self.rootnode.add_file(u"sick", - upload.Data(DATA+"1", - convergence=""))) + upload.Data(DATA+b"1", + convergence=b""))) d.addCallback(_stash_uri, "sick") # this tests that deep-check and stream-manifest will ignore @@ -690,13 +714,13 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi d.addCallback(_stash_uri, "subdir") d.addCallback(lambda subdir_node: subdir_node.add_file(u"grandchild", - upload.Data(DATA+"2", - convergence=""))) + upload.Data(DATA+b"2", + convergence=b""))) d.addCallback(_stash_uri, "grandchild") d.addCallback(lambda ign: self.delete_shares_numbered(self.uris["subdir"], - range(1, 10))) + list(range(1, 10)))) # root # root/good @@ -765,30 +789,30 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi c0 = self.g.clients[0] self.uris = {} self.fileurls = {} - DATA = "data" * 100 + DATA = b"data" * 100 d = c0.create_dirnode() def _stash_root_and_create_file(n): self.rootnode = n - self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) - return n.add_file(u"good", upload.Data(DATA, convergence="")) + self.fileurls["root"] = "uri/" + url_quote(n.get_uri()) + return n.add_file(u"good", upload.Data(DATA, convergence=b"")) d.addCallback(_stash_root_and_create_file) def _stash_uri(fn, which): self.uris[which] = fn.get_uri() d.addCallback(_stash_uri, "good") d.addCallback(lambda ign: self.rootnode.add_file(u"small", - upload.Data("literal", - convergence=""))) + upload.Data(b"literal", + convergence=b""))) d.addCallback(_stash_uri, "small") d.addCallback(lambda ign: self.rootnode.add_file(u"sick", - upload.Data(DATA+"1", - convergence=""))) + upload.Data(DATA+b"1", + convergence=b""))) d.addCallback(_stash_uri, "sick") #d.addCallback(lambda ign: # self.rootnode.add_file(u"dead", - # upload.Data(DATA+"2", - # convergence=""))) + # upload.Data(DATA+b"2", + # convergence=b""))) #d.addCallback(_stash_uri, "dead") #d.addCallback(lambda ign: c0.create_mutable_file("mutable")) @@ -883,25 +907,25 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.set_up_grid(num_clients=2, oneshare=True) c0 = self.g.clients[0] self.uris = {} - DATA = "data" * 100 - d = c0.upload(upload.Data(DATA, convergence="")) + DATA = b"data" * 100 + d = c0.upload(upload.Data(DATA, convergence=b"")) def _stash_uri(ur, which): self.uris[which] = ur.get_uri() d.addCallback(_stash_uri, "one") d.addCallback(lambda ign: - c0.upload(upload.Data(DATA+"1", convergence=""))) + c0.upload(upload.Data(DATA+b"1", convergence=b""))) d.addCallback(_stash_uri, "two") def _stash_mutable_uri(n, which): self.uris[which] = n.get_uri() - assert isinstance(self.uris[which], str) + assert isinstance(self.uris[which], bytes) d.addCallback(lambda ign: - c0.create_mutable_file(publish.MutableData(DATA+"2"))) + c0.create_mutable_file(publish.MutableData(DATA+b"2"))) d.addCallback(_stash_mutable_uri, "mutable") def _compute_fileurls(ignored): self.fileurls = {} for which in self.uris: - self.fileurls[which] = "uri/" + urllib.quote(self.uris[which]) + self.fileurls[which] = "uri/" + url_quote(self.uris[which]) d.addCallback(_compute_fileurls) d.addCallback(self._count_leases, "one") @@ -977,25 +1001,25 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi c0 = self.g.clients[0] self.uris = {} self.fileurls = {} - DATA = "data" * 100 + DATA = b"data" * 100 d = c0.create_dirnode() def _stash_root_and_create_file(n): self.rootnode = n self.uris["root"] = n.get_uri() - self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) - return n.add_file(u"one", upload.Data(DATA, convergence="")) + self.fileurls["root"] = "uri/" + url_quote(n.get_uri()) + return n.add_file(u"one", upload.Data(DATA, convergence=b"")) d.addCallback(_stash_root_and_create_file) def _stash_uri(fn, which): self.uris[which] = fn.get_uri() d.addCallback(_stash_uri, "one") d.addCallback(lambda ign: self.rootnode.add_file(u"small", - upload.Data("literal", - convergence=""))) + upload.Data(b"literal", + convergence=b""))) d.addCallback(_stash_uri, "small") d.addCallback(lambda ign: - c0.create_mutable_file(publish.MutableData("mutable"))) + c0.create_mutable_file(publish.MutableData(b"mutable"))) d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn)) d.addCallback(_stash_uri, "mutable") @@ -1046,36 +1070,36 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi c0 = self.g.clients[0] c0.encoding_params['happy'] = 2 self.fileurls = {} - DATA = "data" * 100 + DATA = b"data" * 100 d = c0.create_dirnode() def _stash_root(n): - self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + self.fileurls["root"] = "uri/" + url_quote(n.get_uri()) self.fileurls["imaginary"] = self.fileurls["root"] + "/imaginary" return n d.addCallback(_stash_root) - d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence=""))) + d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence=b""))) def _stash_bad(ur): - self.fileurls["1share"] = "uri/" + urllib.quote(ur.get_uri()) - self.delete_shares_numbered(ur.get_uri(), range(1,10)) + self.fileurls["1share"] = "uri/" + url_quote(ur.get_uri()) + self.delete_shares_numbered(ur.get_uri(), list(range(1,10))) u = uri.from_string(ur.get_uri()) u.key = testutil.flip_bit(u.key, 0) baduri = u.to_string() - self.fileurls["0shares"] = "uri/" + urllib.quote(baduri) + self.fileurls["0shares"] = "uri/" + url_quote(baduri) d.addCallback(_stash_bad) d.addCallback(lambda ign: c0.create_dirnode()) def _mangle_dirnode_1share(n): u = n.get_uri() - url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + url = self.fileurls["dir-1share"] = "uri/" + url_quote(u) self.fileurls["dir-1share-json"] = url + "?t=json" - self.delete_shares_numbered(u, range(1,10)) + self.delete_shares_numbered(u, list(range(1,10))) d.addCallback(_mangle_dirnode_1share) d.addCallback(lambda ign: c0.create_dirnode()) def _mangle_dirnode_0share(n): u = n.get_uri() - url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + url = self.fileurls["dir-0share"] = "uri/" + url_quote(u) self.fileurls["dir-0share-json"] = url + "?t=json" - self.delete_shares_numbered(u, range(0,10)) + self.delete_shares_numbered(u, list(range(0,10))) d.addCallback(_mangle_dirnode_0share) # NotEnoughSharesError should be reported sensibly, with a @@ -1087,6 +1111,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi 410, "Gone", "NoSharesError", self.GET, self.fileurls["0shares"])) def _check_zero_shares(body): + body = str(body, "utf-8") self.failIfIn("", body) body = " ".join(body.strip().split()) exp = ("NoSharesError: no shares could be found. " @@ -1095,7 +1120,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi "severe corruption. You should perform a filecheck on " "this object to learn more. The full error message is: " "no shares (need 3). Last failure: None") - self.failUnlessReallyEqual(exp, body) + self.assertEqual(exp, body) d.addCallback(_check_zero_shares) @@ -1104,6 +1129,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi 410, "Gone", "NotEnoughSharesError", self.GET, self.fileurls["1share"])) def _check_one_share(body): + body = str(body, "utf-8") self.failIfIn("", body) body = " ".join(body.strip().split()) msgbase = ("NotEnoughSharesError: This indicates that some " @@ -1128,10 +1154,11 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi 404, "Not Found", None, self.GET, self.fileurls["imaginary"])) def _missing_child(body): + body = str(body, "utf-8") self.failUnlessIn("No such child: imaginary", body) d.addCallback(_missing_child) - d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"])) + d.addCallback(lambda ignored: self.GET_unicode(self.fileurls["dir-0share"])) def _check_0shares_dir_html(body): self.failUnlessIn(DIR_HTML_TAG, body) # we should see the regular page, but without the child table or @@ -1150,7 +1177,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.failUnlessIn("No upload forms: directory is unreadable", body) d.addCallback(_check_0shares_dir_html) - d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"])) + d.addCallback(lambda ignored: self.GET_unicode(self.fileurls["dir-1share"])) def _check_1shares_dir_html(body): # at some point, we'll split UnrecoverableFileError into 0-shares # and some-shares like we did for immutable files (since there @@ -1177,6 +1204,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.GET, self.fileurls["dir-0share-json"])) def _check_unrecoverable_file(body): + body = str(body, "utf-8") self.failIfIn("", body) body = " ".join(body.strip().split()) exp = ("UnrecoverableFileError: the directory (or mutable file) " @@ -1204,7 +1232,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi # attach a webapi child that throws a random error, to test how it # gets rendered. w = c0.getServiceNamed("webish") - w.root.putChild("ERRORBOOM", ErrorBoom()) + w.root.putChild(b"ERRORBOOM", ErrorBoom()) # "Accept: */*" : should get a text/html stack trace # "Accept: text/plain" : should get a text/plain stack trace @@ -1217,6 +1245,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.GET, "ERRORBOOM", headers={"accept": "*/*"})) def _internal_error_html1(body): + body = str(body, "utf-8") self.failUnlessIn("", "expected HTML, not '%s'" % body) d.addCallback(_internal_error_html1) @@ -1226,6 +1255,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.GET, "ERRORBOOM", headers={"accept": "text/plain"})) def _internal_error_text2(body): + body = str(body, "utf-8") self.failIfIn("", body) self.failUnless(body.startswith("Traceback "), body) d.addCallback(_internal_error_text2) @@ -1237,6 +1267,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.GET, "ERRORBOOM", headers={"accept": CLI_accepts})) def _internal_error_text3(body): + body = str(body, "utf-8") self.failIfIn("", body) self.failUnless(body.startswith("Traceback "), body) d.addCallback(_internal_error_text3) @@ -1246,7 +1277,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi 500, "Internal Server Error", None, self.GET, "ERRORBOOM")) def _internal_error_html4(body): - self.failUnlessIn("", body) + self.failUnlessIn(b"", body) d.addCallback(_internal_error_html4) def _flush_errors(res): @@ -1264,12 +1295,12 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi c0 = self.g.clients[0] fn = c0.config.get_config_path("access.blacklist") self.uris = {} - DATA = "off-limits " * 50 + DATA = b"off-limits " * 50 - d = c0.upload(upload.Data(DATA, convergence="")) + d = c0.upload(upload.Data(DATA, convergence=b"")) def _stash_uri_and_create_dir(ur): self.uri = ur.get_uri() - self.url = "uri/"+self.uri + self.url = b"uri/"+self.uri u = uri.from_string_filenode(self.uri) self.si = u.get_storage_index() childnode = c0.create_node_from_uri(self.uri, None) @@ -1278,9 +1309,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi def _stash_dir(node): self.dir_node = node self.dir_uri = node.get_uri() - self.dir_url = "uri/"+self.dir_uri + self.dir_url = b"uri/"+self.dir_uri d.addCallback(_stash_dir) - d.addCallback(lambda ign: self.GET(self.dir_url, followRedirect=True)) + d.addCallback(lambda ign: self.GET_unicode(self.dir_url, followRedirect=True)) def _check_dir_html(body): self.failUnlessIn(DIR_HTML_TAG, body) self.failUnlessIn("blacklisted.txt", body) @@ -1293,7 +1324,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi f.write(" # this is a comment\n") f.write(" \n") f.write("\n") # also exercise blank lines - f.write("%s %s\n" % (base32.b2a(self.si), "off-limits to you")) + f.write("%s off-limits to you\n" % (str(base32.b2a(self.si), "ascii"),)) f.close() # clients should be checking the blacklist each time, so we don't # need to restart the client @@ -1304,14 +1335,14 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.GET, self.url)) # We should still be able to list the parent directory, in HTML... - d.addCallback(lambda ign: self.GET(self.dir_url, followRedirect=True)) + d.addCallback(lambda ign: self.GET_unicode(self.dir_url, followRedirect=True)) def _check_dir_html2(body): self.failUnlessIn(DIR_HTML_TAG, body) self.failUnlessIn("blacklisted.txt", body) d.addCallback(_check_dir_html2) # ... and in JSON (used by CLI). - d.addCallback(lambda ign: self.GET(self.dir_url+"?t=json", followRedirect=True)) + d.addCallback(lambda ign: self.GET(self.dir_url+b"?t=json", followRedirect=True)) def _check_dir_json(res): data = json.loads(res) self.failUnless(isinstance(data, list), data) @@ -1350,14 +1381,14 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi d.addCallback(_add_dir) def _get_dircap(dn): self.dir_si_b32 = base32.b2a(dn.get_storage_index()) - self.dir_url_base = "uri/"+dn.get_write_uri() - self.dir_url_json1 = "uri/"+dn.get_write_uri()+"?t=json" - self.dir_url_json2 = "uri/"+dn.get_write_uri()+"?t=json" - self.dir_url_json_ro = "uri/"+dn.get_readonly_uri()+"?t=json" - self.child_url = "uri/"+dn.get_readonly_uri()+"/child" + self.dir_url_base = b"uri/"+dn.get_write_uri() + self.dir_url_json1 = b"uri/"+dn.get_write_uri()+b"?t=json" + self.dir_url_json2 = b"uri/"+dn.get_write_uri()+b"?t=json" + self.dir_url_json_ro = b"uri/"+dn.get_readonly_uri()+b"?t=json" + self.child_url = b"uri/"+dn.get_readonly_uri()+b"/child" d.addCallback(_get_dircap) d.addCallback(lambda ign: self.GET(self.dir_url_base, followRedirect=True)) - d.addCallback(lambda body: self.failUnlessIn(DIR_HTML_TAG, body)) + d.addCallback(lambda body: self.failUnlessIn(DIR_HTML_TAG, str(body, "utf-8"))) d.addCallback(lambda ign: self.GET(self.dir_url_json1)) d.addCallback(lambda res: json.loads(res)) # just check it decodes d.addCallback(lambda ign: self.GET(self.dir_url_json2)) @@ -1368,8 +1399,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi d.addCallback(lambda body: self.failUnlessEqual(DATA, body)) def _block_dir(ign): - f = open(fn, "w") - f.write("%s %s\n" % (self.dir_si_b32, "dir-off-limits to you")) + f = open(fn, "wb") + f.write(b"%s %s\n" % (self.dir_si_b32, b"dir-off-limits to you")) f.close() self.g.clients[0].blacklist.last_mtime -= 2.0 d.addCallback(_block_dir) diff --git a/src/allmydata/test/web/test_introducer.py b/src/allmydata/test/web/test_introducer.py index bf6ef6a4b..08d95bda9 100644 --- a/src/allmydata/test/web/test_introducer.py +++ b/src/allmydata/test/web/test_introducer.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + import json from os.path import join @@ -127,7 +139,7 @@ class IntroducerWeb(unittest.TestCase): assert_soup_has_text( self, soup, - u"%s: %s" % (allmydata.__appname__, allmydata.__version__), + allmydata.__full_version__, ) assert_soup_has_text(self, soup, u"no peers!") assert_soup_has_text(self, soup, u"subscribers!") @@ -213,7 +225,7 @@ class IntroducerRootTests(unittest.TestCase): resource = IntroducerRoot(introducer_node) response = json.loads( self.successResultOf( - render(resource, {"t": [b"json"]}), + render(resource, {b"t": [b"json"]}), ), ) self.assertEqual( diff --git a/src/allmydata/test/web/test_logs.py b/src/allmydata/test/web/test_logs.py index 4895ed6f0..5d697f910 100644 --- a/src/allmydata/test/web/test_logs.py +++ b/src/allmydata/test/web/test_logs.py @@ -1,5 +1,7 @@ """ Tests for ``allmydata.web.logs``. + +Ported to Python 3. """ from __future__ import ( @@ -9,6 +11,19 @@ from __future__ import ( division, ) +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + +import json + +from twisted.trial import unittest +from twisted.internet.defer import inlineCallbacks + +from eliot import log_call + +from autobahn.twisted.testing import create_memory_agent, MemoryReactorClockResolver, create_pumper + from testtools.matchers import ( Equals, ) @@ -37,6 +52,7 @@ from ..common import ( from ...web.logs import ( create_log_resources, + TokenAuthenticatedWebSocketServerProtocol, ) class StreamingEliotLogsTests(SyncTestCase): @@ -57,3 +73,47 @@ class StreamingEliotLogsTests(SyncTestCase): self.client.get(b"http:///v1"), succeeded(has_response_code(Equals(OK))), ) + + +class TestStreamingLogs(unittest.TestCase): + """ + Test websocket streaming of logs + """ + + def setUp(self): + self.reactor = MemoryReactorClockResolver() + self.pumper = create_pumper() + self.agent = create_memory_agent(self.reactor, self.pumper, TokenAuthenticatedWebSocketServerProtocol) + return self.pumper.start() + + def tearDown(self): + return self.pumper.stop() + + @inlineCallbacks + def test_one_log(self): + """ + write a single Eliot log and see it streamed via websocket + """ + + proto = yield self.agent.open( + transport_config=u"ws://localhost:1234/ws", + options={}, + ) + + messages = [] + def got_message(msg, is_binary=False): + messages.append(json.loads(msg)) + proto.on("message", got_message) + + @log_call(action_type=u"test:cli:some-exciting-action") + def do_a_thing(): + pass + + do_a_thing() + + proto.transport.loseConnection() + yield proto.is_closed + + self.assertEqual(len(messages), 2) + self.assertEqual("started", messages[0]["action_status"]) + self.assertEqual("succeeded", messages[1]["action_status"]) diff --git a/src/allmydata/test/web/test_private.py b/src/allmydata/test/web/test_private.py index 27ddbcf78..b426b4d93 100644 --- a/src/allmydata/test/web/test_private.py +++ b/src/allmydata/test/web/test_private.py @@ -1,5 +1,7 @@ """ Tests for ``allmydata.web.private``. + +Ported to Python 3. """ from __future__ import ( @@ -9,6 +11,10 @@ from __future__ import ( division, ) +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from testtools.matchers import ( Equals, ) @@ -56,6 +62,7 @@ class PrivacyTests(SyncTestCase): return super(PrivacyTests, self).setUp() def _authorization(self, scheme, value): + value = str(value, "utf-8") return Headers({ u"authorization": [u"{} {}".format(scheme, value)], }) @@ -90,7 +97,7 @@ class PrivacyTests(SyncTestCase): self.assertThat( self.client.head( b"http:///foo/bar", - headers=self._authorization(SCHEME, u"foo bar"), + headers=self._authorization(str(SCHEME, "utf-8"), b"foo bar"), ), succeeded(has_response_code(Equals(UNAUTHORIZED))), ) @@ -103,7 +110,7 @@ class PrivacyTests(SyncTestCase): self.assertThat( self.client.head( b"http:///foo/bar", - headers=self._authorization(SCHEME, self.token), + headers=self._authorization(str(SCHEME, "utf-8"), self.token), ), # It's a made up URL so we don't get a 200, either, but a 404. succeeded(has_response_code(Equals(NOT_FOUND))), diff --git a/src/allmydata/test/web/test_root.py b/src/allmydata/test/web/test_root.py index 1a29b7a15..ca3cc695d 100644 --- a/src/allmydata/test/web/test_root.py +++ b/src/allmydata/test/web/test_root.py @@ -1,8 +1,24 @@ -from mock import Mock +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import time -from bs4 import BeautifulSoup +from urllib.parse import ( + quote, +) + +from bs4 import ( + BeautifulSoup, +) from twisted.trial import unittest from twisted.web.template import Tag @@ -18,13 +34,12 @@ from ...util.connection_status import ConnectionStatus from allmydata.web.root import URIHandler from allmydata.client import _Client -from hypothesis import given -from hypothesis.strategies import text - from .common import ( - assert_soup_has_tag_with_content, + assert_soup_has_tag_with_attributes, +) +from ..common_web import ( + render, ) - from ..common import ( EMPTY_CLIENT_CONFIG, ) @@ -36,65 +51,45 @@ class RenderSlashUri(unittest.TestCase): """ def setUp(self): - self.request = DummyRequest(b"/uri") - self.request.fields = {} - - def prepathURL(): - return b"http://127.0.0.1.99999/" + b"/".join(self.request.prepath) - - self.request.prePathURL = prepathURL - self.client = Mock() + self.client = object() self.res = URIHandler(self.client) - def test_valid(self): + def test_valid_query_redirect(self): """ - A valid capbility does not result in error + A syntactically valid capability given in the ``uri`` query argument + results in a redirect. """ - self.request.args[b"uri"] = [( + cap = ( b"URI:CHK:nt2xxmrccp7sursd6yh2thhcky:" b"mukesarwdjxiyqsjinbfiiro6q7kgmmekocxfjcngh23oxwyxtzq:2:5:5874882" - )] - self.res.render_GET(self.request) + ) + query_args = {b"uri": [cap]} + response_body = self.successResultOf( + render(self.res, query_args), + ) + soup = BeautifulSoup(response_body, 'html5lib') + tag = assert_soup_has_tag_with_attributes( + self, + soup, + u"meta", + {u"http-equiv": "refresh"}, + ) + self.assertIn( + quote(cap, safe=""), + tag.attrs.get(u"content"), + ) def test_invalid(self): """ - A (trivially) invalid capbility is an error + A syntactically invalid capbility results in an error. """ - self.request.args[b"uri"] = [b"not a capability"] - response_body = self.res.render_GET(self.request) - - soup = BeautifulSoup(response_body, 'html5lib') - - assert_soup_has_tag_with_content( - self, soup, "title", "400 - Error", + query_args = {b"uri": [b"not a capability"]} + response_body = self.successResultOf( + render(self.res, query_args), ) - assert_soup_has_tag_with_content( - self, soup, "h1", "Error", - ) - assert_soup_has_tag_with_content( - self, soup, "p", "Invalid capability", - ) - - @given( - text() - ) - def test_hypothesis_error_caps(self, cap): - """ - Let hypothesis try a bunch of invalid capabilities - """ - self.request.args[b"uri"] = [cap.encode('utf8')] - response_body = self.res.render_GET(self.request) - - soup = BeautifulSoup(response_body, 'html5lib') - - assert_soup_has_tag_with_content( - self, soup, "title", "400 - Error", - ) - assert_soup_has_tag_with_content( - self, soup, "h1", "Error", - ) - assert_soup_has_tag_with_content( - self, soup, "p", "Invalid capability", + self.assertEqual( + response_body, + b"Invalid capability", ) @@ -109,7 +104,7 @@ class RenderServiceRow(unittest.TestCase): ann = {"anonymous-storage-FURL": "pb://w2hqnbaa25yw4qgcvghl5psa3srpfgw3@tcp:127.0.0.1:51309/vucto2z4fxment3vfxbqecblbf6zyp6x", "permutation-seed-base32": "w2hqnbaa25yw4qgcvghl5psa3srpfgw3", } - srv = NativeStorageServer("server_id", ann, None, {}, EMPTY_CLIENT_CONFIG) + srv = NativeStorageServer(b"server_id", ann, None, {}, EMPTY_CLIENT_CONFIG) srv.get_connection_status = lambda: ConnectionStatus(False, "summary", {}, 0, 0) class FakeClient(_Client): @@ -120,7 +115,7 @@ class RenderServiceRow(unittest.TestCase): tub_maker=None, node_config=EMPTY_CLIENT_CONFIG, ) - self.storage_broker.test_add_server("test-srv", srv) + self.storage_broker.test_add_server(b"test-srv", srv) root = RootElement(FakeClient(), time.time) req = DummyRequest(b"") diff --git a/src/allmydata/test/web/test_status.py b/src/allmydata/test/web/test_status.py index 5685a3938..414925446 100644 --- a/src/allmydata/test/web/test_status.py +++ b/src/allmydata/test/web/test_status.py @@ -1,6 +1,16 @@ """ Tests for ```allmydata.web.status```. + +Ported to Python 3. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from bs4 import BeautifulSoup from twisted.web.template import flattenString @@ -143,12 +153,12 @@ class DownloadStatusElementTests(TrialTestCase): See if we can render the page almost fully. """ status = FakeDownloadStatus( - "si-1", 123, - ["s-1", "s-2", "s-3"], - {"s-1": "unknown problem"}, - {"s-1": [1], "s-2": [1,2], "s-3": [2,3]}, + b"si-1", 123, + [b"s-1", b"s-2", b"s-3"], + {b"s-1": "unknown problem"}, + {b"s-1": [1], b"s-2": [1,2], b"s-3": [2,3]}, {"fetch_per_server": - {"s-1": [1], "s-2": [2,3], "s-3": [3,2]}} + {b"s-1": [1], b"s-2": [2,3], b"s-3": [3,2]}} ) result = self._render_download_status_element(status) diff --git a/src/allmydata/test/web/test_token.py b/src/allmydata/test/web/test_token.py deleted file mode 100644 index a439702af..000000000 --- a/src/allmydata/test/web/test_token.py +++ /dev/null @@ -1,105 +0,0 @@ -from zope.interface import implementer -from twisted.trial import unittest -from twisted.web import server -from nevow.inevow import IRequest -from allmydata.web import common - -# XXX FIXME when we introduce "mock" as a dependency, these can -# probably just be Mock instances -@implementer(IRequest) -class FakeRequest(object): - def __init__(self): - self.method = "POST" - self.fields = dict() - self.args = dict() - - -class FakeField(object): - def __init__(self, *values): - if len(values) == 1: - self.value = values[0] - else: - self.value = list(values) - - -class FakeClientWithToken(object): - token = 'a' * 32 - - def get_auth_token(self): - return self.token - - -class TestTokenOnlyApi(unittest.TestCase): - - def setUp(self): - self.client = FakeClientWithToken() - self.page = common.TokenOnlyWebApi(self.client) - - def test_not_post(self): - req = FakeRequest() - req.method = "GET" - - self.assertRaises( - server.UnsupportedMethod, - self.page.render, req, - ) - - def test_missing_token(self): - req = FakeRequest() - - exc = self.assertRaises( - common.WebError, - self.page.render, req, - ) - self.assertEquals(exc.text, "Missing token") - self.assertEquals(exc.code, 401) - - def test_token_in_get_args(self): - req = FakeRequest() - req.args['token'] = 'z' * 32 - - exc = self.assertRaises( - common.WebError, - self.page.render, req, - ) - self.assertEquals(exc.text, "Do not pass 'token' as URL argument") - self.assertEquals(exc.code, 400) - - def test_invalid_token(self): - wrong_token = 'b' * 32 - req = FakeRequest() - req.fields['token'] = FakeField(wrong_token) - - exc = self.assertRaises( - common.WebError, - self.page.render, req, - ) - self.assertEquals(exc.text, "Invalid token") - self.assertEquals(exc.code, 401) - - def test_valid_token_no_t_arg(self): - req = FakeRequest() - req.fields['token'] = FakeField(self.client.token) - - with self.assertRaises(common.WebError) as exc: - self.page.render(req) - self.assertEquals(exc.exception.text, "Must provide 't=' argument") - self.assertEquals(exc.exception.code, 400) - - def test_valid_token_invalid_t_arg(self): - req = FakeRequest() - req.fields['token'] = FakeField(self.client.token) - req.args['t'] = 'not at all json' - - with self.assertRaises(common.WebError) as exc: - self.page.render(req) - self.assertTrue("invalid type" in exc.exception.text) - self.assertEquals(exc.exception.code, 400) - - def test_valid(self): - req = FakeRequest() - req.fields['token'] = FakeField(self.client.token) - req.args['t'] = ['json'] - - result = self.page.render(req) - self.assertTrue(result == NotImplemented) diff --git a/src/allmydata/test/web/test_util.py b/src/allmydata/test/web/test_util.py index 24f865ebc..5f4d6bb88 100644 --- a/src/allmydata/test/web/test_util.py +++ b/src/allmydata/test/web/test_util.py @@ -1,3 +1,15 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from twisted.trial import unittest from allmydata.web import status, common from ..common import ShouldFailMixin diff --git a/src/allmydata/test/web/test_web.py b/src/allmydata/test/web/test_web.py index 508fc82d4..2f000b7a1 100644 --- a/src/allmydata/test/web/test_web.py +++ b/src/allmydata/test/web/test_web.py @@ -6,6 +6,9 @@ import treq from bs4 import BeautifulSoup +from twisted.python.filepath import ( + FilePath, +) from twisted.application import service from twisted.internet import defer from twisted.internet.defer import inlineCallbacks, returnValue @@ -87,7 +90,7 @@ class FakeNodeMaker(NodeMaker): return FakeMutableFileNode(None, None, self.encoding_params, None, self.all_contents).init_from_cap(cap) - def create_mutable_file(self, contents="", keysize=None, + def create_mutable_file(self, contents=b"", keysize=None, version=SDMF_VERSION): n = FakeMutableFileNode(None, None, self.encoding_params, None, self.all_contents) @@ -102,7 +105,7 @@ class FakeUploader(service.Service): d = uploadable.get_size() d.addCallback(lambda size: uploadable.read(size)) def _got_data(datav): - data = "".join(datav) + data = b"".join(datav) n = create_chk_filenode(data, self.all_contents) ur = upload.UploadResults(file_size=len(data), ciphertext_fetched=0, @@ -124,12 +127,12 @@ class FakeUploader(service.Service): def build_one_ds(): - ds = DownloadStatus("storage_index", 1234) + ds = DownloadStatus(b"storage_index", 1234) now = time.time() - serverA = StubServer(hashutil.tagged_hash("foo", "serverid_a")[:20]) - serverB = StubServer(hashutil.tagged_hash("foo", "serverid_b")[:20]) - storage_index = hashutil.storage_index_hash("SI") + serverA = StubServer(hashutil.tagged_hash(b"foo", b"serverid_a")[:20]) + serverB = StubServer(hashutil.tagged_hash(b"foo", b"serverid_b")[:20]) + storage_index = hashutil.storage_index_hash(b"SI") e0 = ds.add_segment_request(0, now) e0.activate(now+0.5) e0.deliver(now+1, 0, 100, 0.5) # when, start,len, decodetime @@ -186,7 +189,7 @@ class FakeHistory(object): def list_all_helper_statuses(self): return [] -class FakeDisplayableServer(StubServer): +class FakeDisplayableServer(StubServer): # type: ignore # tahoe-lafs/ticket/3573 def __init__(self, serverid, nickname, connected, last_connect_time, last_loss_time, last_rx_time): StubServer.__init__(self, serverid) @@ -252,13 +255,13 @@ class FakeStorageServer(service.MultiService): def on_status_changed(self, cb): cb(self) -class FakeClient(_Client): +class FakeClient(_Client): # type: ignore # tahoe-lafs/ticket/3573 def __init__(self): # don't upcall to Client.__init__, since we only want to initialize a # minimal subset service.MultiService.__init__(self) self.all_contents = {} - self.nodeid = "fake_nodeid" + self.nodeid = b"fake_nodeid" self.nickname = u"fake_nickname \u263A" self.introducer_furls = [] self.introducer_clients = [] @@ -274,7 +277,7 @@ class FakeClient(_Client): # fake knowledge of another server self.storage_broker.test_add_server("other_nodeid", FakeDisplayableServer( - serverid="other_nodeid", nickname=u"other_nickname \u263B", connected = True, + serverid=b"other_nodeid", nickname=u"other_nickname \u263B", connected = True, last_connect_time = 10, last_loss_time = 20, last_rx_time = 30)) self.storage_broker.test_add_server("disconnected_nodeid", FakeDisplayableServer( @@ -316,8 +319,16 @@ class WebMixin(TimezoneMixin): self.staticdir = self.mktemp() self.clock = Clock() self.fakeTime = 86460 # 1d 0h 1m 0s - self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir, - clock=self.clock, now_fn=lambda:self.fakeTime) + tempdir = FilePath(self.mktemp()) + tempdir.makedirs() + self.ws = webish.WebishServer( + self.s, + "0", + tempdir=tempdir.path, + staticdir=self.staticdir, + clock=self.clock, + now_fn=lambda:self.fakeTime, + ) self.ws.setServiceParent(self.s) self.webish_port = self.ws.getPortnum() self.webish_url = self.ws.getURL() @@ -735,7 +746,10 @@ class MultiFormatResourceTests(TrialTestCase): "400 - Bad Format", response_body, ) self.assertIn( - "Unknown t value: 'foo'", response_body, + "Unknown t value:", response_body, + ) + self.assertIn( + "'foo'", response_body, ) @@ -4743,6 +4757,31 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi op_url = self.webish_url + "/operations/134?t=status&output=JSON" yield self.assertHTTPError(op_url, 404, "unknown/expired handle '134'") + @inlineCallbacks + def test_uri_redirect(self): + """URI redirects don't cause failure. + + Unit test reproducer for https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3590 + """ + def req(method, path, **kwargs): + return treq.request(method, self.webish_url + path, persistent=False, + **kwargs) + + response = yield req("POST", "/uri?format=sdmf&t=mkdir") + dircap = yield response.content() + assert dircap.startswith('URI:DIR2:') + dircap_uri = "/uri/?uri={}&t=json".format(urllib.quote(dircap)) + + response = yield req( + "GET", + dircap_uri, + ) + self.assertEqual( + response.request.absoluteURI, + self.webish_url + "/uri/{}?t=json".format(urllib.quote(dircap))) + if response.code >= 400: + raise Error(response.code, response=response.content()) + def test_incident(self): d = self.POST("/report_incident", details="eek") def _done(res): @@ -4767,11 +4806,9 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi def test_static_missing(self): # self.staticdir does not exist yet, because we used self.mktemp() d = self.assertFailure(self.GET("/static"), error.Error) - # nevow.static throws an exception when it tries to os.stat the - # missing directory, which gives the client a 500 Internal Server - # Error, and the traceback reveals the parent directory name. By - # switching to plain twisted.web.static, this gives a normal 404 that - # doesn't reveal anything. This addresses #1720. + # If os.stat raises an exception for the missing directory and the + # traceback reveals the parent directory name we don't want to see + # that parent directory name in the response. This addresses #1720. d.addCallback(lambda e: self.assertEquals(str(e), "404 Not Found")) return d diff --git a/src/allmydata/test/web/test_webish.py b/src/allmydata/test/web/test_webish.py new file mode 100644 index 000000000..12a04a6eb --- /dev/null +++ b/src/allmydata/test/web/test_webish.py @@ -0,0 +1,336 @@ +""" +Tests for ``allmydata.webish``. + +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + +from uuid import ( + uuid4, +) +from errno import ( + EACCES, +) +from io import ( + BytesIO, +) + +from hypothesis import ( + given, +) +from hypothesis.strategies import ( + integers, +) + +from testtools.matchers import ( + AfterPreprocessing, + Contains, + Equals, + MatchesAll, + Not, + IsInstance, + HasLength, +) + +from twisted.python.runtime import ( + platform, +) +from twisted.python.filepath import ( + FilePath, +) +from twisted.web.test.requesthelper import ( + DummyChannel, +) +from twisted.web.resource import ( + Resource, +) + +from ..common import ( + SyncTestCase, +) + +from ...webish import ( + TahoeLAFSRequest, + TahoeLAFSSite, +) + + +class TahoeLAFSRequestTests(SyncTestCase): + """ + Tests for ``TahoeLAFSRequest``. + """ + def _fields_test(self, method, request_headers, request_body, match_fields): + channel = DummyChannel() + request = TahoeLAFSRequest( + channel, + ) + for (k, v) in request_headers.items(): + request.requestHeaders.setRawHeaders(k, [v]) + request.gotLength(len(request_body)) + request.handleContentChunk(request_body) + request.requestReceived(method, b"/", b"HTTP/1.1") + + # We don't really care what happened to the request. What we do care + # about is what the `fields` attribute is set to. + self.assertThat( + request.fields, + match_fields, + ) + + def test_no_form_fields(self): + """ + When a ``GET`` request is received, ``TahoeLAFSRequest.fields`` is None. + """ + self._fields_test(b"GET", {}, b"", Equals(None)) + + def test_form_fields(self): + """ + When a ``POST`` request is received, form fields are parsed into + ``TahoeLAFSRequest.fields``. + """ + form_data, boundary = multipart_formdata([ + [param(u"name", u"foo"), + body(u"bar"), + ], + [param(u"name", u"baz"), + param(u"filename", u"quux"), + body(u"some file contents"), + ], + ]) + self._fields_test( + b"POST", + {b"content-type": b"multipart/form-data; boundary=" + bytes(boundary, 'ascii')}, + form_data.encode("ascii"), + AfterPreprocessing( + lambda fs: { + k: fs.getvalue(k) + for k + in fs.keys() + }, + Equals({ + "foo": "bar", + "baz": b"some file contents", + }), + ), + ) + + +class TahoeLAFSSiteTests(SyncTestCase): + """ + Tests for ``TahoeLAFSSite``. + """ + def _test_censoring(self, path, censored): + """ + Verify that the event logged for a request for ``path`` does not include + ``path`` but instead includes ``censored``. + + :param bytes path: A request path. + + :param bytes censored: A replacement value for the request path in the + access log. + + :return: ``None`` if the logging looks good. + """ + logPath = self.mktemp() + + site = TahoeLAFSSite(self.mktemp(), Resource(), logPath=logPath) + site.startFactory() + + channel = DummyChannel() + channel.factory = site + request = TahoeLAFSRequest(channel) + + request.gotLength(None) + request.requestReceived(b"GET", path, b"HTTP/1.1") + + self.assertThat( + FilePath(logPath).getContent(), + MatchesAll( + Contains(censored), + Not(Contains(path)), + ), + ) + + def test_uri_censoring(self): + """ + The log event for a request for **/uri/** has the capability value + censored. + """ + self._test_censoring( + b"/uri/URI:CHK:aaa:bbb", + b"/uri/[CENSORED]", + ) + + def test_file_censoring(self): + """ + The log event for a request for **/file/** has the capability value + censored. + """ + self._test_censoring( + b"/file/URI:CHK:aaa:bbb", + b"/file/[CENSORED]", + ) + + def test_named_censoring(self): + """ + The log event for a request for **/named/** has the capability value + censored. + """ + self._test_censoring( + b"/named/URI:CHK:aaa:bbb", + b"/named/[CENSORED]", + ) + + def test_uri_queryarg_censoring(self): + """ + The log event for a request for **/uri?cap=** has the capability + value censored. + """ + self._test_censoring( + b"/uri?uri=URI:CHK:aaa:bbb", + b"/uri?uri=[CENSORED]", + ) + + def _create_request(self, tempdir): + """ + Create and return a new ``TahoeLAFSRequest`` hooked up to a + ``TahoeLAFSSite``. + + :param bytes tempdir: The temporary directory to give to the site. + + :return TahoeLAFSRequest: The new request instance. + """ + site = TahoeLAFSSite(tempdir.path, Resource(), logPath=self.mktemp()) + site.startFactory() + + channel = DummyChannel() + channel.site = site + request = TahoeLAFSRequest(channel) + return request + + @given(integers(min_value=0, max_value=1024 * 1024 - 1)) + def test_small_content(self, request_body_size): + """ + A request body smaller than 1 MiB is kept in memory. + """ + tempdir = FilePath(self.mktemp()) + request = self._create_request(tempdir) + request.gotLength(request_body_size) + self.assertThat( + request.content, + IsInstance(BytesIO), + ) + + def _large_request_test(self, request_body_size): + """ + Assert that when a request with a body of of the given size is received + its content is written to the directory the ``TahoeLAFSSite`` is + configured with. + """ + tempdir = FilePath(self.mktemp()) + tempdir.makedirs() + request = self._create_request(tempdir) + + # So. Bad news. The temporary file for the uploaded content is + # unnamed (and this isn't even necessarily a bad thing since it is how + # you get automatic on-process-exit cleanup behavior on POSIX). It's + # not visible by inspecting the filesystem. It has no name we can + # discover. Then how do we verify it is written to the right place? + # The question itself is meaningless if we try to be too precise. It + # *has* no filesystem location. However, it is still stored *on* some + # filesystem. We still want to make sure it is on the filesystem we + # specified because otherwise it might be on a filesystem that's too + # small or undesirable in some other way. + # + # I don't know of any way to ask a file descriptor which filesystem + # it's on, either, though. It might be the case that the [f]statvfs() + # result could be compared somehow to infer the filesystem but + # ... it's not clear what the failure modes might be there, across + # different filesystems and runtime environments. + # + # Another approach is to make the temp directory unwriteable and + # observe the failure when an attempt is made to create a file there. + # This is hardly a lovely solution but at least it's kind of simple. + # + # It would be nice if it worked consistently cross-platform but on + # Windows os.chmod is more or less broken. + if platform.isWindows(): + request.gotLength(request_body_size) + self.assertThat( + tempdir.children(), + HasLength(1), + ) + else: + tempdir.chmod(0o550) + with self.assertRaises(OSError) as ctx: + request.gotLength(request_body_size) + raise Exception( + "OSError not raised, instead tempdir.children() = {}".format( + tempdir.children(), + ), + ) + + self.assertThat( + ctx.exception.errno, + Equals(EACCES), + ) + + def test_unknown_request_size(self): + """ + A request body with an unknown size is written to a file in the temporary + directory passed to ``TahoeLAFSSite``. + """ + self._large_request_test(None) + + @given(integers(min_value=1024 * 1024)) + def test_large_request(self, request_body_size): + """ + A request body of 1 MiB or more is written to a file in the temporary + directory passed to ``TahoeLAFSSite``. + """ + self._large_request_test(request_body_size) + + +def param(name, value): + return u"; {}={}".format(name, value) + + +def body(value): + return u"\r\n\r\n{}".format(value) + + +def _field(field): + yield u"Content-Disposition: form-data" + for param in field: + yield param + + +def _multipart_formdata(fields): + for field in fields: + yield u"".join(_field(field)) + u"\r\n" + + +def multipart_formdata(fields): + """ + Serialize some simple fields into a multipart/form-data string. + + :param fields: A list of lists of unicode strings to assemble into the + result. See ``param`` and ``body``. + + :return unicode: The given fields combined into a multipart/form-data + string. + """ + boundary = str(uuid4()) + parts = list(_multipart_formdata(fields)) + parts.insert(0, u"") + return ( + (u"--" + boundary + u"\r\n").join(parts), + boundary, + ) diff --git a/src/allmydata/unknown.py b/src/allmydata/unknown.py index 6c970e484..060696293 100644 --- a/src/allmydata/unknown.py +++ b/src/allmydata/unknown.py @@ -1,3 +1,13 @@ +"""Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from zope.interface import implementer from twisted.internet import defer @@ -31,8 +41,8 @@ class UnknownNode(object): def __init__(self, given_rw_uri, given_ro_uri, deep_immutable=False, name=u""): - assert given_rw_uri is None or isinstance(given_rw_uri, str) - assert given_ro_uri is None or isinstance(given_ro_uri, str) + assert given_rw_uri is None or isinstance(given_rw_uri, bytes) + assert given_ro_uri is None or isinstance(given_ro_uri, bytes) given_rw_uri = given_rw_uri or None given_ro_uri = given_ro_uri or None @@ -182,3 +192,11 @@ class UnknownNode(object): def check_and_repair(self, monitor, verify, add_lease): return defer.succeed(None) + + def __eq__(self, other): + if not isinstance(other, UnknownNode): + return False + return other.ro_uri == self.ro_uri and other.rw_uri == self.rw_uri + + def __ne__(self, other): + return not (self == other) diff --git a/src/allmydata/uri.py b/src/allmydata/uri.py index 2c367cafe..51671b0ac 100644 --- a/src/allmydata/uri.py +++ b/src/allmydata/uri.py @@ -22,6 +22,11 @@ from past.builtins import unicode, long import re +try: + from typing import Type +except ImportError: + pass + from zope.interface import implementer from twisted.python.components import registerAdapter @@ -489,7 +494,7 @@ class MDMFVerifierURI(_BaseURI): return self -@implementer(IURI, IDirnodeURI) +@implementer(IDirnodeURI) class _DirectoryBaseURI(_BaseURI): def __init__(self, filenode_uri=None): self._filenode_uri = filenode_uri @@ -536,7 +541,7 @@ class _DirectoryBaseURI(_BaseURI): return self._filenode_uri.get_storage_index() -@implementer(IDirectoryURI) +@implementer(IURI, IDirectoryURI) class DirectoryURI(_DirectoryBaseURI): BASE_STRING=b'URI:DIR2:' @@ -555,7 +560,7 @@ class DirectoryURI(_DirectoryBaseURI): return ReadonlyDirectoryURI(self._filenode_uri.get_readonly()) -@implementer(IReadonlyDirectoryURI) +@implementer(IURI, IReadonlyDirectoryURI) class ReadonlyDirectoryURI(_DirectoryBaseURI): BASE_STRING=b'URI:DIR2-RO:' @@ -574,6 +579,7 @@ class ReadonlyDirectoryURI(_DirectoryBaseURI): return self +@implementer(IURI, IDirnodeURI) class _ImmutableDirectoryBaseURI(_DirectoryBaseURI): def __init__(self, filenode_uri=None): if filenode_uri: @@ -611,7 +617,7 @@ class LiteralDirectoryURI(_ImmutableDirectoryBaseURI): return None -@implementer(IDirectoryURI) +@implementer(IURI, IDirectoryURI) class MDMFDirectoryURI(_DirectoryBaseURI): BASE_STRING=b'URI:DIR2-MDMF:' @@ -633,7 +639,7 @@ class MDMFDirectoryURI(_DirectoryBaseURI): return MDMFDirectoryURIVerifier(self._filenode_uri.get_verify_cap()) -@implementer(IReadonlyDirectoryURI) +@implementer(IURI, IReadonlyDirectoryURI) class ReadonlyMDMFDirectoryURI(_DirectoryBaseURI): BASE_STRING=b'URI:DIR2-MDMF-RO:' @@ -671,7 +677,7 @@ def wrap_dirnode_cap(filecap): raise AssertionError("cannot interpret as a directory cap: %s" % filecap.__class__) -@implementer(IVerifierURI) +@implementer(IURI, IVerifierURI) class MDMFDirectoryURIVerifier(_DirectoryBaseURI): BASE_STRING=b'URI:DIR2-MDMF-Verifier:' @@ -696,12 +702,12 @@ class MDMFDirectoryURIVerifier(_DirectoryBaseURI): return self -@implementer(IVerifierURI) +@implementer(IURI, IVerifierURI) class DirectoryURIVerifier(_DirectoryBaseURI): BASE_STRING=b'URI:DIR2-Verifier:' BASE_STRING_RE=re.compile(b'^'+BASE_STRING) - INNER_URI_CLASS=SSKVerifierURI + INNER_URI_CLASS=SSKVerifierURI # type: Type[IVerifierURI] def __init__(self, filenode_uri=None): if filenode_uri: diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py index 8c2f0ebed..38d0f4d7e 100644 --- a/src/allmydata/util/_python3.py +++ b/src/allmydata/util/_python3.py @@ -24,7 +24,10 @@ if PY2: # Keep these sorted alphabetically, to reduce merge conflicts: PORTED_MODULES = [ + "allmydata.__main__", + "allmydata._auto_deps", "allmydata._monkeypatch", + "allmydata.blacklist", "allmydata.codec", "allmydata.crypto", "allmydata.crypto.aes", @@ -32,7 +35,11 @@ PORTED_MODULES = [ "allmydata.crypto.error", "allmydata.crypto.rsa", "allmydata.crypto.util", + "allmydata.deep_stats", + "allmydata.dirnode", + "allmydata.frontends.sftpd", "allmydata.hashtree", + "allmydata.immutable.checker", "allmydata.immutable.downloader", "allmydata.immutable.downloader.common", "allmydata.immutable.downloader.fetcher", @@ -46,10 +53,27 @@ PORTED_MODULES = [ "allmydata.immutable.happiness_upload", "allmydata.immutable.layout", "allmydata.immutable.literal", + "allmydata.immutable.offloaded", + "allmydata.immutable.repairer", "allmydata.immutable.upload", "allmydata.interfaces", + "allmydata.introducer.client", + "allmydata.introducer.common", "allmydata.introducer.interfaces", + "allmydata.introducer.server", "allmydata.monitor", + "allmydata.mutable.checker", + "allmydata.mutable.common", + "allmydata.mutable.filenode", + "allmydata.mutable.layout", + "allmydata.mutable.publish", + "allmydata.mutable.repairer", + "allmydata.mutable.retrieve", + "allmydata.mutable.servermap", + "allmydata.node", + "allmydata.nodemaker", + "allmydata.stats", + "allmydata.storage_client", "allmydata.storage.common", "allmydata.storage.crawler", "allmydata.storage.expirer", @@ -59,6 +83,8 @@ PORTED_MODULES = [ "allmydata.storage.server", "allmydata.storage.shares", "allmydata.test.no_network", + "allmydata.test.mutable.util", + "allmydata.unknown", "allmydata.uri", "allmydata.util._python3", "allmydata.util.abbreviate", @@ -68,14 +94,17 @@ PORTED_MODULES = [ "allmydata.util.configutil", "allmydata.util.connection_status", "allmydata.util.deferredutil", - "allmydata.util.fileutil", "allmydata.util.dictutil", + "allmydata.util.eliotutil", "allmydata.util.encodingutil", + "allmydata.util.fileutil", "allmydata.util.gcutil", "allmydata.util.happinessutil", "allmydata.util.hashutil", "allmydata.util.humanreadable", + "allmydata.util.idlib", "allmydata.util.iputil", + "allmydata.util.jsonbytes", "allmydata.util.log", "allmydata.util.mathutil", "allmydata.util.namespace", @@ -86,12 +115,30 @@ PORTED_MODULES = [ "allmydata.util.spans", "allmydata.util.statistics", "allmydata.util.time_format", + "allmydata.web.logs", + "allmydata.webish", ] PORTED_TEST_MODULES = [ + "allmydata.test.mutable.test_checker", + "allmydata.test.mutable.test_datahandle", + "allmydata.test.mutable.test_different_encoding", + "allmydata.test.mutable.test_exceptions", + "allmydata.test.mutable.test_filehandle", + "allmydata.test.mutable.test_filenode", + "allmydata.test.mutable.test_interoperability", + "allmydata.test.mutable.test_multiple_encodings", + "allmydata.test.mutable.test_multiple_versions", + "allmydata.test.mutable.test_problems", + "allmydata.test.mutable.test_repair", + "allmydata.test.mutable.test_roundtrip", + "allmydata.test.mutable.test_servermap", + "allmydata.test.mutable.test_update", + "allmydata.test.mutable.test_version", "allmydata.test.test_abbreviate", "allmydata.test.test_base32", "allmydata.test.test_base62", + "allmydata.test.test_checker", "allmydata.test.test_codec", "allmydata.test.test_common_util", "allmydata.test.test_configutil", @@ -100,7 +147,9 @@ PORTED_TEST_MODULES = [ "allmydata.test.test_crypto", "allmydata.test.test_deferredutil", "allmydata.test.test_dictutil", + "allmydata.test.test_dirnode", "allmydata.test.test_download", + "allmydata.test.test_eliotutil", "allmydata.test.test_encode", "allmydata.test.test_encodingutil", "allmydata.test.test_filenode", @@ -110,21 +159,42 @@ PORTED_TEST_MODULES = [ "allmydata.test.test_helper", "allmydata.test.test_humanreadable", "allmydata.test.test_immutable", + "allmydata.test.test_introducer", "allmydata.test.test_iputil", + "allmydata.test.test_json_metadata", "allmydata.test.test_log", "allmydata.test.test_monitor", "allmydata.test.test_netstring", "allmydata.test.test_no_network", + "allmydata.test.test_node", "allmydata.test.test_observer", "allmydata.test.test_pipeline", "allmydata.test.test_python3", + "allmydata.test.test_repairer", + "allmydata.test.test_sftp", "allmydata.test.test_spans", "allmydata.test.test_statistics", + "allmydata.test.test_stats", "allmydata.test.test_storage", + "allmydata.test.test_storage_client", "allmydata.test.test_storage_web", + + # Only partially ported, test_filesystem_with_cli_in_subprocess and + # test_filesystem methods aren't ported yet, should be done once CLI and + # web are ported respectively. + "allmydata.test.test_system", + "allmydata.test.test_time_format", "allmydata.test.test_upload", "allmydata.test.test_uri", "allmydata.test.test_util", - "allmydata.test.test_version", + "allmydata.test.web.test_common", + "allmydata.test.web.test_grid", + "allmydata.test.web.test_introducer", + "allmydata.test.web.test_logs", + "allmydata.test.web.test_private", + "allmydata.test.web.test_root", + "allmydata.test.web.test_status", + "allmydata.test.web.test_util", + "allmydata.test.web.test_webish", ] diff --git a/src/allmydata/util/base32.py b/src/allmydata/util/base32.py index 287d214ea..10e54bd80 100644 --- a/src/allmydata/util/base32.py +++ b/src/allmydata/util/base32.py @@ -133,6 +133,8 @@ def a2b(cs): """ @param cs the base-32 encoded data (as bytes) """ + # Workaround Future newbytes issues by converting to real bytes on Python 2: + cs = backwardscompat_bytes(cs) precondition(could_be_base32_encoded(cs), "cs is required to be possibly base32 encoded data.", cs=cs) precondition(isinstance(cs, bytes), cs) @@ -140,7 +142,9 @@ def a2b(cs): # Add padding back, to make Python's base64 module happy: while (len(cs) * 5) % 8 != 0: cs += b"=" - return base64.b32decode(cs) + # Let newbytes come through and still work on Python 2, where the base64 + # module gets confused by them. + return base64.b32decode(backwardscompat_bytes(cs)) __all__ = ["b2a", "a2b", "b2a_or_none", "BASE32CHAR_3bits", "BASE32CHAR_1bits", "BASE32CHAR", "BASE32STR_anybytes", "could_be_base32_encoded"] diff --git a/src/allmydata/util/configutil.py b/src/allmydata/util/configutil.py index 1a1a93f18..ea64e1704 100644 --- a/src/allmydata/util/configutil.py +++ b/src/allmydata/util/configutil.py @@ -1,7 +1,7 @@ """ Read/write config files. -Configuration is returned as native strings. +Configuration is returned as Unicode strings. Ported to Python 3. """ @@ -12,20 +12,18 @@ from __future__ import unicode_literals from future.utils import PY2 if PY2: - # We don't do open(), because we want files to read/write native strs when - # we do "r" or "w". - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -if PY2: - # In theory on Python 2 configparser also works, but then code gets the - # wrong exceptions and they don't get handled. So just use native parser - # for now. - from ConfigParser import SafeConfigParser -else: - from configparser import SafeConfigParser +# On Python 2 we use the backport package; that means we always get unicode +# out. +from configparser import ConfigParser import attr +from twisted.python.runtime import ( + platform, +) + class UnknownConfigError(Exception): """ @@ -36,19 +34,27 @@ class UnknownConfigError(Exception): def get_config(tahoe_cfg): - """Load the config, returning a SafeConfigParser. + """Load the config, returning a ConfigParser. - Configuration is returned as native strings. + Configuration is returned as Unicode strings. """ - config = SafeConfigParser() - with open(tahoe_cfg, "r") as f: - # On Python 2, where we read in bytes, skip any initial Byte Order - # Mark. Since this is an ordinary file, we don't need to handle - # incomplete reads, and can assume seekability. - if PY2 and f.read(3) != b'\xEF\xBB\xBF': - f.seek(0) - config.readfp(f) - return config + # Byte Order Mark is an optional garbage code point you sometimes get at + # the start of UTF-8 encoded files. Especially on Windows. Skip it by using + # utf-8-sig. https://en.wikipedia.org/wiki/Byte_order_mark + with open(tahoe_cfg, "r", encoding="utf-8-sig") as f: + cfg_string = f.read() + return get_config_from_string(cfg_string) + + +def get_config_from_string(tahoe_cfg_string): + """Load the config from a string, return the ConfigParser. + + Configuration is returned as Unicode strings. + """ + parser = ConfigParser(strict=False) + parser.read_string(tahoe_cfg_string) + return parser + def set_config(config, section, option, value): if not config.has_section(section): @@ -57,8 +63,25 @@ def set_config(config, section, option, value): assert config.get(section, option) == value def write_config(tahoe_cfg, config): - with open(tahoe_cfg, "w") as f: - config.write(f) + """ + Write a configuration to a file. + + :param FilePath tahoe_cfg: The path to which to write the config. + + :param ConfigParser config: The configuration to write. + + :return: ``None`` + """ + tmp = tahoe_cfg.temporarySibling() + # FilePath.open can only open files in binary mode which does not work + # with ConfigParser.write. + with open(tmp.path, "wt") as fp: + config.write(fp) + # Windows doesn't have atomic overwrite semantics for moveTo. Thus we end + # up slightly less than atomic. + if platform.isWindows(): + tahoe_cfg.remove() + tmp.moveTo(tahoe_cfg) def validate_config(fname, cfg, valid_config): """ @@ -100,10 +123,34 @@ class ValidConfiguration(object): an item name as bytes and returns True if that section, item pair is valid, False otherwise. """ - _static_valid_sections = attr.ib() + _static_valid_sections = attr.ib( + validator=attr.validators.instance_of(dict) + ) _is_valid_section = attr.ib(default=lambda section_name: False) _is_valid_item = attr.ib(default=lambda section_name, item_name: False) + @classmethod + def everything(cls): + """ + Create a validator which considers everything valid. + """ + return cls( + {}, + lambda section_name: True, + lambda section_name, item_name: True, + ) + + @classmethod + def nothing(cls): + """ + Create a validator which considers nothing valid. + """ + return cls( + {}, + lambda section_name: False, + lambda section_name, item_name: False, + ) + def is_valid_section(self, section_name): """ :return: True if the given section name is valid, False otherwise. @@ -134,6 +181,23 @@ class ValidConfiguration(object): ) +def copy_config(old): + """ + Return a brand new ``ConfigParser`` containing the same values as + the given object. + + :param ConfigParser old: The configuration to copy. + + :return ConfigParser: The new object containing the same configuration. + """ + new = ConfigParser() + for section_name in old.sections(): + new.add_section(section_name) + for k, v in old.items(section_name): + new.set(section_name, k, v.replace("%", "%%")) + return new + + def _either(f, g): """ :return: A function which returns True if either f or g returns True. diff --git a/src/allmydata/util/deferredutil.py b/src/allmydata/util/deferredutil.py index 1d13f61e6..ed2a11ee4 100644 --- a/src/allmydata/util/deferredutil.py +++ b/src/allmydata/util/deferredutil.py @@ -15,7 +15,18 @@ if PY2: import time +try: + from typing import ( + Callable, + Any, + ) +except ImportError: + pass + from foolscap.api import eventually +from eliot.twisted import ( + inline_callbacks, +) from twisted.internet import defer, reactor, error from twisted.python.failure import Failure @@ -201,3 +212,22 @@ class WaitForDelayedCallsMixin(PollMixin): d.addErrback(log.err, "error while waiting for delayed calls") d.addBoth(lambda ign: res) return d + +@inline_callbacks +def until( + action, # type: Callable[[], defer.Deferred[Any]] + condition, # type: Callable[[], bool] +): + # type: (...) -> defer.Deferred[None] + """ + Run a Deferred-returning function until a condition is true. + + :param action: The action to run. + :param condition: The predicate signaling stop. + + :return: A Deferred that fires after the condition signals stop. + """ + while True: + yield action() + if condition(): + break diff --git a/src/allmydata/util/dictutil.py b/src/allmydata/util/dictutil.py index 3ace8fca4..5971d26f6 100644 --- a/src/allmydata/util/dictutil.py +++ b/src/allmydata/util/dictutil.py @@ -14,6 +14,7 @@ if PY2: # subclassing dict, so we'd end up exposing Python 3 dict APIs to lots of # code that doesn't support it. from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 +from six import ensure_str class DictOfSets(dict): @@ -76,3 +77,54 @@ class AuxValueDict(dict): have an auxvalue.""" super(AuxValueDict, self).__setitem__(key, value) self.auxilliary[key] = auxilliary + + +class _TypedKeyDict(dict): + """Dictionary that enforces key type. + + Doesn't override everything, but probably good enough to catch most + problems. + + Subclass and override KEY_TYPE. + """ + + KEY_TYPE = object + + def __init__(self, *args, **kwargs): + dict.__init__(self, *args, **kwargs) + for key in self: + if not isinstance(key, self.KEY_TYPE): + raise TypeError("{} must be of type {}".format( + repr(key), self.KEY_TYPE)) + + +def _make_enforcing_override(K, method_name): + def f(self, key, *args, **kwargs): + if not isinstance(key, self.KEY_TYPE): + raise TypeError("{} must be of type {}".format( + repr(key), self.KEY_TYPE)) + return getattr(dict, method_name)(self, key, *args, **kwargs) + f.__name__ = ensure_str(method_name) + setattr(K, method_name, f) + +for _method_name in ["__setitem__", "__getitem__", "setdefault", "get", + "__delitem__"]: + _make_enforcing_override(_TypedKeyDict, _method_name) +del _method_name + + +if PY2: + # No need for enforcement, can use either bytes or unicode as keys and it's + # fine. + BytesKeyDict = UnicodeKeyDict = dict +else: + class BytesKeyDict(_TypedKeyDict): + """Keys should be bytes.""" + + KEY_TYPE = bytes + + + class UnicodeKeyDict(_TypedKeyDict): + """Keys should be unicode strings.""" + + KEY_TYPE = str diff --git a/src/allmydata/util/eliotutil.py b/src/allmydata/util/eliotutil.py index f6f40945d..9e3cdd3e1 100644 --- a/src/allmydata/util/eliotutil.py +++ b/src/allmydata/util/eliotutil.py @@ -1,6 +1,12 @@ """ Tools aimed at the interaction between Tahoe-LAFS implementation and Eliot. + +Ported to Python 3. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals from __future__ import ( unicode_literals, @@ -18,6 +24,11 @@ __all__ = [ "validateSetMembership", ] +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from six import ensure_text + from sys import ( stdout, ) @@ -75,6 +86,9 @@ from twisted.internet.defer import ( ) from twisted.application.service import Service +from .jsonbytes import BytesJSONEncoder + + def validateInstanceOf(t): """ Return an Eliot validator that requires values to be instances of ``t``. @@ -228,7 +242,7 @@ def _stdlib_logging_to_eliot_configuration(stdlib_logger, eliot_logger=None): class _DestinationParser(object): def parse(self, description): - description = description.decode(u"ascii") + description = ensure_text(description) try: kind, args = description.split(u":", 1) @@ -291,7 +305,7 @@ class _DestinationParser(object): rotateLength=rotate_length, maxRotatedFiles=max_rotated_files, ) - return lambda reactor: FileDestination(get_file()) + return lambda reactor: FileDestination(get_file(), BytesJSONEncoder) _parse_destination_description = _DestinationParser().parse diff --git a/src/allmydata/util/encodingutil.py b/src/allmydata/util/encodingutil.py index 17a7a2f38..f13dc5b8e 100644 --- a/src/allmydata/util/encodingutil.py +++ b/src/allmydata/util/encodingutil.py @@ -252,6 +252,16 @@ ESCAPABLE_UNICODE = re.compile(u'([\uD800-\uDBFF][\uDC00-\uDFFF])|' # valid sur ESCAPABLE_8BIT = re.compile( br'[^ !#\x25-\x5B\x5D-\x5F\x61-\x7E]', re.DOTALL) +def quote_output_u(*args, **kwargs): + """ + Like ``quote_output`` but always return ``unicode``. + """ + result = quote_output(*args, **kwargs) + if isinstance(result, unicode): + return result + return result.decode(kwargs.get("encoding", None) or io_encoding) + + def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None): """ Encode either a Unicode string or a UTF-8-encoded bytestring for representation diff --git a/src/allmydata/util/fileutil.py b/src/allmydata/util/fileutil.py index 693cd1d63..e40e06180 100644 --- a/src/allmydata/util/fileutil.py +++ b/src/allmydata/util/fileutil.py @@ -271,11 +271,13 @@ def write_atomically(target, contents, mode="b"): move_into_place(target+".tmp", target) def write(path, data, mode="wb"): + if "b" in mode and isinstance(data, str): + data = data.encode("utf-8") with open(path, mode) as f: f.write(data) -def read(path): - with open(path, "rb") as rf: +def read(path, mode="rb"): + with open(path, mode) as rf: return rf.read() def put_file(path, inf): @@ -309,7 +311,7 @@ def precondition_abspath(path): _getfullpathname = None try: - from nt import _getfullpathname + from nt import _getfullpathname # type: ignore except ImportError: pass diff --git a/src/allmydata/util/i2p_provider.py b/src/allmydata/util/i2p_provider.py index 37789c428..22575b4ca 100644 --- a/src/allmydata/util/i2p_provider.py +++ b/src/allmydata/util/i2p_provider.py @@ -2,11 +2,18 @@ from __future__ import absolute_import, print_function, with_statement import os +from zope.interface import ( + implementer, +) + from twisted.internet.defer import inlineCallbacks, returnValue from twisted.internet.endpoints import clientFromString from twisted.internet.error import ConnectionRefusedError, ConnectError from twisted.application import service +from ..interfaces import ( + IAddressFamily, +) def create(reactor, config): """ @@ -135,6 +142,7 @@ def create_config(reactor, cli_config): returnValue((tahoe_config_i2p, i2p_port, i2p_location)) +@implementer(IAddressFamily) class _Provider(service.MultiService): def __init__(self, config, reactor): service.MultiService.__init__(self) @@ -160,7 +168,14 @@ class _Provider(service.MultiService): (privkeyfile, external_port, escaped_sam_port) return i2p_port - def get_i2p_handler(self): + def get_client_endpoint(self): + """ + Get an ``IStreamClientEndpoint`` which will set up a connection to an I2P + address. + + If I2P is not enabled or the dependencies are not available, return + ``None`` instead. + """ enabled = self._get_i2p_config("enabled", True, boolean=True) if not enabled: return None @@ -188,6 +203,9 @@ class _Provider(service.MultiService): return self._i2p.default(self._reactor, keyfile=keyfile) + # Backwards compatibility alias + get_i2p_handler = get_client_endpoint + def check_dest_config(self): if self._get_i2p_config("dest", False, boolean=True): if not self._txi2p: diff --git a/src/allmydata/util/idlib.py b/src/allmydata/util/idlib.py index 5e44b9d82..eafcbc388 100644 --- a/src/allmydata/util/idlib.py +++ b/src/allmydata/util/idlib.py @@ -1,9 +1,29 @@ +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from six import ensure_text from foolscap import base32 + + def nodeid_b2a(nodeid): - # we display nodeids using the same base32 alphabet that Foolscap uses - return base32.encode(nodeid) + """ + We display nodeids using the same base32 alphabet that Foolscap uses. + + Returns a Unicode string. + """ + return ensure_text(base32.encode(nodeid)) def shortnodeid_b2a(nodeid): + """ + Short version of nodeid_b2a() output, Unicode string. + """ return nodeid_b2a(nodeid)[:8] diff --git a/src/allmydata/util/iputil.py b/src/allmydata/util/iputil.py index bd5ea7e78..fd3e88c7f 100644 --- a/src/allmydata/util/iputil.py +++ b/src/allmydata/util/iputil.py @@ -13,19 +13,19 @@ from future.utils import PY2, native_str if PY2: from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -import os, re, socket, subprocess, errno -from sys import platform +import os, socket from zope.interface import implementer import attr +from netifaces import ( + interfaces, + ifaddresses, +) + # from Twisted from twisted.python.reflect import requireModule -from twisted.internet import defer, threads, reactor -from twisted.internet.protocol import DatagramProtocol -from twisted.internet.error import CannotListenError -from twisted.python.procutils import which from twisted.python import log from twisted.internet.endpoints import AdoptedStreamServerEndpoint from twisted.internet.interfaces import ( @@ -101,180 +101,21 @@ except ImportError: # since one might be shadowing the other. This hack appeases pyflakes. increase_rlimits = _increase_rlimits + def get_local_addresses_sync(): """ - Return a list of IPv4 addresses (as dotted-quad native strings) that are - currently configured on this host, sorted in descending order of how likely - we think they are to work. + Get locally assigned addresses as dotted-quad native strings. + + :return [str]: A list of IPv4 addresses which are assigned to interfaces + on the local system. """ - return [native_str(a) for a in _synchronously_find_addresses_via_config()] - -def get_local_addresses_async(target="198.41.0.4"): # A.ROOT-SERVERS.NET - """ - Return a Deferred that fires with a list of IPv4 addresses (as dotted-quad - native strings) that are currently configured on this host, sorted in - descending order of how likely we think they are to work. - - @param target: we want to learn an IP address they could try using to - connect to us; The default value is fine, but it might help if you - pass the address of a host that you are actually trying to be - reachable to. - """ - addresses = [] - local_ip = get_local_ip_for(target) - if local_ip is not None: - addresses.append(local_ip) - - if platform == "cygwin": - d = _cygwin_hack_find_addresses() - else: - d = _find_addresses_via_config() - - def _collect(res): - for addr in res: - if addr != "0.0.0.0" and not addr in addresses: - addresses.append(addr) - return addresses - d.addCallback(_collect) - d.addCallback(lambda addresses: [native_str(s) for s in addresses]) - return d - -def get_local_ip_for(target): - """Find out what our IP address is for use by a given target. - - @return: the IP address as a dotted-quad native string which could be used - to connect to us. It might work for them, it might not. If - there is no suitable address (perhaps we don't currently have an - externally-visible interface), this will return None. - """ - - try: - target_ipaddr = socket.gethostbyname(target) - except socket.gaierror: - # DNS isn't running, or somehow we encountered an error - - # note: if an interface is configured and up, but nothing is - # connected to it, gethostbyname("A.ROOT-SERVERS.NET") will take 20 - # seconds to raise socket.gaierror . This is synchronous and occurs - # for each node being started, so users of - # test.common.SystemTestMixin (like test_system) will see something - # like 120s of delay, which may be enough to hit the default trial - # timeouts. For that reason, get_local_addresses_async() was changed - # to default to the numerical ip address for A.ROOT-SERVERS.NET, to - # avoid this DNS lookup. This also makes node startup fractionally - # faster. - return None - - try: - udpprot = DatagramProtocol() - port = reactor.listenUDP(0, udpprot) - try: - # connect() will fail if we're offline (e.g. running tests on a - # disconnected laptop), which is fine (localip=None), but we must - # still do port.stopListening() or we'll get a DirtyReactorError - udpprot.transport.connect(target_ipaddr, 7) - localip = udpprot.transport.getHost().host - return localip - finally: - d = port.stopListening() - d.addErrback(log.err) - except (socket.error, CannotListenError): - # no route to that host - localip = None - return native_str(localip) - - -# Wow, I'm really amazed at home much mileage we've gotten out of calling -# the external route.exe program on windows... It appears to work on all -# versions so far. -# ... thus wrote Greg Smith in time immemorial... -# Also, the Win32 APIs for this are really klunky and error-prone. --Daira - -_win32_re = re.compile(br'^\s*\d+\.\d+\.\d+\.\d+\s.+\s(?P
\d+\.\d+\.\d+\.\d+)\s+(?P\d+)\s*$', flags=re.M|re.I|re.S) -_win32_commands = (('route.exe', ('print',), _win32_re),) - -# These work in most Unices. -_addr_re = re.compile(br'^\s*inet [a-zA-Z]*:?(?P
\d+\.\d+\.\d+\.\d+)[\s/].+$', flags=re.M|re.I|re.S) -_unix_commands = (('/bin/ip', ('addr',), _addr_re), - ('/sbin/ip', ('addr',), _addr_re), - ('/sbin/ifconfig', ('-a',), _addr_re), - ('/usr/sbin/ifconfig', ('-a',), _addr_re), - ('/usr/etc/ifconfig', ('-a',), _addr_re), - ('ifconfig', ('-a',), _addr_re), - ('/sbin/ifconfig', (), _addr_re), - ) - - -def _find_addresses_via_config(): - return threads.deferToThread(_synchronously_find_addresses_via_config) - -def _synchronously_find_addresses_via_config(): - # originally by Greg Smith, hacked by Zooko and then Daira - - # We don't reach here for cygwin. - if platform == 'win32': - commands = _win32_commands - else: - commands = _unix_commands - - for (pathtotool, args, regex) in commands: - # If pathtotool is a fully qualified path then we just try that. - # If it is merely an executable name then we use Twisted's - # "which()" utility and try each executable in turn until one - # gives us something that resembles a dotted-quad IPv4 address. - - if os.path.isabs(pathtotool): - exes_to_try = [pathtotool] - else: - exes_to_try = which(pathtotool) - - subprocess_error = getattr( - subprocess, "SubprocessError", subprocess.CalledProcessError - ) - for exe in exes_to_try: - try: - addresses = _query(exe, args, regex) - except (IOError, OSError, ValueError, subprocess_error): - addresses = [] - if addresses: - return addresses - - return [] - -def _query(path, args, regex): - if not os.path.isfile(path): - return [] - env = {native_str('LANG'): native_str('en_US.UTF-8')} - TRIES = 5 - for trial in range(TRIES): - try: - p = subprocess.Popen([path] + list(args), stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) - (output, err) = p.communicate() - break - except OSError as e: - if e.errno == errno.EINTR and trial < TRIES-1: - continue - raise - - addresses = [] - outputsplit = output.split(b'\n') - for outline in outputsplit: - m = regex.match(outline) - if m: - addr = m.group('address') - if addr not in addresses: - addresses.append(addr.decode("utf-8")) - - return addresses - -def _cygwin_hack_find_addresses(): - addresses = [] - for h in ["localhost", "127.0.0.1",]: - addr = get_local_ip_for(h) - if addr is not None and addr not in addresses: - addresses.append(addr) - - return defer.succeed(addresses) + return list( + native_str(address[native_str("addr")]) + for iface_name + in interfaces() + for address + in ifaddresses(iface_name).get(socket.AF_INET, []) + ) def _foolscapEndpointForPortNumber(portnum): @@ -382,7 +223,5 @@ def listenOnUnused(tub, portnum=None): __all__ = ["allocate_tcp_port", "increase_rlimits", "get_local_addresses_sync", - "get_local_addresses_async", - "get_local_ip_for", "listenOnUnused", ] diff --git a/src/allmydata/util/jsonbytes.py b/src/allmydata/util/jsonbytes.py new file mode 100644 index 000000000..406a471a0 --- /dev/null +++ b/src/allmydata/util/jsonbytes.py @@ -0,0 +1,51 @@ +""" +A JSON encoder than can serialize bytes. + +Ported to Python 3. +""" + +from __future__ import unicode_literals +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + + +import json + + +class BytesJSONEncoder(json.JSONEncoder): + """ + A JSON encoder than can also encode bytes. + + The bytes are assumed to be UTF-8 encoded Unicode strings. + """ + def default(self, o): + if isinstance(o, bytes): + return o.decode("utf-8") + return json.JSONEncoder.default(self, o) + + +def dumps(obj, *args, **kwargs): + """Encode to JSON, supporting bytes as keys or values. + + The bytes are assumed to be UTF-8 encoded Unicode strings. + """ + if isinstance(obj, dict): + new_obj = {} + for k, v in obj.items(): + if isinstance(k, bytes): + k = k.decode("utf-8") + new_obj[k] = v + obj = new_obj + return json.dumps(obj, cls=BytesJSONEncoder, *args, **kwargs) + + +# To make this module drop-in compatible with json module: +loads = json.loads + + +__all__ = ["dumps", "loads"] diff --git a/src/allmydata/util/log.py b/src/allmydata/util/log.py index 11c78a5a2..509deb6a4 100644 --- a/src/allmydata/util/log.py +++ b/src/allmydata/util/log.py @@ -11,6 +11,7 @@ from __future__ import unicode_literals from future.utils import PY2 if PY2: from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from six import ensure_str from pyutil import nummedobj @@ -55,6 +56,7 @@ class LogMixin(object): pmsgid = self._parentmsgid if pmsgid is None: pmsgid = self._grandparentmsgid + kwargs = {ensure_str(k): v for (k, v) in kwargs.items()} msgid = log.msg(msg, facility=facility, parent=pmsgid, *args, **kwargs) if self._parentmsgid is None: self._parentmsgid = msgid diff --git a/src/allmydata/util/observer.py b/src/allmydata/util/observer.py index 4ebb598c1..4a39fe014 100644 --- a/src/allmydata/util/observer.py +++ b/src/allmydata/util/observer.py @@ -16,6 +16,9 @@ if PY2: import weakref from twisted.internet import defer from foolscap.api import eventually +from twisted.logger import ( + Logger, +) """The idiom we use is for the observed object to offer a method named 'when_something', which returns a deferred. That deferred will be fired when @@ -61,7 +64,7 @@ class OneShotObserverList(object): def _fire(self, result): for w in self._watchers: - eventually(w.callback, result) + w.callback(result) del self._watchers self.__repr__ = self._fired_repr @@ -97,7 +100,10 @@ class LazyOneShotObserverList(OneShotObserverList): self._fire(self._get_result()) class ObserverList(object): - """A simple class to distribute events to a number of subscribers.""" + """ + Immediately distribute events to a number of subscribers. + """ + _logger = Logger() def __init__(self): self._watchers = [] @@ -109,8 +115,11 @@ class ObserverList(object): self._watchers.remove(observer) def notify(self, *args, **kwargs): - for o in self._watchers: - eventually(o, *args, **kwargs) + for o in self._watchers[:]: + try: + o(*args, **kwargs) + except Exception: + self._logger.failure("While notifying {o!r}", o=o) class EventStreamObserver(object): """A simple class to distribute multiple events to a single subscriber. diff --git a/src/allmydata/util/pollmixin.py b/src/allmydata/util/pollmixin.py index 5d1716853..582bafe86 100644 --- a/src/allmydata/util/pollmixin.py +++ b/src/allmydata/util/pollmixin.py @@ -14,6 +14,12 @@ if PY2: from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import time + +try: + from typing import List +except ImportError: + pass + from twisted.internet import task class TimeoutError(Exception): @@ -23,7 +29,7 @@ class PollComplete(Exception): pass class PollMixin(object): - _poll_should_ignore_these_errors = [] + _poll_should_ignore_these_errors = [] # type: List[Exception] def poll(self, check_f, pollinterval=0.01, timeout=1000): # Return a Deferred, then call check_f periodically until it returns diff --git a/src/allmydata/util/tor_provider.py b/src/allmydata/util/tor_provider.py index d0ed75c3f..7b832735d 100644 --- a/src/allmydata/util/tor_provider.py +++ b/src/allmydata/util/tor_provider.py @@ -2,6 +2,10 @@ from __future__ import absolute_import, print_function, with_statement import os +from zope.interface import ( + implementer, +) + from twisted.internet.defer import inlineCallbacks, returnValue from twisted.internet.endpoints import clientFromString, TCP4ServerEndpoint from twisted.internet.error import ConnectionRefusedError, ConnectError @@ -9,25 +13,11 @@ from twisted.application import service from .observer import OneShotObserverList from .iputil import allocate_tcp_port - - -def create(reactor, config): - """ - Create a new _Provider service (this is an IService so must be - hooked up to a parent or otherwise started). - - If foolscap.connections.tor or txtorcon are not installed, then - Provider.get_tor_handler() will return None. If tahoe.cfg wants - to start an onion service too, then this `create()` method will - throw a nice error (and startService will throw an ugly error). - """ - provider = _Provider(config, reactor) - provider.check_onion_config() - return provider - +from ..interfaces import ( + IAddressFamily, +) def _import_tor(): - # this exists to be overridden by unit tests try: from foolscap.connections import tor return tor @@ -41,6 +31,25 @@ def _import_txtorcon(): except ImportError: # pragma: no cover return None +def create(reactor, config, import_tor=None, import_txtorcon=None): + """ + Create a new _Provider service (this is an IService so must be + hooked up to a parent or otherwise started). + + If foolscap.connections.tor or txtorcon are not installed, then + Provider.get_tor_handler() will return None. If tahoe.cfg wants + to start an onion service too, then this `create()` method will + throw a nice error (and startService will throw an ugly error). + """ + if import_tor is None: + import_tor = _import_tor + if import_txtorcon is None: + import_txtorcon = _import_txtorcon + provider = _Provider(config, reactor, import_tor(), import_txtorcon()) + provider.check_onion_config() + return provider + + def data_directory(private_dir): return os.path.join(private_dir, "tor-statedir") @@ -209,15 +218,16 @@ def create_config(reactor, cli_config): returnValue((tahoe_config_tor, tor_port, tor_location)) +@implementer(IAddressFamily) class _Provider(service.MultiService): - def __init__(self, config, reactor): + def __init__(self, config, reactor, tor, txtorcon): service.MultiService.__init__(self) self._config = config self._tor_launched = None self._onion_ehs = None self._onion_tor_control_proto = None - self._tor = _import_tor() - self._txtorcon = _import_txtorcon() + self._tor = tor + self._txtorcon = txtorcon self._reactor = reactor def _get_tor_config(self, *args, **kwargs): @@ -228,7 +238,13 @@ class _Provider(service.MultiService): ep = TCP4ServerEndpoint(self._reactor, local_port, interface="127.0.0.1") return ep - def get_tor_handler(self): + def get_client_endpoint(self): + """ + Get an ``IStreamClientEndpoint`` which will set up a connection using Tor. + + If Tor is not enabled or the dependencies are not available, return + ``None`` instead. + """ enabled = self._get_tor_config("enabled", True, boolean=True) if not enabled: return None @@ -253,6 +269,9 @@ class _Provider(service.MultiService): return self._tor.default_socks() + # Backwards compatibility alias + get_tor_handler = get_client_endpoint + @inlineCallbacks def _make_control_endpoint(self, reactor, update_status): # this will only be called when tahoe.cfg has "[tor] launch = true" diff --git a/src/allmydata/version_checks.py b/src/allmydata/version_checks.py deleted file mode 100644 index d022055ea..000000000 --- a/src/allmydata/version_checks.py +++ /dev/null @@ -1,334 +0,0 @@ -""" -Produce reports about the versions of Python software in use by Tahoe-LAFS -for debugging and auditing purposes. - -Ported to Python 3. -""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -__all__ = [ - "PackagingError", - "get_package_versions", - "get_package_versions_string", - "normalized_version", -] - -import os, platform, re, sys, traceback, pkg_resources - -import six - -import distro - -from . import ( - __appname__, - full_version, - branch, -) -from .util import ( - verlib, -) - -if getattr(sys, 'frozen', None): - # "Frozen" python interpreters (i.e., standalone executables - # generated by PyInstaller and other, similar utilities) run - # independently of a traditional setuptools-based packaging - # environment, and so pkg_resources.get_distribution() cannot be - # used in such cases to gather a list of requirements at runtime - # (and because a frozen application is one that has already been - # "installed", an empty list suffices here). - _INSTALL_REQUIRES = [] -else: - _INSTALL_REQUIRES = list( - str(req) - for req - in pkg_resources.get_distribution(__appname__).requires() - ) - -class PackagingError(EnvironmentError): - """ - Raised when there is an error in packaging of Tahoe-LAFS or its - dependencies which makes it impossible to proceed safely. - """ - -def get_package_versions(): - return dict([(k, v) for k, (v, l, c) in _vers_and_locs_list]) - -def get_package_versions_string(show_paths=False, debug=False): - res = [] - for p, (v, loc, comment) in _vers_and_locs_list: - info = str(p) + ": " + str(v) - if comment: - info = info + " [%s]" % str(comment) - if show_paths: - info = info + " (%s)" % str(loc) - res.append(info) - - output = "\n".join(res) + "\n" - - if _cross_check_errors: - output += _get_error_string(_cross_check_errors, debug=debug) - - return output - -_distributor_id_cmdline_re = re.compile("(?:Distributor ID:)\s*(.*)", re.I) -_release_cmdline_re = re.compile("(?:Release:)\s*(.*)", re.I) - -_distributor_id_file_re = re.compile("(?:DISTRIB_ID\s*=)\s*(.*)", re.I) -_release_file_re = re.compile("(?:DISTRIB_RELEASE\s*=)\s*(.*)", re.I) - -_distname = None -_version = None - -def normalized_version(verstr, what=None): - try: - suggested = verlib.suggest_normalized_version(verstr) or verstr - return verlib.NormalizedVersion(suggested) - except verlib.IrrationalVersionError: - raise - except Exception: - cls, value, trace = sys.exc_info() - new_exc = PackagingError("could not parse %s due to %s: %s" - % (what or repr(verstr), cls.__name__, value)) - six.reraise(cls, new_exc, trace) - -def _get_error_string(errors, debug=False): - - msg = "\n%s\n" % ("\n".join(errors),) - if debug: - msg += ( - "\n" - "For debugging purposes, the PYTHONPATH was\n" - " %r\n" - "install_requires was\n" - " %r\n" - "sys.path after importing pkg_resources was\n" - " %s\n" - % ( - os.environ.get('PYTHONPATH'), - _INSTALL_REQUIRES, - (os.pathsep+"\n ").join(sys.path), - ) - ) - return msg - -def _cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list): - """This function returns a list of errors due to any failed cross-checks.""" - - from ._auto_deps import not_import_versionable - - errors = [] - not_pkg_resourceable = ['python', 'platform', __appname__.lower(), 'openssl'] - - for name, (imp_ver, imp_loc, imp_comment) in imported_vers_and_locs_list: - name = name.lower() - if name not in not_pkg_resourceable: - if name not in pkg_resources_vers_and_locs: - if name == "setuptools" and "distribute" in pkg_resources_vers_and_locs: - pr_ver, pr_loc = pkg_resources_vers_and_locs["distribute"] - if not (os.path.normpath(os.path.realpath(pr_loc)) == os.path.normpath(os.path.realpath(imp_loc)) - and imp_comment == "distribute"): - errors.append("Warning: dependency 'setuptools' found to be version %r of 'distribute' from %r " - "by pkg_resources, but 'import setuptools' gave version %r [%s] from %r. " - "A version mismatch is expected, but a location mismatch is not." - % (pr_ver, pr_loc, imp_ver, imp_comment or 'probably *not* distribute', imp_loc)) - else: - errors.append("Warning: dependency %r (version %r imported from %r) was not found by pkg_resources." - % (name, imp_ver, imp_loc)) - continue - - pr_ver, pr_loc = pkg_resources_vers_and_locs[name] - if imp_ver is None and imp_loc is None: - errors.append("Warning: dependency %r could not be imported. pkg_resources thought it should be possible " - "to import version %r from %r.\nThe exception trace was %r." - % (name, pr_ver, pr_loc, imp_comment)) - continue - - # If the pkg_resources version is identical to the imported version, don't attempt - # to normalize them, since it is unnecessary and may fail (ticket #2499). - if imp_ver != 'unknown' and pr_ver == imp_ver: - continue - - try: - pr_normver = normalized_version(pr_ver) - except verlib.IrrationalVersionError: - continue - except Exception as e: - errors.append("Warning: version number %r found for dependency %r by pkg_resources could not be parsed. " - "The version found by import was %r from %r. " - "pkg_resources thought it should be found at %r. " - "The exception was %s: %s" - % (pr_ver, name, imp_ver, imp_loc, pr_loc, e.__class__.__name__, e)) - else: - if imp_ver == 'unknown': - if name not in not_import_versionable: - errors.append("Warning: unexpectedly could not find a version number for dependency %r imported from %r. " - "pkg_resources thought it should be version %r at %r." - % (name, imp_loc, pr_ver, pr_loc)) - else: - try: - imp_normver = normalized_version(imp_ver) - except verlib.IrrationalVersionError: - continue - except Exception as e: - errors.append("Warning: version number %r found for dependency %r (imported from %r) could not be parsed. " - "pkg_resources thought it should be version %r at %r. " - "The exception was %s: %s" - % (imp_ver, name, imp_loc, pr_ver, pr_loc, e.__class__.__name__, e)) - else: - if pr_ver == 'unknown' or (pr_normver != imp_normver): - if not os.path.normpath(os.path.realpath(pr_loc)) == os.path.normpath(os.path.realpath(imp_loc)): - errors.append("Warning: dependency %r found to have version number %r (normalized to %r, from %r) " - "by pkg_resources, but version %r (normalized to %r, from %r) by import." - % (name, pr_ver, str(pr_normver), pr_loc, imp_ver, str(imp_normver), imp_loc)) - - return errors - -def _get_openssl_version(): - try: - from OpenSSL import SSL - return _extract_openssl_version(SSL) - except Exception: - return ("unknown", None, None) - -def _extract_openssl_version(ssl_module): - openssl_version = ssl_module.SSLeay_version(ssl_module.SSLEAY_VERSION) - if openssl_version.startswith('OpenSSL '): - openssl_version = openssl_version[8 :] - - (version, _, comment) = openssl_version.partition(' ') - - try: - openssl_cflags = ssl_module.SSLeay_version(ssl_module.SSLEAY_CFLAGS) - if '-DOPENSSL_NO_HEARTBEATS' in openssl_cflags.split(' '): - comment += ", no heartbeats" - except Exception: - pass - - return (version, None, comment if comment else None) - - -def _get_platform(): - # Our version of platform.platform(), telling us both less and more than the - # Python Standard Library's version does. - # We omit details such as the Linux kernel version number, but we add a - # more detailed and correct rendition of the Linux distribution and - # distribution-version. - if "linux" in platform.system().lower(): - return ( - platform.system() + "-" + - "_".join(distro.linux_distribution()[:2]) + "-" + - platform.machine() + "-" + - "_".join([x for x in platform.architecture() if x]) - ) - else: - return platform.platform() - -def _get_package_versions_and_locations(): - import warnings - from ._auto_deps import package_imports, global_deprecation_messages, deprecation_messages, \ - runtime_warning_messages, warning_imports, ignorable - - def package_dir(srcfile): - return os.path.dirname(os.path.dirname(os.path.normcase(os.path.realpath(srcfile)))) - - # pkg_resources.require returns the distribution that pkg_resources attempted to put - # on sys.path, which can differ from the one that we actually import due to #1258, - # or any other bug that causes sys.path to be set up incorrectly. Therefore we - # must import the packages in order to check their versions and paths. - - # This is to suppress all UserWarnings and various DeprecationWarnings and RuntimeWarnings - # (listed in _auto_deps.py). - - warnings.filterwarnings("ignore", category=UserWarning, append=True) - - for msg in global_deprecation_messages + deprecation_messages: - warnings.filterwarnings("ignore", category=DeprecationWarning, message=msg, append=True) - for msg in runtime_warning_messages: - warnings.filterwarnings("ignore", category=RuntimeWarning, message=msg, append=True) - try: - for modulename in warning_imports: - try: - __import__(modulename) - except (ImportError, SyntaxError): - pass - finally: - # Leave suppressions for UserWarnings and global_deprecation_messages active. - for _ in runtime_warning_messages + deprecation_messages: - warnings.filters.pop() - - packages = [] - pkg_resources_vers_and_locs = dict() - - if not hasattr(sys, 'frozen'): - pkg_resources_vers_and_locs = { - p.project_name.lower(): (str(p.version), p.location) - for p - in pkg_resources.require(_INSTALL_REQUIRES) - } - - def get_version(module): - if hasattr(module, '__version__'): - return str(getattr(module, '__version__')) - elif hasattr(module, 'version'): - ver = getattr(module, 'version') - if isinstance(ver, tuple): - return '.'.join(map(str, ver)) - else: - return str(ver) - else: - return 'unknown' - - for pkgname, modulename in [(__appname__, 'allmydata')] + package_imports: - if modulename: - try: - __import__(modulename) - module = sys.modules[modulename] - except (ImportError, SyntaxError): - etype, emsg, etrace = sys.exc_info() - trace_info = (etype, str(emsg), ([None] + traceback.extract_tb(etrace))[-1]) - packages.append( (pkgname, (None, None, trace_info)) ) - else: - comment = None - if pkgname == __appname__: - comment = "%s: %s" % (branch, full_version) - elif pkgname == 'setuptools' and hasattr(module, '_distribute'): - # distribute does not report its version in any module variables - comment = 'distribute' - ver = get_version(module) - loc = package_dir(module.__file__) - if ver == "unknown" and pkgname in pkg_resources_vers_and_locs: - (pr_ver, pr_loc) = pkg_resources_vers_and_locs[pkgname] - if loc == os.path.normcase(os.path.realpath(pr_loc)): - ver = pr_ver - packages.append( (pkgname, (ver, loc, comment)) ) - elif pkgname == 'python': - packages.append( (pkgname, (platform.python_version(), sys.executable, None)) ) - elif pkgname == 'platform': - packages.append( (pkgname, (_get_platform(), None, None)) ) - elif pkgname == 'OpenSSL': - packages.append( (pkgname, _get_openssl_version()) ) - - cross_check_errors = [] - - if len(pkg_resources_vers_and_locs) > 0: - imported_packages = set([p.lower() for (p, _) in packages]) - extra_packages = [] - - for pr_name, (pr_ver, pr_loc) in pkg_resources_vers_and_locs.items(): - if pr_name not in imported_packages and pr_name not in ignorable: - extra_packages.append( (pr_name, (pr_ver, pr_loc, "according to pkg_resources")) ) - - cross_check_errors = _cross_check(pkg_resources_vers_and_locs, packages) - packages += extra_packages - - return packages, cross_check_errors - - -_vers_and_locs_list, _cross_check_errors = _get_package_versions_and_locations() diff --git a/src/allmydata/web/check_results.py b/src/allmydata/web/check_results.py index 7c4723333..54130183b 100644 --- a/src/allmydata/web/check_results.py +++ b/src/allmydata/web/check_results.py @@ -1,6 +1,6 @@ +from future.builtins import str import time -import json from twisted.web import ( http, @@ -31,6 +31,7 @@ from allmydata.interfaces import ( from allmydata.util import ( base32, dictutil, + jsonbytes as json, # Supporting dumping bytes ) @@ -200,7 +201,7 @@ class ResultsBase(object): return tags.ul(r) def _html(self, s): - if isinstance(s, (str, unicode)): + if isinstance(s, (bytes, str)): return html.escape(s) assert isinstance(s, (list, tuple)) return [html.escape(w) for w in s] @@ -522,7 +523,7 @@ class DeepCheckResultsRendererElement(Element, ResultsBase, ReloadMixin): summary = cr.get_summary() if summary: summary_text = ": " + summary - summary_text += " [SI: %s]" % cr.get_storage_index_string() + summary_text += " [SI: %s]" % cr.get_storage_index_string().decode("ascii") problems.append({ # Not sure self._join_pathstring(path) is the # right thing to use here. diff --git a/src/allmydata/web/common.py b/src/allmydata/web/common.py index 102e67adc..57118d1d4 100644 --- a/src/allmydata/web/common.py +++ b/src/allmydata/web/common.py @@ -1,17 +1,56 @@ +from past.builtins import unicode +from six import ensure_text, ensure_str import time import json from functools import wraps +from hyperlink import ( + DecodedURL, +) + +from eliot import ( + Message, + start_action, +) +from eliot.twisted import ( + DeferredContext, +) + from twisted.web import ( http, resource, - server, template, ) +from twisted.web.iweb import ( + IRequest, +) +from twisted.web.template import ( + tags, +) +from twisted.web.server import ( + NOT_DONE_YET, +) +from twisted.web.util import ( + DeferredResource, + FailureElement, + redirectTo, +) +from twisted.python.reflect import ( + fullyQualifiedName, +) from twisted.python import log -from nevow import appserver -from nevow.inevow import IRequest +from twisted.python.failure import ( + Failure, +) +from twisted.internet.defer import ( + CancelledError, + maybeDeferred, +) +from twisted.web.resource import ( + IResource, +) + from allmydata import blacklist from allmydata.interfaces import ( EmptyPathnameComponentError, @@ -27,7 +66,6 @@ from allmydata.interfaces import ( SDMF_VERSION, ) from allmydata.mutable.common import UnrecoverableFileError -from allmydata.util.hashutil import timing_safe_compare from allmydata.util.time_format import ( format_delta, format_time, @@ -62,17 +100,19 @@ def get_filenode_metadata(filenode): def boolean_of_arg(arg): # TODO: "" + arg = ensure_text(arg) if arg.lower() not in ("true", "t", "1", "false", "f", "0", "on", "off"): raise WebError("invalid boolean argument: %r" % (arg,), http.BAD_REQUEST) return arg.lower() in ("true", "t", "1", "on") def parse_replace_arg(replace): + replace = ensure_text(replace) if replace.lower() == "only-files": return replace try: return boolean_of_arg(replace) except WebError: - raise WebError("invalid replace= argument: %r" % (replace,), http.BAD_REQUEST) + raise WebError("invalid replace= argument: %r" % (ensure_str(replace),), http.BAD_REQUEST) def get_format(req, default="CHK"): @@ -81,11 +121,11 @@ def get_format(req, default="CHK"): if boolean_of_arg(get_arg(req, "mutable", "false")): return "SDMF" return default - if arg.upper() == "CHK": + if arg.upper() == b"CHK": return "CHK" - elif arg.upper() == "SDMF": + elif arg.upper() == b"SDMF": return "SDMF" - elif arg.upper() == "MDMF": + elif arg.upper() == b"MDMF": return "MDMF" else: raise WebError("Unknown format: %s, I know CHK, SDMF, MDMF" % arg, @@ -117,8 +157,22 @@ def parse_offset_arg(offset): return offset -def get_root(ctx_or_req): - req = IRequest(ctx_or_req) +def get_root(req): + """ + Get a relative path with parent directory segments that refers to the root + location known to the given request. This seems a lot like the constant + absolute path **/** but it will behave differently if the Tahoe-LAFS HTTP + server is reverse-proxied and mounted somewhere other than at the root. + + :param twisted.web.iweb.IRequest req: The request to consider. + + :return: A string like ``../../..`` with the correct number of segments to + reach the root. + """ + if not IRequest.providedBy(req): + raise TypeError( + "get_root requires IRequest provider, got {!r}".format(req), + ) depth = len(req.prepath) + len(req.postpath) link = "/".join([".."] * depth) return link @@ -157,28 +211,44 @@ def compute_rate(bytes, seconds): return 1.0 * bytes / seconds def abbreviate_rate(data): - # 21.8kBps, 554.4kBps 4.37MBps + """ + Convert number of bytes/second into human readable strings (unicode). + + Uses metric measures, so 1000 not 1024, e.g. 21.8kBps, 554.4kBps, 4.37MBps. + + :param data: Either ``None`` or integer. + + :return: Unicode string. + """ if data is None: - return "" + return u"" r = float(data) if r > 1000000: - return "%1.2fMBps" % (r/1000000) + return u"%1.2fMBps" % (r/1000000) if r > 1000: - return "%.1fkBps" % (r/1000) - return "%.0fBps" % r + return u"%.1fkBps" % (r/1000) + return u"%.0fBps" % r def abbreviate_size(data): - # 21.8kB, 554.4kB 4.37MB + """ + Convert number of bytes into human readable strings (unicode). + + Uses metric measures, so 1000 not 1024, e.g. 21.8kB, 554.4kB, 4.37MB. + + :param data: Either ``None`` or integer. + + :return: Unicode string. + """ if data is None: - return "" + return u"" r = float(data) if r > 1000000000: - return "%1.2fGB" % (r/1000000000) + return u"%1.2fGB" % (r/1000000000) if r > 1000000: - return "%1.2fMB" % (r/1000000) + return u"%1.2fMB" % (r/1000000) if r > 1000: - return "%.1fkB" % (r/1000) - return "%.0fB" % r + return u"%.1fkB" % (r/1000) + return u"%.0fB" % r def plural(sequence_or_length): if isinstance(sequence_or_length, int): @@ -319,58 +389,13 @@ def humanize_failure(f): return humanize_exception(f.value) -class MyExceptionHandler(appserver.DefaultExceptionHandler, object): - def simple(self, ctx, text, code=http.BAD_REQUEST): - req = IRequest(ctx) - req.setResponseCode(code) - #req.responseHeaders.setRawHeaders("content-encoding", []) - #req.responseHeaders.setRawHeaders("content-disposition", []) - req.setHeader("content-type", "text/plain;charset=utf-8") - if isinstance(text, unicode): - text = text.encode("utf-8") - req.setHeader("content-length", b"%d" % len(text)) - req.write(text) - # TODO: consider putting the requested URL here - req.finishRequest(False) - - def renderHTTP_exception(self, ctx, f): - try: - text, code = humanize_failure(f) - except: - log.msg("exception in humanize_failure") - log.msg("argument was %s" % (f,)) - log.err() - text, code = str(f), None - if code is not None: - return self.simple(ctx, text, code) - if f.check(server.UnsupportedMethod): - # twisted.web.server.Request.render() has support for transforming - # this into an appropriate 501 NOT_IMPLEMENTED or 405 NOT_ALLOWED - # return code, but nevow does not. - req = IRequest(ctx) - method = req.method - return self.simple(ctx, - "I don't know how to treat a %s request." % method, - http.NOT_IMPLEMENTED) - req = IRequest(ctx) - accept = req.getHeader("accept") - if not accept: - accept = "*/*" - if "*/*" in accept or "text/*" in accept or "text/html" in accept: - super = appserver.DefaultExceptionHandler - return super.renderHTTP_exception(self, ctx, f) - # use plain text - traceback = f.getTraceback() - return self.simple(ctx, traceback, http.INTERNAL_SERVER_ERROR) - - class NeedOperationHandleError(WebError): pass class SlotsSequenceElement(template.Element): """ - ``SlotsSequenceElement` is a minimal port of nevow's sequence renderer for + ``SlotsSequenceElement` is a minimal port of Nevow's sequence renderer for twisted.web.template. Tags passed in to be templated will have two renderers available: ``item`` @@ -413,84 +438,254 @@ class SlotsSequenceElement(template.Element): return tag -class TokenOnlyWebApi(resource.Resource, object): - """ - I provide a rend.Page implementation that only accepts POST calls, - and only if they have a 'token=' arg with the correct - authentication token (see - :meth:`allmydata.client.Client.get_auth_token`). Callers must also - provide the "t=" argument to indicate the return-value (the only - valid value for this is "json") - - Subclasses should override 'post_json' which should process the - API call and return a string which encodes a valid JSON - object. This will only be called if the correct token is present - and valid (during renderHTTP processing). - """ - - def __init__(self, client): - self.client = client - - def post_json(self, req): - return NotImplemented - - def render(self, req): - if req.method != 'POST': - raise server.UnsupportedMethod(('POST',)) - if req.args.get('token', False): - raise WebError("Do not pass 'token' as URL argument", http.BAD_REQUEST) - # not using get_arg() here because we *don't* want the token - # argument to work if you passed it as a GET-style argument - token = None - if req.fields and 'token' in req.fields: - token = req.fields['token'].value.strip() - if not token: - raise WebError("Missing token", http.UNAUTHORIZED) - if not timing_safe_compare(token, self.client.get_auth_token()): - raise WebError("Invalid token", http.UNAUTHORIZED) - - t = get_arg(req, "t", "").strip() - if not t: - raise WebError("Must provide 't=' argument") - if t == u'json': - try: - return self.post_json(req) - except WebError as e: - req.setResponseCode(e.code) - return json.dumps({"error": e.text}) - except Exception as e: - message, code = humanize_exception(e) - req.setResponseCode(500 if code is None else code) - return json.dumps({"error": message}) - else: - raise WebError("'%s' invalid type for 't' arg" % (t,), http.BAD_REQUEST) - - -def exception_to_child(f): +def exception_to_child(getChild): """ Decorate ``getChild`` method with exception handling behavior to render an error page reflecting the exception. """ - @wraps(f) + @wraps(getChild) def g(self, name, req): - try: - return f(self, name, req) - except Exception as e: - description, status = humanize_exception(e) - return resource.ErrorPage(status, "Error", description) + # Bind the method to the instance so it has a better + # fullyQualifiedName later on. This is not necessary on Python 3. + bound_getChild = getChild.__get__(self, type(self)) + + action = start_action( + action_type=u"allmydata:web:common-getChild", + uri=req.uri, + method=req.method, + name=name, + handler=fullyQualifiedName(bound_getChild), + ) + with action.context(): + result = DeferredContext(maybeDeferred(bound_getChild, name, req)) + result.addCallbacks( + _getChild_done, + _getChild_failed, + callbackArgs=(self,), + ) + result = result.addActionFinish() + return DeferredResource(result) return g -def render_exception(f): +def _getChild_done(child, parent): + Message.log( + message_type=u"allmydata:web:common-getChild:result", + result=fullyQualifiedName(type(child)), + ) + if child is None: + return resource.NoResource() + return child + + +def _getChild_failed(reason): + text, code = humanize_failure(reason) + return resource.ErrorPage(code, "Error", text) + + +def render_exception(render): """ Decorate a ``render_*`` method with exception handling behavior to render an error page reflecting the exception. """ - @wraps(f) + @wraps(render) def g(self, request): - try: - return f(self, request) - except Exception as e: - description, status = humanize_exception(e) - return resource.ErrorPage(status, "Error", description).render(request) + # Bind the method to the instance so it has a better + # fullyQualifiedName later on. This is not necessary on Python 3. + bound_render = render.__get__(self, type(self)) + + action = start_action( + action_type=u"allmydata:web:common-render", + uri=request.uri, + method=request.method, + handler=fullyQualifiedName(bound_render), + ) + if getattr(request, "dont_apply_extra_processing", False): + with action: + return bound_render(request) + + with action.context(): + result = DeferredContext(maybeDeferred(bound_render, request)) + # Apply `_finish` all of our result handling logic to whatever it + # returned. + result.addBoth(_finish, bound_render, request) + d = result.addActionFinish() + + # If the connection is lost then there's no point running our _finish + # logic because it has nowhere to send anything. There may also be no + # point in finishing whatever operation was being performed because + # the client cannot be informed of its result. Also, Twisted Web + # raises exceptions from some Request methods if they're used after + # the connection is lost. + request.notifyFinish().addErrback( + lambda ignored: d.cancel(), + ) + return NOT_DONE_YET + return g + + +def _finish(result, render, request): + """ + Try to finish rendering the response to a request. + + This implements extra convenience functionality not provided by Twisted + Web. Various resources in Tahoe-LAFS made use of this functionality when + it was provided by Nevow. Rather than making that application code do the + more tedious thing itself, we duplicate the functionality here. + + :param result: Something returned by a render method which we can turn + into a response. + + :param render: The original render method which produced the result. + + :param request: The request being responded to. + + :return: ``None`` + """ + if isinstance(result, Failure): + if result.check(CancelledError): + return + Message.log( + message_type=u"allmydata:web:common-render:failure", + message=result.getErrorMessage(), + ) + _finish( + _renderHTTP_exception(request, result), + render, + request, + ) + elif IResource.providedBy(result): + # If result is also using @render_exception then we don't want to + # double-apply the logic. This leads to an attempt to double-finish + # the request. If it isn't using @render_exception then you should + # fix it so it is. + Message.log( + message_type=u"allmydata:web:common-render:resource", + resource=fullyQualifiedName(type(result)), + ) + result.render(request) + elif isinstance(result, unicode): + Message.log( + message_type=u"allmydata:web:common-render:unicode", + ) + request.write(result.encode("utf-8")) + request.finish() + elif isinstance(result, bytes): + Message.log( + message_type=u"allmydata:web:common-render:bytes", + ) + request.write(result) + request.finish() + elif isinstance(result, DecodedURL): + Message.log( + message_type=u"allmydata:web:common-render:DecodedURL", + ) + _finish(redirectTo(result.to_text().encode("utf-8"), request), render, request) + elif result is None: + Message.log( + message_type=u"allmydata:web:common-render:None", + ) + request.finish() + elif result == NOT_DONE_YET: + Message.log( + message_type=u"allmydata:web:common-render:NOT_DONE_YET", + ) + pass + else: + Message.log( + message_type=u"allmydata:web:common-render:unknown", + ) + log.err("Request for {!r} handled by {!r} returned unusable {!r}".format( + request.uri, + fullyQualifiedName(render), + result, + )) + request.setResponseCode(http.INTERNAL_SERVER_ERROR) + _finish(b"Internal Server Error", render, request) + + +def _renderHTTP_exception(request, failure): + try: + text, code = humanize_failure(failure) + except: + log.msg("exception in humanize_failure") + log.msg("argument was %s" % (failure,)) + log.err() + text = str(failure) + code = None + + if code is not None: + return _renderHTTP_exception_simple(request, text, code) + + accept = request.getHeader("accept") + if not accept: + accept = "*/*" + if "*/*" in accept or "text/*" in accept or "text/html" in accept: + request.setResponseCode(http.INTERNAL_SERVER_ERROR) + return template.renderElement( + request, + tags.html( + tags.head( + tags.title(u"Exception"), + ), + tags.body( + FailureElement(failure), + ), + ), + ) + + # use plain text + traceback = failure.getTraceback() + return _renderHTTP_exception_simple( + request, + traceback, + http.INTERNAL_SERVER_ERROR, + ) + + +def _renderHTTP_exception_simple(request, text, code): + request.setResponseCode(code) + request.setHeader("content-type", "text/plain;charset=utf-8") + if isinstance(text, unicode): + text = text.encode("utf-8") + request.setHeader("content-length", b"%d" % len(text)) + return text + + +def handle_when_done(req, d): + when_done = get_arg(req, "when_done", None) + if when_done: + d.addCallback(lambda res: DecodedURL.from_text(when_done.decode("utf-8"))) + return d + + +def url_for_string(req, url_string): + """ + Construct a universal URL using the given URL string. + + :param IRequest req: The request being served. If ``redir_to`` is not + absolute then this is used to determine the net location of this + server and the resulting URL is made to point at it. + + :param bytes url_string: A byte string giving a universal or absolute URL. + + :return DecodedURL: An absolute URL based on this server's net location + and the given URL string. + """ + url = DecodedURL.from_text(url_string.decode("utf-8")) + if url.host == b"": + root = req.URLPath() + netloc = root.netloc.split(b":", 1) + if len(netloc) == 1: + host = netloc + port = None + else: + host = netloc[0] + port = int(netloc[1]) + url = url.replace( + scheme=root.scheme.decode("ascii"), + host=host.decode("ascii"), + port=port, + ) + return url diff --git a/src/allmydata/web/common_py3.py b/src/allmydata/web/common_py3.py index 73130cbab..cde3924fd 100644 --- a/src/allmydata/web/common_py3.py +++ b/src/allmydata/web/common_py3.py @@ -4,15 +4,14 @@ Common utilities that are available from Python 3. Can eventually be merged back into allmydata.web.common. """ -from future.utils import PY2 +from past.builtins import unicode -if PY2: - from nevow.inevow import IRequest as INevowRequest -else: - INevowRequest = None +try: + from typing import Optional +except ImportError: + pass from twisted.web import resource, http -from twisted.web.iweb import IRequest from allmydata.util import abbreviate @@ -23,24 +22,26 @@ class WebError(Exception): self.code = code -def get_arg(ctx_or_req, argname, default=None, multiple=False): +def get_arg(req, argname, default=None, multiple=False): """Extract an argument from either the query args (req.args) or the form body fields (req.fields). If multiple=False, this returns a single value (or the default, which defaults to None), and the query args take precedence. If multiple=True, this returns a tuple of arguments (possibly empty), starting with all those in the query args. + + :param TahoeLAFSRequest req: The request to consider. + + :return: Either bytes or tuple of bytes. """ + if isinstance(argname, unicode): + argname = argname.encode("utf-8") + if isinstance(default, unicode): + default = default.encode("utf-8") results = [] - if PY2: - req = INevowRequest(ctx_or_req) - if argname in req.args: - results.extend(req.args[argname]) - if req.fields and argname in req.fields: - results.append(req.fields[argname].value) - else: - req = IRequest(ctx_or_req) - if argname in req.args: - results.extend(req.args[argname]) + if argname in req.args: + results.extend(req.args[argname]) + if req.fields and argname in req.fields: + results.append(req.fields[argname].value) if multiple: return tuple(results) if results: @@ -59,7 +60,7 @@ class MultiFormatResource(resource.Resource, object): format if nothing else is given as the ``formatDefault``. """ formatArgument = "t" - formatDefault = None + formatDefault = None # type: Optional[str] def render(self, req): """ @@ -74,6 +75,9 @@ class MultiFormatResource(resource.Resource, object): :return: The result of the selected renderer. """ t = get_arg(req, self.formatArgument, self.formatDefault) + # It's either bytes or None. + if isinstance(t, bytes): + t = unicode(t, "ascii") renderer = self._get_renderer(t) return renderer(req) @@ -107,16 +111,23 @@ class MultiFormatResource(resource.Resource, object): def abbreviate_time(data): + """ + Convert number of seconds into human readable string. + + :param data: Either ``None`` or integer or float, seconds. + + :return: Unicode string. + """ # 1.23s, 790ms, 132us if data is None: - return "" + return u"" s = float(data) if s >= 10: return abbreviate.abbreviate_time(data) if s >= 1.0: - return "%.2fs" % s + return u"%.2fs" % s if s >= 0.01: - return "%.0fms" % (1000*s) + return u"%.0fms" % (1000*s) if s >= 0.001: - return "%.1fms" % (1000*s) - return "%.0fus" % (1000000*s) + return u"%.1fms" % (1000*s) + return u"%.0fus" % (1000000*s) diff --git a/src/allmydata/web/directory.py b/src/allmydata/web/directory.py index 4d7aa1bd1..981c8ef56 100644 --- a/src/allmydata/web/directory.py +++ b/src/allmydata/web/directory.py @@ -1,6 +1,6 @@ +from past.builtins import unicode -import json -import urllib +from urllib.parse import quote as url_quote from datetime import timedelta from zope.interface import implementer @@ -20,7 +20,7 @@ from twisted.web.template import ( from hyperlink import URL from twisted.python.filepath import FilePath -from allmydata.util import base32 +from allmydata.util import base32, jsonbytes as json from allmydata.util.encodingutil import ( to_bytes, quote_output, @@ -58,6 +58,7 @@ from allmydata.web.common import ( SlotsSequenceElement, exception_to_child, render_exception, + handle_when_done, ) from allmydata.web.filenode import ReplaceMeMixin, \ FileNodeHandler, PlaceHolderNodeHandler @@ -108,12 +109,12 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object): # or no further children) renders "this" page. We also need # to reject "/uri/URI:DIR2:..//", so we look at postpath. name = name.decode('utf8') - if not name and req.postpath != ['']: + if not name and req.postpath != [b'']: return self # Rejecting URIs that contain empty path pieces (for example: # "/uri/URI:DIR2:../foo//new.txt" or "/uri/URI:DIR2:..//") was - # the old nevow behavior and it is encoded in the test suite; + # the old Nevow behavior and it is encoded in the test suite; # we will follow suit. for segment in req.prepath: if not segment: @@ -134,7 +135,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object): terminal = (req.prepath + req.postpath)[-1].decode('utf8') == name nonterminal = not terminal #len(req.postpath) > 0 - t = get_arg(req, "t", "").strip() + t = get_arg(req, b"t", b"").strip() if isinstance(node_or_failure, Failure): f = node_or_failure f.trap(NoSuchChildError) @@ -206,6 +207,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object): ) return make_handler_for(node, self.client, self.node, name) + @render_exception def render_DELETE(self, req): assert self.parentnode and self.name d = self.parentnode.delete(self.name) @@ -215,7 +217,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object): @render_exception def render_GET(self, req): # This is where all of the directory-related ?t=* code goes. - t = get_arg(req, "t", "").strip() + t = unicode(get_arg(req, b"t", b"").strip(), "ascii") # t=info contains variable ophandles, t=rename-form contains the name # of the child being renamed. Neither is allowed an ETag. @@ -223,7 +225,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object): if not self.node.is_mutable() and t in FIXED_OUTPUT_TYPES: si = self.node.get_storage_index() if si and req.setETag('DIR:%s-%s' % (base32.b2a(si), t or "")): - return "" + return b"" if not t: # render the directory as HTML @@ -253,7 +255,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object): @render_exception def render_PUT(self, req): - t = get_arg(req, "t", "").strip() + t = get_arg(req, b"t", b"").strip() replace = parse_replace_arg(get_arg(req, "replace", "true")) if t == "mkdir": @@ -273,7 +275,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object): @render_exception def render_POST(self, req): - t = get_arg(req, "t", "").strip() + t = unicode(get_arg(req, b"t", b"").strip(), "ascii") if t == "mkdir": d = self._POST_mkdir(req) @@ -310,13 +312,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object): else: raise WebError("POST to a directory with bad t=%s" % t) - when_done = get_arg(req, "when_done", None) - if when_done: - def done(res): - req.redirect(when_done) - return res - d.addCallback(done) - return d + return handle_when_done(req, d) def _POST_mkdir(self, req): name = get_arg(req, "name", "") @@ -402,9 +398,12 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object): d.addBoth(_maybe_got_node) # now we have a placeholder or a filenodehandler, and we can just # delegate to it. We could return the resource back out of - # DirectoryNodeHandler.renderHTTP, and nevow would recurse into it, - # but the addCallback() that handles when_done= would break. - d.addCallback(lambda child: child.render(req)) + # DirectoryNodeHandler.render_POST and it would get rendered but the + # addCallback() that handles when_done= would break. + def render_child(child): + req.dont_apply_extra_processing = True + return child.render(req) + d.addCallback(render_child) return d def _POST_uri(self, req): @@ -523,9 +522,9 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object): d.addCallback(self._maybe_literal, CheckResultsRenderer) return d - def _start_operation(self, monitor, renderer, ctx): - self._operations.add_monitor(ctx, monitor, renderer) - return self._operations.redirect_to(ctx) + def _start_operation(self, monitor, renderer, req): + self._operations.add_monitor(req, monitor, renderer) + return self._operations.redirect_to(req) def _POST_start_deep_check(self, req): # check this directory and everything reachable from it @@ -733,7 +732,7 @@ class DirectoryAsHTML(Element): return "" rocap = self.node.get_readonly_uri() root = get_root(req) - uri_link = "%s/uri/%s/" % (root, urllib.quote(rocap)) + uri_link = "%s/uri/%s/" % (root, url_quote(rocap)) return tag(tags.a("Read-Only Version", href=uri_link)) @renderer @@ -755,10 +754,10 @@ class DirectoryAsHTML(Element): called by the 'children' renderer) """ name = name.encode("utf-8") - nameurl = urllib.quote(name, safe="") # encode any slashes too + nameurl = url_quote(name, safe="") # encode any slashes too root = get_root(req) - here = "{}/uri/{}/".format(root, urllib.quote(self.node.get_uri())) + here = "{}/uri/{}/".format(root, url_quote(self.node.get_uri())) if self.node.is_unknown() or self.node.is_readonly(): unlink = "-" rename = "-" @@ -815,7 +814,7 @@ class DirectoryAsHTML(Element): assert IFilesystemNode.providedBy(target), target target_uri = target.get_uri() or "" - quoted_uri = urllib.quote(target_uri, safe="") # escape slashes too + quoted_uri = url_quote(target_uri, safe="") # escape slashes too if IMutableFileNode.providedBy(target): # to prevent javascript in displayed .html files from stealing a @@ -836,7 +835,7 @@ class DirectoryAsHTML(Element): elif IDirectoryNode.providedBy(target): # directory - uri_link = "%s/uri/%s/" % (root, urllib.quote(target_uri)) + uri_link = "%s/uri/%s/" % (root, url_quote(target_uri)) slots["filename"] = tags.a(name, href=uri_link) if not target.is_mutable(): dirtype = "DIR-IMM" @@ -872,7 +871,7 @@ class DirectoryAsHTML(Element): slots["size"] = "-" # use a directory-relative info link, so we can extract both the # writecap and the readcap - info_link = "%s?t=info" % urllib.quote(name) + info_link = "%s?t=info" % url_quote(name) if info_link: slots["info"] = tags.a("More Info", href=info_link) @@ -889,7 +888,7 @@ class DirectoryAsHTML(Element): # because action="." doesn't get us back to the dir page (but # instead /uri itself) root = get_root(req) - here = "{}/uri/{}/".format(root, urllib.quote(self.node.get_uri())) + here = "{}/uri/{}/".format(root, url_quote(self.node.get_uri())) if self.node.is_readonly(): return tags.div("No upload forms: directory is read-only") @@ -1006,7 +1005,7 @@ def _directory_json_metadata(req, dirnode): d = dirnode.list() def _got(children): kids = {} - for name, (childnode, metadata) in children.iteritems(): + for name, (childnode, metadata) in children.items(): assert IFilesystemNode.providedBy(childnode), childnode rw_uri = childnode.get_write_uri() ro_uri = childnode.get_readonly_uri() @@ -1167,13 +1166,13 @@ def _cap_to_link(root, path, cap): if isinstance(cap_obj, (CHKFileURI, WriteableSSKFileURI, ReadonlySSKFileURI)): uri_link = root_url.child( u"file", - u"{}".format(urllib.quote(cap)), - u"{}".format(urllib.quote(path[-1])), + u"{}".format(url_quote(cap)), + u"{}".format(url_quote(path[-1])), ) else: uri_link = root_url.child( u"uri", - u"{}".format(urllib.quote(cap, safe="")), + u"{}".format(url_quote(cap, safe="")), ) return tags.a(cap, href=uri_link.to_text()) else: @@ -1364,7 +1363,7 @@ class ManifestStreamer(dirnode.DeepStats): j = json.dumps(d, ensure_ascii=True) assert "\n" not in j - self.req.write(j+"\n") + self.req.write(j.encode("utf-8")+b"\n") def finish(self): stats = dirnode.DeepStats.get_results(self) @@ -1373,8 +1372,8 @@ class ManifestStreamer(dirnode.DeepStats): } j = json.dumps(d, ensure_ascii=True) assert "\n" not in j - self.req.write(j+"\n") - return "" + self.req.write(j.encode("utf-8")+b"\n") + return b"" @implementer(IPushProducer) class DeepCheckStreamer(dirnode.DeepStats): @@ -1442,7 +1441,7 @@ class DeepCheckStreamer(dirnode.DeepStats): def write_line(self, data): j = json.dumps(data, ensure_ascii=True) assert "\n" not in j - self.req.write(j+"\n") + self.req.write(j.encode("utf-8")+b"\n") def finish(self): stats = dirnode.DeepStats.get_results(self) @@ -1451,8 +1450,8 @@ class DeepCheckStreamer(dirnode.DeepStats): } j = json.dumps(d, ensure_ascii=True) assert "\n" not in j - self.req.write(j+"\n") - return "" + self.req.write(j.encode("utf-8")+b"\n") + return b"" class UnknownNodeHandler(Resource, object): @@ -1465,7 +1464,7 @@ class UnknownNodeHandler(Resource, object): @render_exception def render_GET(self, req): - t = get_arg(req, "t", "").strip() + t = unicode(get_arg(req, "t", "").strip(), "ascii") if t == "info": return MoreInfo(self.node) if t == "json": diff --git a/src/allmydata/web/filenode.py b/src/allmydata/web/filenode.py index 0ecc8cc52..5bd575631 100644 --- a/src/allmydata/web/filenode.py +++ b/src/allmydata/web/filenode.py @@ -1,5 +1,4 @@ - -import json +from past.builtins import unicode, long from twisted.web import http, static from twisted.internet import defer @@ -8,8 +7,6 @@ from twisted.web.resource import ( ErrorPage, ) -from nevow import url - from allmydata.interfaces import ExistingChildError from allmydata.monitor import Monitor from allmydata.immutable.upload import FileHandle @@ -34,8 +31,8 @@ from allmydata.web.common import ( render_exception, should_create_intermediate_directories, text_plain, - MyExceptionHandler, WebError, + handle_when_done, ) from allmydata.web.check_results import ( CheckResultsRenderer, @@ -43,6 +40,8 @@ from allmydata.web.check_results import ( LiteralCheckResultsRenderer, ) from allmydata.web.info import MoreInfo +from allmydata.util import jsonbytes as json + class ReplaceMeMixin(object): def replace_me_with_a_child(self, req, client, replace): @@ -119,7 +118,7 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin): @render_exception def render_PUT(self, req): - t = get_arg(req, "t", "").strip() + t = get_arg(req, b"t", b"").strip() replace = parse_replace_arg(get_arg(req, "replace", "true")) assert self.parentnode and self.name @@ -135,9 +134,9 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin): @render_exception def render_POST(self, req): - t = get_arg(req, "t", "").strip() - replace = boolean_of_arg(get_arg(req, "replace", "true")) - if t == "upload": + t = get_arg(req, b"t", b"").strip() + replace = boolean_of_arg(get_arg(req, b"replace", b"true")) + if t == b"upload": # like PUT, but get the file data from an HTML form's input field. # We could get here from POST /uri/mutablefilecap?t=upload, # or POST /uri/path/file?t=upload, or @@ -150,10 +149,7 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin): # placeholder. raise WebError("POST to a file: bad t=%s" % t) - when_done = get_arg(req, "when_done", None) - if when_done: - d.addCallback(lambda res: when_done) - return d + return handle_when_done(req, d) class FileNodeHandler(Resource, ReplaceMeMixin, object): @@ -184,7 +180,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object): @render_exception def render_GET(self, req): - t = get_arg(req, "t", "").strip() + t = unicode(get_arg(req, b"t", b"").strip(), "ascii") # t=info contains variable ophandles, so is not allowed an ETag. FIXED_OUTPUT_TYPES = ["", "json", "uri", "readonly-uri"] @@ -242,19 +238,19 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object): @render_exception def render_HEAD(self, req): - t = get_arg(req, "t", "").strip() + t = get_arg(req, b"t", b"").strip() if t: raise WebError("HEAD file: bad t=%s" % t) - filename = get_arg(req, "filename", self.name) or "unknown" + filename = get_arg(req, b"filename", self.name) or "unknown" d = self.node.get_best_readable_version() d.addCallback(lambda dn: FileDownloader(dn, filename)) return d @render_exception def render_PUT(self, req): - t = get_arg(req, "t", "").strip() - replace = parse_replace_arg(get_arg(req, "replace", "true")) - offset = parse_offset_arg(get_arg(req, "offset", None)) + t = get_arg(req, b"t", b"").strip() + replace = parse_replace_arg(get_arg(req, b"replace", b"true")) + offset = parse_offset_arg(get_arg(req, b"offset", None)) if not t: if not replace: @@ -295,11 +291,11 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object): @render_exception def render_POST(self, req): - t = get_arg(req, "t", "").strip() - replace = boolean_of_arg(get_arg(req, "replace", "true")) - if t == "check": + t = get_arg(req, b"t", b"").strip() + replace = boolean_of_arg(get_arg(req, b"replace", b"true")) + if t == b"check": d = self._POST_check(req) - elif t == "upload": + elif t == b"upload": # like PUT, but get the file data from an HTML form's input field # We could get here from POST /uri/mutablefilecap?t=upload, # or POST /uri/path/file?t=upload, or @@ -315,10 +311,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object): else: raise WebError("POST to file: bad t=%s" % t) - when_done = get_arg(req, "when_done", None) - if when_done: - d.addCallback(lambda res: url.URL.fromString(when_done)) - return d + return handle_when_done(req, d) def _maybe_literal(self, res, Results_Class): if res: @@ -485,24 +478,13 @@ class FileDownloader(Resource, object): if req.method == "HEAD": return "" - finished = [] - def _request_finished(ign): - finished.append(True) - req.notifyFinish().addBoth(_request_finished) - d = self.filenode.read(req, first, size) - def _finished(ign): - if not finished: - req.finish() def _error(f): - lp = log.msg("error during GET", facility="tahoe.webish", failure=f, - level=log.UNUSUAL, umid="xSiF3w") - if finished: - log.msg("but it's too late to tell them", parent=lp, - level=log.UNUSUAL, umid="j1xIbw") - return - req._tahoe_request_had_error = f # for HTTP-style logging + if f.check(defer.CancelledError): + # The HTTP connection was lost and we no longer have anywhere + # to send our result. Let this pass through. + return f if req.startedWriting: # The content-type is already set, and the response code has # already been sent, so we can't provide a clean error @@ -513,15 +495,16 @@ class FileDownloader(Resource, object): # error response be shorter than the intended results. # # We don't have a lot of options, unfortunately. - req.write("problem during download\n") - req.finish() + return b"problem during download\n" else: # We haven't written anything yet, so we can provide a # sensible error message. - eh = MyExceptionHandler() - eh.renderHTTP_exception(req, f) - d.addCallbacks(_finished, _error) - return req.deferred + return f + d.addCallbacks( + lambda ignored: None, + _error, + ) + return d def _file_json_metadata(req, filenode, edge_metadata): diff --git a/src/allmydata/web/introweb.py b/src/allmydata/web/introweb.py index f57a5232a..280d6cc26 100644 --- a/src/allmydata/web/introweb.py +++ b/src/allmydata/web/introweb.py @@ -5,9 +5,7 @@ from twisted.web.template import Element, XMLFile, renderElement, renderer from twisted.python.filepath import FilePath from twisted.web import static import allmydata -import json -from allmydata.version_checks import get_package_versions_string -from allmydata.util import idlib +from allmydata.util import idlib, jsonbytes as json from allmydata.web.common import ( render_time, MultiFormatResource, @@ -28,10 +26,10 @@ class IntroducerRoot(MultiFormatResource): self.introducer_node = introducer_node self.introducer_service = introducer_node.getServiceNamed("introducer") # necessary as a root Resource - self.putChild("", self) + self.putChild(b"", self) static_dir = resource_filename("allmydata.web", "static") for filen in os.listdir(static_dir): - self.putChild(filen, static.File(os.path.join(static_dir, filen))) + self.putChild(filen.encode("utf-8"), static.File(os.path.join(static_dir, filen))) def _create_element(self): """ @@ -68,7 +66,7 @@ class IntroducerRoot(MultiFormatResource): announcement_summary[service_name] += 1 res[u"announcement_summary"] = announcement_summary - return json.dumps(res, indent=1) + b"\n" + return (json.dumps(res, indent=1) + "\n").encode("utf-8") class IntroducerRootElement(Element): @@ -89,7 +87,7 @@ class IntroducerRootElement(Element): self.introducer_service = introducer_service self.node_data_dict = { "my_nodeid": idlib.nodeid_b2a(self.introducer_node.nodeid), - "version": get_package_versions_string(), + "version": allmydata.__full_version__, "import_path": str(allmydata).replace("/", "/ "), # XXX kludge for wrapping "rendered_at": render_time(time.time()), } @@ -105,7 +103,7 @@ class IntroducerRootElement(Element): if ad.service_name not in services: services[ad.service_name] = 0 services[ad.service_name] += 1 - service_names = services.keys() + service_names = list(services.keys()) service_names.sort() return u", ".join(u"{}: {}".format(service_name, services[service_name]) for service_name in service_names) diff --git a/src/allmydata/web/logs.py b/src/allmydata/web/logs.py index 0ba8b17e9..6f15a3ca9 100644 --- a/src/allmydata/web/logs.py +++ b/src/allmydata/web/logs.py @@ -1,3 +1,6 @@ +""" +Ported to Python 3. +""" from __future__ import ( print_function, unicode_literals, @@ -49,7 +52,11 @@ class TokenAuthenticatedWebSocketServerProtocol(WebSocketServerProtocol): """ # probably want a try/except around here? what do we do if # transmission fails or anything else bad happens? - self.sendMessage(json.dumps(message)) + encoded = json.dumps(message) + if isinstance(encoded, str): + # On Python 3 dumps() returns Unicode... + encoded = encoded.encode("utf-8") + self.sendMessage(encoded) def onOpen(self): """ diff --git a/src/allmydata/web/operations.py b/src/allmydata/web/operations.py index 21c2ec7ef..2ba87c5ec 100644 --- a/src/allmydata/web/operations.py +++ b/src/allmydata/web/operations.py @@ -1,10 +1,15 @@ import time -from nevow import url +from hyperlink import ( + DecodedURL, +) from twisted.web.template import ( renderer, tags as T, ) +from twisted.python.urlpath import ( + URLPath, +) from twisted.python.failure import Failure from twisted.internet import reactor, defer from twisted.web import resource @@ -14,7 +19,6 @@ from twisted.application import service from allmydata.web.common import ( WebError, - get_root, get_arg, boolean_of_arg, exception_to_child, @@ -84,17 +88,14 @@ class OphandleTable(resource.Resource, service.Service): """ :param allmydata.webish.MyRequest req: """ - ophandle = get_arg(req, "ophandle") + ophandle = get_arg(req, "ophandle").decode("utf-8") assert ophandle - target = get_root(req) + "/operations/" + ophandle + here = DecodedURL.from_text(unicode(URLPath.fromRequest(req))) + target = here.click(u"/").child(u"operations", ophandle) output = get_arg(req, "output") if output: - target = target + "?output=%s" % output - - # XXX: We have to use nevow.url here because nevow.appserver - # is unhappy with anything else; so this gets its own ticket. - # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3314 - return url.URL.fromString(target) + target = target.add(u"output", output.decode("utf-8")) + return target @exception_to_child def getChild(self, name, req): @@ -151,8 +152,6 @@ class ReloadMixin(object): def refresh(self, req, tag): if self.monitor.is_finished(): return "" - # dreid suggests ctx.tag(**dict([("http-equiv", "refresh")])) - # but I can't tell if he's joking or not tag.attributes["http-equiv"] = "refresh" tag.attributes["content"] = str(self.REFRESH_TIME) return tag @@ -160,12 +159,12 @@ class ReloadMixin(object): @renderer def reload(self, req, tag): if self.monitor.is_finished(): - return "" + return b"" # url.gethere would break a proxy, so the correct thing to do is # req.path[-1] + queryargs ophandle = req.prepath[-1] - reload_target = ophandle + "?output=html" - cancel_target = ophandle + "?t=cancel" + reload_target = ophandle + b"?output=html" + cancel_target = ophandle + b"?t=cancel" cancel_button = T.form(T.input(type="submit", value="Cancel"), action=cancel_target, method="POST", diff --git a/src/allmydata/web/private.py b/src/allmydata/web/private.py index fea058405..405ca75e7 100644 --- a/src/allmydata/web/private.py +++ b/src/allmydata/web/private.py @@ -61,7 +61,16 @@ class IToken(ICredentials): pass -@implementer(IToken) +# Workaround for Shoobx/mypy-zope#26, where without suitable +# stubs for twisted classes (ICredentials), IToken does not +# appear to be an Interface. The proper fix appears to be to +# create stubs for twisted +# (https://twistedmatrix.com/trac/ticket/9717). For now, +# bypassing the inline decorator syntax works around the issue. +_itoken_impl = implementer(IToken) + + +@_itoken_impl @attr.s class Token(object): proposed_token = attr.ib(type=bytes) diff --git a/src/allmydata/web/root.py b/src/allmydata/web/root.py index 78daadef4..9fb3ac9d3 100644 --- a/src/allmydata/web/root.py +++ b/src/allmydata/web/root.py @@ -1,7 +1,8 @@ +from future.utils import PY3 + import os import time -import json -import urllib +from urllib.parse import quote as urlquote from hyperlink import DecodedURL, URL from pkg_resources import resource_filename @@ -10,7 +11,7 @@ from twisted.web import ( resource, static, ) -from twisted.web.util import redirectTo +from twisted.web.util import redirectTo, Redirect from twisted.python.filepath import FilePath from twisted.web.template import ( Element, @@ -21,8 +22,7 @@ from twisted.web.template import ( ) import allmydata # to display import path -from allmydata.version_checks import get_package_versions_string -from allmydata.util import log +from allmydata.util import log, jsonbytes as json from allmydata.interfaces import IFileNode from allmydata.web import ( filenode, @@ -83,7 +83,7 @@ class URIHandler(resource.Resource, object): # it seems Nevow was creating absolute URLs including # host/port whereas req.uri is absolute (but lacks host/port) redir_uri = URL.from_text(req.prePathURL().decode('utf8')) - redir_uri = redir_uri.child(urllib.quote(uri_arg).decode('utf8')) + redir_uri = redir_uri.child(urlquote(uri_arg)) # add back all the query args that AREN'T "?uri=" for k, values in req.args.items(): if k != b"uri": @@ -147,7 +147,7 @@ class URIHandler(resource.Resource, object): and creates and appropriate handler (depending on the kind of capability it was passed). """ - # this is in case a URI like "/uri/?cap=" is + # this is in case a URI like "/uri/?uri=" is # passed -- we re-direct to the non-trailing-slash version so # that there is just one valid URI for "uri" resource. if not name: @@ -155,11 +155,13 @@ class URIHandler(resource.Resource, object): u = u.replace( path=(s for s in u.path if s), # remove empty segments ) - return redirectTo(u.to_uri().to_text().encode('utf8'), req) + return Redirect(u.to_uri().to_text().encode('utf8')) try: node = self.client.create_node_from_uri(name) return directory.make_handler_for(node, self.client) - except (TypeError, AssertionError): + except (TypeError, AssertionError) as e: + log.msg(format="Failed to parse cap, perhaps due to bug: %(e)s", + e=e, level=log.WEIRD) raise WebError( "'{}' is not a valid file- or directory- cap".format(name) ) @@ -189,7 +191,7 @@ class FileHandler(resource.Resource, object): return filenode.FileNodeDownloadHandler(self.client, node) @render_exception - def render_GET(self, ctx): + def render_GET(self, req): raise WebError("/file must be followed by a file-cap and a name", http.NOT_FOUND) @@ -227,23 +229,26 @@ class Root(MultiFormatResource): self._client = client self._now_fn = now_fn - self.putChild("uri", URIHandler(client)) - self.putChild("cap", URIHandler(client)) + self.putChild(b"uri", URIHandler(client)) + self.putChild(b"cap", URIHandler(client)) # Handler for everything beneath "/private", an area of the resource # hierarchy which is only accessible with the private per-node API # auth token. - self.putChild("private", create_private_tree(client.get_auth_token)) + self.putChild(b"private", create_private_tree(client.get_auth_token)) - self.putChild("file", FileHandler(client)) - self.putChild("named", FileHandler(client)) - self.putChild("status", status.Status(client.get_history())) - self.putChild("statistics", status.Statistics(client.stats_provider)) + self.putChild(b"file", FileHandler(client)) + self.putChild(b"named", FileHandler(client)) + self.putChild(b"status", status.Status(client.get_history())) + self.putChild(b"statistics", status.Statistics(client.stats_provider)) static_dir = resource_filename("allmydata.web", "static") for filen in os.listdir(static_dir): - self.putChild(filen, static.File(os.path.join(static_dir, filen))) + child_path = filen + if PY3: + child_path = filen.encode("utf-8") + self.putChild(child_path, static.File(os.path.join(static_dir, filen))) - self.putChild("report_incident", IncidentReporter()) + self.putChild(b"report_incident", IncidentReporter()) @exception_to_child def getChild(self, path, request): @@ -566,7 +571,7 @@ class RootElement(Element): @renderer def version(self, req, tag): - return tag(get_package_versions_string()) + return tag(allmydata.__full_version__) @renderer def import_path(self, req, tag): diff --git a/src/allmydata/web/statistics.xhtml b/src/allmydata/web/statistics.xhtml index 42376079d..2cc7e2b5a 100644 --- a/src/allmydata/web/statistics.xhtml +++ b/src/allmydata/web/statistics.xhtml @@ -12,8 +12,6 @@

General

    -
  • Load Average:
  • -
  • Peak Load:
  • Files Uploaded (immutable):
  • Files Downloaded (immutable):
  • Files Published (mutable):
  • diff --git a/src/allmydata/web/status.py b/src/allmydata/web/status.py index 7f6020a99..2002b2fdf 100644 --- a/src/allmydata/web/status.py +++ b/src/allmydata/web/status.py @@ -1,8 +1,8 @@ +from past.builtins import long, unicode import pprint import itertools import hashlib -import json from twisted.internet import defer from twisted.python.filepath import FilePath from twisted.web.resource import Resource @@ -13,7 +13,7 @@ from twisted.web.template import ( renderElement, tags, ) -from allmydata.util import base32, idlib +from allmydata.util import base32, idlib, jsonbytes as json from allmydata.web.common import ( abbreviate_time, abbreviate_rate, @@ -1297,6 +1297,7 @@ class Status(MultiFormatResource): except ValueError: raise WebError("no '-' in '{}'".format(path)) count = int(count_s) + stype = unicode(stype, "ascii") if stype == "up": for s in itertools.chain(h.list_all_upload_statuses(), h.list_all_helper_statuses()): @@ -1335,7 +1336,7 @@ class Status(MultiFormatResource): active = [s for s in self._get_all_statuses() if s.get_active()] - active.sort(lambda a, b: cmp(a.get_started(), b.get_started())) + active.sort(key=lambda a: a.get_started()) active.reverse() return active @@ -1343,7 +1344,7 @@ class Status(MultiFormatResource): recent = [s for s in self._get_all_statuses() if not s.get_active()] - recent.sort(lambda a, b: cmp(a.get_started(), b.get_started())) + recent.sort(key=lambda a: a.get_started()) recent.reverse() return recent @@ -1373,7 +1374,6 @@ class StatusElement(Element): started_s = render_time(op.get_started()) result["started"] = started_s - si_s = base32.b2a_or_none(op.get_storage_index()) if si_s is None: si_s = "(None)" @@ -1565,14 +1565,6 @@ class StatisticsElement(Element): # Note that `counters` can be empty. self._stats = provider.get_stats() - @renderer - def load_average(self, req, tag): - return tag(str(self._stats["stats"].get("load_monitor.avg_load"))) - - @renderer - def peak_load(self, req, tag): - return tag(str(self._stats["stats"].get("load_monitor.max_load"))) - @renderer def uploads(self, req, tag): files = self._stats["counters"].get("uploader.files_uploaded", 0) diff --git a/src/allmydata/web/storage.py b/src/allmydata/web/storage.py index cf3264dac..82c789d9b 100644 --- a/src/allmydata/web/storage.py +++ b/src/allmydata/web/storage.py @@ -1,5 +1,6 @@ +from future.utils import PY2 -import time, json +import time from twisted.python.filepath import FilePath from twisted.web.template import ( Element, @@ -13,7 +14,7 @@ from allmydata.web.common_py3 import ( MultiFormatResource ) from allmydata.util.abbreviate import abbreviate_space -from allmydata.util import time_format, idlib +from allmydata.util import time_format, idlib, jsonbytes as json def remove_prefix(s, prefix): @@ -317,4 +318,7 @@ class StorageStatus(MultiFormatResource): "lease-checker": self._storage.lease_checker.get_state(), "lease-checker-progress": self._storage.lease_checker.get_progress(), } - return json.dumps(d, indent=1) + "\n" + result = json.dumps(d, indent=1) + "\n" + if PY2: + result = result.decode("utf-8") + return result.encode("utf-8") diff --git a/src/allmydata/web/storage_plugins.py b/src/allmydata/web/storage_plugins.py index 57f636f50..939047c6e 100644 --- a/src/allmydata/web/storage_plugins.py +++ b/src/allmydata/web/storage_plugins.py @@ -29,7 +29,8 @@ class StoragePlugins(Resource, object): """ resources = self._client.get_client_storage_plugin_web_resources() try: - result = resources[segment] + # Technically client could be using some other encoding? + result = resources[segment.decode("utf-8")] except KeyError: result = NoResource() self.putChild(segment, result) diff --git a/src/allmydata/web/unlinked.py b/src/allmydata/web/unlinked.py index f94e32240..e420a0371 100644 --- a/src/allmydata/web/unlinked.py +++ b/src/allmydata/web/unlinked.py @@ -1,5 +1,6 @@ import urllib + from twisted.web import http from twisted.internet import defer from twisted.python.filepath import FilePath @@ -10,7 +11,6 @@ from twisted.web.template import ( renderElement, tags, ) -from nevow import url from allmydata.immutable.upload import FileHandle from allmydata.mutable.publish import MutableFileHandle from allmydata.web.common import ( @@ -21,6 +21,7 @@ from allmydata.web.common import ( get_format, get_mutable_type, render_exception, + url_for_string, ) from allmydata.web import status @@ -66,7 +67,7 @@ def POSTUnlinkedCHK(req, client): def _done(upload_results, redir_to): if "%(uri)s" in redir_to: redir_to = redir_to.replace("%(uri)s", urllib.quote(upload_results.get_uri())) - return url.URL.fromString(redir_to) + return url_for_string(req, redir_to) d.addCallback(_done, when_done) else: # return the Upload Results page, which includes the URI @@ -160,7 +161,6 @@ def POSTUnlinkedCreateDirectory(req, client): new_url = "uri/" + urllib.quote(res.get_uri()) req.setResponseCode(http.SEE_OTHER) # 303 req.setHeader('location', new_url) - req.finish() return '' d.addCallback(_then_redir) else: @@ -179,7 +179,6 @@ def POSTUnlinkedCreateDirectoryWithChildren(req, client): new_url = "uri/" + urllib.quote(res.get_uri()) req.setResponseCode(http.SEE_OTHER) # 303 req.setHeader('location', new_url) - req.finish() return '' d.addCallback(_then_redir) else: @@ -198,7 +197,6 @@ def POSTUnlinkedCreateImmutableDirectory(req, client): new_url = "uri/" + urllib.quote(res.get_uri()) req.setResponseCode(http.SEE_OTHER) # 303 req.setHeader('location', new_url) - req.finish() return '' d.addCallback(_then_redir) else: diff --git a/src/allmydata/webish.py b/src/allmydata/webish.py index 432a8cf2f..e90fa573a 100644 --- a/src/allmydata/webish.py +++ b/src/allmydata/webish.py @@ -1,47 +1,75 @@ -import re, time +""" +Ported to Python 3. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from future.utils import PY2 +if PY2: + from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 + +from six import ensure_str + +import re, time, tempfile + +from cgi import ( + FieldStorage, +) +from io import ( + BytesIO, +) + from twisted.application import service, strports, internet -from twisted.web import http, static +from twisted.web import static +from twisted.web.http import ( + parse_qs, +) +from twisted.web.server import ( + Request, + Site, +) from twisted.internet import defer from twisted.internet.address import ( IPv4Address, IPv6Address, ) -from nevow import appserver, inevow from allmydata.util import log, fileutil from allmydata.web import introweb, root -from allmydata.web.common import MyExceptionHandler from allmydata.web.operations import OphandleTable from .web.storage_plugins import ( StoragePlugins, ) -# we must override twisted.web.http.Request.requestReceived with a version -# that doesn't use cgi.parse_multipart() . Since we actually use Nevow, we -# override the nevow-specific subclass, nevow.appserver.NevowRequest . This -# is an exact copy of twisted.web.http.Request (from SVN HEAD on 10-Aug-2007) -# that modifies the way form arguments are parsed. Note that this sort of -# surgery may induce a dependency upon a particular version of twisted.web +class TahoeLAFSRequest(Request, object): + """ + ``TahoeLAFSRequest`` adds several features to a Twisted Web ``Request`` + that are useful for Tahoe-LAFS. -parse_qs = http.parse_qs -class MyRequest(appserver.NevowRequest, object): + :ivar NoneType|FieldStorage fields: For POST requests, a structured + representation of the contents of the request body. For anything + else, ``None``. + """ fields = None - _tahoe_request_had_error = None def requestReceived(self, command, path, version): - """Called by channel when all data has been received. - - This method is not intended for users. """ - self.content.seek(0,0) + Called by channel when all data has been received. + + Override the base implementation to apply certain site-wide policies + and to provide less memory-intensive multipart/form-post handling for + large file uploads. + """ + self.content.seek(0) self.args = {} self.stack = [] - self.setHeader("Referrer-Policy", "no-referrer") self.method, self.uri = command, path self.clientproto = version - x = self.uri.split('?', 1) + x = self.uri.split(b'?', 1) if len(x) == 1: self.path = self.uri @@ -49,93 +77,42 @@ class MyRequest(appserver.NevowRequest, object): self.path, argstring = x self.args = parse_qs(argstring, 1) - # Adding security headers. These will be sent for *all* HTTP requests. - # See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Frame-Options - self.responseHeaders.setRawHeaders("X-Frame-Options", ["DENY"]) + if self.method == b'POST': + # We use FieldStorage here because it performs better than + # cgi.parse_multipart(self.content, pdict) which is what + # twisted.web.http.Request uses. - # Argument processing. + headers = { + ensure_str(name.lower()): ensure_str(value[-1]) + for (name, value) + in self.requestHeaders.getAllRawHeaders() + } -## The original twisted.web.http.Request.requestReceived code parsed the -## content and added the form fields it found there to self.args . It -## did this with cgi.parse_multipart, which holds the arguments in RAM -## and is thus unsuitable for large file uploads. The Nevow subclass -## (nevow.appserver.NevowRequest) uses cgi.FieldStorage instead (putting -## the results in self.fields), which is much more memory-efficient. -## Since we know we're using Nevow, we can anticipate these arguments -## appearing in self.fields instead of self.args, and thus skip the -## parse-content-into-self.args step. + if 'content-length' not in headers: + # Python 3's cgi module would really, really like us to set Content-Length. + self.content.seek(0, 2) + headers['content-length'] = str(self.content.tell()) + self.content.seek(0) + + self.fields = FieldStorage(self.content, headers, environ={'REQUEST_METHOD': 'POST'}) + self.content.seek(0) + + self._tahoeLAFSSecurityPolicy() -## args = self.args -## ctype = self.getHeader('content-type') -## if self.method == "POST" and ctype: -## mfd = 'multipart/form-data' -## key, pdict = cgi.parse_header(ctype) -## if key == 'application/x-www-form-urlencoded': -## args.update(parse_qs(self.content.read(), 1)) -## elif key == mfd: -## try: -## args.update(cgi.parse_multipart(self.content, pdict)) -## except KeyError, e: -## if e.args[0] == 'content-disposition': -## # Parse_multipart can't cope with missing -## # content-dispostion headers in multipart/form-data -## # parts, so we catch the exception and tell the client -## # it was a bad request. -## self.channel.transport.write( -## "HTTP/1.1 400 Bad Request\r\n\r\n") -## self.channel.transport.loseConnection() -## return -## raise self.processing_started_timestamp = time.time() self.process() - def _logger(self): - # we build up a log string that hides most of the cap, to preserve - # user privacy. We retain the query args so we can identify things - # like t=json. Then we send it to the flog. We make no attempt to - # match apache formatting. TODO: when we move to DSA dirnodes and - # shorter caps, consider exposing a few characters of the cap, or - # maybe a few characters of its hash. - x = self.uri.split("?", 1) - if len(x) == 1: - # no query args - path = self.uri - queryargs = "" - else: - path, queryargs = x - # there is a form handler which redirects POST /uri?uri=FOO into - # GET /uri/FOO so folks can paste in non-HTTP-prefixed uris. Make - # sure we censor these too. - if queryargs.startswith("uri="): - queryargs = "[uri=CENSORED]" - queryargs = "?" + queryargs - if path.startswith("/uri"): - path = "/uri/[CENSORED].." - elif path.startswith("/file"): - path = "/file/[CENSORED].." - elif path.startswith("/named"): - path = "/named/[CENSORED].." - - uri = path + queryargs - - error = "" - if self._tahoe_request_had_error: - error = " [ERROR]" - - log.msg( - format=( - "web: %(clientip)s %(method)s %(uri)s %(code)s " - "%(length)s%(error)s" - ), - clientip=_get_client_ip(self), - method=self.method, - uri=uri, - code=self.code, - length=(self.sentLength or "-"), - error=error, - facility="tahoe.webish", - level=log.OPERATIONAL, - ) + def _tahoeLAFSSecurityPolicy(self): + """ + Set response properties related to Tahoe-LAFS-imposed security policy. + This will ensure that all HTTP requests received by the Tahoe-LAFS + HTTP server have this policy imposed, regardless of other + implementation details. + """ + # See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Frame-Options + self.responseHeaders.setRawHeaders("X-Frame-Options", ["DENY"]) + # See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy + self.setHeader("Referrer-Policy", "no-referrer") def _get_client_ip(request): @@ -150,10 +127,75 @@ def _get_client_ip(request): return None +def _logFormatter(logDateTime, request): + # we build up a log string that hides most of the cap, to preserve + # user privacy. We retain the query args so we can identify things + # like t=json. Then we send it to the flog. We make no attempt to + # match apache formatting. TODO: when we move to DSA dirnodes and + # shorter caps, consider exposing a few characters of the cap, or + # maybe a few characters of its hash. + x = request.uri.split(b"?", 1) + if len(x) == 1: + # no query args + path = request.uri + queryargs = b"" + else: + path, queryargs = x + # there is a form handler which redirects POST /uri?uri=FOO into + # GET /uri/FOO so folks can paste in non-HTTP-prefixed uris. Make + # sure we censor these too. + if queryargs.startswith(b"uri="): + queryargs = b"uri=[CENSORED]" + queryargs = b"?" + queryargs + if path.startswith(b"/uri/"): + path = b"/uri/[CENSORED]" + elif path.startswith(b"/file/"): + path = b"/file/[CENSORED]" + elif path.startswith(b"/named/"): + path = b"/named/[CENSORED]" + + uri = path + queryargs + + template = "web: %(clientip)s %(method)s %(uri)s %(code)s %(length)s" + return template % dict( + clientip=_get_client_ip(request), + method=request.method, + uri=uri, + code=request.code, + length=(request.sentLength or "-"), + facility="tahoe.webish", + level=log.OPERATIONAL, + ) + + +class TahoeLAFSSite(Site, object): + """ + The HTTP protocol factory used by Tahoe-LAFS. + + Among the behaviors provided: + + * A configurable temporary directory where large request bodies can be + written so they don't stay in memory. + + * A log formatter that writes some access logs but omits capability + strings to help keep them secret. + """ + requestFactory = TahoeLAFSRequest + + def __init__(self, tempdir, *args, **kwargs): + Site.__init__(self, *args, logFormatter=_logFormatter, **kwargs) + self._tempdir = tempdir + + def getContentFile(self, length): + if length is None or length >= 1024 * 1024: + return tempfile.TemporaryFile(dir=self._tempdir) + return BytesIO() + + class WebishServer(service.MultiService): name = "webish" - def __init__(self, client, webport, nodeurl_path=None, staticdir=None, + def __init__(self, client, webport, tempdir, nodeurl_path=None, staticdir=None, clock=None, now_fn=time.time): service.MultiService.__init__(self) # the 'data' argument to all render() methods default to the Client @@ -163,7 +205,7 @@ class WebishServer(service.MultiService): # time in a deterministic manner. self.root = root.Root(client, clock, now_fn) - self.buildServer(webport, nodeurl_path, staticdir) + self.buildServer(webport, tempdir, nodeurl_path, staticdir) # If set, clock is a twisted.internet.task.Clock that the tests # use to test ophandle expiration. @@ -173,17 +215,17 @@ class WebishServer(service.MultiService): self.root.putChild(b"storage-plugins", StoragePlugins(client)) - def buildServer(self, webport, nodeurl_path, staticdir): + def buildServer(self, webport, tempdir, nodeurl_path, staticdir): self.webport = webport - self.site = site = appserver.NevowSite(self.root) - self.site.requestFactory = MyRequest - self.site.remember(MyExceptionHandler(), inevow.ICanHandleException) + self.site = TahoeLAFSSite(tempdir, self.root) self.staticdir = staticdir # so tests can check if staticdir: self.root.putChild("static", static.File(staticdir)) if re.search(r'^\d', webport): webport = "tcp:"+webport # twisted warns about bare "0" or "3456" - s = strports.service(webport, site) + # strports must be native strings. + webport = ensure_str(webport) + s = strports.service(webport, self.site) s.setServiceParent(self) self._scheme = None @@ -253,4 +295,4 @@ class IntroducerWebishServer(WebishServer): def __init__(self, introducer, webport, nodeurl_path=None, staticdir=None): service.MultiService.__init__(self) self.root = introweb.IntroducerRoot(introducer) - self.buildServer(webport, nodeurl_path, staticdir) + self.buildServer(webport, tempfile.tempdir, nodeurl_path, staticdir) diff --git a/src/allmydata/windows/fixups.py b/src/allmydata/windows/fixups.py index e7f045b95..e98aa8a67 100644 --- a/src/allmydata/windows/fixups.py +++ b/src/allmydata/windows/fixups.py @@ -217,7 +217,12 @@ def initialize(): # Instead it "mangles" or escapes them using \x7F as an escape character, which we # unescape here. def unmangle(s): - return re.sub(u'\\x7F[0-9a-fA-F]*\\;', lambda m: unichr(int(m.group(0)[1:-1], 16)), s) + return re.sub( + u'\\x7F[0-9a-fA-F]*\\;', + # type ignored for 'unichr' (Python 2 only) + lambda m: unichr(int(m.group(0)[1:-1], 16)), # type: ignore + s, + ) try: argv = [unmangle(argv_unicode[i]).encode('utf-8') for i in xrange(0, argc.value)] diff --git a/static/tahoe.py b/static/tahoe.py index cac53bdfa..c18f60e2c 100644 --- a/static/tahoe.py +++ b/static/tahoe.py @@ -3,23 +3,19 @@ # Import this first to suppress deprecation warnings. import allmydata -# nevow requires all these for its voodoo module import time adaptor registrations -from nevow import accessors, appserver, static, rend, url, util, query, i18n, flat -from nevow import guard, stan, testutil, context -from nevow.flat import flatmdom, flatstan, twist -from formless import webform, processors, annotate, iformless from decimal import Decimal from xml.dom import minidom import allmydata.web -# junk to appease pyflakes's outrage -[ - accessors, appserver, static, rend, url, util, query, i18n, flat, guard, stan, testutil, - context, flatmdom, flatstan, twist, webform, processors, annotate, iformless, Decimal, - minidom, allmydata, -] +# We import these things to give PyInstaller's dependency resolver some hints +# about what it needs to include. We don't use them otherwise _here_ but +# other parts of the codebase do. pyflakes points out that they are unused +# unless we use them. So ... use them. +Decimal +minidom +allmydata from allmydata.scripts import runner -runner.run() \ No newline at end of file +runner.run() diff --git a/tox.ini b/tox.ini index 97ad3883a..915981e0c 100644 --- a/tox.ini +++ b/tox.ini @@ -7,7 +7,7 @@ twisted = 1 [tox] -envlist = codechecks,py27,py36,pypy27 +envlist = typechecks,codechecks,py27,py36,pypy27 minversion = 2.4 [testenv] @@ -77,7 +77,7 @@ setenv = COVERAGE_PROCESS_START=.coveragerc commands = # NOTE: 'run with "py.test --keep-tempdir -s -v integration/" to debug failures' - py.test --coverage -v {posargs:integration} + py.test --timeout=1800 --coverage -v {posargs:integration} coverage combine coverage report @@ -95,12 +95,16 @@ setenv = # .decode(getattr(sys.stdout, "encoding", "utf8")) # `TypeError: decode() argument 1 must be string, not None` PYTHONIOENCODING=utf_8 + + # If no positional arguments are given, try to run the checks on the + # entire codebase, including various pieces of supporting code. + DEFAULT_FILES=src integration static misc setup.py commands = - flake8 src static misc setup.py - python misc/coding_tools/check-umids.py src - python misc/coding_tools/check-debugging.py - python misc/coding_tools/find-trailing-spaces.py -r src static misc setup.py - python misc/coding_tools/check-miscaptures.py + flake8 {posargs:{env:DEFAULT_FILES}} + python misc/coding_tools/check-umids.py {posargs:{env:DEFAULT_FILES}} + python misc/coding_tools/check-debugging.py {posargs:{env:DEFAULT_FILES}} + python misc/coding_tools/find-trailing-spaces.py -r {posargs:{env:DEFAULT_FILES}} + python misc/coding_tools/check-miscaptures.py {posargs:{env:DEFAULT_FILES}} # If towncrier.check fails, you forgot to add a towncrier news # fragment explaining the change in this branch. Create one at @@ -108,6 +112,16 @@ commands = # file. See pyproject.toml for legal values. python -m towncrier.check --pyproject towncrier.pyproject.toml + +[testenv:typechecks] +skip_install = True +deps = + mypy + git+https://github.com/Shoobx/mypy-zope + git+https://github.com/warner/foolscap +commands = mypy src + + [testenv:draftnews] passenv = TAHOE_LAFS_* PIP_* SUBUNITREPORTER_* USERPROFILE HOMEDRIVE HOMEPATH # see comment in [testenv] about "certifi" @@ -168,14 +182,10 @@ commands = git commit -m "update NEWS.txt for release" [testenv:deprecations] -setenv = - PYTHONWARNINGS=default::DeprecationWarning commands = python misc/build_helpers/run-deprecations.py --package allmydata --warnings={env:TAHOE_LAFS_WARNINGS_LOG:_trial_temp/deprecation-warnings.log} trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata} [testenv:upcoming-deprecations] -setenv = - PYTHONWARNINGS=default::DeprecationWarning deps = # Take the base deps as well! {[testenv]deps} @@ -211,6 +221,7 @@ commands = deps = sphinx docutils==0.12 + recommonmark # normal install is not needed for docs, and slows things down skip_install = True commands =