diff --git a/.circleci/Dockerfile.debian b/.circleci/Dockerfile.debian index 96c54736c..abab1f4fa 100644 --- a/.circleci/Dockerfile.debian +++ b/.circleci/Dockerfile.debian @@ -1,7 +1,7 @@ ARG TAG FROM debian:${TAG} ARG PYTHON_VERSION - +ENV DEBIAN_FRONTEND noninteractive ENV WHEELHOUSE_PATH /tmp/wheelhouse ENV VIRTUALENV_PATH /tmp/venv # This will get updated by the CircleCI checkout step. @@ -18,15 +18,11 @@ RUN apt-get --quiet update && \ libffi-dev \ libssl-dev \ libyaml-dev \ - virtualenv + virtualenv \ + tor # Get the project source. This is better than it seems. CircleCI will # *update* this checkout on each job run, saving us more time per-job. COPY . ${BUILD_SRC_ROOT} RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}" - -# Only the integration tests currently need this but it doesn't hurt to always -# have it present and it's simpler than building a whole extra image just for -# the integration tests. -RUN ${BUILD_SRC_ROOT}/integration/install-tor.sh diff --git a/.circleci/Dockerfile.centos b/.circleci/Dockerfile.oraclelinux similarity index 93% rename from .circleci/Dockerfile.centos rename to .circleci/Dockerfile.oraclelinux index 9070d71d9..cf4c009d2 100644 --- a/.circleci/Dockerfile.centos +++ b/.circleci/Dockerfile.oraclelinux @@ -1,5 +1,5 @@ ARG TAG -FROM centos:${TAG} +FROM oraclelinux:${TAG} ARG PYTHON_VERSION ENV WHEELHOUSE_PATH /tmp/wheelhouse @@ -13,7 +13,6 @@ RUN yum install --assumeyes \ sudo \ make automake gcc gcc-c++ \ python${PYTHON_VERSION} \ - python${PYTHON_VERSION}-devel \ libffi-devel \ openssl-devel \ libyaml \ diff --git a/.circleci/Dockerfile.ubuntu b/.circleci/Dockerfile.ubuntu index 2fcc60f5a..22689f0c1 100644 --- a/.circleci/Dockerfile.ubuntu +++ b/.circleci/Dockerfile.ubuntu @@ -1,7 +1,7 @@ ARG TAG FROM ubuntu:${TAG} ARG PYTHON_VERSION - +ENV DEBIAN_FRONTEND noninteractive ENV WHEELHOUSE_PATH /tmp/wheelhouse ENV VIRTUALENV_PATH /tmp/venv # This will get updated by the CircleCI checkout step. diff --git a/.circleci/circleci.txt b/.circleci/circleci.txt new file mode 100644 index 000000000..c7adf9ec1 --- /dev/null +++ b/.circleci/circleci.txt @@ -0,0 +1,78 @@ +# A master build looks like this: + +# BASH_ENV=/tmp/.bash_env-63d018969ca480003a031e62-0-build +# CI=true +# CIRCLECI=true +# CIRCLE_BRANCH=master +# CIRCLE_BUILD_NUM=76545 +# CIRCLE_BUILD_URL=https://circleci.com/gh/tahoe-lafs/tahoe-lafs/76545 +# CIRCLE_JOB=NixOS 21.11 +# CIRCLE_NODE_INDEX=0 +# CIRCLE_NODE_TOTAL=1 +# CIRCLE_PROJECT_REPONAME=tahoe-lafs +# CIRCLE_PROJECT_USERNAME=tahoe-lafs +# CIRCLE_REPOSITORY_URL=git@github.com:tahoe-lafs/tahoe-lafs.git +# CIRCLE_SHA1=ed0bda2d7456f4a2cd60870072e1fe79864a49a1 +# CIRCLE_SHELL_ENV=/tmp/.bash_env-63d018969ca480003a031e62-0-build +# CIRCLE_USERNAME=alice +# CIRCLE_WORKFLOW_ID=6d9bb71c-be3a-4659-bf27-60954180619b +# CIRCLE_WORKFLOW_JOB_ID=0793c975-7b9f-489f-909b-8349b72d2785 +# CIRCLE_WORKFLOW_WORKSPACE_ID=6d9bb71c-be3a-4659-bf27-60954180619b +# CIRCLE_WORKING_DIRECTORY=~/project + +# A build of an in-repo PR looks like this: + +# BASH_ENV=/tmp/.bash_env-63d1971a0298086d8841287e-0-build +# CI=true +# CIRCLECI=true +# CIRCLE_BRANCH=3946-less-chatty-downloads +# CIRCLE_BUILD_NUM=76612 +# CIRCLE_BUILD_URL=https://circleci.com/gh/tahoe-lafs/tahoe-lafs/76612 +# CIRCLE_JOB=NixOS 21.11 +# CIRCLE_NODE_INDEX=0 +# CIRCLE_NODE_TOTAL=1 +# CIRCLE_PROJECT_REPONAME=tahoe-lafs +# CIRCLE_PROJECT_USERNAME=tahoe-lafs +# CIRCLE_PULL_REQUEST=https://github.com/tahoe-lafs/tahoe-lafs/pull/1251 +# CIRCLE_PULL_REQUESTS=https://github.com/tahoe-lafs/tahoe-lafs/pull/1251 +# CIRCLE_REPOSITORY_URL=git@github.com:tahoe-lafs/tahoe-lafs.git +# CIRCLE_SHA1=921a2083dcefdb5f431cdac195fc9ac510605349 +# CIRCLE_SHELL_ENV=/tmp/.bash_env-63d1971a0298086d8841287e-0-build +# CIRCLE_USERNAME=bob +# CIRCLE_WORKFLOW_ID=5e32c12e-be37-4868-9fa8-6a6929fec2f1 +# CIRCLE_WORKFLOW_JOB_ID=316ca408-81b4-4c96-bbdd-644e4c3e01e5 +# CIRCLE_WORKFLOW_WORKSPACE_ID=5e32c12e-be37-4868-9fa8-6a6929fec2f1 +# CIRCLE_WORKING_DIRECTORY=~/project +# CI_PULL_REQUEST=https://github.com/tahoe-lafs/tahoe-lafs/pull/1251 + +# A build of a PR from a fork looks like this: + +# BASH_ENV=/tmp/.bash_env-63d40f7b2e89cd3de10e0db9-0-build +# CI=true +# CIRCLECI=true +# CIRCLE_BRANCH=pull/1252 +# CIRCLE_BUILD_NUM=76678 +# CIRCLE_BUILD_URL=https://circleci.com/gh/tahoe-lafs/tahoe-lafs/76678 +# CIRCLE_JOB=NixOS 21.05 +# CIRCLE_NODE_INDEX=0 +# CIRCLE_NODE_TOTAL=1 +# CIRCLE_PROJECT_REPONAME=tahoe-lafs +# CIRCLE_PROJECT_USERNAME=tahoe-lafs +# CIRCLE_PR_NUMBER=1252 +# CIRCLE_PR_REPONAME=tahoe-lafs +# CIRCLE_PR_USERNAME=carol +# CIRCLE_PULL_REQUEST=https://github.com/tahoe-lafs/tahoe-lafs/pull/1252 +# CIRCLE_PULL_REQUESTS=https://github.com/tahoe-lafs/tahoe-lafs/pull/1252 +# CIRCLE_REPOSITORY_URL=git@github.com:tahoe-lafs/tahoe-lafs.git +# CIRCLE_SHA1=15c7916e0812e6baa2a931cd54b18f3382a8456e +# CIRCLE_SHELL_ENV=/tmp/.bash_env-63d40f7b2e89cd3de10e0db9-0-build +# CIRCLE_USERNAME= +# CIRCLE_WORKFLOW_ID=19c917c8-3a38-4b20-ac10-3265259fa03e +# CIRCLE_WORKFLOW_JOB_ID=58e95215-eccf-4664-a231-1dba7fd2d323 +# CIRCLE_WORKFLOW_WORKSPACE_ID=19c917c8-3a38-4b20-ac10-3265259fa03e +# CIRCLE_WORKING_DIRECTORY=~/project +# CI_PULL_REQUEST=https://github.com/tahoe-lafs/tahoe-lafs/pull/1252 + +# A build of a PR from a fork where the owner has enabled CircleCI looks +# the same as a build of an in-repo PR, except it runs on th owner's +# CircleCI namespace. diff --git a/.circleci/config.yml b/.circleci/config.yml index 2fc8e88e7..d327ecbc7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -11,188 +11,354 @@ # version: 2.1 +# Every job that pushes a Docker image from Docker Hub must authenticate to +# it. Define a couple yaml anchors that can be used to supply the necessary +# credentials. + +# First is a CircleCI job context which makes Docker Hub credentials available +# in the environment. +# +# Contexts are managed in the CircleCI web interface: +# +# https://app.circleci.com/settings/organization/github/tahoe-lafs/contexts +dockerhub-context-template: &DOCKERHUB_CONTEXT + context: "dockerhub-auth" + +# Required environment for using the coveralls tool to upload partial coverage +# reports and then finish the process. +coveralls-environment: &COVERALLS_ENVIRONMENT + COVERALLS_REPO_TOKEN: "JPf16rLB7T2yjgATIxFzTsEgMdN1UNq6o" + +# Next is a Docker executor template that gets the credentials from the +# environment and supplies them to the executor. +dockerhub-auth-template: &DOCKERHUB_AUTH + - auth: + username: $DOCKERHUB_USERNAME + password: $DOCKERHUB_PASSWORD + + # A template that can be shared between the two different image-building +# workflows. +.images: &IMAGES + jobs: + - "build-image-debian-11": + <<: *DOCKERHUB_CONTEXT + - "build-image-ubuntu-20-04": + <<: *DOCKERHUB_CONTEXT + - "build-image-ubuntu-22-04": + <<: *DOCKERHUB_CONTEXT + - "build-image-fedora-35": + <<: *DOCKERHUB_CONTEXT + - "build-image-oraclelinux-8": + <<: *DOCKERHUB_CONTEXT + # Restore later as PyPy38 + #- "build-image-pypy27-buster": + # <<: *DOCKERHUB_CONTEXT + +parameters: + # Control whether the image-building workflow runs as part of this pipeline. + # Generally we do not want this to run because we don't need our + # dependencies to move around all the time and because building the image + # takes a couple minutes. + # + # An easy way to trigger a pipeline with this set to true is with the + # rebuild-images.sh tool in this directory. You can also do so via the + # CircleCI web UI. + build-images: + default: false + type: "boolean" + + # Control whether the test-running workflow runs as part of this pipeline. + # Generally we do want this to run because running the tests is the primary + # purpose of this pipeline. + run-tests: + default: true + type: "boolean" + workflows: ci: + when: "<< pipeline.parameters.run-tests >>" jobs: # Start with jobs testing various platforms. - - "debian-9": + - "debian-11": {} - - "debian-10": - requires: - - "debian-9" - "ubuntu-20-04": {} - - "ubuntu-18-04": - requires: - - "ubuntu-20-04" - - "ubuntu-16-04": - requires: - - "ubuntu-20-04" - - "fedora-29": - {} - - "fedora-28": - requires: - - "fedora-29" - - - "centos-8": + - "ubuntu-22-04": {} - - "nixos-19-09": + # Equivalent to RHEL 8; CentOS 8 is dead. + - "oraclelinux-8": {} - - "nixos-21-05": - {} + - "nixos": + name: "<>" + nixpkgs: "nixpkgs-unstable" + matrix: + parameters: + pythonVersion: + - "python39" + - "python310" + - "python311" - # Test against PyPy 2.7 - - "pypy27-buster": - {} - - # Just one Python 3.6 configuration while the port is in-progress. - - "python36": - {} + # Eventually, test against PyPy 3.8 + #- "pypy27-buster": + # {} # Other assorted tasks and configurations - - "lint": - {} - - "codechecks3": + - "codechecks": {} - "pyinstaller": {} - - "deprecations": - {} - "c-locale": {} # Any locale other than C or UTF-8. - "another-locale": {} + - "windows-server-2022": + name: "Windows Server 2022, CPython <>" + matrix: + parameters: + # Run the job for a number of CPython versions. These are the + # two versions installed on the version of the Windows VM image + # we specify (in the executor). This is handy since it means we + # don't have to do any Python installation work. We pin the + # Windows VM image so these shouldn't shuffle around beneath us + # but if we want to update that image or get different versions + # of Python, we probably have to do something here. + pythonVersion: + - "3.9" + - "3.11" + - "integration": + # Run even the slow integration tests here. We need the `--` to + # sneak past tox and get to pytest. + tox-args: "-- --runslow integration" requires: # If the unit test suite doesn't pass, don't bother running the # integration tests. - - "debian-9" + - "debian-11" - "typechecks": {} - "docs": {} + - "finish-coverage-report": + requires: + # Referencing the job by "alias" (as CircleCI calls the mapping + # key) instead of the value of its "name" property causes us to + # require every instance of the job from its matrix expansion. So + # this requirement is enough to require every Windows Server 2022 + # job. + - "windows-server-2022" + images: - # Build the Docker images used by the ci jobs. This makes the ci jobs - # faster and takes various spurious failures out of the critical path. - triggers: - # Build once a day - - schedule: - cron: "0 0 * * *" - filters: - branches: - only: - - "master" - - jobs: - # Every job that pushes a Docker image from Docker Hub needs to provide - # credentials. Use this first job to define a yaml anchor that can be - # used to supply a CircleCI job context which makes Docker Hub - # credentials available in the environment. - # - # Contexts are managed in the CircleCI web interface: - # - # https://app.circleci.com/settings/organization/github/tahoe-lafs/contexts - - "build-image-debian-10": &DOCKERHUB_CONTEXT - context: "dockerhub-auth" - - "build-image-debian-9": - <<: *DOCKERHUB_CONTEXT - - "build-image-ubuntu-16-04": - <<: *DOCKERHUB_CONTEXT - - "build-image-ubuntu-18-04": - <<: *DOCKERHUB_CONTEXT - - "build-image-ubuntu-20-04": - <<: *DOCKERHUB_CONTEXT - - "build-image-fedora-28": - <<: *DOCKERHUB_CONTEXT - - "build-image-fedora-29": - <<: *DOCKERHUB_CONTEXT - - "build-image-centos-8": - <<: *DOCKERHUB_CONTEXT - - "build-image-pypy27-buster": - <<: *DOCKERHUB_CONTEXT - - "build-image-python36-ubuntu": - <<: *DOCKERHUB_CONTEXT + <<: *IMAGES + # Build as part of the workflow but only if requested. + when: "<< pipeline.parameters.build-images >>" jobs: - dockerhub-auth-template: - # This isn't a real job. It doesn't get scheduled as part of any - # workflow. Instead, it's just a place we can hang a yaml anchor to - # finish the Docker Hub authentication configuration. Workflow jobs using - # the DOCKERHUB_CONTEXT anchor will have access to the environment - # variables used here. These variables will allow the Docker Hub image - # pull to be authenticated and hopefully avoid hitting and rate limits. - docker: &DOCKERHUB_AUTH - - image: "null" - auth: - username: $DOCKERHUB_USERNAME - password: $DOCKERHUB_PASSWORD + finish-coverage-report: + docker: + - <<: *DOCKERHUB_AUTH + image: "python:3-slim" steps: - run: - name: "CircleCI YAML schema conformity" + name: "Indicate completion to coveralls.io" + environment: + <<: *COVERALLS_ENVIRONMENT command: | - # This isn't a real command. We have to have something in this - # space, though, or the CircleCI yaml schema validator gets angry. - # Since this job is never scheduled this step is never run so the - # actual value here is irrelevant. + pip install coveralls==3.3.1 + python -m coveralls --finish - lint: + codechecks: docker: - <<: *DOCKERHUB_AUTH - image: "circleci/python:2" + image: "cimg/python:3.9" steps: - "checkout" - - run: + - run: &INSTALL_TOX name: "Install tox" command: | - pip install --user tox + pip install --user 'tox~=3.0' - run: name: "Static-ish code checks" command: | ~/.local/bin/tox -e codechecks - codechecks3: - docker: - - <<: *DOCKERHUB_AUTH - image: "circleci/python:3" + windows-server-2022: + parameters: + pythonVersion: + description: >- + An argument to pass to the `py` launcher to choose a Python version. + type: "string" + default: "" + + executor: "windows" + environment: + # Tweak Hypothesis to make its behavior more suitable for the CI + # environment. This should improve reproducibility and lessen the + # effects of variable compute resources. + TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci" + + # Tell pip where its download cache lives. This must agree with the + # "save_cache" step below or caching won't really work right. + PIP_CACHE_DIR: "pip-cache" + + # And tell pip where it can find out cached wheelhouse for fast wheel + # installation, even for projects that don't distribute wheels. This + # must also agree with the "save_cache" step below. + PIP_FIND_LINKS: "wheelhouse" steps: - "checkout" - - run: - name: "Install tox" - command: | - pip install --user tox + # If possible, restore a pip download cache to save us from having to + # download all our Python dependencies from PyPI. + - "restore_cache": + keys: + # The download cache and/or the wheelhouse may contain Python + # version-specific binary packages so include the Python version + # in this key, as well as the canonical source of our + # dependencies. + - &CACHE_KEY "pip-packages-v1-<< parameters.pythonVersion >>-{{ checksum \"setup.py\" }}" - - run: - name: "Static-ish code checks" + - "run": + name: "Fix $env:PATH" command: | - ~/.local/bin/tox -e codechecks3 + # The Python this job is parameterized is not necessarily the one + # at the front of $env:PATH. Modify $env:PATH so that it is so we + # can just say "python" in the rest of the steps. Also get the + # related Scripts directory so tools from packages we install are + # also available. + $p = py -<> -c "import sys; print(sys.prefix)" + $q = py -<> -c "import sysconfig; print(sysconfig.get_path('scripts'))" + + New-Item $Profile.CurrentUserAllHosts -Force + # $p gets "python" on PATH and $q gets tools from packages we + # install. Note we carefully construct the string so that + # $env:PATH is not substituted now but $p and $q are. ` is the + # PowerShell string escape character. + Add-Content -Path $Profile.CurrentUserAllHosts -Value "`$env:PATH = `"$p;$q;`$env:PATH`"" + + - "run": + name: "Display tool versions" + command: | + python misc/build_helpers/show-tool-versions.py + + - "run": + # It's faster to install a wheel than a source package. If we don't + # have a cached wheelhouse then build all of the wheels and dump + # them into a directory where they can become a cached wheelhouse. + # We would have built these wheels during installation anyway so it + # doesn't cost us anything extra and saves us effort next time. + name: "(Maybe) Build Wheels" + command: | + if ((Test-Path .\wheelhouse) -and (Test-Path .\wheelhouse\*)) { + echo "Found populated wheelhouse, skipping wheel building." + } else { + python -m pip install wheel + python -m pip wheel --wheel-dir $env:PIP_FIND_LINKS .[testenv] .[test] + } + + - "save_cache": + paths: + # Make sure this agrees with PIP_CACHE_DIR in the environment. + - "pip-cache" + - "wheelhouse" + key: *CACHE_KEY + + - "run": + name: "Install Dependencies" + environment: + # By this point we should no longer need an index. + PIP_NO_INDEX: "1" + command: | + python -m pip install .[testenv] .[test] + + - "run": + name: "Run Unit Tests" + environment: + # Configure the results location for the subunitv2-file reporter + # from subunitreporter + SUBUNITREPORTER_OUTPUT_PATH: "test-results.subunit2" + + # Try to get prompt output from the reporter to avoid no-output + # timeouts. + PYTHONUNBUFFERED: "1" + + command: | + # Run the test suite under coverage measurement using the + # parameterized version of Python, writing subunitv2-format + # results to the file given in the environment. + python -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata + + - "run": + name: "Upload Coverage" + environment: + <<: *COVERALLS_ENVIRONMENT + # Mark the data as just one piece of many because we have more + # than one instance of this job (two on Windows now, some on other + # platforms later) which collects and reports coverage. This is + # necessary to cause Coveralls to merge multiple coverage results + # into a single report. Note the merge only happens when we + # "finish" a particular build, as identified by its "build_num" + # (aka "service_number"). + COVERALLS_PARALLEL: "true" + command: | + python -m pip install coveralls==3.3.1 + + # .coveragerc sets parallel = True so we don't have a `.coverage` + # file but a `.coverage.` file (or maybe more than + # one, but probably not). coveralls can't work with these so + # merge them before invoking it. + python -m coverage combine + + # Now coveralls will be able to find the data, so have it do the + # upload. Also, have it strip the system config-specific prefix + # from all of the source paths. + $prefix = python -c "import sysconfig; print(sysconfig.get_path('purelib'))" + python -m coveralls --basedir $prefix + + - "run": + name: "Convert Result Log" + command: | + # subunit2junitxml exits with error if the result stream it is + # converting has test failures in it! So this step might fail. + # Since the step in which we actually _ran_ the tests won't fail + # even if there are test failures, this is a good thing for now. + subunit2junitxml.exe --output-to=test-results.xml test-results.subunit2 + + - "store_test_results": + path: "test-results.xml" + + - "store_artifacts": + path: "_trial_temp/test.log" + + - "store_artifacts": + path: "eliot.log" + + - "store_artifacts": + path: ".coverage" pyinstaller: docker: - <<: *DOCKERHUB_AUTH - image: "circleci/python:2" + image: "cimg/python:3.9" steps: - "checkout" - run: - name: "Install tox" - command: | - pip install --user tox + <<: *INSTALL_TOX - run: name: "Make PyInstaller executable" @@ -207,12 +373,7 @@ jobs: command: | dist/Tahoe-LAFS/tahoe --version - debian-9: &DEBIAN - docker: - - <<: *DOCKERHUB_AUTH - image: "tahoelafsci/debian:9-py2.7" - user: "nobody" - + debian-11: &DEBIAN environment: &UTF_8_ENVIRONMENT # In general, the test suite is not allowed to fail while the job # succeeds. But you can set this to "yes" if you want it to be @@ -224,7 +385,7 @@ jobs: # filenames and argv). LANG: "en_US.UTF-8" # Select a tox environment to run for this job. - TAHOE_LAFS_TOX_ENVIRONMENT: "py27" + TAHOE_LAFS_TOX_ENVIRONMENT: "py39" # Additional arguments to pass to tox. TAHOE_LAFS_TOX_ARGS: "" # The path in which test artifacts will be placed. @@ -289,32 +450,28 @@ jobs: name: "Submit coverage results" command: | if [ -n "${UPLOAD_COVERAGE}" ]; then - /tmp/venv/bin/codecov + echo "TODO: Need a new coverage solution, see https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4011" fi - - debian-10: - <<: *DEBIAN docker: - <<: *DOCKERHUB_AUTH - image: "tahoelafsci/debian:10-py2.7" + image: "tahoelafsci/debian:11-py3.9" user: "nobody" - pypy27-buster: - <<: *DEBIAN - docker: - - <<: *DOCKERHUB_AUTH - image: "tahoelafsci/pypy:buster-py2" - user: "nobody" - - environment: - <<: *UTF_8_ENVIRONMENT - # We don't do coverage since it makes PyPy far too slow: - TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27" - # Since we didn't collect it, don't upload it. - UPLOAD_COVERAGE: "" - + # Restore later using PyPy3.8 + # pypy27-buster: + # <<: *DEBIAN + # docker: + # - <<: *DOCKERHUB_AUTH + # image: "tahoelafsci/pypy:buster-py2" + # user: "nobody" + # environment: + # <<: *UTF_8_ENVIRONMENT + # # We don't do coverage since it makes PyPy far too slow: + # TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27" + # # Since we didn't collect it, don't upload it. + # UPLOAD_COVERAGE: "" c-locale: <<: *DEBIAN @@ -332,23 +489,21 @@ jobs: # aka "Latin 1" LANG: "en_US.ISO-8859-1" - - deprecations: - <<: *DEBIAN - - environment: - <<: *UTF_8_ENVIRONMENT - # Select the deprecations tox environments. - TAHOE_LAFS_TOX_ENVIRONMENT: "deprecations,upcoming-deprecations" - # Put the logs somewhere we can report them. - TAHOE_LAFS_WARNINGS_LOG: "/tmp/artifacts/deprecation-warnings.log" - # The deprecations tox environments don't do coverage measurement. - UPLOAD_COVERAGE: "" - - integration: <<: *DEBIAN + parameters: + tox-args: + description: >- + Additional arguments to pass to the tox command. + type: "string" + default: "" + + docker: + - <<: *DOCKERHUB_AUTH + image: "tahoelafsci/debian:11-py3.9" + user: "nobody" + environment: <<: *UTF_8_ENVIRONMENT # Select the integration tests tox environments. @@ -356,60 +511,44 @@ jobs: # Disable artifact collection because py.test can't produce any. ARTIFACTS_OUTPUT_PATH: "" + # Pass on anything we got in our parameters. + TAHOE_LAFS_TOX_ARGS: "<< parameters.tox-args >>" + steps: - "checkout" # DRY, YAML-style. See the debian-9 steps. - run: *SETUP_VIRTUALENV - run: *RUN_TESTS - - ubuntu-16-04: - <<: *DEBIAN - docker: - - <<: *DOCKERHUB_AUTH - image: "tahoelafsci/ubuntu:16.04-py2.7" - user: "nobody" - - - ubuntu-18-04: &UBUNTU_18_04 - <<: *DEBIAN - docker: - - <<: *DOCKERHUB_AUTH - image: "tahoelafsci/ubuntu:18.04-py2.7" - user: "nobody" - - - python36: - <<: *UBUNTU_18_04 - docker: - - <<: *DOCKERHUB_AUTH - image: "tahoelafsci/ubuntu:18.04-py3" - user: "nobody" - - environment: - <<: *UTF_8_ENVIRONMENT - # The default trial args include --rterrors which is incompatible with - # this reporter on Python 3. So drop that and just specify the - # reporter. - TAHOE_LAFS_TRIAL_ARGS: "--reporter=subunitv2-file" - TAHOE_LAFS_TOX_ENVIRONMENT: "py36" - - ubuntu-20-04: <<: *DEBIAN docker: - <<: *DOCKERHUB_AUTH - image: "tahoelafsci/ubuntu:20.04" + image: "tahoelafsci/ubuntu:20.04-py3.9" user: "nobody" + environment: + <<: *UTF_8_ENVIRONMENT + TAHOE_LAFS_TOX_ENVIRONMENT: "py39" - - centos-8: &RHEL_DERIV + ubuntu-22-04: + <<: *DEBIAN docker: - <<: *DOCKERHUB_AUTH - image: "tahoelafsci/centos:8-py2" + image: "tahoelafsci/ubuntu:22.04-py3.10" + user: "nobody" + environment: + <<: *UTF_8_ENVIRONMENT + TAHOE_LAFS_TOX_ENVIRONMENT: "py310" + + oraclelinux-8: &RHEL_DERIV + docker: + - <<: *DOCKERHUB_AUTH + image: "tahoelafsci/oraclelinux:8-py3.8" user: "nobody" - environment: *UTF_8_ENVIRONMENT + environment: + <<: *UTF_8_ENVIRONMENT + TAHOE_LAFS_TOX_ENVIRONMENT: "py38" # pip cannot install packages if the working directory is not readable. # We want to run a lot of steps as nobody instead of as root. @@ -425,63 +564,51 @@ jobs: - store_artifacts: *STORE_OTHER_ARTIFACTS - run: *SUBMIT_COVERAGE - - fedora-28: + fedora-35: <<: *RHEL_DERIV docker: - <<: *DOCKERHUB_AUTH - image: "tahoelafsci/fedora:28-py" + image: "tahoelafsci/fedora:35-py3" user: "nobody" + nixos: + parameters: + nixpkgs: + description: >- + Reference the name of a flake-managed nixpkgs input (see `nix flake + metadata` and flake.nix) + type: "string" + pythonVersion: + description: >- + Reference the name of a Python package in nixpkgs to use. + type: "string" - fedora-29: - <<: *RHEL_DERIV - docker: - - <<: *DOCKERHUB_AUTH - image: "tahoelafsci/fedora:29-py" - user: "nobody" - - nixos-19-09: &NIXOS - docker: - # Run in a highly Nix-capable environment. - - <<: *DOCKERHUB_AUTH - image: "nixorg/nix:circleci" - - environment: - NIX_PATH: "nixpkgs=https://github.com/NixOS/nixpkgs-channels/archive/nixos-19.09-small.tar.gz" - SOURCE: "nix/" + executor: "nix" steps: - - "checkout" - - "run": - name: "Build and Test" - command: | - # CircleCI build environment looks like it has a zillion and a - # half cores. Don't let Nix autodetect this high core count - # because it blows up memory usage and fails the test run. Pick a - # number of cores that suites the build environment we're paying - # for (the free one!). - # - # Also, let it run more than one job at a time because we have to - # build a couple simple little dependencies that don't take - # advantage of multiple cores and we get a little speedup by doing - # them in parallel. - nix-build --cores 3 --max-jobs 2 "$SOURCE" + - "nix-build": + nixpkgs: "<>" + pythonVersion: "<>" + buildSteps: + - "run": + name: "Unit Test" + command: | + source .circleci/lib.sh - nixos-21-05: - <<: *NIXOS + # Translate the nixpkgs selection into a flake reference we + # can use to override the default nixpkgs input. + NIXPKGS=$(nixpkgs_flake_reference <>) - environment: - # Note this doesn't look more similar to the 19.09 NIX_PATH URL because - # there was some internal shuffling by the NixOS project about how they - # publish stable revisions. - NIX_PATH: "nixpkgs=https://github.com/NixOS/nixpkgs/archive/d32b07e6df276d78e3640eb43882b80c9b2b3459.tar.gz" - SOURCE: "nix/py3.nix" + cache_if_able nix run \ + --override-input nixpkgs "$NIXPKGS" \ + .#<>-unittest -- \ + --jobs $UNITTEST_CORES \ + allmydata typechecks: docker: - <<: *DOCKERHUB_AUTH - image: "tahoelafsci/ubuntu:18.04-py3" + image: "tahoelafsci/ubuntu:20.04-py3.9" steps: - "checkout" @@ -493,7 +620,7 @@ jobs: docs: docker: - <<: *DOCKERHUB_AUTH - image: "tahoelafsci/ubuntu:18.04-py3" + image: "tahoelafsci/ubuntu:20.04-py3.9" steps: - "checkout" @@ -511,16 +638,19 @@ jobs: # https://circleci.com/blog/how-to-build-a-docker-image-on-circleci-2-0/ docker: - <<: *DOCKERHUB_AUTH - image: "docker:17.05.0-ce-git" + # CircleCI build images; https://github.com/CircleCI-Public/cimg-base + # for details. + image: "cimg/base:2022.01" environment: - DISTRO: "tahoelafsci/:foo-py2" - TAG: "tahoelafsci/distro:-py2" + DISTRO: "tahoelafsci/:foo-py3.9" + TAG: "tahoelafsci/distro:-py3.9" PYTHON_VERSION: "tahoelafsci/distro:tag-py` forms. + NIX_CONFIG: "experimental-features = nix-command flakes" +commands: + nix-build: + parameters: + nixpkgs: + description: >- + Reference the name of a flake-managed nixpkgs input (see `nix flake + metadata` and flake.nix) + type: "string" + pythonVersion: + description: >- + Reference the name of a Python package in nixpkgs to use. + type: "string" + buildSteps: + description: >- + The build steps to execute after setting up the build environment. + type: "steps" - build-image-pypy27-buster: - <<: *BUILD_IMAGE + steps: + - "run": + # Get cachix for Nix-friendly caching. + name: "Install Basic Dependencies" + command: | + # Get some build environment dependencies and let them float on a + # certain release branch. These aren't involved in the actual + # package build (only in CI environment setup) so the fact that + # they float shouldn't hurt reproducibility. + NIXPKGS="nixpkgs/nixos-23.05" + nix profile install $NIXPKGS#cachix $NIXPKGS#bash $NIXPKGS#jp - environment: - DISTRO: "pypy" - TAG: "buster" - # We only have Python 2 for PyPy right now so there's no support for - # setting up PyPy 3 in the image building toolchain. This value is just - # for constructing the right Docker image tag. - PYTHON_VERSION: "2" + # Activate our cachix cache for "binary substitution". This sets + # up configuration tht lets Nix download something from the cache + # instead of building it locally, if possible. + cachix use "${CACHIX_NAME}" + + - "checkout" + + - "run": + # The Nix package doesn't know how to do this part, unfortunately. + name: "Generate version" + command: | + nix-shell \ + -p 'python3.withPackages (ps: [ ps.setuptools ])' \ + --run 'python setup.py update_version' + + - "run": + name: "Build Package" + command: | + source .circleci/lib.sh + NIXPKGS=$(nixpkgs_flake_reference <>) + cache_if_able nix build \ + --verbose \ + --print-build-logs \ + --cores "$DEPENDENCY_CORES" \ + --override-input nixpkgs "$NIXPKGS" \ + .#<>-tahoe-lafs + + - steps: "<>" diff --git a/.circleci/create-virtualenv.sh b/.circleci/create-virtualenv.sh index 810ce5ae2..05ac64490 100755 --- a/.circleci/create-virtualenv.sh +++ b/.circleci/create-virtualenv.sh @@ -46,4 +46,8 @@ export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}" # setuptools 45 requires Python 3.5 or newer. Even though we upgraded pip # above, it may still not be able to get us a compatible version unless we # explicitly ask for one. -"${PIP}" install --upgrade setuptools==44.0.0 wheel +"${PIP}" install --upgrade setuptools wheel + +# Just about every user of this image wants to use tox from the bootstrap +# virtualenv so go ahead and install it now. +"${PIP}" install "tox~=4.0" diff --git a/.circleci/lib.sh b/.circleci/lib.sh new file mode 100644 index 000000000..a53c33dce --- /dev/null +++ b/.circleci/lib.sh @@ -0,0 +1,148 @@ +# CircleCI build environment looks like it has a zillion and a half cores. +# Don't let Nix autodetect this high core count because it blows up memory +# usage and fails the test run. Pick a number of cores that suits the build +# environment we're paying for (the free one!). +DEPENDENCY_CORES=3 + +# Once dependencies are built, we can allow some more concurrency for our own +# test suite. +UNITTEST_CORES=8 + +# Run a command, enabling cache writes to cachix if possible. The command is +# accepted as a variable number of positional arguments (like argv). +function cache_if_able() { + # Dump some info about our build environment. + describe_build + + if is_cache_writeable; then + # If the cache is available we'll use it. This lets fork owners set + # up their own caching if they want. + echo "Cachix credentials present; will attempt to write to cache." + + # The `cachix watch-exec ...` does our cache population. When it sees + # something added to the store (I guess) it pushes it to the named + # cache. + cachix watch-exec "${CACHIX_NAME}" -- "$@" + else + if is_cache_required; then + echo "Required credentials (CACHIX_AUTH_TOKEN) are missing." + return 1 + else + echo "Cachix credentials missing; will not attempt cache writes." + "$@" + fi + fi +} + +function is_cache_writeable() { + # We can only *push* to the cache if we have a CACHIX_AUTH_TOKEN. in-repo + # jobs will get this from CircleCI configuration but jobs from forks may + # not. + [ -v CACHIX_AUTH_TOKEN ] +} + +function is_cache_required() { + # If we're building in tahoe-lafs/tahoe-lafs then we must use the cache. + # If we're building anything from a fork then we're allowed to not have + # the credentials. + is_upstream +} + +# Return success if the origin of this build is the tahoe-lafs/tahoe-lafs +# repository itself (and so we expect to have cache credentials available), +# failure otherwise. +# +# See circleci.txt for notes about how this determination is made. +function is_upstream() { + # CIRCLE_PROJECT_USERNAME is set to the org the build is happening for. + # If a PR targets a fork of the repo then this is set to something other + # than "tahoe-lafs". + [ "$CIRCLE_PROJECT_USERNAME" == "tahoe-lafs" ] && + + # CIRCLE_BRANCH is set to the real branch name for in-repo PRs and + # "pull/NNNN" for pull requests from forks. + # + # CIRCLE_PULL_REQUESTS is set to a comma-separated list of the full + # URLs of the PR pages which share an underlying branch, with one of + # them ended with that same "pull/NNNN" for PRs from forks. + ! any_element_endswith "/$CIRCLE_BRANCH" "," "$CIRCLE_PULL_REQUESTS" +} + +# Return success if splitting $3 on $2 results in an array with any element +# that ends with $1, failure otherwise. +function any_element_endswith() { + suffix=$1 + shift + + sep=$1 + shift + + haystack=$1 + shift + + IFS="${sep}" read -r -a elements <<< "$haystack" + for elem in "${elements[@]}"; do + if endswith "$suffix" "$elem"; then + return 0 + fi + done + return 1 +} + +# Return success if $2 ends with $1, failure otherwise. +function endswith() { + suffix=$1 + shift + + haystack=$1 + shift + + case "$haystack" in + *${suffix}) + return 0 + ;; + + *) + return 1 + ;; + esac +} + +function describe_build() { + echo "Building PR for user/org: ${CIRCLE_PROJECT_USERNAME}" + echo "Building branch: ${CIRCLE_BRANCH}" + if is_upstream; then + echo "Upstream build." + else + echo "Non-upstream build." + fi + if is_cache_required; then + echo "Cache is required." + else + echo "Cache not required." + fi + if is_cache_writeable; then + echo "Cache is writeable." + else + echo "Cache not writeable." + fi +} + +# Inspect the flake input metadata for an input of a given name and return the +# revision at which that input is pinned. If the input does not exist then +# return garbage (probably "null"). +read_input_revision() { + input_name=$1 + shift + + nix flake metadata --json | jp --unquoted 'locks.nodes."'"$input_name"'".locked.rev' +} + +# Return a flake reference that refers to a certain revision of nixpkgs. The +# certain revision is the revision to which the specified input is pinned. +nixpkgs_flake_reference() { + input_name=$1 + shift + + echo "github:NixOS/nixpkgs?rev=$(read_input_revision $input_name)" +} diff --git a/.circleci/populate-wheelhouse.sh b/.circleci/populate-wheelhouse.sh index 519a80cac..239c8367b 100755 --- a/.circleci/populate-wheelhouse.sh +++ b/.circleci/populate-wheelhouse.sh @@ -3,18 +3,6 @@ # https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/ set -euxo pipefail -# Basic Python packages that you just need to have around to do anything, -# practically speaking. -BASIC_DEPS="pip wheel" - -# Python packages we need to support the test infrastructure. *Not* packages -# Tahoe-LAFS itself (implementation or test suite) need. -TEST_DEPS="tox codecov" - -# Python packages we need to generate test reports for CI infrastructure. -# *Not* packages Tahoe-LAFS itself (implement or test suite) need. -REPORTING_DEPS="python-subunit junitxml subunitreporter" - # The filesystem location of the wheelhouse which we'll populate with wheels # for all of our dependencies. WHEELHOUSE_PATH="$1" @@ -41,15 +29,5 @@ export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}" LANG="en_US.UTF-8" "${PIP}" \ wheel \ --wheel-dir "${WHEELHOUSE_PATH}" \ - "${PROJECT_ROOT}"[test] \ - ${BASIC_DEPS} \ - ${TEST_DEPS} \ - ${REPORTING_DEPS} - -# Not strictly wheelhouse population but ... Note we omit basic deps here. -# They're in the wheelhouse if Tahoe-LAFS wants to drag them in but it will -# have to ask. -"${PIP}" \ - install \ - ${TEST_DEPS} \ - ${REPORTING_DEPS} + "${PROJECT_ROOT}"[testenv] \ + "${PROJECT_ROOT}"[test] diff --git a/.circleci/rebuild-images.sh b/.circleci/rebuild-images.sh new file mode 100755 index 000000000..901651905 --- /dev/null +++ b/.circleci/rebuild-images.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash + +set -euo pipefail + +# Get your API token here: +# https://app.circleci.com/settings/user/tokens +API_TOKEN=$1 +shift + +# Name the branch you want to trigger the build for +BRANCH=$1 +shift + +curl \ + --verbose \ + --request POST \ + --url https://circleci.com/api/v2/project/gh/tahoe-lafs/tahoe-lafs/pipeline \ + --header "Circle-Token: $API_TOKEN" \ + --header "content-type: application/json" \ + --data '{"branch":"'"$BRANCH"'","parameters":{"build-images":true,"run-tests":false}}' diff --git a/.circleci/run-tests.sh b/.circleci/run-tests.sh index 764651c40..d897cc729 100755 --- a/.circleci/run-tests.sh +++ b/.circleci/run-tests.sh @@ -45,14 +45,15 @@ fi # A prefix for the test command that ensure it will exit after no more than a # certain amount of time. Ideally, we would only enforce a "silent" period -# timeout but there isn't obviously a ready-made tool for that. The test -# suite only takes about 5 - 6 minutes on CircleCI right now. 15 minutes -# seems like a moderately safe window. +# timeout but there isn't obviously a ready-made tool for that. The unit test +# suite only takes about 5 - 6 minutes on CircleCI right now. The integration +# tests are a bit longer than that. 45 minutes seems like a moderately safe +# window. # # This is primarily aimed at catching hangs on the PyPy job which runs for # about 21 minutes and then gets killed by CircleCI in a way that fails the # job and bypasses our "allowed failure" logic. -TIMEOUT="timeout --kill-after 1m 15m" +TIMEOUT="timeout --kill-after 1m 45m" # Run the test suite as a non-root user. This is the expected usage some # small areas of the test suite assume non-root privileges (such as unreadable @@ -78,9 +79,10 @@ else alternative="false" fi +WORKDIR=/tmp/tahoe-lafs.tox ${TIMEOUT} ${BOOTSTRAP_VENV}/bin/tox \ -c ${PROJECT_ROOT}/tox.ini \ - --workdir /tmp/tahoe-lafs.tox \ + --workdir "${WORKDIR}" \ -e "${TAHOE_LAFS_TOX_ENVIRONMENT}" \ ${TAHOE_LAFS_TOX_ARGS} || "${alternative}" @@ -92,5 +94,6 @@ if [ -n "${ARTIFACTS}" ]; then # Create a junitxml results area. mkdir -p "$(dirname "${JUNITXML}")" - "${BOOTSTRAP_VENV}"/bin/subunit2junitxml < "${SUBUNIT2}" > "${JUNITXML}" || "${alternative}" + + "${WORKDIR}/${TAHOE_LAFS_TOX_ENVIRONMENT}/bin/subunit2junitxml" < "${SUBUNIT2}" > "${JUNITXML}" || "${alternative}" fi diff --git a/.circleci/setup-virtualenv.sh b/.circleci/setup-virtualenv.sh index feccbbf23..7087c5120 100755 --- a/.circleci/setup-virtualenv.sh +++ b/.circleci/setup-virtualenv.sh @@ -26,12 +26,7 @@ shift || : # Tell pip where it can find any existing wheels. export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}" - -# It is tempting to also set PIP_NO_INDEX=1 but (a) that will cause problems -# between the time dependencies change and the images are re-built and (b) the -# upcoming-deprecations job wants to install some dependencies from github and -# it's awkward to get that done any earlier than the tox run. So, we don't -# set it. +export PIP_NO_INDEX="1" # Get everything else installed in it, too. "${BOOTSTRAP_VENV}"/bin/tox \ diff --git a/.coveragerc b/.coveragerc index d09554cad..5b41f9ce3 100644 --- a/.coveragerc +++ b/.coveragerc @@ -19,7 +19,7 @@ skip_covered = True source = # It looks like this in the checkout src/ -# It looks like this in the Windows build environment +# It looks like this in the GitHub Actions Windows build environment D:/a/tahoe-lafs/tahoe-lafs/.tox/py*-coverage/Lib/site-packages/ # Although sometimes it looks like this instead. Also it looks like this on macOS. .tox/py*-coverage/lib/python*/site-packages/ diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst index b59385aa4..fa3d66ffe 100644 --- a/.github/CONTRIBUTING.rst +++ b/.github/CONTRIBUTING.rst @@ -18,3 +18,9 @@ Examples of contributions include: Before authoring or reviewing a patch, please familiarize yourself with the `Coding Standards `_ and the `Contributor Code of Conduct <../docs/CODE_OF_CONDUCT.md>`_. + + +🥳 First Contribution? +====================== + +If you are committing to Tahoe for the very first time, consider adding your name to our contributor list in `CREDITS <../CREDITS>`__ diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 45b2986a3..845d49e63 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,6 +6,33 @@ on: - "master" pull_request: +# At the start of each workflow run, GitHub creates a unique +# GITHUB_TOKEN secret to use in the workflow. It is a good idea for +# this GITHUB_TOKEN to have the minimum of permissions. See: +# +# - https://docs.github.com/en/actions/security-guides/automatic-token-authentication +# - https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions +# +permissions: + contents: read + +# Control to what degree jobs in this workflow will run concurrently with +# other instances of themselves. +# +# https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions#concurrency +concurrency: + # We want every revision on master to run the workflow completely. + # "head_ref" is not set for the "push" event but it is set for the + # "pull_request" event. If it is set then it is the name of the branch and + # we can use it to make sure each branch has only one active workflow at a + # time. If it is not set then we can compute a unique string that gives + # every master/push workflow its own group. + group: "${{ github.head_ref || format('{0}-{1}', github.run_number, github.run_attempt) }}" + + # Then, we say that if a new workflow wants to start in the same group as a + # running workflow, the running workflow should be cancelled. + cancel-in-progress: true + env: # Tell Hypothesis which configuration we want it to use. TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci" @@ -17,73 +44,64 @@ jobs: strategy: fail-fast: false matrix: - os: - - windows-latest - - ubuntu-latest - python-version: - - 2.7 - - 3.6 - - 3.7 - - 3.8 - - 3.9 include: - # On macOS don't bother with 3.6-3.8, just to get faster builds. - - os: macos-10.15 - python-version: 2.7 - - os: macos-latest - python-version: 3.9 + - os: macos-12 + python-version: "3.12" + # We only support PyPy on Linux at the moment. + - os: ubuntu-latest + python-version: "pypy-3.8" + - os: ubuntu-latest + python-version: "pypy-3.9" + - os: ubuntu-latest + python-version: "3.12" + - os: windows-latest + python-version: "3.12" steps: # See https://github.com/actions/checkout. A fetch-depth of 0 # fetches all tags and branches. - name: Check out Tahoe-LAFS sources - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - # To use pip caching with GitHub Actions in an OS-independent - # manner, we need `pip cache dir` command, which became - # available since pip v20.1+. At the time of writing this, - # GitHub Actions offers pip v20.3.3 for both ubuntu-latest and - # windows-latest, and pip v20.3.1 for macos-latest. - - name: Get pip cache directory - id: pip-cache - run: | - echo "::set-output name=dir::$(pip cache dir)" - - # See https://github.com/actions/cache - - name: Use pip cache - uses: actions/cache@v2 - with: - path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} - restore-keys: | - ${{ runner.os }}-pip- + cache: 'pip' # caching pip dependencies - name: Install Python packages run: | - pip install --upgrade codecov tox tox-gh-actions setuptools + pip install --upgrade tox tox-gh-actions setuptools pip list - name: Display tool versions run: python misc/build_helpers/show-tool-versions.py - name: Run tox for corresponding Python version + if: ${{ !contains(matrix.os, 'windows') }} run: python -m tox + # On Windows, a non-blocking pipe might respond (when emulating Unix-y + # API) with ENOSPC to indicate buffer full. Trial doesn't handle this + # well, so it breaks test runs. To attempt to solve this, we pipe the + # output through passthrough.py that will hopefully be able to do the right + # thing by using Windows APIs. + - name: Run tox for corresponding Python version + if: ${{ contains(matrix.os, 'windows') }} + run: | + pip install twisted pywin32 + python -m tox | python misc/windows-enospc/passthrough.py + - name: Upload eliot.log - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v3 with: name: eliot.log path: eliot.log - name: Upload trial log - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v3 with: name: test.log path: _trial_temp/test.log @@ -92,25 +110,6 @@ jobs: # Action for this, as of Jan 2021 it does not support Python coverage # files - only lcov files. Therefore, we use coveralls-python, the # coveralls.io-supplied Python reporter, for this. - # - # It is coveralls-python 1.x that has maintained compatibility - # with Python 2, while coveralls-python 3.x is compatible with - # Python 3. Sadly we can't use them both in the same workflow. - # - # The two versions of coveralls-python are somewhat mutually - # incompatible. Mixing these two different versions when - # reporting coverage to coveralls.io will lead to grief, since - # they get job IDs in different fashion. If we use both - # versions of coveralls in the same workflow, the finalizing - # step will be able to mark only part of the jobs as done, and - # the other part will be left hanging, never marked as done: it - # does not matter if we make an API call or `coveralls --finish` - # to indicate that CI has finished running. - # - # So we try to use the newer coveralls-python that is available - # via Python 3 (which is present in GitHub Actions tool cache, - # even when we're running Python 2.7 tests) throughout this - # workflow. - name: "Report Coverage to Coveralls" run: | pip3 install --upgrade coveralls==3.0.1 @@ -160,23 +159,23 @@ jobs: fail-fast: false matrix: os: + # 22.04 has some issue with Tor at the moment: + # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3943 + - ubuntu-20.04 + - macos-12 - windows-latest - - ubuntu-latest python-version: - - 2.7 - - 3.6 - - 3.9 + - "3.11" + force-foolscap: + - false include: - # On macOS don't bother with 3.6, just to get faster builds. - - os: macos-10.15 - python-version: 2.7 - - os: macos-latest - python-version: 3.9 - + - os: ubuntu-20.04 + python-version: "3.12" + force-foolscap: true steps: - name: Install Tor [Ubuntu] - if: matrix.os == 'ubuntu-latest' + if: ${{ contains(matrix.os, 'ubuntu') }} run: sudo apt install tor # TODO: See https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3744. @@ -185,38 +184,24 @@ jobs: - name: Install Tor [macOS, ${{ matrix.python-version }} ] if: ${{ contains(matrix.os, 'macos') }} run: | - brew extract --version 0.4.5.8 tor homebrew/cask - brew install tor@0.4.5.8 - brew link --overwrite tor@0.4.5.8 + brew install tor - name: Install Tor [Windows] if: matrix.os == 'windows-latest' - uses: crazy-max/ghaction-chocolatey@v1 + uses: crazy-max/ghaction-chocolatey@v2 with: args: install tor - name: Check out Tahoe-LAFS sources - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - - name: Get pip cache directory - id: pip-cache - run: | - echo "::set-output name=dir::$(pip cache dir)" - - - name: Use pip cache - uses: actions/cache@v2 - with: - path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} - restore-keys: | - ${{ runner.os }}-pip- + cache: 'pip' # caching pip dependencies - name: Install Python packages run: | @@ -226,16 +211,28 @@ jobs: - name: Display tool versions run: python misc/build_helpers/show-tool-versions.py - - name: Run "Python 2 integration tests" - if: ${{ matrix.python-version == '2.7' }} - run: tox -e integration - - name: Run "Python 3 integration tests" - if: ${{ matrix.python-version != '2.7' }} - run: tox -e integration3 + if: "${{ !matrix.force-foolscap }}" + env: + # On macOS this is necessary to ensure unix socket paths for tor + # aren't too long. On Windows tox won't pass it through so it has no + # effect. On Linux it doesn't make a difference one way or another. + TMPDIR: "/tmp" + run: | + tox -e integration + + - name: Run "Python 3 integration tests (force Foolscap)" + if: "${{ matrix.force-foolscap }}" + env: + # On macOS this is necessary to ensure unix socket paths for tor + # aren't too long. On Windows tox won't pass it through so it has no + # effect. On Linux it doesn't make a difference one way or another. + TMPDIR: "/tmp" + run: | + tox -e integration -- --force-foolscap integration/ - name: Upload eliot.log in case of failure - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v3 if: failure() with: name: integration.eliot.json @@ -247,36 +244,24 @@ jobs: fail-fast: false matrix: os: - - macos-10.15 + - macos-12 - windows-latest - ubuntu-latest python-version: - - 2.7 + - 3.9 steps: - name: Check out Tahoe-LAFS sources - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - - name: Get pip cache directory - id: pip-cache - run: | - echo "::set-output name=dir::$(pip cache dir)" - - - name: Use pip cache - uses: actions/cache@v2 - with: - path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} - restore-keys: | - ${{ runner.os }}-pip- + cache: 'pip' # caching pip dependencies - name: Install Python packages run: | @@ -294,7 +279,7 @@ jobs: run: dist/Tahoe-LAFS/tahoe --version - name: Upload PyInstaller package - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: Tahoe-LAFS-${{ matrix.os }}-Python-${{ matrix.python-version }} path: dist/Tahoe-LAFS-*-*.* diff --git a/.gitignore b/.gitignore index d6a58b88b..0cf688c54 100644 --- a/.gitignore +++ b/.gitignore @@ -29,8 +29,7 @@ zope.interface-*.egg .pc /src/allmydata/test/plugins/dropin.cache -/_trial_temp* -/_test_memory/ +**/_trial_temp* /tmp* /*.patch /dist/ @@ -54,3 +53,5 @@ zope.interface-*.egg # This is the plaintext of the private environment needed for some CircleCI # operations. It's never supposed to be checked in. secret-env-plain + +.ruff_cache \ No newline at end of file diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 000000000..665b53178 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,10 @@ +version: 2 + +build: + os: ubuntu-22.04 + tools: + python: "3.10" + +python: + install: + - requirements: docs/requirements.txt diff --git a/.ruff.toml b/.ruff.toml new file mode 100644 index 000000000..2dd6b59b5 --- /dev/null +++ b/.ruff.toml @@ -0,0 +1,18 @@ +select = [ + # Pyflakes checks + "F", + # Prohibit tabs: + "W191", + # No trailing whitespace: + "W291", + "W293", + # Make sure we bind closure variables in a loop (equivalent to pylint + # cell-var-from-loop): + "B023", + # Don't silence exceptions in finally by accident: + "B012", + # Don't use mutable default arguments: + "B006", + # Errors from PyLint: + "PLE", +] \ No newline at end of file diff --git a/CREDITS b/CREDITS index b0923fc35..89e1468aa 100644 --- a/CREDITS +++ b/CREDITS @@ -240,3 +240,27 @@ N: Lukas Pirl E: tahoe@lukas-pirl.de W: http://lukas-pirl.de D: Buildslaves (Debian, Fedora, CentOS; 2016-2021) + +N: Anxhelo Lushka +E: anxhelo1995@gmail.com +D: Web site design and updates + +N: Fon E. Noel +E: fenn25.fn@gmail.com +D: bug-fixes and refactoring + +N: Jehad Baeth +E: jehad@leastauthority.com +D: Documentation improvement + +N: May-Lee Sia +E: mayleesia@gmail.com +D: Community-manager and documentation improvements + +N: Yash Nayani +E: yashaswi.nram@gmail.com +D: Installation Guide improvements + +N: Florian Sesser +E: florian@private.storage +D: OpenMetrics support \ No newline at end of file diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 842093fdb..000000000 --- a/Dockerfile +++ /dev/null @@ -1,10 +0,0 @@ -FROM python:2.7 - -ADD . /tahoe-lafs -RUN \ - cd /tahoe-lafs && \ - git pull --depth=100 && \ - pip install . && \ - rm -rf ~/.cache/ - -WORKDIR /root diff --git a/Dockerfile.dev b/Dockerfile.dev deleted file mode 100644 index b0fd24b5e..000000000 --- a/Dockerfile.dev +++ /dev/null @@ -1,25 +0,0 @@ -FROM debian:9 -LABEL maintainer "gordon@leastauthority.com" -RUN apt-get update -RUN DEBIAN_FRONTEND=noninteractive apt-get -yq upgrade -RUN DEBIAN_FRONTEND=noninteractive apt-get -yq install build-essential python-dev libffi-dev libssl-dev python-virtualenv git -RUN \ - git clone https://github.com/tahoe-lafs/tahoe-lafs.git /root/tahoe-lafs; \ - cd /root/tahoe-lafs; \ - virtualenv --python=python2.7 venv; \ - ./venv/bin/pip install --upgrade setuptools; \ - ./venv/bin/pip install --editable .; \ - ./venv/bin/tahoe --version; -RUN \ - cd /root; \ - mkdir /root/.tahoe-client; \ - mkdir /root/.tahoe-introducer; \ - mkdir /root/.tahoe-server; -RUN /root/tahoe-lafs/venv/bin/tahoe create-introducer --location=tcp:introducer:3458 --port=tcp:3458 /root/.tahoe-introducer -RUN /root/tahoe-lafs/venv/bin/tahoe start /root/.tahoe-introducer -RUN /root/tahoe-lafs/venv/bin/tahoe create-node --location=tcp:server:3457 --port=tcp:3457 --introducer=$(cat /root/.tahoe-introducer/private/introducer.furl) /root/.tahoe-server -RUN /root/tahoe-lafs/venv/bin/tahoe create-client --webport=3456 --introducer=$(cat /root/.tahoe-introducer/private/introducer.furl) --basedir=/root/.tahoe-client --shares-needed=1 --shares-happy=1 --shares-total=1 -VOLUME ["/root/.tahoe-client", "/root/.tahoe-server", "/root/.tahoe-introducer"] -EXPOSE 3456 3457 3458 -ENTRYPOINT ["/root/tahoe-lafs/venv/bin/tahoe"] -CMD [] diff --git a/MANIFEST.in b/MANIFEST.in index 121a9778f..6cec1c847 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -4,7 +4,7 @@ include relnotes.txt include Dockerfile include tox.ini .appveyor.yml .travis.yml include .coveragerc -recursive-include src *.xhtml *.js *.png *.css *.svg *.txt +recursive-include src *.xhtml *.js *.png *.css *.svg *.txt *.yaml graft docs graft misc graft static diff --git a/Makefile b/Makefile index f7a357588..c02184a36 100644 --- a/Makefile +++ b/Makefile @@ -17,7 +17,7 @@ PYTHON=python export PYTHON PYFLAKES=flake8 export PYFLAKES -VIRTUAL_ENV=./.tox/py27 +VIRTUAL_ENV=./.tox/py37 SOURCES=src/allmydata static misc setup.py APPNAME=tahoe-lafs TEST_SUITE=allmydata @@ -35,7 +35,7 @@ test: .tox/create-venvs.log # Run codechecks first since it takes the least time to report issues early. tox --develop -e codechecks # Run all the test environments in parallel to reduce run-time - tox --develop -p auto -e 'py27,py36,pypy27' + tox --develop -p auto -e 'py37' .PHONY: test-venv-coverage ## Run all tests with coverage collection and reporting. test-venv-coverage: @@ -51,7 +51,7 @@ test-venv-coverage: .PHONY: test-py3-all ## Run all tests under Python 3 test-py3-all: .tox/create-venvs.log - tox --develop -e py36 allmydata + tox --develop -e py37 allmydata # This is necessary only if you want to automatically produce a new # _version.py file from the current git history (without doing a build). @@ -136,37 +136,12 @@ count-lines: # Here is a list of testing tools that can be run with 'python' from a # virtualenv in which Tahoe has been installed. There used to be Makefile # targets for each, but the exact path to a suitable python is now up to the -# developer. But as a hint, after running 'tox', ./.tox/py27/bin/python will +# developer. But as a hint, after running 'tox', ./.tox/py37/bin/python will # probably work. # src/allmydata/test/bench_dirnode.py -# The check-speed and check-grid targets are disabled, since they depend upon -# the pre-located $(TAHOE) executable that was removed when we switched to -# tox. They will eventually be resurrected as dedicated tox environments. - -# The check-speed target uses a pre-established client node to run a canned -# set of performance tests against a test network that is also -# pre-established (probably on a remote machine). Provide it with the path to -# a local directory where this client node has been created (and populated -# with the necessary FURLs of the test network). This target will start that -# client with the current code and then run the tests. Afterwards it will -# stop the client. -# -# The 'sleep 5' is in there to give the new client a chance to connect to its -# storageservers, since check_speed.py has no good way of doing that itself. - -##.PHONY: check-speed -##check-speed: .built -## if [ -z '$(TESTCLIENTDIR)' ]; then exit 1; fi -## @echo "stopping any leftover client code" -## -$(TAHOE) stop $(TESTCLIENTDIR) -## $(TAHOE) start $(TESTCLIENTDIR) -## sleep 5 -## $(TAHOE) @src/allmydata/test/check_speed.py $(TESTCLIENTDIR) -## $(TAHOE) stop $(TESTCLIENTDIR) - # The check-grid target also uses a pre-established client node, along with a # long-term directory that contains some well-known files. See the docstring # in src/allmydata/test/check_grid.py to see how to set this up. @@ -195,12 +170,11 @@ test-clean: # Use 'make distclean' instead to delete all generated files. .PHONY: clean clean: - rm -rf build _trial_temp _test_memory .built + rm -rf build _trial_temp .built rm -f `find src *.egg -name '*.so' -or -name '*.pyc'` rm -rf support dist rm -rf `ls -d *.egg | grep -vEe"setuptools-|setuptools_darcs-|darcsver-"` rm -rf *.pyc - rm -f bin/tahoe bin/tahoe.pyscript rm -f *.pkg .PHONY: distclean @@ -250,3 +224,62 @@ src/allmydata/_version.py: .tox/create-venvs.log: tox.ini setup.py tox --notest -p all | tee -a "$(@)" + + +# to make a new release: +# - create a ticket for the release in Trac +# - ensure local copy is up-to-date +# - create a branch like "XXXX.release" from up-to-date master +# - in the branch, run "make release" +# - run "make release-test" +# - perform any other sanity-checks on the release +# - run "make release-upload" +# Note that several commands below hard-code "meejah"; if you are +# someone else please adjust them. +release: + @echo "Is checkout clean?" + git diff-files --quiet + git diff-index --quiet --cached HEAD -- + + @echo "Clean docs build area" + rm -rf docs/_build/ + + @echo "Install required build software" + python3 -m pip install --editable .[build] + + @echo "Test README" + python3 setup.py check -r -s + + @echo "Update NEWS" + python3 -m towncrier build --yes --version `python3 misc/build_helpers/update-version.py --no-tag` + git add -u + git commit -m "update NEWS for release" + +# note that this always bumps the "middle" number, e.g. from 1.17.1 -> 1.18.0 +# and produces a tag into the Git repository + @echo "Bump version and create tag" + python3 misc/build_helpers/update-version.py + + @echo "Build and sign wheel" + python3 setup.py bdist_wheel + gpg --pinentry=loopback -u meejah@meejah.ca --armor --detach-sign dist/tahoe_lafs-`git describe | cut -b 12-`-py3-none-any.whl + ls dist/*`git describe | cut -b 12-`* + + @echo "Build and sign source-dist" + python3 setup.py sdist + gpg --pinentry=loopback -u meejah@meejah.ca --armor --detach-sign dist/tahoe-lafs-`git describe | cut -b 12-`.tar.gz + ls dist/*`git describe | cut -b 12-`* + +# basically just a bare-minimum smoke-test that it installs and runs +release-test: + gpg --verify dist/tahoe-lafs-`git describe | cut -b 12-`.tar.gz.asc + gpg --verify dist/tahoe_lafs-`git describe | cut -b 12-`-py3-none-any.whl.asc + virtualenv testmf_venv + testmf_venv/bin/pip install dist/tahoe_lafs-`git describe | cut -b 12-`-py3-none-any.whl + testmf_venv/bin/tahoe --version + rm -rf testmf_venv + +release-upload: + scp dist/*`git describe | cut -b 12-`* meejah@tahoe-lafs.org:/home/source/downloads + git push origin_push tahoe-lafs-`git describe | cut -b 12-` + twine upload dist/tahoe_lafs-`git describe | cut -b 12-`-py3-none-any.whl dist/tahoe_lafs-`git describe | cut -b 12-`-py3-none-any.whl.asc dist/tahoe-lafs-`git describe | cut -b 12-`.tar.gz dist/tahoe-lafs-`git describe | cut -b 12-`.tar.gz.asc diff --git a/NEWS.rst b/NEWS.rst index 1cfc726ae..d5dadf06d 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -5,6 +5,309 @@ User-Visible Changes in Tahoe-LAFS ================================== .. towncrier start line +Release 1.190 (2024-01-04) +'''''''''''''''''''''''''' + +Features +-------- + +- Tahoe-LAFS now includes a new "Grid Manager" specification and implementation adding more options to control which storage servers a client will use for uploads. (`#2916 `_) +- Added support for Python 3.12, and work with Eliot 1.15 (`#3072 `_) +- `tahoe run ...` will now exit when its stdin is closed. + + This facilitates subprocess management, specifically cleanup. + When a parent process is running tahoe and exits without time to do "proper" cleanup at least the stdin descriptor will be closed. + Subsequently "tahoe run" notices this and exits. (`#3921 `_) +- Mutable objects can now be created with a pre-determined "signature key" using the ``tahoe put`` CLI or the HTTP API. This enables deterministic creation of mutable capabilities. This feature must be used with care to preserve the normal security and reliability properties. (`#3962 `_) +- Added support for Python 3.11. (`#3982 `_) +- tahoe run now accepts --allow-stdin-close to mean "keep running if stdin closes" (`#4036 `_) +- The storage server and client now support a new, HTTPS-based protocol. (`#4041 `_) +- Started work on a new end-to-end benchmarking framework. (`#4060 `_) +- Some operations now run in threads, improving the responsiveness of Tahoe nodes. (`#4068 `_) +- Logs are now written in a thread, which should make the application more responsive under load. (`#4804 `_) + + +Bug Fixes +--------- + +- Provide better feedback from plugin configuration errors + + Local errors now print a useful message and exit. + Announcements that only contain invalid / unusable plugins now show a message in the Welcome page. (`#3899 `_) +- Work with (and require) newer versions of pycddl. (`#3938 `_) +- Uploading immutables will now better use available bandwidth, which should allow for faster uploads in many cases. (`#3939 `_) +- Downloads of large immutables should now finish much faster. (`#3946 `_) +- Fix incompatibility with transitive dependency charset_normalizer >= 3 when using PyInstaller. (`#3966 `_) +- A bug where Introducer nodes configured to listen on Tor or I2P would not actually do so has been fixed. (`#3999 `_) +- The (still off-by-default) HTTP storage client will now use Tor when Tor-based client-side anonymity was requested. + Previously it would use normal TCP connections and not be anonymous. (`#4029 `_) +- Provide our own copy of attrs' "provides()" validator + + This validator is deprecated and slated for removal; that project's suggestion is to copy the code to our project. (`#4056 `_) +- Fix a race condition with SegmentFetcher (`#4078 `_) + + +Dependency/Installation Changes +------------------------------- + +- tenacity is no longer a dependency. (`#3989 `_) + + +Documentation Changes +--------------------- + +- Several minor errors in the Great Black Swamp proposed specification document have been fixed. (`#3922 `_) +- Document the ``force_foolscap`` configuration options for ``[storage]`` and ``[client]``. (`#4039 `_) + + +Removed Features +---------------- + +- Python 3.7 is no longer supported, and Debian 10 and Ubuntu 18.04 are no longer tested. (`#3964 `_) + + +Other Changes +------------- + +- The integration test suite now includes a set of capability test vectors (``integration/vectors/test_vectors.yaml``) which can be used to verify compatibility between Tahoe-LAFS and other implementations. (`#3961 `_) + + +Misc/Other +---------- + +- `#3508 `_, `#3622 `_, `#3783 `_, `#3870 `_, `#3874 `_, `#3880 `_, `#3904 `_, `#3910 `_, `#3914 `_, `#3917 `_, `#3927 `_, `#3928 `_, `#3935 `_, `#3936 `_, `#3937 `_, `#3940 `_, `#3942 `_, `#3944 `_, `#3947 `_, `#3950 `_, `#3952 `_, `#3953 `_, `#3954 `_, `#3956 `_, `#3958 `_, `#3959 `_, `#3960 `_, `#3965 `_, `#3967 `_, `#3968 `_, `#3969 `_, `#3970 `_, `#3971 `_, `#3974 `_, `#3975 `_, `#3976 `_, `#3978 `_, `#3987 `_, `#3988 `_, `#3991 `_, `#3993 `_, `#3994 `_, `#3996 `_, `#3998 `_, `#4000 `_, `#4001 `_, `#4002 `_, `#4003 `_, `#4004 `_, `#4005 `_, `#4006 `_, `#4009 `_, `#4010 `_, `#4012 `_, `#4014 `_, `#4015 `_, `#4016 `_, `#4018 `_, `#4019 `_, `#4020 `_, `#4022 `_, `#4023 `_, `#4024 `_, `#4026 `_, `#4027 `_, `#4028 `_, `#4035 `_, `#4038 `_, `#4040 `_, `#4042 `_, `#4044 `_, `#4046 `_, `#4047 `_, `#4049 `_, `#4050 `_, `#4051 `_, `#4052 `_, `#4055 `_, `#4059 `_, `#4061 `_, `#4062 `_, `#4063 `_, `#4065 `_, `#4066 `_, `#4070 `_, `#4074 `_, `#4075 `_ + + +Release 1.18.0 (2022-10-02) +''''''''''''''''''''''''''' + +Backwards Incompatible Changes +------------------------------ + +- Python 3.6 is no longer supported, as it has reached end-of-life and is no longer receiving security updates. (`#3865 `_) +- Python 3.7 or later is now required; Python 2 is no longer supported. (`#3873 `_) +- Share corruption reports stored on disk are now always encoded in UTF-8. (`#3879 `_) +- Record both the PID and the process creation-time: + + a new kind of pidfile in `running.process` records both + the PID and the creation-time of the process. This facilitates + automatic discovery of a "stale" pidfile that points to a + currently-running process. If the recorded creation-time matches + the creation-time of the running process, then it is a still-running + `tahoe run` process. Otherwise, the file is stale. + + The `twistd.pid` file is no longer present. (`#3926 `_) + + +Features +-------- + +- The implementation of SDMF and MDMF (mutables) now requires RSA keys to be exactly 2048 bits, aligning them with the specification. + + Some code existed to allow tests to shorten this and it's + conceptually possible a modified client produced mutables + with different key-sizes. However, the spec says that they + must be 2048 bits. If you happen to have a capability with + a key-size different from 2048 you may use 1.17.1 or earlier + to read the content. (`#3828 `_) +- "make" based release automation (`#3846 `_) + + +Misc/Other +---------- + +- `#3327 `_, `#3526 `_, `#3697 `_, `#3709 `_, `#3786 `_, `#3788 `_, `#3802 `_, `#3816 `_, `#3855 `_, `#3858 `_, `#3859 `_, `#3860 `_, `#3867 `_, `#3868 `_, `#3871 `_, `#3872 `_, `#3875 `_, `#3876 `_, `#3877 `_, `#3881 `_, `#3882 `_, `#3883 `_, `#3889 `_, `#3890 `_, `#3891 `_, `#3893 `_, `#3895 `_, `#3896 `_, `#3898 `_, `#3900 `_, `#3909 `_, `#3913 `_, `#3915 `_, `#3916 `_ + + +Release 1.17.1 (2022-01-07) +''''''''''''''''''''''''''' + +Bug Fixes +--------- + +- Fixed regression on Python 3 causing the JSON version of the Welcome page to sometimes produce a 500 error (`#3852 `_) +- Fixed regression on Python 3 where JSON HTTP POSTs failed to be processed. (`#3854 `_) + + +Misc/Other +---------- + +- `#3848 `_, `#3849 `_, `#3850 `_, `#3856 `_ + + +Release 1.17.0 (2021-12-06) +''''''''''''''''''''''''''' + +Security-related Changes +------------------------ + +- The introducer server no longer writes the sensitive introducer fURL value to its log at startup time. Instead it writes the well-known path of the file from which this value can be read. (`#3819 `_) +- The storage protocol operation ``add_lease`` now safely rejects an attempt to add a 4,294,967,296th lease to an immutable share. + Previously this failed with an error after recording the new lease in the share file, resulting in the share file losing track of a one previous lease. (`#3821 `_) +- The storage protocol operation ``readv`` now safely rejects attempts to read negative lengths. + Previously these read requests were satisfied with the complete contents of the share file (including trailing metadata) starting from the specified offset. (`#3822 `_) +- The storage server implementation now respects the ``reserved_space`` configuration value when writing lease information and recording corruption advisories. + Previously, new leases could be created and written to disk even when the storage server had less remaining space than the configured reserve space value. + Now this operation will fail with an exception and the lease will not be created. + Similarly, if there is no space available, corruption advisories will be logged but not written to disk. (`#3823 `_) +- The storage server implementation no longer records corruption advisories about storage indexes for which it holds no shares. (`#3824 `_) +- The lease-checker now uses JSON instead of pickle to serialize its state. + + tahoe will now refuse to run until you either delete all pickle files or + migrate them using the new command:: + + tahoe admin migrate-crawler + + This will migrate all crawler-related pickle files. (`#3825 `_) +- The SFTP server no longer accepts password-based credentials for authentication. + Public/private key-based credentials are now the only supported authentication type. + This removes plaintext password storage from the SFTP credentials file. + It also removes a possible timing side-channel vulnerability which might have allowed attackers to discover an account's plaintext password. (`#3827 `_) +- The storage server now keeps hashes of lease renew and cancel secrets for immutable share files instead of keeping the original secrets. (`#3839 `_) +- The storage server now keeps hashes of lease renew and cancel secrets for mutable share files instead of keeping the original secrets. (`#3841 `_) + + +Features +-------- + +- Tahoe-LAFS releases now have just a .tar.gz source release and a (universal) wheel (`#3735 `_) +- tahoe-lafs now provides its statistics also in OpenMetrics format (for Prometheus et. al.) at `/statistics?t=openmetrics`. (`#3786 `_) +- If uploading an immutable hasn't had a write for 30 minutes, the storage server will abort the upload. (`#3807 `_) + + +Bug Fixes +--------- + +- When uploading an immutable, overlapping writes that include conflicting data are rejected. In practice, this likely didn't happen in real-world usage. (`#3801 `_) + + +Dependency/Installation Changes +------------------------------- + +- Tahoe-LAFS now supports running on NixOS 21.05 with Python 3. (`#3808 `_) + + +Documentation Changes +--------------------- + +- The news file for future releases will include a section for changes with a security impact. (`#3815 `_) + + +Removed Features +---------------- + +- The little-used "control port" has been removed from all node types. (`#3814 `_) + + +Other Changes +------------- + +- Tahoe-LAFS no longer runs its Tor integration test suite on Python 2 due to the increased complexity of obtaining compatible versions of necessary dependencies. (`#3837 `_) + + +Misc/Other +---------- + +- `#3525 `_, `#3527 `_, `#3754 `_, `#3758 `_, `#3784 `_, `#3792 `_, `#3793 `_, `#3795 `_, `#3797 `_, `#3798 `_, `#3799 `_, `#3800 `_, `#3805 `_, `#3806 `_, `#3810 `_, `#3812 `_, `#3820 `_, `#3829 `_, `#3830 `_, `#3831 `_, `#3832 `_, `#3833 `_, `#3834 `_, `#3835 `_, `#3836 `_, `#3838 `_, `#3842 `_, `#3843 `_, `#3847 `_ + + +Release 1.16.0 (2021-09-17) +''''''''''''''''''''''''''' + +Backwards Incompatible Changes +------------------------------ + +- The Tahoe command line now always uses UTF-8 to decode its arguments, regardless of locale. (`#3588 `_) +- tahoe backup's --exclude-from has been renamed to --exclude-from-utf-8, and correspondingly requires the file to be UTF-8 encoded. (`#3716 `_) + + +Features +-------- + +- Added 'typechecks' environment for tox running mypy and performing static typechecks. (`#3399 `_) +- The NixOS-packaged Tahoe-LAFS now knows its own version. (`#3629 `_) + + +Bug Fixes +--------- + +- Fix regression that broke flogtool results on Python 2. (`#3509 `_) +- Fix a logging regression on Python 2 involving unicode strings. (`#3510 `_) +- Certain implementation-internal weakref KeyErrors are now handled and should no longer cause user-initiated operations to fail. (`#3539 `_) +- SFTP public key auth likely works more consistently, and SFTP in general was previously broken. (`#3584 `_) +- Fixed issue where redirecting old-style URIs (/uri/?uri=...) didn't work. (`#3590 `_) +- ``tahoe invite`` will now read share encoding/placement configuration values from a Tahoe client node configuration file if they are not given on the command line, instead of raising an unhandled exception. (`#3650 `_) +- Fix regression where uploading files with non-ASCII names failed. (`#3738 `_) +- Fixed annoying UnicodeWarning message on Python 2 when running CLI tools. (`#3739 `_) +- Fixed bug where share corruption events were not logged on storage servers running on Windows. (`#3779 `_) + + +Dependency/Installation Changes +------------------------------- + +- Tahoe-LAFS now requires Twisted 19.10.0 or newer. As a result, it now has a transitive dependency on bcrypt. (`#1549 `_) +- Debian 8 support has been replaced with Debian 10 support. (`#3326 `_) +- Tahoe-LAFS no longer depends on Nevow. (`#3433 `_) +- Tahoe-LAFS now requires the `netifaces` Python package and no longer requires the external `ip`, `ifconfig`, or `route.exe` executables. (`#3486 `_) +- The Tahoe-LAFS project no longer commits to maintaining binary packages for all dependencies at . Please use PyPI instead. (`#3497 `_) +- Tahoe-LAFS now uses a forked version of txi2p (named txi2p-tahoe) with Python 3 support. (`#3633 `_) +- The Nix package now includes correct version information. (`#3712 `_) +- Use netifaces 0.11.0 wheel package from PyPI.org if you use 64-bit Python 2.7 on Windows. VCPython27 downloads are no longer available at Microsoft's website, which has made building Python 2.7 wheel packages of Python libraries with C extensions (such as netifaces) on Windows difficult. (`#3733 `_) + + +Configuration Changes +--------------------- + +- The ``[client]introducer.furl`` configuration item is now deprecated in favor of the ``private/introducers.yaml`` file. (`#3504 `_) + + +Documentation Changes +--------------------- + +- Documentation now has its own towncrier category. (`#3664 `_) +- `tox -e docs` will treat warnings about docs as errors. (`#3666 `_) +- The visibility of the Tahoe-LAFS logo has been improved for "dark" themed viewing. (`#3677 `_) +- A cheatsheet-style document for contributors was created at CONTRIBUTORS.rst (`#3682 `_) +- Our IRC channel, #tahoe-lafs, has been moved to irc.libera.chat. (`#3721 `_) +- Tahoe-LAFS project is now registered with Libera.Chat IRC network. (`#3726 `_) +- Rewriting the installation guide for Tahoe-LAFS. (`#3747 `_) +- Documentation and installation links in the README have been fixed. (`#3749 `_) +- The Great Black Swamp proposed specification now includes sample interactions to demonstrate expected usage patterns. (`#3764 `_) +- The Great Black Swamp proposed specification now includes a glossary. (`#3765 `_) +- The Great Black Swamp specification now allows parallel upload of immutable share data. (`#3769 `_) +- There is now a specification for the scheme which Tahoe-LAFS storage clients use to derive their lease renewal secrets. (`#3774 `_) +- The Great Black Swamp proposed specification now has a simplified interface for reading data from immutable shares. (`#3777 `_) +- tahoe-dev mailing list is now at tahoe-dev@lists.tahoe-lafs.org. (`#3782 `_) +- The Great Black Swamp specification now describes the required authorization scheme. (`#3785 `_) +- The "Great Black Swamp" proposed specification has been expanded to include two lease management APIs. (`#3037 `_) +- The specification section of the Tahoe-LAFS documentation now includes explicit discussion of the security properties of Foolscap "fURLs" on which it depends. (`#3503 `_) +- The README, revised by Viktoriia with feedback from the team, is now more focused on the developer community and provides more information about Tahoe-LAFS, why it's important, and how someone can use it or start contributing to it. (`#3545 `_) +- The "Great Black Swamp" proposed specification has been changed use ``v=1`` as the URL version identifier. (`#3644 `_) +- You can run `make livehtml` in docs directory to invoke sphinx-autobuild. (`#3663 `_) + + +Removed Features +---------------- + +- Announcements delivered through the introducer system are no longer automatically annotated with copious information about the Tahoe-LAFS software version nor the versions of its dependencies. (`#3518 `_) +- The stats gatherer, broken since at least Tahoe-LAFS 1.13.0, has been removed. The ``[client]stats_gatherer.furl`` configuration item in ``tahoe.cfg`` is no longer allowed. The Tahoe-LAFS project recommends using a third-party metrics aggregation tool instead. (`#3549 `_) +- The deprecated ``tahoe`` start, restart, stop, and daemonize sub-commands have been removed. (`#3550 `_) +- FTP is no longer supported by Tahoe-LAFS. Please use the SFTP support instead. (`#3583 `_) +- Removed support for the Account Server frontend authentication type. (`#3652 `_) + + +Other Changes +------------- + +- Refactored test_introducer in web tests to use custom base test cases (`#3757 `_) + + +Misc/Other +---------- + +- `#2928 `_, `#3283 `_, `#3314 `_, `#3384 `_, `#3385 `_, `#3390 `_, `#3404 `_, `#3428 `_, `#3432 `_, `#3434 `_, `#3435 `_, `#3454 `_, `#3459 `_, `#3460 `_, `#3465 `_, `#3466 `_, `#3467 `_, `#3468 `_, `#3470 `_, `#3471 `_, `#3472 `_, `#3473 `_, `#3474 `_, `#3475 `_, `#3477 `_, `#3478 `_, `#3479 `_, `#3481 `_, `#3482 `_, `#3483 `_, `#3485 `_, `#3488 `_, `#3490 `_, `#3491 `_, `#3492 `_, `#3493 `_, `#3496 `_, `#3499 `_, `#3500 `_, `#3501 `_, `#3502 `_, `#3511 `_, `#3513 `_, `#3514 `_, `#3515 `_, `#3517 `_, `#3520 `_, `#3521 `_, `#3522 `_, `#3523 `_, `#3524 `_, `#3528 `_, `#3529 `_, `#3532 `_, `#3533 `_, `#3534 `_, `#3536 `_, `#3537 `_, `#3542 `_, `#3544 `_, `#3546 `_, `#3547 `_, `#3551 `_, `#3552 `_, `#3553 `_, `#3555 `_, `#3557 `_, `#3558 `_, `#3560 `_, `#3563 `_, `#3564 `_, `#3565 `_, `#3566 `_, `#3567 `_, `#3568 `_, `#3572 `_, `#3574 `_, `#3575 `_, `#3576 `_, `#3577 `_, `#3578 `_, `#3579 `_, `#3580 `_, `#3582 `_, `#3587 `_, `#3588 `_, `#3589 `_, `#3591 `_, `#3592 `_, `#3593 `_, `#3594 `_, `#3595 `_, `#3596 `_, `#3599 `_, `#3600 `_, `#3603 `_, `#3605 `_, `#3606 `_, `#3607 `_, `#3608 `_, `#3611 `_, `#3612 `_, `#3613 `_, `#3615 `_, `#3616 `_, `#3617 `_, `#3618 `_, `#3619 `_, `#3620 `_, `#3621 `_, `#3623 `_, `#3624 `_, `#3625 `_, `#3626 `_, `#3628 `_, `#3630 `_, `#3631 `_, `#3632 `_, `#3634 `_, `#3635 `_, `#3637 `_, `#3638 `_, `#3640 `_, `#3642 `_, `#3645 `_, `#3646 `_, `#3647 `_, `#3648 `_, `#3649 `_, `#3651 `_, `#3653 `_, `#3654 `_, `#3655 `_, `#3656 `_, `#3657 `_, `#3658 `_, `#3662 `_, `#3667 `_, `#3669 `_, `#3670 `_, `#3671 `_, `#3672 `_, `#3674 `_, `#3675 `_, `#3676 `_, `#3678 `_, `#3679 `_, `#3681 `_, `#3683 `_, `#3686 `_, `#3687 `_, `#3691 `_, `#3692 `_, `#3699 `_, `#3700 `_, `#3701 `_, `#3702 `_, `#3703 `_, `#3704 `_, `#3705 `_, `#3707 `_, `#3708 `_, `#3709 `_, `#3711 `_, `#3713 `_, `#3714 `_, `#3715 `_, `#3717 `_, `#3718 `_, `#3722 `_, `#3723 `_, `#3727 `_, `#3728 `_, `#3729 `_, `#3730 `_, `#3731 `_, `#3732 `_, `#3734 `_, `#3735 `_, `#3736 `_, `#3741 `_, `#3743 `_, `#3744 `_, `#3745 `_, `#3746 `_, `#3751 `_, `#3759 `_, `#3760 `_, `#3763 `_, `#3773 `_, `#3781 `_ + + Release 1.15.1 '''''''''''''' diff --git a/README.rst b/README.rst index 705ed11bb..bbf88610d 100644 --- a/README.rst +++ b/README.rst @@ -53,12 +53,11 @@ For more detailed instructions, read `Installing Tahoe-LAFS `__ to learn how to set up your first Tahoe-LAFS node. -🐍 Python 3 Support --------------------- +🐍 Python 2 +----------- -Python 3 support has been introduced starting with Tahoe-LAFS 1.16.0, alongside Python 2. -System administrators are advised to start running Tahoe on Python 3 and should expect Python 2 support to be dropped in a future version. -Please, feel free to file issues if you run into bugs while running Tahoe on Python 3. +Python 3.8 or later is required. +If you are still using Python 2.7, use Tahoe-LAFS version 1.17.1. 🤖 Issues @@ -95,7 +94,14 @@ As a community-driven open source project, Tahoe-LAFS welcomes contributions of - `Patch reviews `__ -Before authoring or reviewing a patch, please familiarize yourself with the `Coding Standard `__ and the `Contributor Code of Conduct `__. +Before authoring or reviewing a patch, please familiarize yourself with the `Coding Standard `__ and the `Contributor Code of Conduct `__. + + +🥳 First Contribution? +---------------------- + +If you are committing to Tahoe for the very first time, it's required that you add your name to our contributor list in `CREDITS `__. Please ensure that this addition has it's own commit within your first contribution. + 🤝 Supporters -------------- diff --git a/benchmarks/__init__.py b/benchmarks/__init__.py new file mode 100644 index 000000000..bcefa8d3a --- /dev/null +++ b/benchmarks/__init__.py @@ -0,0 +1,12 @@ +""" +pytest-based end-to-end benchmarks of Tahoe-LAFS. + +Usage: + +$ systemd-run --user --scope pytest benchmark --number-of-nodes=3 + +It's possible to pass --number-of-nodes multiple times. + +The systemd-run makes sure the tests run in their own cgroup so we get CPU +accounting correct. +""" diff --git a/benchmarks/conftest.py b/benchmarks/conftest.py new file mode 100644 index 000000000..972d89b48 --- /dev/null +++ b/benchmarks/conftest.py @@ -0,0 +1,150 @@ +""" +pytest infrastructure for benchmarks. + +The number of nodes is parameterized via a --number-of-nodes CLI option added +to pytest. +""" + +import os +from shutil import which, rmtree +from tempfile import mkdtemp +from contextlib import contextmanager +from time import time + +import pytest +import pytest_twisted + +from twisted.internet import reactor +from twisted.internet.defer import DeferredList, succeed + +from allmydata.util.iputil import allocate_tcp_port + +from integration.grid import Client, create_grid, create_flog_gatherer + + +def pytest_addoption(parser): + parser.addoption( + "--number-of-nodes", + action="append", + default=[], + type=int, + help="list of number_of_nodes to benchmark against", + ) + # Required to be compatible with integration.util code that we indirectly + # depend on, but also might be useful. + parser.addoption( + "--force-foolscap", + action="store_true", + default=False, + dest="force_foolscap", + help=( + "If set, force Foolscap only for the storage protocol. " + + "Otherwise HTTP will be used." + ), + ) + + +def pytest_generate_tests(metafunc): + # Make number_of_nodes accessible as a parameterized fixture: + if "number_of_nodes" in metafunc.fixturenames: + metafunc.parametrize( + "number_of_nodes", + metafunc.config.getoption("number_of_nodes"), + scope="session", + ) + + +def port_allocator(): + port = allocate_tcp_port() + return succeed(port) + + +@pytest.fixture(scope="session") +def grid(request): + """ + Provides a new Grid with a single Introducer and flog-gathering process. + + Notably does _not_ provide storage servers; use the storage_nodes + fixture if your tests need a Grid that can be used for puts / gets. + """ + tmp_path = mkdtemp(prefix="tahoe-benchmark") + request.addfinalizer(lambda: rmtree(tmp_path)) + flog_binary = which("flogtool") + flog_gatherer = pytest_twisted.blockon( + create_flog_gatherer(reactor, request, tmp_path, flog_binary) + ) + g = pytest_twisted.blockon( + create_grid(reactor, request, tmp_path, flog_gatherer, port_allocator) + ) + return g + + +@pytest.fixture(scope="session") +def storage_nodes(grid, number_of_nodes): + nodes_d = [] + for _ in range(number_of_nodes): + nodes_d.append(grid.add_storage_node()) + + nodes_status = pytest_twisted.blockon(DeferredList(nodes_d)) + for ok, value in nodes_status: + assert ok, "Storage node creation failed: {}".format(value) + return grid.storage_servers + + +@pytest.fixture(scope="session") +def client_node(request, grid, storage_nodes, number_of_nodes) -> Client: + """ + Create a grid client node with number of shares matching number of nodes. + """ + client_node = pytest_twisted.blockon( + grid.add_client( + "client_node", + needed=number_of_nodes, + happy=number_of_nodes, + total=number_of_nodes + 3, # Make sure FEC does some work + ) + ) + print(f"Client node pid: {client_node.process.transport.pid}") + return client_node + +def get_cpu_time_for_cgroup(): + """ + Get how many CPU seconds have been used in current cgroup so far. + + Assumes we're running in a v2 cgroup. + """ + with open("/proc/self/cgroup") as f: + cgroup = f.read().strip().split(":")[-1] + assert cgroup.startswith("/") + cgroup = cgroup[1:] + cpu_stat = os.path.join("/sys/fs/cgroup", cgroup, "cpu.stat") + with open(cpu_stat) as f: + for line in f.read().splitlines(): + if line.startswith("usage_usec"): + return int(line.split()[1]) / 1_000_000 + raise ValueError("Failed to find usage_usec") + + +class Benchmarker: + """Keep track of benchmarking results.""" + + @contextmanager + def record(self, capsys: pytest.CaptureFixture[str], name, **parameters): + """Record the timing of running some code, if it succeeds.""" + start_cpu = get_cpu_time_for_cgroup() + start = time() + yield + elapsed = time() - start + end_cpu = get_cpu_time_for_cgroup() + elapsed_cpu = end_cpu - start_cpu + # FOR now we just print the outcome: + parameters = " ".join(f"{k}={v}" for (k, v) in parameters.items()) + with capsys.disabled(): + print( + f"\nBENCHMARK RESULT: {name} {parameters} elapsed={elapsed:.3} (secs) CPU={elapsed_cpu:.3} (secs)\n" + ) + + +@pytest.fixture(scope="session") +def tahoe_benchmarker(): + return Benchmarker() diff --git a/benchmarks/test_cli.py b/benchmarks/test_cli.py new file mode 100644 index 000000000..42b4b45bf --- /dev/null +++ b/benchmarks/test_cli.py @@ -0,0 +1,66 @@ +"""Benchmarks for minimal `tahoe` CLI interactions.""" + +from subprocess import Popen, PIPE + +import pytest + +from integration.util import cli + + +@pytest.fixture(scope="module", autouse=True) +def cli_alias(client_node): + cli(client_node.process, "create-alias", "cli") + + +@pytest.mark.parametrize("file_size", [1000, 100_000, 1_000_000, 10_000_000]) +def test_get_put_files_sequentially( + file_size, + client_node, + tahoe_benchmarker, + number_of_nodes, + capsys, +): + """ + Upload 5 files with ``tahoe put`` and then download them with ``tahoe + get``, measuring the latency of both operations. We do multiple uploads + and downloads to try to reduce noise. + """ + DATA = b"0123456789" * (file_size // 10) + + with tahoe_benchmarker.record( + capsys, "cli-put-5-file-sequentially", file_size=file_size, number_of_nodes=number_of_nodes + ): + for i in range(5): + p = Popen( + [ + "tahoe", + "--node-directory", + client_node.process.node_dir, + "put", + "-", + f"cli:get_put_files_sequentially{i}", + ], + stdin=PIPE, + ) + p.stdin.write(DATA) + p.stdin.write(str(i).encode("ascii")) + p.stdin.close() + assert p.wait() == 0 + + with tahoe_benchmarker.record( + capsys, "cli-get-5-files-sequentially", file_size=file_size, number_of_nodes=number_of_nodes + ): + for i in range(5): + p = Popen( + [ + "tahoe", + "--node-directory", + client_node.process.node_dir, + "get", + f"cli:get_put_files_sequentially{i}", + "-", + ], + stdout=PIPE, + ) + assert p.stdout.read() == DATA + str(i).encode("ascii") + assert p.wait() == 0 diff --git a/default.nix b/default.nix new file mode 100644 index 000000000..62dfc8176 --- /dev/null +++ b/default.nix @@ -0,0 +1,13 @@ +# This is the flake-compat glue code. It loads the flake and gives us its +# outputs. This gives us backwards compatibility with pre-flake consumers. +# All of the real action is in flake.nix. +(import + ( + let lock = builtins.fromJSON (builtins.readFile ./flake.lock); in + fetchTarball { + url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; + sha256 = lock.nodes.flake-compat.locked.narHash; + } + ) + { src = ./.; } +).defaultNix.default diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index 1d23be71a..000000000 --- a/docker-compose.yml +++ /dev/null @@ -1,49 +0,0 @@ -version: '2' -services: - client: - build: - context: . - dockerfile: ./Dockerfile.dev - volumes: - - ./misc:/root/tahoe-lafs/misc - - ./integration:/root/tahoe-lafs/integration - - ./src:/root/tahoe-lafs/static - - ./setup.cfg:/root/tahoe-lafs/setup.cfg - - ./setup.py:/root/tahoe-lafs/setup.py - ports: - - "127.0.0.1:3456:3456" - depends_on: - - "introducer" - - "server" - entrypoint: /root/tahoe-lafs/venv/bin/tahoe - command: ["run", "/root/.tahoe-client"] - server: - build: - context: . - dockerfile: ./Dockerfile.dev - volumes: - - ./misc:/root/tahoe-lafs/misc - - ./integration:/root/tahoe-lafs/integration - - ./src:/root/tahoe-lafs/static - - ./setup.cfg:/root/tahoe-lafs/setup.cfg - - ./setup.py:/root/tahoe-lafs/setup.py - ports: - - "127.0.0.1:3457:3457" - depends_on: - - "introducer" - entrypoint: /root/tahoe-lafs/venv/bin/tahoe - command: ["run", "/root/.tahoe-server"] - introducer: - build: - context: . - dockerfile: ./Dockerfile.dev - volumes: - - ./misc:/root/tahoe-lafs/misc - - ./integration:/root/tahoe-lafs/integration - - ./src:/root/tahoe-lafs/static - - ./setup.cfg:/root/tahoe-lafs/setup.cfg - - ./setup.py:/root/tahoe-lafs/setup.py - ports: - - "127.0.0.1:3458:3458" - entrypoint: /root/tahoe-lafs/venv/bin/tahoe - command: ["run", "/root/.tahoe-introducer"] diff --git a/docs/Installation/install-tahoe.rst b/docs/Installation/install-tahoe.rst index 2fe47f4a8..8ceca2e01 100644 --- a/docs/Installation/install-tahoe.rst +++ b/docs/Installation/install-tahoe.rst @@ -28,15 +28,15 @@ To install Tahoe-LAFS on Windows: 3. Open the installer by double-clicking it. Select the **Add Python to PATH** check-box, then click **Install Now**. 4. Start PowerShell and enter the following command to verify python installation:: - + python --version 5. Enter the following command to install Tahoe-LAFS:: - + pip install tahoe-lafs 6. Verify installation by checking for the version:: - + tahoe --version If you want to hack on Tahoe's source code, you can install Tahoe in a ``virtualenv`` on your Windows Machine. To learn more, see :doc:`install-on-windows`. @@ -56,13 +56,13 @@ If you are working on MacOS or a Linux distribution which does not have Tahoe-LA * **pip**: Most python installations already include `pip`. However, if your installation does not, see `pip installation `_. 2. Install Tahoe-LAFS using pip:: - + pip install tahoe-lafs 3. Verify installation by checking for the version:: - + tahoe --version -If you are looking to hack on the source code or run pre-release code, we recommend you install Tahoe-LAFS on a `virtualenv` instance. To learn more, see :doc:`install-on-linux`. +If you are looking to hack on the source code or run pre-release code, we recommend you install Tahoe-LAFS on a `virtualenv` instance. To learn more, see :doc:`install-on-linux`. You can always write to the `tahoe-dev mailing list `_ or chat on the `Libera.chat IRC `_ if you are not able to get Tahoe-LAFS up and running on your deployment. diff --git a/docs/architecture.rst b/docs/architecture.rst index 4cfabd844..989f2cd87 100644 --- a/docs/architecture.rst +++ b/docs/architecture.rst @@ -57,6 +57,18 @@ The key-value store is implemented by a grid of Tahoe-LAFS storage servers -- user-space processes. Tahoe-LAFS storage clients communicate with the storage servers over TCP. +There are two supported protocols: + +* Foolscap, the only supported protocol in release before v1.19. +* HTTPS, new in v1.19. + +By default HTTPS is enabled. When HTTPS is enabled on the server, the server +transparently listens for both Foolscap and HTTPS on the same port. When it is +disabled, the server only supports Foolscap. Clients can use either; by default +they will use HTTPS when possible, falling back to I2p, but when configured +appropriately they will only use Foolscap. At this time the only limitations of +HTTPS is that I2P is not supported, so any usage of I2P only uses Foolscap. + Storage servers hold data in the form of "shares". Shares are encoded pieces of files. There are a configurable number of shares for each file, 10 by default. Normally, each share is stored on a separate server, but in some diff --git a/docs/check_running.py b/docs/check_running.py new file mode 100644 index 000000000..2705f1721 --- /dev/null +++ b/docs/check_running.py @@ -0,0 +1,47 @@ + +import psutil +import filelock + + +def can_spawn_tahoe(pidfile): + """ + Determine if we can spawn a Tahoe-LAFS for the given pidfile. That + pidfile may be deleted if it is stale. + + :param pathlib.Path pidfile: the file to check, that is the Path + to "running.process" in a Tahoe-LAFS configuration directory + + :returns bool: True if we can spawn `tahoe run` here + """ + lockpath = pidfile.parent / (pidfile.name + ".lock") + with filelock.FileLock(lockpath): + try: + with pidfile.open("r") as f: + pid, create_time = f.read().strip().split(" ", 1) + except FileNotFoundError: + return True + + # somewhat interesting: we have a pidfile + pid = int(pid) + create_time = float(create_time) + + try: + proc = psutil.Process(pid) + # most interesting case: there _is_ a process running at the + # recorded PID -- but did it just happen to get that PID, or + # is it the very same one that wrote the file? + if create_time == proc.create_time(): + # _not_ stale! another intance is still running against + # this configuration + return False + + except psutil.NoSuchProcess: + pass + + # the file is stale + pidfile.unlink() + return True + + +from pathlib import Path +print("can spawn?", can_spawn_tahoe(Path("running.process"))) diff --git a/docs/conf.py b/docs/conf.py index af05e5900..79a57e48c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -12,9 +12,6 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys -import os - # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. @@ -63,7 +60,7 @@ release = u'1.x' # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/docs/configuration.rst b/docs/configuration.rst index 93c9aa0f1..7f038192e 100644 --- a/docs/configuration.rst +++ b/docs/configuration.rst @@ -679,6 +679,13 @@ Client Configuration location to prefer their local servers so that they can maintain access to all of their uploads without using the internet. +``force_foolscap = (boolean, optional)`` + + If this is ``True``, the client will only connect to storage servers via + Foolscap, regardless of whether they support HTTPS. If this is ``False``, + the client will prefer HTTPS when it is available on the server. The default + value is ``False``. + In addition, see :doc:`accepting-donations` for a convention for donating to storage server operators. @@ -796,6 +803,13 @@ Storage Server Configuration (i.e. ``BASEDIR/storage``), but it can be placed elsewhere. Relative paths will be interpreted relative to the node's base directory. +``force_foolscap = (boolean, optional)`` + + If this is ``True``, the node will expose the storage server via Foolscap + only, with no support for HTTPS. If this is ``False``, the server will + support both Foolscap and HTTPS on the same port. The default value is + ``False``. + In addition, see :doc:`accepting-donations` for a convention encouraging donations to storage server operators. @@ -980,6 +994,9 @@ the node will not use an Introducer at all. Such "introducerless" clients must be configured with static servers (described below), or they will not be able to upload and download files. + +.. _server_list: + Static Server Definitions ========================= diff --git a/docs/donations.rst b/docs/donations.rst index a38e280ac..bc1d55c73 100644 --- a/docs/donations.rst +++ b/docs/donations.rst @@ -73,10 +73,15 @@ key on this list. ~$1020 1DskmM8uCvmvTKjPbeDgfmVsGifZCmxouG -* Aspiration contract (first phase, 2019) - $300k-$350k +* Aspiration contract + $300k-$350k (first phase, 2019) + $800k (second phase, 2020) 1gDXYQNH4kCJ8Dk7kgiztfjNUaA1KJcHv +* OpenCollective development work (2023) + ~$260k + 1KZYr8UU2XjuEdSPzn2pF8eRPZZvffByDf + Historical Donation Addresses ============================= @@ -104,17 +109,17 @@ This document is signed by the Tahoe-LAFS Release-Signing Key (GPG keyid (https://github.com/tahoe-lafs/tahoe-lafs.git) as `docs/donations.rst`. Both actions require access to secrets held closely by Tahoe developers. -signed: Brian Warner, 27-Dec-2018 +signed: Brian Warner, 25-Oct-2023 -----BEGIN PGP SIGNATURE----- -iQEzBAEBCAAdFiEE405i0G0Oac/KQXn/veDTHWhmanoFAlwlrdsACgkQveDTHWhm -anqEqQf/SdxMvI0+YbsZe+Gr/+lNWrNtfxAkjgLUZYRPmElZG6UKkNuPghXfsYRM -71nRbgbn05jrke7AGlulxNplTxYP/5LQVf5K1nvTE7yPI/LBMudIpAbM3wPiLKSD -qecrVZiqiIBPHWScyya91qirTHtJTJj39cs/N9937hD+Pm65paHWHDZhMkhStGH7 -05WtvD0G+fFuAgs04VDBz/XVQlPbngkmdKjIL06jpIAgzC3H9UGFcqe55HKY66jK -W769TiRuGLLS07cOPqg8t2hPpE4wv9Gs02hfg1Jc656scsFuEkh5eMMj/MXcFsED -8vwn16kjJk1fkeg+UofnXsHeHIJalQ== -=/E+V +iQEzBAEBCAAdFiEE405i0G0Oac/KQXn/veDTHWhmanoFAmU5YZMACgkQveDTHWhm +anqt+ggAo2kulNmjrWA5VhqE8i6ckkxQMRVY4y0LAfiI0ho/505ZBZvpoh/Ze31x +ZJj4DczHmZM+m3L+fZyubT4ldagYEojtwkYmxHAQz2DIV4PrdjsUQWyvkNcTBZWu +y5mR5ATk3EYRa19xGEosWK1OzW2kgRbpAbznuWsdxxw9vNENBrolGRsyJqRQHCiV +/4UkrGiOegaJSFMKy2dCyDF3ExD6wT9+fdqC5xDJZjhD+SUDJnD4oWLYLroj//v1 +sy4J+/ElNU9oaC0jDb9fx1ECk+u6B+YiaYlW/MrZNqzKCM/76yZ8sA2+ynsOHGtL +bPFpLJjX6gBwHkMqvkWhsJEojxkFVQ== +=gxlb -----END PGP SIGNATURE----- diff --git a/docs/expenses.rst b/docs/expenses.rst index fbb4293ef..b11acce74 100644 --- a/docs/expenses.rst +++ b/docs/expenses.rst @@ -131,3 +131,54 @@ developer summit. * acdfc299c35eed3bb27f7463ad8cdfcdcd4dcfd5184f290f87530c2be999de3e 1.41401086 (@$714.16) = $1009.83, plus 0.000133 tx-fee + +Aspiration Contract +------------------- + +In December 2018, we entered into an agreement with a non-profit named +Aspiration (https://aspirationtech.org/) to fund contractors for development +work. They handle payroll, taxes, and oversight, in exchange for an 8% +management fee. The first phase of work will extend through most of 2019. + +* Recipient: Aspiration +* Address: 1gDXYQNH4kCJ8Dk7kgiztfjNUaA1KJcHv + +These txids record the transfers from the primary 1Pxi address to the +Aspiration-specific 1gDXY subaddress. In some cases, leftover funds +were swept back into the main 1Pxi address after the transfers were +complete. + +First phase, transfers performed 28-Dec-2018 - 31-Dec-2018, total 89 +BTC, about $350K. + +* 95c68d488bd92e8c164195370aaa516dff05aa4d8c543d3fb8cfafae2b811e7a + 1.0 BTC plus 0.00002705 tx-fee +* c0a5b8e3a63c56c4365d4c3ded0821bc1170f6351502849168bc34e30a0582d7 + 89.0 BTC plus 0.00000633 tx-fee +* 421cff5f398509aaf48951520738e0e63dfddf1157920c15bdc72c34e24cf1cf + return 0.00005245 BTC to 1Pxi, less 0.00000211 tx-fee + +In November 2020, we funded a second phase of the work: 51.38094 BTC, +about $800K. + +* 7558cbf3b24e8d835809d2d6f01a8ba229190102efdf36280d0639abaa488721 + 1.0 BTC plus 0.00230766 tx-fee +* 9c78ae6bb7db62cbd6be82fd52d50a2f015285b562f05de0ebfb0e5afc6fd285 + 56.0 BTC plus 0.00057400 tx-fee +* fbee4332e8c7ffbc9c1bcaee773f063550e589e58d350d14f6daaa473966c368 + returning 5.61906 BTC to 1Pxi, less 0.00012000 tx-fee + + +Open Collective +--------------- + +In August 2023, we started working with Open Collective to fund a +grant covering development work performed over the last year. + +* Recipient: Open Collective (US) +* Address: 1KZYr8UU2XjuEdSPzn2pF8eRPZZvffByDf + +The first phase transferred 7.5 BTC (about $260K). + +* (txid) + (amount) diff --git a/docs/frontends/FTP-and-SFTP.rst b/docs/frontends/FTP-and-SFTP.rst index 9d4f1dcec..ede719e26 100644 --- a/docs/frontends/FTP-and-SFTP.rst +++ b/docs/frontends/FTP-and-SFTP.rst @@ -47,8 +47,8 @@ servers must be configured with a way to first authenticate a user (confirm that a prospective client has a legitimate claim to whatever authorities we might grant a particular user), and second to decide what directory cap should be used as the root directory for a log-in by the authenticated user. -A username and password can be used; as of Tahoe-LAFS v1.11, RSA or DSA -public key authentication is also supported. +As of Tahoe-LAFS v1.17, +RSA/DSA public key authentication is the only supported mechanism. Tahoe-LAFS provides two mechanisms to perform this user-to-cap mapping. The first (recommended) is a simple flat file with one account per line. @@ -59,20 +59,14 @@ Creating an Account File To use the first form, create a file (for example ``BASEDIR/private/accounts``) in which each non-comment/non-blank line is a space-separated line of -(USERNAME, PASSWORD, ROOTCAP), like so:: +(USERNAME, KEY-TYPE, PUBLIC-KEY, ROOTCAP), like so:: % cat BASEDIR/private/accounts - # This is a password line: username password cap - alice password URI:DIR2:ioej8xmzrwilg772gzj4fhdg7a:wtiizszzz2rgmczv4wl6bqvbv33ag4kvbr6prz3u6w3geixa6m6a - bob sekrit URI:DIR2:6bdmeitystckbl9yqlw7g56f4e:serp5ioqxnh34mlbmzwvkp3odehsyrr7eytt5f64we3k9hhcrcja - # This is a public key line: username keytype pubkey cap # (Tahoe-LAFS v1.11 or later) carol ssh-rsa AAAA... URI:DIR2:ovjy4yhylqlfoqg2vcze36dhde:4d4f47qko2xm5g7osgo2yyidi5m4muyo2vjjy53q4vjju2u55mfa -For public key authentication, the keytype may be either "ssh-rsa" or "ssh-dsa". -To avoid ambiguity between passwords and public key types, a password cannot -start with "ssh-". +The key type may be either "ssh-rsa" or "ssh-dsa". Now add an ``accounts.file`` directive to your ``tahoe.cfg`` file, as described in the next sections. diff --git a/docs/frontends/webapi.rst b/docs/frontends/webapi.rst index 77ce11974..baffa412d 100644 --- a/docs/frontends/webapi.rst +++ b/docs/frontends/webapi.rst @@ -446,6 +446,21 @@ Creating a New Directory given, the directory's format is determined by the default mutable file format, as configured on the Tahoe-LAFS node responding to the request. + In addition, an optional "private-key=" argument is supported which, if given, + specifies the underlying signing key to be used when creating the directory. + This value must be a DER-encoded 2048-bit RSA private key in urlsafe base64 + encoding. (To convert an existing PEM-encoded RSA key file into the format + required, the following commands may be used -- assuming a modern UNIX-like + environment with common tools already installed: + ``openssl rsa -in key.pem -outform der | base64 -w 0 -i - | tr '+/' '-_'``) + + Because this key can be used to derive the write capability for the + associated directory, additional care should be taken to ensure that the key is + unique, that it is kept confidential, and that it was derived from an + appropriate (high-entropy) source of randomness. If this argument is omitted + (the default behavior), Tahoe-LAFS will generate an appropriate signing key + using the underlying operating system's source of entropy. + ``POST /uri?t=mkdir-with-children`` Create a new directory, populated with a set of child nodes, and return its @@ -453,7 +468,8 @@ Creating a New Directory any other directory: the returned write-cap is the only reference to it. The format of the directory can be controlled with the format= argument in - the query string, as described above. + the query string and a signing key can be specified with the private-key= + argument, as described above. Initial children are provided as the body of the POST form (this is more efficient than doing separate mkdir and set_children operations). If the diff --git a/docs/gpg-setup.rst b/docs/gpg-setup.rst new file mode 100644 index 000000000..cb8cbfd20 --- /dev/null +++ b/docs/gpg-setup.rst @@ -0,0 +1,18 @@ +Preparing to Authenticate Release (Setting up GPG) +-------------------------------------------------- + +In other to keep releases authentic it's required that releases are signed before being +published. This ensure's that users of Tahoe are able to verify that the version of Tahoe +they are using is coming from a trusted or at the very least known source. + +The authentication is done using the ``GPG`` implementation of ``OpenGPG`` to be able to complete +the release steps you would have to download the ``GPG`` software and setup a key(identity). + +- `Download `__ and install GPG for your operating system. +- Generate a key pair using ``gpg --gen-key``. *Some questions would be asked to personalize your key configuration.* + +You might take additional steps including: + +- Setting up a revocation certificate (Incase you lose your secret key) +- Backing up your key pair +- Upload your fingerprint to a keyserver such as `openpgp.org `__ diff --git a/docs/index.rst b/docs/index.rst index 54efc38ee..47a9bd0e5 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -40,6 +40,9 @@ preserving your privacy and security. :caption: Tahoe-LAFS in Depth architecture + gpg-setup + servers + managed-grid helper convergence-secret garbage-collection diff --git a/docs/managed-grid.rst b/docs/managed-grid.rst new file mode 100644 index 000000000..1c5acaa20 --- /dev/null +++ b/docs/managed-grid.rst @@ -0,0 +1,342 @@ + + +Managed Grid +============ + +This document explains the "Grid Manager" concept and the +`grid-manager` command. Someone operating a grid may choose to use a +Grid Manager. Operators of storage-servers and clients will then be +given additional configuration in this case. + + +Overview and Motivation +----------------------- + +In a grid using an Introducer, a client will use any storage-server +the Introducer announces (and the Introducer will announce any +storage-server that connects to it). This means that anyone with the +Introducer fURL can connect storage to the grid. + +Sometimes, this is just what you want! + +For some use-cases, though, you want to have clients only use certain +servers. One case might be a "managed" grid, where some entity runs +the grid; clients of this grid don't want their uploads to go to +"unmanaged" storage if some other client decides to provide storage. + +One way to limit which storage servers a client connects to is via the +"server list" (:ref:`server_list`) (aka "Introducerless" +mode). Clients are given static lists of storage-servers, and connect +only to those. This means manually updating these lists if the storage +servers change, however. + +Another method is for clients to use `[client] peers.preferred=` +configuration option (:ref:`Client Configuration`), which suffers +from a similar disadvantage. + + +Grid Manager +------------ + +A "grid-manager" consists of some data defining a keypair (along with +some other details) and Tahoe sub-commands to manipulate the data and +produce certificates to give to storage-servers. Certificates assert +the statement: "Grid Manager X suggests you use storage-server Y to +upload shares to" (X and Y are public-keys). Such a certificate +consists of: + + - the version of the format the certificate conforms to (`1`) + - the public-key of a storage-server + - an expiry timestamp + - a signature of the above + +A client will always use any storage-server for downloads (expired +certificate, or no certificate) because clients check the ciphertext +and re-assembled plaintext against the keys in the capability; +"grid-manager" certificates only control uploads. + +Clients make use of this functionality by configuring one or more Grid Manager public keys. +This tells the client to only upload to storage-servers that have a currently-valid certificate from any of the Grid Managers their client allows. +In case none are configured, the default behavior (of using any storage server) prevails. + + +Grid Manager Data Storage +------------------------- + +The data defining the grid-manager is stored in an arbitrary +directory, which you indicate with the ``--config`` option (in the +future, we may add the ability to store the data directly in a grid, +at which time you may be able to pass a directory-capability to this +option). + +If you don't want to store the configuration on disk at all, you may +use ``--config -`` (the last character is a dash) and write a valid +JSON configuration to stdin. + +All commands require the ``--config`` option and they all behave +similarly for "data from stdin" versus "data from disk". A directory +(and not a file) is used on disk because in that mode, each +certificate issued is also stored alongside the configuration +document; in "stdin / stdout" mode, an issued certificate is only +ever available on stdout. + +The configuration is a JSON document. It is subject to change as Grid +Manager evolves. It contains a version number in the +`grid_manager_config_version` key which will increment whenever the +document schema changes. + + +grid-manager create +``````````````````` + +Create a new grid-manager. + +If you specify ``--config -`` then a new grid-manager configuration is +written to stdout. Otherwise, a new grid-manager is created in the +directory specified by the ``--config`` option. It is an error if the +directory already exists. + + +grid-manager public-identity +```````````````````````````` + +Print out a grid-manager's public key. This key is derived from the +private-key of the grid-manager, so a valid grid-manager config must +be given via ``--config`` + +This public key is what is put in clients' configuration to actually +validate and use grid-manager certificates. + + +grid-manager add +```````````````` + +Takes two args: ``name pubkey``. The ``name`` is an arbitrary local +identifier for the new storage node (also sometimes called "a petname" +or "nickname"). The pubkey is the tahoe-encoded key from a ``node.pubkey`` +file in the storage-server's node directory (minus any +whitespace). For example, if ``~/storage0`` contains a storage-node, +you might do something like this:: + + grid-manager --config ./gm0 add storage0 $(cat ~/storage0/node.pubkey) + +This adds a new storage-server to a Grid Manager's +configuration. (Since it mutates the configuration, if you used +``--config -`` the new configuration will be printed to stdout). The +usefulness of the ``name`` is solely for reference within this Grid +Manager. + + +grid-manager list +````````````````` + +Lists all storage-servers that have previously been added using +``grid-manager add``. + + +grid-manager sign +````````````````` + +Takes two args: ``name expiry_days``. The ``name`` is a nickname used +previously in a ``grid-manager add`` command and ``expiry_days`` is +the number of days in the future when the certificate should expire. + +Note that this mutates the state of the grid-manager if it is on disk, +by adding this certificate to our collection of issued +certificates. If you used ``--config -``, the certificate isn't +persisted anywhere except to stdout (so if you wish to keep it +somewhere, that is up to you). + +This command creates a new "version 1" certificate for a +storage-server (identified by its public key). The new certificate is +printed to stdout. If you stored the config on disk, the new +certificate will (also) be in a file named like ``alice.cert.0``. + + +Enrolling a Storage Server: CLI +------------------------------- + + +tahoe admin add-grid-manager-cert +````````````````````````````````` + +- `--filename`: the file to read the cert from +- `--name`: the name of this certificate + +Import a "version 1" storage-certificate produced by a grid-manager A +storage server may have zero or more such certificates installed; for +now just one is sufficient. You will have to re-start your node after +this. Subsequent announcements to the Introducer will include this +certificate. + +.. note:: + + This command will simply edit the `tahoe.cfg` file and direct you + to re-start. In the Future(tm), we should consider (in exarkun's + words): + + "A python program you run as a new process" might not be the + best abstraction to layer on top of the configuration + persistence system, though. It's a nice abstraction for users + (although most users would probably rather have a GUI) but it's + not a great abstraction for automation. So at some point it + may be better if there is CLI -> public API -> configuration + persistence system. And maybe "public API" is even a network + API for the storage server so it's equally easy to access from + an agent implemented in essentially any language and maybe if + the API is exposed by the storage node itself then this also + gives you live-configuration-updates, avoiding the need for + node restarts (not that this is the only way to accomplish + this, but I think it's a good way because it avoids the need + for messes like inotify and it supports the notion that the + storage node process is in charge of its own configuration + persistence system, not just one consumer among many ... which + has some nice things going for it ... though how this interacts + exactly with further node management automation might bear + closer scrutiny). + + +Enrolling a Storage Server: Config +---------------------------------- + +You may edit the ``[storage]`` section of the ``tahoe.cfg`` file to +turn on grid-management with ``grid_management = true``. You then must +also provide a ``[grid_management_certificates]`` section in the +config-file which lists ``name = path/to/certificate`` pairs. + +These certificate files are issued by the ``grid-manager sign`` +command; these should be transmitted to the storage server operator +who includes them in the config for the storage server. Relative paths +are based from the node directory. Example:: + + [storage] + grid_management = true + + [grid_management_certificates] + default = example_grid.cert + +This will cause us to give this certificate to any Introducers we +connect to (and subsequently, the Introducer will give the certificate +out to clients). + + +Enrolling a Client: Config +-------------------------- + +You may instruct a Tahoe client to use only storage servers from given +Grid Managers. If there are no such keys, any servers are used +(but see https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3979). If +there are one or more keys, the client will only upload to a storage +server that has a valid certificate (from any of the keys). + +To specify public-keys, add a ``[grid_managers]`` section to the +config. This consists of ``name = value`` pairs where ``name`` is an +arbitrary name and ``value`` is a public-key of a Grid +Manager. Example:: + + [grid_managers] + example_grid = pub-v0-vqimc4s5eflwajttsofisp5st566dbq36xnpp4siz57ufdavpvlq + +See also https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3507 which +proposes a command to edit the config. + + +Example Setup of a New Managed Grid +----------------------------------- + +This example creates an actual grid, but it's all just on one machine +with different "node directories" and a separate tahoe process for +each node. Usually of course each storage server would be on a +separate computer. + +Note that we use the ``daemonize`` command in the following but that's +only one way to handle "running a command in the background". You +could instead run commands that start with ``daemonize ...`` in their +own shell/terminal window or via something like ``systemd`` + +We'll store our Grid Manager configuration on disk, in +``./gm0``. To initialize this directory:: + + grid-manager --config ./gm0 create + +(If you already have a grid, you can :ref:`skip ahead `.) + +First of all, create an Introducer. Note that we actually have to run +it briefly before it creates the "Introducer fURL" we want for the +next steps:: + + tahoe create-introducer --listen=tcp --port=5555 --location=tcp:localhost:5555 ./introducer + daemonize tahoe -d introducer run + +Next, we attach a couple of storage nodes:: + + tahoe create-node --introducer $(cat introducer/private/introducer.furl) --nickname storage0 --webport 6001 --location tcp:localhost:6003 --port 6003 ./storage0 + tahoe create-node --introducer $(cat introducer/private/introducer.furl) --nickname storage1 --webport 6101 --location tcp:localhost:6103 --port 6103 ./storage1 + daemonize tahoe -d storage0 run + daemonize tahoe -d storage1 run + +.. _skip_ahead: + +We can now tell the Grid Manager about our new storage servers:: + + grid-manager --config ./gm0 add storage0 $(cat storage0/node.pubkey) + grid-manager --config ./gm0 add storage1 $(cat storage1/node.pubkey) + +To produce a new certificate for each node, we do this:: + + grid-manager --config ./gm0 sign storage0 > ./storage0/gridmanager.cert + grid-manager --config ./gm0 sign storage1 > ./storage1/gridmanager.cert + +Now, we want our storage servers to actually announce these +certificates into the grid. We do this by adding some configuration +(in ``tahoe.cfg``):: + + [storage] + grid_management = true + + [grid_manager_certificates] + default = gridmanager.cert + +Add the above bit to each node's ``tahoe.cfg`` and re-start the +storage nodes. (Alternatively, use the ``tahoe add-grid-manager`` +command). + +Now try adding a new storage server ``storage2``. This client can join +the grid just fine, and announce itself to the Introducer as providing +storage:: + + tahoe create-node --introducer $(cat introducer/private/introducer.furl) --nickname storage2 --webport 6301 --location tcp:localhost:6303 --port 6303 ./storage2 + daemonize tahoe -d storage2 run + +At this point any client will upload to any of these three +storage-servers. Make a client "alice" and try! + +:: + + tahoe create-client --introducer $(cat introducer/private/introducer.furl) --nickname alice --webport 6401 --shares-total=3 --shares-needed=2 --shares-happy=3 ./alice + daemonize tahoe -d alice run + tahoe -d alice put README.rst # prints out a read-cap + find storage2/storage/shares # confirm storage2 has a share + +Now we want to make Alice only upload to the storage servers that the +grid-manager has given certificates to (``storage0`` and +``storage1``). We need the grid-manager's public key to put in Alice's +configuration:: + + grid-manager --config ./gm0 public-identity + +Put the key printed out above into Alice's ``tahoe.cfg`` in section +``client``:: + + [grid_managers] + example_name = pub-v0-vqimc4s5eflwajttsofisp5st566dbq36xnpp4siz57ufdavpvlq + + +Now, re-start the "alice" client. Since we made Alice's parameters +require 3 storage servers to be reachable (``--happy=3``), all their +uploads should now fail (so ``tahoe put`` will fail) because they +won't use storage2 and thus can't "achieve happiness". + +A proposal to expose more information about Grid Manager and +certificate status in the Welcome page is discussed in +https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3506 diff --git a/docs/performance.rst b/docs/performance.rst index 6ddeb1fe8..a0487c72c 100644 --- a/docs/performance.rst +++ b/docs/performance.rst @@ -82,8 +82,9 @@ network: A memory footprint: N/K*A -notes: Tahoe-LAFS generates a new RSA keypair for each mutable file that it -publishes to a grid. This takes up to 1 or 2 seconds on a typical desktop PC. +notes: +Tahoe-LAFS generates a new RSA keypair for each mutable file that it publishes to a grid. +This takes around 100 milliseconds on a relatively high-end laptop from 2021. Part of the process of encrypting, encoding, and uploading a mutable file to a Tahoe-LAFS grid requires that the entire file be in memory at once. For larger diff --git a/docs/proposed/index.rst b/docs/proposed/index.rst index d01d92d2d..f0bb2f344 100644 --- a/docs/proposed/index.rst +++ b/docs/proposed/index.rst @@ -14,4 +14,3 @@ index only lists the files that are in .rst format. :maxdepth: 2 leasedb - http-storage-node-protocol diff --git a/docs/release-checklist.rst b/docs/release-checklist.rst index da1bbe16f..e02844d67 100644 --- a/docs/release-checklist.rst +++ b/docs/release-checklist.rst @@ -3,9 +3,8 @@ Release Checklist ================= -These instructions were produced while making the 1.15.0 release. They -are based on the original instructions (in old revisions in the file -`docs/how_to_make_a_tahoe-lafs_release.org`). +This release checklist specifies a series of checks that anyone engaged in +releasing a version of Tahoe should follow. Any contributor can do the first part of the release preparation. Only certain contributors can perform other parts. These are the two main @@ -13,9 +12,12 @@ sections of this checklist (and could be done by different people). A final section describes how to announce the release. +This checklist is based on the original instructions (in old revisions in the file +`docs/how_to_make_a_tahoe-lafs_release.org`). + Any Contributor ---------------- +=============== Anyone who can create normal PRs should be able to complete this portion of the release process. @@ -32,13 +34,35 @@ Tuesday if you want to get anything in"). - Create a ticket for the release in Trac - Ticket number needed in next section +- Making first release? See `GPG Setup Instructions `__ to make sure you can sign releases. [One time setup] + +Get a clean checkout +```````````````````` + +The release proccess involves compressing source files and putting them in formats +suitable for distribution such as ``.tar.gz`` and ``zip``. That said, it's neccesary to +the release process begins with a clean checkout to avoid making a release with +previously generated files. + +- Inside the tahoe root dir run ``git clone . ../tahoe-release-x.x.x`` where (x.x.x is the release number such as 1.16.0). + +.. note:: + The above command would create a new directory at the same level as your original clone named ``tahoe-release-x.x.x``. You can name this folder however you want but it would be a good + practice to give it the release name. You MAY also discard this directory once the release + process is complete. + +Get into the release directory and install dependencies by running + +- cd ../tahoe-release-x.x.x (assuming you are still in your original clone) +- python -m venv venv +- ./venv/bin/pip install --editable .[test] Create Branch and Apply Updates ``````````````````````````````` -- Create a branch for release-candidates (e.g. `XXXX.release-1.15.0.rc0`) -- run `tox -e news` to produce a new NEWS.txt file (this does a commit) +- Create a branch for the release/candidate (e.g. ``XXXX.release-1.16.0``) +- run tox -e news to produce a new NEWS.txt file (this does a commit) - create the news for the release - newsfragments/.minor @@ -46,7 +70,7 @@ Create Branch and Apply Updates - manually fix NEWS.txt - - proper title for latest release ("Release 1.15.0" instead of "Release ...post1432") + - proper title for latest release ("Release 1.16.0" instead of "Release ...post1432") - double-check date (maybe release will be in the future) - spot-check the release notes (these come from the newsfragments files though so don't do heavy editing) @@ -54,7 +78,7 @@ Create Branch and Apply Updates - update "relnotes.txt" - - update all mentions of 1.14.0 -> 1.15.0 + - update all mentions of ``1.16.0`` to new and higher release version for example ``1.16.1`` - update "previous release" statement and date - summarize major changes - commit it @@ -63,14 +87,7 @@ Create Branch and Apply Updates - change the value given for `version` from `OLD.post1` to `NEW.post1` -- update "CREDITS" - - - are there any new contributors in this release? - - one way: git log release-1.14.0.. | grep Author | sort | uniq - - commit it - - update "docs/known_issues.rst" if appropriate -- update "docs/Installation/install-tahoe.rst" references to the new release - Push the branch to github - Create a (draft) PR; this should trigger CI (note that github doesn't let you create a PR without some changes on the branch so @@ -95,23 +112,33 @@ they will need to evaluate which contributors' signatures they trust. - (all steps above are completed) - sign the release - - git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-1.15.0rc0" tahoe-lafs-1.15.0rc0 - - (replace the key-id above with your own) + - git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-1.16.0rc0" tahoe-lafs-1.16.0rc0 + +.. note:: + - Replace the key-id above with your own, which can simply be your email if it's attached to your fingerprint. + - Don't forget to put the correct tag message and name. In this example, the tag message is "release Tahoe-LAFS-1.16.0rc0" and the tag name is ``tahoe-lafs-1.16.0rc0`` - build all code locally + - these should all pass: - - tox -e py27,codechecks,docs,integration + - tox -e py37,codechecks,docs,integration - these can fail (ideally they should not of course): - tox -e deprecations,upcoming-deprecations +- clone to a clean, local checkout (to avoid extra files being included in the release) + + - cd /tmp + - git clone /home/meejah/src/tahoe-lafs + - build tarballs - tox -e tarballs - - confirm it at least exists: - - ls dist/ | grep 1.15.0rc0 + - Confirm that release tarballs exist by runnig: + + - ls dist/ | grep 1.16.0rc0 - inspect and test the tarballs @@ -120,14 +147,12 @@ they will need to evaluate which contributors' signatures they trust. - when satisfied, sign the tarballs: - - gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0-py2-none-any.whl - - gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.tar.bz2 - - gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.tar.gz - - gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.zip + - gpg --pinentry=loopback --armor -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A --detach-sign dist/tahoe_lafs-1.16.0rc0-py2.py3-none-any.whl + - gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.16.0rc0.tar.gz Privileged Contributor ------------------------ +====================== Steps in this portion require special access to keys or infrastructure. For example, **access to tahoe-lafs.org** to upload @@ -155,24 +180,32 @@ need to be uploaded to https://tahoe-lafs.org in `~source/downloads` - secure-copy all release artifacts to the download area on the tahoe-lafs.org host machine. `~source/downloads` on there maps to - https://tahoe-lafs.org/downloads/ on the Web. -- scp dist/*1.15.0* username@tahoe-lafs.org:/home/source/downloads + https://tahoe-lafs.org/downloads/ on the Web: + + - scp dist/*1.15.0* username@tahoe-lafs.org:/home/source/downloads + - the following developers have access to do this: - exarkun - meejah - warner +Push the signed tag to the main repository: + +- git push origin tahoe-lafs-1.17.1 + For the actual release, the tarball and signature files need to be uploaded to PyPI as well. -- how to do this? -- (original guide says only `twine upload dist/*`) -- the following developers have access to do this: +- ls dist/*1.19.0* +- twine upload --username __token__ --password `cat SECRET-pypi-tahoe-publish-token` dist/*1.19.0* + +The following developers have access to do this: - warner + - meejah - exarkun (partial?) - - meejah (partial?) + Announcing the Release Candidate ```````````````````````````````` diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 000000000..39c4c20f0 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,4 @@ +sphinx +docutils<0.18 # https://github.com/sphinx-doc/sphinx/issues/9788 +recommonmark +sphinx_rtd_theme diff --git a/docs/running.rst b/docs/running.rst index 406c8200b..263448735 100644 --- a/docs/running.rst +++ b/docs/running.rst @@ -124,6 +124,35 @@ Tahoe-LAFS. .. _magic wormhole: https://magic-wormhole.io/ +Multiple Instances +------------------ + +Running multiple instances against the same configuration directory isn't supported. +This will lead to undefined behavior and could corrupt the configuration or state. + +We attempt to avoid this situation with a "pidfile"-style file in the config directory called ``running.process``. +There may be a parallel file called ``running.process.lock`` in existence. + +The ``.lock`` file exists to make sure only one process modifies ``running.process`` at once. +The lock file is managed by the `lockfile `_ library. +If you wish to make use of ``running.process`` for any reason you should also lock it and follow the semantics of lockfile. + +If ``running.process`` exists then it contains the PID and the creation-time of the process. +When no such file exists, there is no other process running on this configuration. +If there is a ``running.process`` file, it may be a leftover file or it may indicate that another process is running against this config. +To tell the difference, determine if the PID in the file exists currently. +If it does, check the creation-time of the process versus the one in the file. +If these match, there is another process currently running and using this config. +Otherwise, the file is stale -- it should be removed before starting Tahoe-LAFS. + +Some example Python code to check the above situations: + +.. literalinclude:: check_running.py + + + + + A note about small grids ------------------------ diff --git a/docs/specifications/dirnodes.rst b/docs/specifications/dirnodes.rst index 88fcd0fa9..c53d28a26 100644 --- a/docs/specifications/dirnodes.rst +++ b/docs/specifications/dirnodes.rst @@ -267,7 +267,7 @@ How well does this design meet the goals? value, so there are no opportunities for staleness 9. monotonicity: VERY: the single point of access also protects against retrograde motion - + Confidentiality leaks in the storage servers @@ -332,8 +332,9 @@ MDMF design rules allow for efficient random-access reads from the middle of the file, which would give the index something useful to point at. The current SDMF design generates a new RSA public/private keypair for each -directory. This takes considerable time and CPU effort, generally one or two -seconds per directory. We have designed (but not yet built) a DSA-based +directory. This takes some time and CPU effort (around 100 milliseconds on a +relatively high-end 2021 laptop) per directory. +We have designed (but not yet built) a DSA-based mutable file scheme which will use shared parameters to reduce the directory-creation effort to a bare minimum (picking a random number instead of generating two random primes). @@ -363,7 +364,7 @@ single child, looking up a single child) would require pulling or pushing a lot of unrelated data, increasing network overhead (and necessitating test-and-set semantics for the modification side, which increases the chances that a user operation will fail, making it more challenging to provide -promises of atomicity to the user). +promises of atomicity to the user). It would also make it much more difficult to enable the delegation ("sharing") of specific directories. Since each aggregate "realm" provides @@ -469,4 +470,3 @@ Preventing delegation between communication parties is just as pointless as asking Bob to forget previously accessed files. However, there may be value to configuring the UI to ask Carol to not share files with Bob, or to removing all files from Bob's view at the same time his access is revoked. - diff --git a/docs/proposed/http-storage-node-protocol.rst b/docs/specifications/http-storage-node-protocol.rst similarity index 59% rename from docs/proposed/http-storage-node-protocol.rst rename to docs/specifications/http-storage-node-protocol.rst index 521bf476d..db400fb2b 100644 --- a/docs/proposed/http-storage-node-protocol.rst +++ b/docs/specifications/http-storage-node-protocol.rst @@ -3,7 +3,7 @@ Storage Node Protocol ("Great Black Swamp", "GBS") ================================================== -The target audience for this document is Tahoe-LAFS developers. +The target audience for this document is developers working on Tahoe-LAFS or on an alternate implementation intended to be interoperable. After reading this document, one should expect to understand how Tahoe-LAFS clients interact over the network with Tahoe-LAFS storage nodes. @@ -30,15 +30,15 @@ Glossary introducer a Tahoe-LAFS process at a known location configured to re-publish announcements about the location of storage servers - fURL + :ref:`fURLs ` a self-authenticating URL-like string which can be used to locate a remote object using the Foolscap protocol (the storage service is an example of such an object) - NURL + :ref:`NURLs ` a self-authenticating URL-like string almost exactly like a fURL but without being tied to Foolscap swissnum - a short random string which is part of a fURL and which acts as a shared secret to authorize clients to use a storage service + a short random string which is part of a fURL/NURL and which acts as a shared secret to authorize clients to use a storage service lease state associated with a share informing a storage server of the duration of storage desired by a client @@ -64,6 +64,10 @@ Glossary lease renew secret a short secret string which storage servers required to be presented before allowing a particular lease to be renewed +The key words +"MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" +in this document are to be interpreted as described in RFC 2119. + Motivation ---------- @@ -119,8 +123,8 @@ An HTTP-based protocol can make use of TLS in largely the same way to provide th Provision of these properties *is* dependant on implementers following Great Black Swamp's rules for x509 certificate validation (rather than the standard "web" rules for validation). -Requirements ------------- +Design Requirements +------------------- Security ~~~~~~~~ @@ -189,6 +193,9 @@ Solutions An HTTP-based protocol, dubbed "Great Black Swamp" (or "GBS"), is described below. This protocol aims to satisfy the above requirements at a lower level of complexity than the current Foolscap-based protocol. +Summary (Non-normative) +~~~~~~~~~~~~~~~~~~~~~~~ + Communication with the storage node will take place using TLS. The TLS version and configuration will be dictated by an ongoing understanding of best practices. The storage node will present an x509 certificate during the TLS handshake. @@ -211,15 +218,15 @@ To further clarify, consider this example. Alice operates a storage node. Alice generates a key pair and secures it properly. Alice generates a self-signed storage node certificate with the key pair. -Alice's storage node announces (to an introducer) a fURL containing (among other information) the SPKI hash. +Alice's storage node announces (to an introducer) a NURL containing (among other information) the SPKI hash. Imagine the SPKI hash is ``i5xb...``. -This results in a fURL of ``pb://i5xb...@example.com:443/g3m5...#v=1``. +This results in a NURL of ``pb://i5xb...@example.com:443/g3m5...#v=1``. Bob creates a client node pointed at the same introducer. Bob's client node receives the announcement from Alice's storage node (indirected through the introducer). -Bob's client node recognizes the fURL as referring to an HTTP-dialect server due to the ``v=1`` fragment. -Bob's client node can now perform a TLS handshake with a server at the address in the fURL location hints +Bob's client node recognizes the NURL as referring to an HTTP-dialect server due to the ``v=1`` fragment. +Bob's client node can now perform a TLS handshake with a server at the address in the NURL location hints (``example.com:443`` in this example). Following the above described validation procedures, Bob's client node can determine whether it has reached Alice's storage node or not. @@ -230,17 +237,17 @@ Additionally, by continuing to interact using TLS, Bob's client and Alice's storage node are assured of both **message authentication** and **message confidentiality**. -Bob's client further inspects the fURL for the *swissnum*. +Bob's client further inspects the NURL for the *swissnum*. When Bob's client issues HTTP requests to Alice's storage node it includes the *swissnum* in its requests. **Storage authorization** has been achieved. .. note:: Foolscap TubIDs are 20 bytes (SHA1 digest of the certificate). - They are encoded with Base32 for a length of 32 bytes. + They are encoded with `Base32`_ for a length of 32 bytes. SPKI information discussed here is 32 bytes (SHA256 digest). - They would be encoded in Base32 for a length of 52 bytes. - `base64url`_ provides a more compact encoding of the information while remaining URL-compatible. + They would be encoded in `Base32`_ for a length of 52 bytes. + `unpadded base64url`_ provides a more compact encoding of the information while remaining URL-compatible. This would encode the SPKI information for a length of merely 43 bytes. SHA1, the current Foolscap hash function, @@ -266,13 +273,13 @@ Generation of a new certificate allows for certain non-optimal conditions to be * The ``commonName`` of ``newpb_thingy`` may be changed to a more descriptive value. * A ``notValidAfter`` field with a timestamp in the past may be updated. -Storage nodes will announce a new fURL for this new HTTP-based server. -This fURL will be announced alongside their existing Foolscap-based server's fURL. +Storage nodes will announce a new NURL for this new HTTP-based server. +This NURL will be announced alongside their existing Foolscap-based server's fURL. Such an announcement will resemble this:: { - "anonymous-storage-FURL": "pb://...", # The old key - "gbs-anonymous-storage-url": "pb://...#v=1" # The new key + "anonymous-storage-FURL": "pb://...", # The old entry + "anonymous-storage-NURLs": ["pb://...#v=1"] # The new, additional entry } The transition process will proceed in three stages: @@ -312,13 +319,8 @@ The follow sequence of events is likely: #. The client uses the information in its cache to open a Foolscap connection to the storage server. Ideally, -the client would not rely on an update from the introducer to give it the GBS fURL for the updated storage server. -Therefore, -when an updated client connects to a storage server using Foolscap, -it should request the server's version information. -If this information indicates that GBS is supported then the client should cache this GBS information. -On subsequent connection attempts, -it should make use of this GBS information. +the client would not rely on an update from the introducer to give it the GBS NURL for the updated storage server. +In practice, we have decided not to implement this functionality. Server Details -------------- @@ -329,15 +331,117 @@ and shares. A particular resource is addressed by the HTTP request path. Details about the interface are encoded in the HTTP message body. +String Encoding +~~~~~~~~~~~~~~~ + +.. _Base32: + +Base32 +!!!!!! + +Where the specification refers to Base32 the meaning is *unpadded* Base32 encoding as specified by `RFC 4648`_ using a *lowercase variation* of the alphabet from Section 6. + +That is, the alphabet is: + +.. list-table:: Base32 Alphabet + :header-rows: 1 + + * - Value + - Encoding + - Value + - Encoding + - Value + - Encoding + - Value + - Encoding + + * - 0 + - a + - 9 + - j + - 18 + - s + - 27 + - 3 + * - 1 + - b + - 10 + - k + - 19 + - t + - 28 + - 4 + * - 2 + - c + - 11 + - l + - 20 + - u + - 29 + - 5 + * - 3 + - d + - 12 + - m + - 21 + - v + - 30 + - 6 + * - 4 + - e + - 13 + - n + - 22 + - w + - 31 + - 7 + * - 5 + - f + - 14 + - o + - 23 + - x + - + - + * - 6 + - g + - 15 + - p + - 24 + - y + - + - + * - 7 + - h + - 16 + - q + - 25 + - z + - + - + * - 8 + - i + - 17 + - r + - 26 + - 2 + - + - + Message Encoding ~~~~~~~~~~~~~~~~ -The preferred encoding for HTTP message bodies is `CBOR`_. -A request may be submitted using an alternate encoding by declaring this in the ``Content-Type`` header. -A request may indicate its preference for an alternate encoding in the response using the ``Accept`` header. -These two headers are used in the typical way for an HTTP application. +Clients and servers MUST use the ``Content-Type`` and ``Accept`` header fields as specified in `RFC 9110`_ for message body negotiation. -The only other encoding support for which is currently recommended is JSON. +The encoding for HTTP message bodies SHOULD be `CBOR`_. +Clients submitting requests using this encoding MUST include a ``Content-Type: application/cbor`` request header field. +A request MAY be submitted using an alternate encoding by declaring this in the ``Content-Type`` header field. +A request MAY indicate its preference for an alternate encoding in the response using the ``Accept`` header field. +A request which includes no ``Accept`` header field MUST be interpreted in the same way as a request including a ``Accept: application/cbor`` header field. + +Clients and servers MAY support additional request and response message body encodings. + +Clients and servers SHOULD support ``application/json`` request and response message body encoding. For HTTP messages carrying binary share data, this is expected to be a particularly poor encoding. However, @@ -350,6 +454,24 @@ Because of the simple types used throughout and the equivalence described in `RFC 7049`_ these examples should be representative regardless of which of these two encodings is chosen. +There are two exceptions to this rule. + +1. Sets +!!!!!!! + +For CBOR messages, +any sequence that is semantically a set (i.e. no repeated values allowed, order doesn't matter, and elements are hashable in Python) should be sent as a set. +Tag 6.258 is used to indicate sets in CBOR; +see `the CBOR registry `_ for more details. +The JSON encoding does not support sets. +Sets MUST be represented as arrays in JSON-encoded messages. + +2. Bytes +!!!!!!!! + +The CBOR encoding natively supports a bytes type while the JSON encoding does not. +Bytes MUST be represented as strings giving the `Base64`_ representation of the original bytes value. + HTTP Design ~~~~~~~~~~~ @@ -363,50 +485,96 @@ one branch contains all of the share data; another branch contains all of the lease data; etc. -Authorization is required for all endpoints. -The standard HTTP authorization protocol is used. -The authentication *type* used is ``Tahoe-LAFS``. -The swissnum from the NURL used to locate the storage service is used as the *credentials*. -If credentials are not presented or the swissnum is not associated with a storage service then no storage processing is performed and the request receives an ``UNAUTHORIZED`` response. +Clients and servers MUST use the ``Authorization`` header field, +as specified in `RFC 9110`_, +for authorization of all requests to all endpoints specified here. +The authentication *type* MUST be ``Tahoe-LAFS``. +Clients MUST present the `Base64`_-encoded representation of the swissnum from the NURL used to locate the storage service as the *credentials*. + +If credentials are not presented or the swissnum is not associated with a storage service then the server MUST issue a ``401 UNAUTHORIZED`` response and perform no other processing of the message. + +Requests to certain endpoints MUST include additional secrets in the ``X-Tahoe-Authorization`` headers field. +The endpoints which require these secrets are: + +* ``PUT /storage/v1/lease/:storage_index``: + The secrets included MUST be ``lease-renew-secret`` and ``lease-cancel-secret``. + +* ``POST /storage/v1/immutable/:storage_index``: + The secrets included MUST be ``lease-renew-secret``, ``lease-cancel-secret``, and ``upload-secret``. + +* ``PATCH /storage/v1/immutable/:storage_index/:share_number``: + The secrets included MUST be ``upload-secret``. + +* ``PUT /storage/v1/immutable/:storage_index/:share_number/abort``: + The secrets included MUST be ``upload-secret``. + +* ``POST /storage/v1/mutable/:storage_index/read-test-write``: + The secrets included MUST be ``lease-renew-secret``, ``lease-cancel-secret``, and ``write-enabler``. + +If these secrets are: + +1. Missing. +2. The wrong length. +3. Not the expected kind of secret. +4. They are otherwise unparseable before they are actually semantically used. + +the server MUST respond with ``400 BAD REQUEST`` and perform no other processing of the message. +401 is not used because this isn't an authorization problem, this is a "you sent garbage and should know better" bug. + +If authorization using the secret fails, +then the server MUST send a ``401 UNAUTHORIZED`` response and perform no other processing of the message. + +Encoding +~~~~~~~~ + +* ``storage_index`` MUST be `Base32`_ encoded in URLs. +* ``share_number`` MUST be a decimal representation General ~~~~~~~ -``GET /v1/version`` -!!!!!!!!!!!!!!!!!!! +``GET /storage/v1/version`` +!!!!!!!!!!!!!!!!!!!!!!!!!!! -Retrieve information about the version of the storage server. -Information is returned as an encoded mapping. -For example:: +This endpoint allows clients to retrieve some basic metadata about a storage server from the storage service. +The response MUST validate against this CDDL schema:: - { "http://allmydata.org/tahoe/protocols/storage/v1" : - { "maximum-immutable-share-size": 1234, - "maximum-mutable-share-size": 1235, - "available-space": 123456, - "tolerates-immutable-read-overrun": true, - "delete-mutable-shares-with-zero-length-writev": true, - "fills-holes-with-zero-bytes": true, - "prevents-read-past-end-of-share-data": true, - "gbs-anonymous-storage-url": "pb://...#v=1" - }, - "application-version": "1.13.0" - } + {'http://allmydata.org/tahoe/protocols/storage/v1' => { + 'maximum-immutable-share-size' => uint + 'maximum-mutable-share-size' => uint + 'available-space' => uint + } + 'application-version' => bstr + } -``PUT /v1/lease/:storage_index`` -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +The server SHOULD populate as many fields as possible with accurate information about its behavior. + +For fields which relate to a specific API +the semantics are documented below in the section for that API. +For fields that are more general than a single API the semantics are as follows: + +* available-space: + The server SHOULD use this field to advertise the amount of space that it currently considers unused and is willing to allocate for client requests. + The value is a number of bytes. + + +``PUT /storage/v1/lease/:storage_index`` +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! Either renew or create a new lease on the bucket addressed by ``storage_index``. -The details of the lease are encoded in the request body. + +The renew secret and cancellation secret should be included as ``X-Tahoe-Authorization`` headers. For example:: - {"renew-secret": "abcd", "cancel-secret": "efgh"} + X-Tahoe-Authorization: lease-renew-secret + X-Tahoe-Authorization: lease-cancel-secret -If the ``renew-secret`` value matches an existing lease +If the ``lease-renew-secret`` value matches an existing lease then the expiration time of that lease will be changed to 31 days after the time of this operation. If it does not match an existing lease -then a new lease will be created with this ``renew-secret`` which expires 31 days after the time of this operation. +then a new lease will be created with this ``lease-renew-secret`` which expires 31 days after the time of this operation. -``renew-secret`` and ``cancel-secret`` values must be 32 bytes long. +``lease-renew-secret`` and ``lease-cancel-secret`` values must be 32 bytes long. The server treats them as opaque values. :ref:`Share Leases` gives details about how the Tahoe-LAFS storage client constructs these values. @@ -423,8 +591,10 @@ In these cases the server takes no action and returns ``NOT FOUND``. Discussion `````````` -We considered an alternative where ``renew-secret`` and ``cancel-secret`` are placed in query arguments on the request path. -We chose to put these values into the request body to make the URL simpler. +We considered an alternative where ``lease-renew-secret`` and ``lease-cancel-secret`` are placed in query arguments on the request path. +This increases chances of leaking secrets in logs. +Putting the secrets in the body reduces the chances of leaking secrets, +but eventually we chose headers as the least likely information to be logged. Several behaviors here are blindly copied from the Foolscap-based storage server protocol. @@ -441,27 +611,59 @@ Immutable Writing ~~~~~~~ -``POST /v1/immutable/:storage_index`` -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +``POST /storage/v1/immutable/:storage_index`` +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! Initialize an immutable storage index with some buckets. -The buckets may have share data written to them once. -A lease is also created for the shares. +The server MUST allow share data to be written to the buckets at most one time. +The server MAY create a lease for the buckets. Details of the buckets to create are encoded in the request body. +The request body MUST validate against this CDDL schema:: + + { + share-numbers: #6.258([0*256 uint]) + allocated-size: uint + } + For example:: - {"renew-secret": "efgh", "cancel-secret": "ijkl", - "share-numbers": [1, 7, ...], "allocated-size": 12345} + {"share-numbers": [1, 7, ...], "allocated-size": 12345} + +The server SHOULD accept a value for **allocated-size** that is less than or equal to the lesser of the values of the server's version message's **maximum-immutable-share-size** or **available-space** values. + +The request MUST include ``X-Tahoe-Authorization`` HTTP headers that set the various secrets—upload, lease renewal, lease cancellation—that will be later used to authorize various operations. +For example:: + + X-Tahoe-Authorization: lease-renew-secret + X-Tahoe-Authorization: lease-cancel-secret + X-Tahoe-Authorization: upload-secret + +The response body MUST include encoded information about the created buckets. +The response body MUST validate against this CDDL schema:: + + { + already-have: #6.258([0*256 uint]) + allocated: #6.258([0*256 uint]) + } -The response body includes encoded information about the created buckets. For example:: {"already-have": [1, ...], "allocated": [7, ...]} +The upload secret is an opaque _byte_ string. + +Handling repeat calls: + +* If the same API call is repeated with the same upload secret, the response is the same and no change is made to server state. + This is necessary to ensure retries work in the face of lost responses from the server. +* If the API calls is with a different upload secret, this implies a new client, perhaps because the old client died. + Or it may happen because the client wants to upload a different share number than a previous client. + New shares will be created, existing shares will be unchanged, regardless of whether the upload secret matches or not. + Discussion `````````` -We considered making this ``POST /v1/immutable`` instead. +We considered making this ``POST /storage/v1/immutable`` instead. The motivation was to keep *storage index* out of the request URL. Request URLs have an elevated chance of being logged by something. We were concerned that having the *storage index* logged may increase some risks. @@ -482,25 +684,53 @@ The response includes ``already-have`` and ``allocated`` for two reasons: This might be because a server has become unavailable and a remaining server needs to store more shares for the upload. It could also just be that the client's preferred servers have changed. -``PATCH /v1/immutable/:storage_index/:share_number`` -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +Regarding upload secrets, +the goal is for uploading and aborting (see next sections) to be authenticated by more than just the storage index. +In the future, we may want to generate them in a way that allows resuming/canceling when the client has issues. +In the short term, they can just be a random byte string. +The primary security constraint is that each upload to each server has its own unique upload key, +tied to uploading that particular storage index to this particular server. + +Rejected designs for upload secrets: + +* Upload secret per share number. + In order to make the secret unguessable by attackers, which includes other servers, + it must contain randomness. + Randomness means there is no need to have a secret per share, since adding share-specific content to randomness doesn't actually make the secret any better. + +``PATCH /storage/v1/immutable/:storage_index/:share_number`` +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! Write data for the indicated share. -The share number must belong to the storage index. -The request body is the raw share data (i.e., ``application/octet-stream``). -*Content-Range* requests are encouraged for large transfers to allow partially complete uploads to be resumed. +The share number MUST belong to the storage index. +The request body MUST be the raw share data (i.e., ``application/octet-stream``). +The request MUST include a *Content-Range* header field; +for large transfers this allows partially complete uploads to be resumed. + For example, a 1MiB share can be divided in to eight separate 128KiB chunks. Each chunk can be uploaded in a separate request. Each request can include a *Content-Range* value indicating its placement within the complete share. If any one of these requests fails then at most 128KiB of upload work needs to be retried. -The server must recognize when all of the data has been received and mark the share as complete +The server MUST recognize when all of the data has been received and mark the share as complete (which it can do because it was informed of the size when the storage index was initialized). -* When a chunk that does not complete the share is successfully uploaded the response is ``OK``. - The response body indicates the range of share data that has yet to be uploaded. - That is:: +The request MUST include a ``X-Tahoe-Authorization`` header that includes the upload secret:: + + X-Tahoe-Authorization: upload-secret + +Responses: + +* When a chunk that does not complete the share is successfully uploaded the response MUST be ``OK``. + The response body MUST indicate the range of share data that has yet to be uploaded. + The response body MUST validate against this CDDL schema:: + + { + required: [0* {begin: uint, end: uint}] + } + + For example:: { "required": [ { "begin": @@ -511,25 +741,12 @@ The server must recognize when all of the data has been received and mark the sh ] } -* When the chunk that completes the share is successfully uploaded the response is ``CREATED``. +* When the chunk that completes the share is successfully uploaded the response MUST be ``CREATED``. * If the *Content-Range* for a request covers part of the share that has already, and the data does not match already written data, - the response is ``CONFLICT``. - At this point the only thing to do is abort the upload and start from scratch (see below). - -``PUT /v1/immutable/:storage_index/:share_number/abort`` -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - -This cancels an *in-progress* upload. - -The response code: - -* When the upload is still in progress and therefore the abort has succeeded, - the response is ``OK``. - Future uploads can start from scratch with no pre-existing upload state stored on the server. -* If the uploaded has already finished, the response is 405 (Method Not Allowed) - and no change is made. - + the response MUST be ``CONFLICT``. + In this case the client MUST abort the upload. + The client MAY then restart the upload from scratch. Discussion `````````` @@ -549,38 +766,85 @@ From RFC 7231:: PATCH method defined in [RFC5789]). -``POST /v1/immutable/:storage_index/:share_number/corrupt`` -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +``PUT /storage/v1/immutable/:storage_index/:share_number/abort`` +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +This cancels an *in-progress* upload. + +The request MUST include a ``X-Tahoe-Authorization`` header that includes the upload secret:: + + X-Tahoe-Authorization: upload-secret + +If there is an incomplete upload with a matching upload-secret then the server MUST consider the abort to have succeeded. +In this case the response MUST be ``OK``. +The server MUST respond to all future requests as if the operations related to this upload did not take place. + +If there is no incomplete upload with a matching upload-secret then the server MUST respond with ``Method Not Allowed`` (405). +The server MUST make no client-visible changes to its state in this case. + +``POST /storage/v1/immutable/:storage_index/:share_number/corrupt`` +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! Advise the server the data read from the indicated share was corrupt. -The request body includes an human-meaningful string with details about the corruption. +The request body includes an human-meaningful text string with details about the corruption. It also includes potentially important details about the share. +The request body MUST validate against this CDDL schema:: + + { + reason: tstr .size (1..32765) + } For example:: {"reason": "expected hash abcd, got hash efgh"} -.. share-type, storage-index, and share-number are inferred from the URL +The report pertains to the immutable share with a **storage index** and **share number** given in the request path. +If the identified **storage index** and **share number** are known to the server then the response SHOULD be accepted and made available to server administrators. +In this case the response SHOULD be ``OK``. +If the response is not accepted then the response SHOULD be ``Not Found`` (404). + +Discussion +`````````` + +The seemingly odd length limit on ``reason`` is chosen so that the *encoded* representation of the message is limited to 32768. Reading ~~~~~~~ -``GET /v1/immutable/:storage_index/shares`` -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +``GET /storage/v1/immutable/:storage_index/shares`` +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +Retrieve a list (semantically, a set) indicating all shares available for the indicated storage index. +The response body MUST validate against this CDDL schema:: + + #6.258([0*256 uint]) -Retrieve a list indicating all shares available for the indicated storage index. For example:: [1, 5] -``GET /v1/immutable/:storage_index/:share_number`` -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +If the **storage index** in the request path is not known to the server then the response MUST include an empty list. + +``GET /storage/v1/immutable/:storage_index/:share_number`` +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! Read a contiguous sequence of bytes from one share in one bucket. -The response body is the raw share data (i.e., ``application/octet-stream``). -The ``Range`` header may be used to request exactly one ``bytes`` range. -Interpretation and response behavior is as specified in RFC 7233 § 4.1. -Multiple ranges in a single request are *not* supported. +The response body MUST be the raw share data (i.e., ``application/octet-stream``). +The ``Range`` header MAY be used to request exactly one ``bytes`` range, +in which case the response code MUST be ``Partial Content`` (206). +Interpretation and response behavior MUST be as specified in RFC 7233 § 4.1. +Multiple ranges in a single request are *not* supported; +open-ended ranges are also not supported. +Clients MUST NOT send requests using these features. + +If the response reads beyond the end of the data, +the response MUST be shorter than the requested range. +It MUST contain all data up to the end of the share and then end. +The resulting ``Content-Range`` header MUST be consistent with the returned data. + +If the response to a query is an empty range, +the server MUST send a ``No Content`` (204) response. Discussion `````````` @@ -609,8 +873,8 @@ Mutable Writing ~~~~~~~ -``POST /v1/mutable/:storage_index/read-test-write`` -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +``POST /storage/v1/mutable/:storage_index/read-test-write`` +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! General purpose read-test-and-write operation for mutable storage indexes. A mutable storage index is also called a "slot" @@ -619,16 +883,30 @@ The first write operation on a mutable storage index creates it (that is, there is no separate "create this storage index" operation as there is for the immutable storage index type). -The request body includes the secrets necessary to rewrite to the shares -along with test, read, and write vectors for the operation. +The request MUST include ``X-Tahoe-Authorization`` headers with write enabler and lease secrets:: + + X-Tahoe-Authorization: write-enabler + X-Tahoe-Authorization: lease-cancel-secret + X-Tahoe-Authorization: lease-renew-secret + +The request body MUST include test, read, and write vectors for the operation. +The request body MUST validate against this CDDL schema:: + + { + "test-write-vectors": { + 0*256 share_number : { + "test": [0*30 {"offset": uint, "size": uint, "specimen": bstr}] + "write": [* {"offset": uint, "data": bstr}] + "new-length": uint / null + } + } + "read-vector": [0*30 {"offset": uint, "size": uint}] + } + share_number = uint + For example:: { - "secrets": { - "write-enabler": "abcd", - "lease-renew": "efgh", - "lease-cancel": "ijkl" - }, "test-write-vectors": { 0: { "test": [{ @@ -648,6 +926,14 @@ For example:: The response body contains a boolean indicating whether the tests all succeed (and writes were applied) and a mapping giving read data (pre-write). +The response body MUST validate against this CDDL schema:: + + { + "success": bool, + "data": {0*256 share_number: [0* bstr]} + } + share_number = uint + For example:: { @@ -659,28 +945,57 @@ For example:: } } -A test vector or read vector that read beyond the boundaries of existing data will return nothing for any bytes past the end. -As a result, if there is no data at all, an empty bytestring is returned no matter what the offset or length. +A client MAY send a test vector or read vector to bytes beyond the end of existing data. +In this case a server MUST behave as if the test or read vector referred to exactly as much data exists. + +For example, +consider the case where the server has 5 bytes of data for a particular share. +If a client sends a read vector with an ``offset`` of 1 and a ``size`` of 4 then the server MUST respond with all of the data except the first byte. +If a client sends a read vector with the same ``offset`` and a ``size`` of 5 (or any larger value) then the server MUST respond in the same way. + +Similarly, +if there is no data at all, +an empty byte string is returned no matter what the offset or length. Reading ~~~~~~~ -``GET /v1/mutable/:storage_index/shares`` -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +``GET /storage/v1/mutable/:storage_index/shares`` +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +Retrieve a set indicating all shares available for the indicated storage index. +The response body MUST validate against this CDDL schema:: + + #6.258([0*256 uint]) -Retrieve a list indicating all shares available for the indicated storage index. For example:: [1, 5] -``GET /v1/mutable/:storage_index?share=:s0&share=:sN&offset=:o1&size=:z0&offset=:oN&size=:zN`` -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +``GET /storage/v1/mutable/:storage_index/:share_number`` +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -Read data from the indicated mutable shares. -Just like ``GET /v1/mutable/:storage_index``. +Read data from the indicated mutable shares, just like ``GET /storage/v1/immutable/:storage_index``. -``POST /v1/mutable/:storage_index/:share_number/corrupt`` -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +The response body MUST be the raw share data (i.e., ``application/octet-stream``). +The ``Range`` header MAY be used to request exactly one ``bytes`` range, +in which case the response code MUST be ``Partial Content`` (206). +Interpretation and response behavior MUST be specified in RFC 7233 § 4.1. +Multiple ranges in a single request are *not* supported; +open-ended ranges are also not supported. +Clients MUST NOT send requests using these features. + +If the response reads beyond the end of the data, +the response MUST be shorter than the requested range. +It MUST contain all data up to the end of the share and then end. +The resulting ``Content-Range`` header MUST be consistent with the returned data. + +If the response to a query is an empty range, +the server MUST send a ``No Content`` (204) response. + + +``POST /storage/v1/mutable/:storage_index/:share_number/corrupt`` +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! Advise the server the data read from the indicated share was corrupt. Just like the immutable version. @@ -688,49 +1003,69 @@ Just like the immutable version. Sample Interactions ------------------- +This section contains examples of client/server interactions to help illuminate the above specification. +This section is non-normative. + Immutable Data ~~~~~~~~~~~~~~ 1. Create a bucket for storage index ``AAAAAAAAAAAAAAAA`` to hold two immutable shares, discovering that share ``1`` was already uploaded:: - POST /v1/immutable/AAAAAAAAAAAAAAAA - {"renew-secret": "efgh", "cancel-secret": "ijkl", - "share-numbers": [1, 7], "allocated-size": 48} + POST /storage/v1/immutable/AAAAAAAAAAAAAAAA + Authorization: Tahoe-LAFS nurl-swissnum + X-Tahoe-Authorization: lease-renew-secret efgh + X-Tahoe-Authorization: lease-cancel-secret jjkl + X-Tahoe-Authorization: upload-secret xyzf + + {"share-numbers": [1, 7], "allocated-size": 48} 200 OK {"already-have": [1], "allocated": [7]} #. Upload the content for immutable share ``7``:: - PATCH /v1/immutable/AAAAAAAAAAAAAAAA/7 + PATCH /storage/v1/immutable/AAAAAAAAAAAAAAAA/7 + Authorization: Tahoe-LAFS nurl-swissnum Content-Range: bytes 0-15/48 + X-Tahoe-Authorization: upload-secret xyzf 200 OK + { "required": [ {"begin": 16, "end": 48 } ] } - PATCH /v1/immutable/AAAAAAAAAAAAAAAA/7 + PATCH /storage/v1/immutable/AAAAAAAAAAAAAAAA/7 + Authorization: Tahoe-LAFS nurl-swissnum Content-Range: bytes 16-31/48 + X-Tahoe-Authorization: upload-secret xyzf 200 OK + { "required": [ {"begin": 32, "end": 48 } ] } - PATCH /v1/immutable/AAAAAAAAAAAAAAAA/7 + PATCH /storage/v1/immutable/AAAAAAAAAAAAAAAA/7 + Authorization: Tahoe-LAFS nurl-swissnum Content-Range: bytes 32-47/48 + X-Tahoe-Authorization: upload-secret xyzf 201 CREATED #. Download the content of the previously uploaded immutable share ``7``:: - GET /v1/immutable/AAAAAAAAAAAAAAAA?share=7&offset=0&size=48 + GET /storage/v1/immutable/AAAAAAAAAAAAAAAA?share=7 + Authorization: Tahoe-LAFS nurl-swissnum + Range: bytes=0-47 200 OK + Content-Range: bytes 0-47/48 #. Renew the lease on all immutable shares in bucket ``AAAAAAAAAAAAAAAA``:: - PUT /v1/lease/AAAAAAAAAAAAAAAA - {"renew-secret": "efgh", "cancel-secret": "ijkl"} + PUT /storage/v1/lease/AAAAAAAAAAAAAAAA + Authorization: Tahoe-LAFS nurl-swissnum + X-Tahoe-Authorization: lease-cancel-secret jjkl + X-Tahoe-Authorization: lease-renew-secret efgh 204 NO CONTENT @@ -742,13 +1077,13 @@ The special test vector of size 1 but empty bytes will only pass if there is no existing share, otherwise it will read a byte which won't match `b""`:: - POST /v1/mutable/BBBBBBBBBBBBBBBB/read-test-write + POST /storage/v1/mutable/BBBBBBBBBBBBBBBB/read-test-write + Authorization: Tahoe-LAFS nurl-swissnum + X-Tahoe-Authorization: write-enabler abcd + X-Tahoe-Authorization: lease-cancel-secret efgh + X-Tahoe-Authorization: lease-renew-secret ijkl + { - "secrets": { - "write-enabler": "abcd", - "lease-renew": "efgh", - "lease-cancel": "ijkl" - }, "test-write-vectors": { 3: { "test": [{ @@ -774,13 +1109,13 @@ otherwise it will read a byte which won't match `b""`:: #. Safely rewrite the contents of a known version of mutable share number ``3`` (or fail):: - POST /v1/mutable/BBBBBBBBBBBBBBBB/read-test-write + POST /storage/v1/mutable/BBBBBBBBBBBBBBBB/read-test-write + Authorization: Tahoe-LAFS nurl-swissnum + X-Tahoe-Authorization: write-enabler abcd + X-Tahoe-Authorization: lease-cancel-secret efgh + X-Tahoe-Authorization: lease-renew-secret ijkl + { - "secrets": { - "write-enabler": "abcd", - "lease-renew": "efgh", - "lease-cancel": "ijkl" - }, "test-write-vectors": { 3: { "test": [{ @@ -806,20 +1141,33 @@ otherwise it will read a byte which won't match `b""`:: #. Download the contents of share number ``3``:: - GET /v1/mutable/BBBBBBBBBBBBBBBB?share=3&offset=0&size=10 + GET /storage/v1/mutable/BBBBBBBBBBBBBBBB?share=3 + Authorization: Tahoe-LAFS nurl-swissnum + Range: bytes=0-16 + + 200 OK + Content-Range: bytes 0-15/16 #. Renew the lease on previously uploaded mutable share in slot ``BBBBBBBBBBBBBBBB``:: - PUT /v1/lease/BBBBBBBBBBBBBBBB - {"renew-secret": "efgh", "cancel-secret": "ijkl"} + PUT /storage/v1/lease/BBBBBBBBBBBBBBBB + Authorization: Tahoe-LAFS nurl-swissnum + X-Tahoe-Authorization: lease-cancel-secret efgh + X-Tahoe-Authorization: lease-renew-secret ijkl 204 NO CONTENT +.. _Base64: https://www.rfc-editor.org/rfc/rfc4648#section-4 + +.. _RFC 4648: https://tools.ietf.org/html/rfc4648 + .. _RFC 7469: https://tools.ietf.org/html/rfc7469#section-2.4 .. _RFC 7049: https://tools.ietf.org/html/rfc7049#section-4 +.. _RFC 9110: https://tools.ietf.org/html/rfc9110 + .. _CBOR: http://cbor.io/ .. [#] @@ -864,7 +1212,7 @@ otherwise it will read a byte which won't match `b""`:: spki_encoded = urlsafe_b64encode(spki_sha256) assert spki_encoded == tub_id - Note we use `base64url`_ rather than the Foolscap- and Tahoe-LAFS-preferred Base32. + Note we use `unpadded base64url`_ rather than the Foolscap- and Tahoe-LAFS-preferred Base32. .. [#] https://www.cvedetails.com/cve/CVE-2017-5638/ @@ -875,6 +1223,6 @@ otherwise it will read a byte which won't match `b""`:: .. [#] https://efail.de/ -.. _base64url: https://tools.ietf.org/html/rfc7515#appendix-C +.. _unpadded base64url: https://tools.ietf.org/html/rfc7515#appendix-C .. _attacking SHA1: https://en.wikipedia.org/wiki/SHA-1#Attacks diff --git a/docs/specifications/index.rst b/docs/specifications/index.rst index e813acf07..4f71dc0dc 100644 --- a/docs/specifications/index.rst +++ b/docs/specifications/index.rst @@ -17,3 +17,4 @@ the data formats used by Tahoe. lease servers-of-happiness backends/raic + http-storage-node-protocol diff --git a/docs/specifications/url.rst b/docs/specifications/url.rst index 31fb05fad..14c58201c 100644 --- a/docs/specifications/url.rst +++ b/docs/specifications/url.rst @@ -7,6 +7,8 @@ These are not to be confused with the URI-like capabilities Tahoe-LAFS uses to r An attempt is also made to outline the rationale for certain choices about these URLs. The intended audience for this document is Tahoe-LAFS maintainers and other developers interested in interoperating with Tahoe-LAFS or these URLs. +.. _furls: + Background ---------- @@ -31,12 +33,14 @@ The client's use of the swissnum is what allows the server to authorize the clie .. _`swiss number`: http://wiki.erights.org/wiki/Swiss_number +.. _NURLs: + NURLs ----- The authentication and authorization properties of fURLs are a good fit for Tahoe-LAFS' requirements. These are not inherently tied to the Foolscap protocol itself. -In particular they are beneficial to :doc:`../proposed/http-storage-node-protocol` which uses HTTP instead of Foolscap. +In particular they are beneficial to :doc:`http-storage-node-protocol` which uses HTTP instead of Foolscap. It is conceivable they will also be used with WebSockets at some point as well. Continuing to refer to these URLs as fURLs when they are being used for other protocols may cause confusion. @@ -47,27 +51,27 @@ This can be considered to expand to "**N**\ ew URLs" or "Authe\ **N**\ ticating The anticipated use for a **NURL** will still be to establish a TLS connection to a peer. The protocol run over that TLS connection could be Foolscap though it is more likely to be an HTTP-based protocol (such as GBS). +Unlike fURLs, only a single net-loc is included, for consistency with other forms of URLs. +As a result, multiple NURLs may be available for a single server. + Syntax ------ The EBNF for a NURL is as follows:: - nurl = scheme, hash, "@", net-loc-list, "/", swiss-number, [ version1 ] - - scheme = "pb://" + nurl = tcp-nurl | tor-nurl | i2p-nurl + tcp-nurl = "pb://", hash, "@", tcp-loc, "/", swiss-number, [ version1 ] + tor-nurl = "pb+tor://", hash, "@", tcp-loc, "/", swiss-number, [ version1 ] + i2p-nurl = "pb+i2p://", hash, "@", i2p-loc, "/", swiss-number, [ version1 ] hash = unreserved - net-loc-list = net-loc, [ { ",", net-loc } ] - net-loc = tcp-loc | tor-loc | i2p-loc - - tcp-loc = [ "tcp:" ], hostname, [ ":" port ] - tor-loc = "tor:", hostname, [ ":" port ] - i2p-loc = "i2p:", i2p-addr, [ ":" port ] - - i2p-addr = { unreserved }, ".i2p" + tcp-loc = hostname, [ ":" port ] hostname = domain | IPv4address | IPv6address + i2p-loc = i2p-addr, [ ":" port ] + i2p-addr = { unreserved }, ".i2p" + swiss-number = segment version1 = "#v=1" @@ -87,11 +91,13 @@ These differences are separated into distinct versions. Version 0 --------- -A Foolscap fURL is considered the canonical definition of a version 0 NURL. +In theory, a Foolscap fURL with a single netloc is considered the canonical definition of a version 0 NURL. Notably, the hash component is defined as the base32-encoded SHA1 hash of the DER form of an x509v3 certificate. A version 0 NURL is identified by the absence of the ``v=1`` fragment. +In practice, real world fURLs may have more than one netloc, so lack of version fragment will likely just involve dispatching the fURL to a different parser. + Examples ~~~~~~~~ @@ -103,11 +109,8 @@ Version 1 The hash component of a version 1 NURL differs in three ways from the prior version. -1. The hash function used is SHA3-224 instead of SHA1. - The security of SHA1 `continues to be eroded`_. - Contrariwise SHA3 is currently the most recent addition to the SHA family by NIST. - The 224 bit instance is chosen to keep the output short and because it offers greater collision resistance than SHA1 was thought to offer even at its inception - (prior to security research showing actual collision resistance is lower). +1. The hash function used is SHA-256, to match RFC 7469. + The security of SHA1 `continues to be eroded`_; Latacora `SHA-2`_. 2. The hash is computed over the certificate's SPKI instead of the whole certificate. This allows certificate re-generation so long as the public key remains the same. This is useful to allow contact information to be updated or extension of validity period. @@ -122,7 +125,7 @@ The hash component of a version 1 NURL differs in three ways from the prior vers *all* certificate fields should be considered within the context of the relationship identified by the SPKI hash. 3. The hash is encoded using urlsafe-base64 (without padding) instead of base32. - This provides a more compact representation and minimizes the usability impacts of switching from a 160 bit hash to a 224 bit hash. + This provides a more compact representation and minimizes the usability impacts of switching from a 160 bit hash to a 256 bit hash. A version 1 NURL is identified by the presence of the ``v=1`` fragment. Though the length of the hash string (38 bytes) could also be used to differentiate it from a version 0 NURL, @@ -140,7 +143,8 @@ Examples * ``pb://azEu8vlRpnEeYm0DySQDeNY3Z2iJXHC_bsbaAw@localhost:47877/64i4aokv4ej#v=1`` .. _`continues to be eroded`: https://en.wikipedia.org/wiki/SHA-1#Cryptanalysis_and_validation -.. _`explored by the web community`: https://www.imperialviolet.org/2011/05/04/pinning.html +.. _`SHA-2`: https://latacora.micro.blog/2018/04/03/cryptographic-right-answers.html +.. _`explored by the web community`: https://www.rfc-editor.org/rfc/rfc7469 .. _Foolscap: https://github.com/warner/foolscap .. [1] ``foolscap.furl.decode_furl`` is taken as the canonical definition of the syntax of a fURL. diff --git a/docs/stats.rst b/docs/stats.rst index 50642d816..c7d69e0d2 100644 --- a/docs/stats.rst +++ b/docs/stats.rst @@ -264,3 +264,18 @@ the "tahoe-conf" file for notes about configuration and installing these plugins into a Munin environment. .. _Munin: http://munin-monitoring.org/ + + +Scraping Stats Values in OpenMetrics Format +=========================================== + +Time Series DataBase (TSDB) software like Prometheus_ and VictoriaMetrics_ can +parse statistics from the e.g. http://localhost:3456/statistics?t=openmetrics +URL in OpenMetrics_ format. Software like Grafana_ can then be used to graph +and alert on these numbers. You can find a pre-configured dashboard for +Grafana at https://grafana.com/grafana/dashboards/16894-tahoe-lafs/. + +.. _OpenMetrics: https://openmetrics.io/ +.. _Prometheus: https://prometheus.io/ +.. _VictoriaMetrics: https://victoriametrics.com/ +.. _Grafana: https://grafana.com/ diff --git a/flake.lock b/flake.lock new file mode 100644 index 000000000..b7b74a0e4 --- /dev/null +++ b/flake.lock @@ -0,0 +1,115 @@ +{ + "nodes": { + "flake-compat": { + "flake": false, + "locked": { + "lastModified": 1673956053, + "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", + "owner": "edolstra", + "repo": "flake-compat", + "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", + "type": "github" + }, + "original": { + "owner": "edolstra", + "repo": "flake-compat", + "type": "github" + } + }, + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1687709756, + "narHash": "sha256-Y5wKlQSkgEK2weWdOu4J3riRd+kV/VCgHsqLNTTWQ/0=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "dbabf0ca0c0c4bce6ea5eaf65af5cb694d2082c7", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs-22_11": { + "locked": { + "lastModified": 1688392541, + "narHash": "sha256-lHrKvEkCPTUO+7tPfjIcb7Trk6k31rz18vkyqmkeJfY=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "ea4c80b39be4c09702b0cb3b42eab59e2ba4f24b", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-22.11", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs-23_05": { + "locked": { + "lastModified": 1689885880, + "narHash": "sha256-2ikAcvHKkKh8J/eUrwMA+wy1poscC+oL1RkN1V3RmT8=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "fa793b06f56896b7d1909e4b69977c7bf842b2f0", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-23.05", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs-unstable": { + "locked": { + "lastModified": 1689791806, + "narHash": "sha256-QpXjfiyBFwa7MV/J6nM5FoBreks9O7j9cAZxV22MR8A=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "439ba0789ff84dddea64eb2d47a4a0d4887dbb1f", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "pull/244135/head", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "flake-compat": "flake-compat", + "flake-utils": "flake-utils", + "nixpkgs": [ + "nixpkgs-unstable" + ], + "nixpkgs-22_11": "nixpkgs-22_11", + "nixpkgs-23_05": "nixpkgs-23_05", + "nixpkgs-unstable": "nixpkgs-unstable" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 000000000..bde792db3 --- /dev/null +++ b/flake.nix @@ -0,0 +1,240 @@ +{ + description = "Tahoe-LAFS, free and open decentralized data store"; + + nixConfig = { + # Supply configuration for the build cache updated by our CI system. This + # should allow most users to avoid having to build a large number of + # packages (otherwise necessary due to our Python package overrides). + substituters = ["https://tahoe-lafs-opensource.cachix.org"]; + trusted-public-keys = ["tahoe-lafs-opensource.cachix.org-1:eIKCHOPJYceJ2gb74l6e0mayuSdXqiavxYeAio0LFGo="]; + }; + + inputs = { + # A couple possible nixpkgs pins. Ideally these could be selected easily + # from the command line but there seems to be no syntax/support for that. + # However, these at least cause certain revisions to be pinned in our lock + # file where you *can* dig them out - and the CI configuration does. + # + # These are really just examples for the time being since neither of these + # releases contains a package set that is completely compatible with our + # requirements. We could decide in the future that supporting multiple + # releases of NixOS at a time is worthwhile and then pins like these will + # help us test each of those releases. + "nixpkgs-22_11" = { + url = github:NixOS/nixpkgs?ref=nixos-22.11; + }; + "nixpkgs-23_05" = { + url = github:NixOS/nixpkgs?ref=nixos-23.05; + }; + + # We depend on a very new python-cryptography which is not yet available + # from any release branch of nixpkgs. However, it is contained in a PR + # currently up for review. Point our nixpkgs at that for now. + "nixpkgs-unstable" = { + url = github:NixOS/nixpkgs?ref=pull/244135/head; + }; + + # Point the default nixpkgs at one of those. This avoids having getting a + # _third_ package set involved and gives a way to provide what should be a + # working experience by default (that is, if nixpkgs doesn't get + # overridden). + nixpkgs.follows = "nixpkgs-unstable"; + + # Also get flake-utils for simplified multi-system definitions. + flake-utils = { + url = github:numtide/flake-utils; + }; + + # And get a helper that lets us easily continue to provide a default.nix. + flake-compat = { + url = "github:edolstra/flake-compat"; + flake = false; + }; + }; + + outputs = { self, nixpkgs, flake-utils, ... }: + { + # Expose an overlay which adds our version of Tahoe-LAFS to the Python + # package sets we specify, as well as all of the correct versions of its + # dependencies. + # + # We will also use this to define some other outputs since it gives us + # the most succinct way to get a working Tahoe-LAFS package. + overlays.default = import ./nix/overlay.nix; + + } // (flake-utils.lib.eachDefaultSystem (system: let + + # The package set for this system architecture. + pkgs = import nixpkgs { + inherit system; + # And include our Tahoe-LAFS package in that package set. + overlays = [ self.overlays.default ]; + }; + + # pythonVersions :: [string] + # + # The version strings for the Python runtimes we'll work with. + pythonVersions = + let + # Match attribute names that look like a Python derivation - CPython + # or PyPy. We take care to avoid things like "python-foo" and + # "python3Full-unittest" though. We only want things like "pypy38" + # or "python311". + nameMatches = name: null != builtins.match "(python|pypy)3[[:digit:]]{0,2}" name; + + # Sometimes an old version is left in the package set as an error + # saying something like "we remove this". Make sure we whatever we + # found by name evaluates without error, too. + notError = drv: (builtins.tryEval drv).success; + in + # Discover all of the Python runtime derivations by inspecting names + # and filtering out derivations with errors. + builtins.attrNames ( + pkgs.lib.attrsets.filterAttrs + (name: drv: nameMatches name && notError drv) + pkgs + ); + + # defaultPyVersion :: string + # + # An element of pythonVersions which we'll use for the default package. + defaultPyVersion = "python3"; + + # pythons :: [derivation] + # + # Retrieve the actual Python package for each configured version. We + # already applied our overlay to pkgs so our packages will already be + # available. + pythons = builtins.map (pyVer: pkgs.${pyVer}) pythonVersions; + + # packageName :: string -> string + # + # Construct the Tahoe-LAFS package name for the given Python runtime. + packageName = pyVersion: "${pyVersion}-tahoe-lafs"; + + # string -> string + # + # Construct the unit test application name for the given Python runtime. + unitTestName = pyVersion: "${pyVersion}-unittest"; + + # (string -> a) -> (string -> b) -> string -> attrset a b + # + # Make a singleton attribute set from the result of two functions. + singletonOf = f: g: x: { ${f x} = g x; }; + + # [attrset] -> attrset + # + # Merge a list of attrset into a single attrset with overlap preferring + # rightmost values. + mergeAttrs = pkgs.lib.foldr pkgs.lib.mergeAttrs {}; + + # makeRuntimeEnv :: string -> derivation + # + # Create a derivation that includes a Python runtime, Tahoe-LAFS, and + # all of its dependencies. + makeRuntimeEnv = singletonOf packageName makeRuntimeEnv'; + makeRuntimeEnv' = pyVersion: (pkgs.${pyVersion}.withPackages (ps: with ps; + [ tahoe-lafs ] ++ + tahoe-lafs.passthru.extras.i2p ++ + tahoe-lafs.passthru.extras.tor + )).overrideAttrs (old: { + # By default, withPackages gives us a derivation with a fairly generic + # name (like "python-env"). Put our name in there for legibility. + # See the similar override in makeTestEnv. + name = packageName pyVersion; + }); + + # makeTestEnv :: string -> derivation + # + # Create a derivation that includes a Python runtime and all of the + # Tahoe-LAFS dependencies, but not Tahoe-LAFS itself, which we'll get + # from the working directory. + makeTestEnv = pyVersion: (pkgs.${pyVersion}.withPackages (ps: with ps; + [ tahoe-lafs ] ++ + tahoe-lafs.passthru.extras.i2p ++ + tahoe-lafs.passthru.extras.tor ++ + tahoe-lafs.passthru.extras.unittest + )).overrideAttrs (old: { + # See the similar override in makeRuntimeEnv'. + name = packageName pyVersion; + }); + in { + # Include a package set with out overlay on it in our own output. This + # is mainly a development/debugging convenience as it will expose all of + # our Python package overrides beneath it. The magic name + # "legacyPackages" is copied from nixpkgs and has special support in the + # nix command line tool. + legacyPackages = pkgs; + + # The flake's package outputs. We'll define one version of the package + # for each version of Python we could find. We'll also point the + # flake's "default" package at the derivation corresponding to the + # default Python version we defined above. The package consists of a + # Python environment with Tahoe-LAFS available to it. + packages = + mergeAttrs ( + [ { default = self.packages.${system}.${packageName defaultPyVersion}; } ] + ++ (builtins.map makeRuntimeEnv pythonVersions) + ++ (builtins.map (singletonOf unitTestName makeTestEnv) pythonVersions) + ); + + # The flake's app outputs. We'll define a version of an app for running + # the test suite for each version of Python we could find. We'll also + # define a version of an app for running the "tahoe" command-line + # entrypoint for each version of Python we could find. + apps = + let + # writeScript :: string -> string -> path + # + # Write a shell program to a file so it can be run later. + # + # We avoid writeShellApplication here because it has ghc as a + # dependency but ghc has Python as a dependency and our Python + # package override triggers a rebuild of ghc and many Haskell + # packages which takes a looong time. + writeScript = name: text: "${pkgs.writeShellScript name text}"; + + # makeTahoeApp :: string -> attrset + # + # A helper function to define the Tahoe-LAFS runtime entrypoint for + # a certain Python runtime. + makeTahoeApp = pyVersion: { + "tahoe-${pyVersion}" = { + type = "app"; + program = + writeScript "tahoe" + '' + ${makeRuntimeEnv' pyVersion}/bin/tahoe "$@" + ''; + }; + }; + + # makeUnitTestsApp :: string -> attrset + # + # A helper function to define the Tahoe-LAFS unit test entrypoint + # for a certain Python runtime. + makeUnitTestsApp = pyVersion: { + "${unitTestName pyVersion}" = { + type = "app"; + program = + let + python = "${makeTestEnv pyVersion}/bin/python"; + in + writeScript "unit-tests" + '' + ${python} setup.py update_version + export TAHOE_LAFS_HYPOTHESIS_PROFILE=ci + export PYTHONPATH=$PWD/src + ${python} -m twisted.trial "$@" + ''; + }; + }; + in + # Merge a default app definition with the rest of the apps. + mergeAttrs ( + [ { default = self.apps.${system}."tahoe-python3"; } ] + ++ (builtins.map makeUnitTestsApp pythonVersions) + ++ (builtins.map makeTahoeApp pythonVersions) + ); + })); +} diff --git a/integration/conftest.py b/integration/conftest.py index 39ff3b42b..313ff36c2 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -1,35 +1,26 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations +import os import sys import shutil +from attr import frozen from time import sleep -from os import mkdir, listdir, environ +from os import mkdir, environ from os.path import join, exists -from tempfile import mkdtemp, mktemp -from functools import partial -from json import loads - -from foolscap.furl import ( - decode_furl, -) +from tempfile import mkdtemp from eliot import ( to_file, log_call, ) +from twisted.python.filepath import FilePath from twisted.python.procutils import which -from twisted.internet.defer import DeferredList +from twisted.internet.defer import DeferredList, succeed from twisted.internet.error import ( ProcessExitedAlready, ProcessTerminated, @@ -37,23 +28,32 @@ from twisted.internet.error import ( import pytest import pytest_twisted +from typing import Mapping from .util import ( - _CollectOutputProtocol, _MagicTextProtocol, _DumpOutputProtocol, _ProcessExitedProtocol, _create_node, - _cleanup_tahoe_process, _tahoe_runner_optional_coverage, await_client_ready, - TahoeProcess, - cli, - _run_node, - generate_ssh_key, block_with_timeout, ) +from .grid import ( + create_flog_gatherer, + create_grid, +) +from allmydata.node import read_config +from allmydata.util.iputil import allocate_tcp_port +# No reason for HTTP requests to take longer than four minutes in the +# integration tests. See allmydata/scripts/common_http.py for usage. +os.environ["__TAHOE_CLI_HTTP_TIMEOUT"] = "240" + +# Make Foolscap logging go into Twisted logging, so that integration test logs +# include extra information +# (https://github.com/warner/foolscap/blob/latest-release/doc/logging.rst): +os.environ["FLOGTOTWISTED"] = "1" # pytest customization hooks @@ -66,6 +66,29 @@ def pytest_addoption(parser): "--coverage", action="store_true", dest="coverage", help="Collect coverage statistics", ) + parser.addoption( + "--force-foolscap", action="store_true", default=False, + dest="force_foolscap", + help=("If set, force Foolscap only for the storage protocol. " + + "Otherwise HTTP will be used.") + ) + parser.addoption( + "--runslow", action="store_true", default=False, + dest="runslow", + help="If set, run tests marked as slow.", + ) + +def pytest_collection_modifyitems(session, config, items): + if not config.option.runslow: + # The --runslow option was not given; keep only collected items not + # marked as slow. + items[:] = [ + item + for item + in items + if item.get_closest_marker("slow") is None + ] + @pytest.fixture(autouse=True, scope='session') def eliot_logging(): @@ -89,9 +112,21 @@ def reactor(): return _reactor +@pytest.fixture(scope='session') +@log_call(action_type=u"integration:port_allocator", include_result=False) +def port_allocator(reactor): + # these will appear basically random, which can make especially + # manual debugging harder but we're re-using code instead of + # writing our own...so, win? + def allocate(): + port = allocate_tcp_port() + return succeed(port) + return allocate + + @pytest.fixture(scope='session') @log_call(action_type=u"integration:temp_dir", include_args=[]) -def temp_dir(request): +def temp_dir(request) -> str: """ Invoke like 'py.test --keep-tempdir ...' to avoid deleting the temp-dir """ @@ -123,154 +158,48 @@ def flog_binary(): @pytest.fixture(scope='session') @log_call(action_type=u"integration:flog_gatherer", include_args=[]) def flog_gatherer(reactor, temp_dir, flog_binary, request): - out_protocol = _CollectOutputProtocol() - gather_dir = join(temp_dir, 'flog_gather') - reactor.spawnProcess( - out_protocol, - flog_binary, - ( - 'flogtool', 'create-gatherer', - '--location', 'tcp:localhost:3117', - '--port', '3117', - gather_dir, - ) + fg = pytest_twisted.blockon( + create_flog_gatherer(reactor, request, temp_dir, flog_binary) ) - pytest_twisted.blockon(out_protocol.done) - - twistd_protocol = _MagicTextProtocol("Gatherer waiting at") - twistd_process = reactor.spawnProcess( - twistd_protocol, - which('twistd')[0], - ( - 'twistd', '--nodaemon', '--python', - join(gather_dir, 'gatherer.tac'), - ), - path=gather_dir, - ) - pytest_twisted.blockon(twistd_protocol.magic_seen) - - def cleanup(): - _cleanup_tahoe_process(twistd_process, twistd_protocol.exited) - - flog_file = mktemp('.flog_dump') - flog_protocol = _DumpOutputProtocol(open(flog_file, 'w')) - flog_dir = join(temp_dir, 'flog_gather') - flogs = [x for x in listdir(flog_dir) if x.endswith('.flog')] - - print("Dumping {} flogtool logfiles to '{}'".format(len(flogs), flog_file)) - reactor.spawnProcess( - flog_protocol, - flog_binary, - ( - 'flogtool', 'dump', join(temp_dir, 'flog_gather', flogs[0]) - ), - ) - print("Waiting for flogtool to complete") - try: - block_with_timeout(flog_protocol.done, reactor) - except ProcessTerminated as e: - print("flogtool exited unexpectedly: {}".format(str(e))) - print("Flogtool completed") - - request.addfinalizer(cleanup) - - with open(join(gather_dir, 'log_gatherer.furl'), 'r') as f: - furl = f.read().strip() - return furl + return fg @pytest.fixture(scope='session') -@log_call( - action_type=u"integration:introducer", - include_args=["temp_dir", "flog_gatherer"], - include_result=False, -) -def introducer(reactor, temp_dir, flog_gatherer, request): - config = ''' -[node] -nickname = introducer0 -web.port = 4560 -log_gatherer.furl = {log_furl} -'''.format(log_furl=flog_gatherer) +@log_call(action_type=u"integration:grid", include_args=[]) +def grid(reactor, request, temp_dir, flog_gatherer, port_allocator): + """ + Provides a new Grid with a single Introducer and flog-gathering process. - intro_dir = join(temp_dir, 'introducer') - print("making introducer", intro_dir) - - if not exists(intro_dir): - mkdir(intro_dir) - done_proto = _ProcessExitedProtocol() - _tahoe_runner_optional_coverage( - done_proto, - reactor, - request, - ( - 'create-introducer', - '--listen=tcp', - '--hostname=localhost', - intro_dir, - ), - ) - pytest_twisted.blockon(done_proto.done) - - # over-write the config file with our stuff - with open(join(intro_dir, 'tahoe.cfg'), 'w') as f: - f.write(config) - - # "tahoe run" is consistent across Linux/macOS/Windows, unlike the old - # "start" command. - protocol = _MagicTextProtocol('introducer running') - transport = _tahoe_runner_optional_coverage( - protocol, - reactor, - request, - ( - 'run', - intro_dir, - ), + Notably does _not_ provide storage servers; use the storage_nodes + fixture if your tests need a Grid that can be used for puts / gets. + """ + g = pytest_twisted.blockon( + create_grid(reactor, request, temp_dir, flog_gatherer, port_allocator) ) - request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited)) + return g - pytest_twisted.blockon(protocol.magic_seen) - return TahoeProcess(transport, intro_dir) + +@pytest.fixture(scope='session') +def introducer(grid): + return grid.introducer @pytest.fixture(scope='session') @log_call(action_type=u"integration:introducer:furl", include_args=["temp_dir"]) def introducer_furl(introducer, temp_dir): - furl_fname = join(temp_dir, 'introducer', 'private', 'introducer.furl') - while not exists(furl_fname): - print("Don't see {} yet".format(furl_fname)) - sleep(.1) - furl = open(furl_fname, 'r').read() - tubID, location_hints, name = decode_furl(furl) - if not location_hints: - # If there are no location hints then nothing can ever possibly - # connect to it and the only thing that can happen next is something - # will hang or time out. So just give up right now. - raise ValueError( - "Introducer ({!r}) fURL has no location hints!".format( - introducer_furl, - ), - ) - return furl + return introducer.furl -@pytest.fixture(scope='session') +@pytest.fixture @log_call( action_type=u"integration:tor:introducer", include_args=["temp_dir", "flog_gatherer"], include_result=False, ) -def tor_introducer(reactor, temp_dir, flog_gatherer, request): - config = ''' -[node] -nickname = introducer_tor -web.port = 4561 -log_gatherer.furl = {log_furl} -'''.format(log_furl=flog_gatherer) - +def tor_introducer(reactor, temp_dir, flog_gatherer, request, tor_network): intro_dir = join(temp_dir, 'introducer_tor') - print("making introducer", intro_dir) + print("making Tor introducer in {}".format(intro_dir)) + print("(this can take tens of seconds to allocate Onion address)") if not exists(intro_dir): mkdir(intro_dir) @@ -281,20 +210,23 @@ log_gatherer.furl = {log_furl} request, ( 'create-introducer', - '--tor-control-port', 'tcp:localhost:8010', + '--tor-control-port', tor_network.client_control_endpoint, + '--hide-ip', '--listen=tor', intro_dir, ), ) pytest_twisted.blockon(done_proto.done) - # over-write the config file with our stuff - with open(join(intro_dir, 'tahoe.cfg'), 'w') as f: - f.write(config) + # adjust a few settings + config = read_config(intro_dir, "tub.port") + config.set_config("node", "nickname", "introducer-tor") + config.set_config("node", "web.port", "4561") + config.set_config("node", "log_gatherer.furl", flog_gatherer.furl) # "tahoe run" is consistent across Linux/macOS/Windows, unlike the old # "start" command. - protocol = _MagicTextProtocol('introducer running') + protocol = _MagicTextProtocol('introducer running', "tor_introducer") transport = _tahoe_runner_optional_coverage( protocol, reactor, @@ -313,101 +245,51 @@ log_gatherer.furl = {log_furl} pass request.addfinalizer(cleanup) + print("Waiting for introducer to be ready...") pytest_twisted.blockon(protocol.magic_seen) + print("Introducer ready.") return transport -@pytest.fixture(scope='session') +@pytest.fixture def tor_introducer_furl(tor_introducer, temp_dir): furl_fname = join(temp_dir, 'introducer_tor', 'private', 'introducer.furl') while not exists(furl_fname): print("Don't see {} yet".format(furl_fname)) sleep(.1) furl = open(furl_fname, 'r').read() + print(f"Found Tor introducer furl: {furl} in {furl_fname}") return furl @pytest.fixture(scope='session') @log_call( action_type=u"integration:storage_nodes", - include_args=["temp_dir", "introducer_furl", "flog_gatherer"], + include_args=["grid"], include_result=False, ) -def storage_nodes(reactor, temp_dir, introducer, introducer_furl, flog_gatherer, request): +def storage_nodes(grid): nodes_d = [] # start all 5 nodes in parallel for x in range(5): - name = 'node{}'.format(x) - web_port= 9990 + x - nodes_d.append( - _create_node( - reactor, request, temp_dir, introducer_furl, flog_gatherer, name, - web_port="tcp:{}:interface=localhost".format(web_port), - storage=True, - ) - ) + nodes_d.append(grid.add_storage_node()) + nodes_status = pytest_twisted.blockon(DeferredList(nodes_d)) - nodes = [] - for ok, process in nodes_status: - assert ok, "Storage node creation failed: {}".format(process) - nodes.append(process) - return nodes + for ok, value in nodes_status: + assert ok, "Storage node creation failed: {}".format(value) + return grid.storage_servers @pytest.fixture(scope='session') @log_call(action_type=u"integration:alice", include_args=[], include_result=False) -def alice(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, request): - process = pytest_twisted.blockon( - _create_node( - reactor, request, temp_dir, introducer_furl, flog_gatherer, "alice", - web_port="tcp:9980:interface=localhost", - storage=False, - # We're going to kill this ourselves, so no need for finalizer to - # do it: - finalize=False, - ) - ) - await_client_ready(process) - - # 1. Create a new RW directory cap: - cli(process, "create-alias", "test") - rwcap = loads(cli(process, "list-aliases", "--json"))["test"]["readwrite"] - - # 2. Enable SFTP on the node: - host_ssh_key_path = join(process.node_dir, "private", "ssh_host_rsa_key") - accounts_path = join(process.node_dir, "private", "accounts") - with open(join(process.node_dir, "tahoe.cfg"), "a") as f: - f.write("""\ -[sftpd] -enabled = true -port = tcp:8022:interface=127.0.0.1 -host_pubkey_file = {ssh_key_path}.pub -host_privkey_file = {ssh_key_path} -accounts.file = {accounts_path} -""".format(ssh_key_path=host_ssh_key_path, accounts_path=accounts_path)) - generate_ssh_key(host_ssh_key_path) - - # 3. Add a SFTP access file with username/password and SSH key auth. - - # The client SSH key path is typically going to be somewhere else (~/.ssh, - # typically), but for convenience sake for testing we'll put it inside node. - client_ssh_key_path = join(process.node_dir, "private", "ssh_client_rsa_key") - generate_ssh_key(client_ssh_key_path) - # Pub key format is "ssh-rsa ". We want the key. - ssh_public_key = open(client_ssh_key_path + ".pub").read().strip().split()[1] - with open(accounts_path, "w") as f: - f.write("""\ -alice password {rwcap} - -alice2 ssh-rsa {ssh_public_key} {rwcap} -""".format(rwcap=rwcap, ssh_public_key=ssh_public_key)) - - # 4. Restart the node with new SFTP config. - process.kill() - pytest_twisted.blockon(_run_node(reactor, process.node_dir, request, None)) - - await_client_ready(process) - return process +def alice(reactor, request, grid, storage_nodes): + """ + :returns grid.Client: the associated instance for Alice + """ + alice = pytest_twisted.blockon(grid.add_client("alice")) + pytest_twisted.blockon(alice.add_sftp(reactor, request)) + print(f"Alice pid: {alice.process.transport.pid}") + return alice @pytest.fixture(scope='session') @@ -420,22 +302,43 @@ def bob(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, reques storage=False, ) ) - await_client_ready(process) + pytest_twisted.blockon(await_client_ready(process)) return process @pytest.fixture(scope='session') @pytest.mark.skipif(sys.platform.startswith('win'), 'Tor tests are unstable on Windows') -def chutney(reactor, temp_dir): +def chutney(reactor, temp_dir: str) -> tuple[str, dict[str, str]]: + """ + Install the Chutney software that is required to run a small local Tor grid. + + (Chutney lacks the normal "python stuff" so we can't just declare + it in Tox or similar dependencies) + """ + # Try to find Chutney already installed in the environment. + try: + import chutney + except ImportError: + # Nope, we'll get our own in a moment. + pass + else: + # We already have one, just use it. + return ( + # from `checkout/lib/chutney/__init__.py` we want to get back to + # `checkout` because that's the parent of the directory with all + # of the network definitions. So, great-grand-parent. + FilePath(chutney.__file__).parent().parent().parent().path, + # There's nothing to add to the environment. + {}, + ) chutney_dir = join(temp_dir, 'chutney') mkdir(chutney_dir) - # TODO: - - # check for 'tor' binary explicitly and emit a "skip" if we can't - # find it + missing = [exe for exe in ["tor", "tor-gencert"] if not which(exe)] + if missing: + pytest.skip(f"Some command-line tools not found: {missing}") # XXX yuck! should add a setup.py to chutney so we can at least # "pip install " and/or depend on chutney in "pip @@ -448,17 +351,15 @@ def chutney(reactor, temp_dir): 'git', ( 'git', 'clone', - 'https://git.torproject.org/chutney.git', + 'https://gitlab.torproject.org/tpo/core/chutney.git', chutney_dir, ), env=environ, ) pytest_twisted.blockon(proto.done) - # XXX: Here we reset Chutney to the last revision known to work - # with Python 2, as a workaround for Chutney moving to Python 3. - # When this is no longer necessary, we will have to drop this and - # add '--depth=1' back to the above 'git clone' subprocess. + # XXX: Here we reset Chutney to a specific revision known to work, + # since there are no stability guarantees or releases yet. proto = _DumpOutputProtocol(None) reactor.spawnProcess( proto, @@ -466,94 +367,131 @@ def chutney(reactor, temp_dir): ( 'git', '-C', chutney_dir, 'reset', '--hard', - '99bd06c7554b9113af8c0877b6eca4ceb95dcbaa' + 'c4f6789ad2558dcbfeb7d024c6481d8112bfb6c2' ), env=environ, ) pytest_twisted.blockon(proto.done) - return chutney_dir + return chutney_dir, {"PYTHONPATH": join(chutney_dir, "lib")} + + +@frozen +class ChutneyTorNetwork: + """ + Represents a running Chutney (tor) network. Returned by the + "tor_network" fixture. + """ + dir: FilePath + environ: Mapping[str, str] + client_control_port: int + + @property + def client_control_endpoint(self) -> str: + return "tcp:localhost:{}".format(self.client_control_port) @pytest.fixture(scope='session') @pytest.mark.skipif(sys.platform.startswith('win'), reason='Tor tests are unstable on Windows') def tor_network(reactor, temp_dir, chutney, request): + """ + Build a basic Tor network. - # this is the actual "chutney" script at the root of a chutney checkout - chutney_dir = chutney - chut = join(chutney_dir, 'chutney') + Instantiate the "networks/basic" Chutney configuration for a local + Tor network. - # now, as per Chutney's README, we have to create the network - # ./chutney configure networks/basic - # ./chutney start networks/basic + This provides a small, local Tor network that can run v3 Onion + Services. It has 3 authorities, 5 relays and 2 clients. + + The 'chutney' fixture pins a Chutney git qrevision, so things + shouldn't change. This network has two clients which are the only + nodes with valid SocksPort configuration ("008c" and "009c" 9008 + and 9009) + + The control ports start at 8000 (so the ControlPort for the client + nodes are 8008 and 8009). + + :param chutney: The root directory of a Chutney checkout and a dict of + additional environment variables to set so a Python process can use + it. + + :return: None + """ + chutney_root, chutney_env = chutney + basic_network = join(chutney_root, 'networks', 'basic') env = environ.copy() - env.update({"PYTHONPATH": join(chutney_dir, "lib")}) - proto = _DumpOutputProtocol(None) - reactor.spawnProcess( - proto, - sys.executable, - ( - sys.executable, '-m', 'chutney.TorNet', 'configure', - join(chutney_dir, 'networks', 'basic'), - ), - path=join(chutney_dir), - env=env, - ) - pytest_twisted.blockon(proto.done) - - proto = _DumpOutputProtocol(None) - reactor.spawnProcess( - proto, - sys.executable, - ( - sys.executable, '-m', 'chutney.TorNet', 'start', - join(chutney_dir, 'networks', 'basic'), - ), - path=join(chutney_dir), - env=env, - ) - pytest_twisted.blockon(proto.done) - - # print some useful stuff - proto = _CollectOutputProtocol() - reactor.spawnProcess( - proto, - sys.executable, - ( - sys.executable, '-m', 'chutney.TorNet', 'status', - join(chutney_dir, 'networks', 'basic'), - ), - path=join(chutney_dir), - env=env, - ) - try: - pytest_twisted.blockon(proto.done) - except ProcessTerminated: - print("Chutney.TorNet status failed (continuing):") - print(proto.output.getvalue()) - - def cleanup(): - print("Tearing down Chutney Tor network") - proto = _CollectOutputProtocol() + env.update(chutney_env) + env.update({ + # default is 60, probably too short for reliable automated use. + "CHUTNEY_START_TIME": "600", + }) + chutney_argv = (sys.executable, '-m', 'chutney.TorNet') + def chutney(argv): + proto = _DumpOutputProtocol(None) reactor.spawnProcess( proto, sys.executable, - ( - sys.executable, '-m', 'chutney.TorNet', 'stop', - join(chutney_dir, 'networks', 'basic'), - ), - path=join(chutney_dir), + chutney_argv + argv, + path=join(chutney_root), env=env, ) + return proto.done + + # now, as per Chutney's README, we have to create the network + pytest_twisted.blockon(chutney(("configure", basic_network))) + + # before we start the network, ensure we will tear down at the end + def cleanup(): + print("Tearing down Chutney Tor network") try: - block_with_timeout(proto.done, reactor) + block_with_timeout(chutney(("stop", basic_network)), reactor) except ProcessTerminated: # If this doesn't exit cleanly, that's fine, that shouldn't fail # the test suite. pass - request.addfinalizer(cleanup) - return chut + pytest_twisted.blockon(chutney(("start", basic_network))) + + # Wait for the nodes to "bootstrap" - ie, form a network among themselves. + # Successful bootstrap is reported with a message something like: + # + # Everything bootstrapped after 151 sec + # Bootstrap finished: 151 seconds + # Node status: + # test000a : 100, done , Done + # test001a : 100, done , Done + # test002a : 100, done , Done + # test003r : 100, done , Done + # test004r : 100, done , Done + # test005r : 100, done , Done + # test006r : 100, done , Done + # test007r : 100, done , Done + # test008c : 100, done , Done + # test009c : 100, done , Done + # Published dir info: + # test000a : 100, all nodes , desc md md_cons ns_cons , Dir info cached + # test001a : 100, all nodes , desc md md_cons ns_cons , Dir info cached + # test002a : 100, all nodes , desc md md_cons ns_cons , Dir info cached + # test003r : 100, all nodes , desc md md_cons ns_cons , Dir info cached + # test004r : 100, all nodes , desc md md_cons ns_cons , Dir info cached + # test005r : 100, all nodes , desc md md_cons ns_cons , Dir info cached + # test006r : 100, all nodes , desc md md_cons ns_cons , Dir info cached + # test007r : 100, all nodes , desc md md_cons ns_cons , Dir info cached + pytest_twisted.blockon(chutney(("wait_for_bootstrap", basic_network))) + + # print some useful stuff + try: + pytest_twisted.blockon(chutney(("status", basic_network))) + except ProcessTerminated: + print("Chutney.TorNet status failed (continuing)") + + # the "8008" comes from configuring "networks/basic" in chutney + # and then examining "net/nodes/008c/torrc" for ControlPort value + return ChutneyTorNetwork( + chutney_root, + chutney_env, + 8008, + ) diff --git a/integration/grid.py b/integration/grid.py new file mode 100644 index 000000000..b97c22bf7 --- /dev/null +++ b/integration/grid.py @@ -0,0 +1,529 @@ +""" +Classes which directly represent various kinds of Tahoe processes +that co-operate to for "a Grid". + +These methods and objects are used by conftest.py fixtures but may +also be used as direct helpers for tests that don't want to (or can't) +rely on 'the' global grid as provided by fixtures like 'alice' or +'storage_servers'. +""" + +from os import mkdir, listdir +from os.path import join, exists +from json import loads +from tempfile import mktemp +from time import sleep + +from eliot import ( + log_call, +) + +from foolscap.furl import ( + decode_furl, +) + +from twisted.python.procutils import which +from twisted.internet.defer import ( + inlineCallbacks, + returnValue, + Deferred, +) +from twisted.internet.task import ( + deferLater, +) +from twisted.internet.interfaces import ( + IProcessTransport, + IProcessProtocol, +) +from twisted.internet.error import ProcessTerminated + +from allmydata.util.attrs_provides import ( + provides, +) +from allmydata.node import read_config +from .util import ( + _CollectOutputProtocol, + _MagicTextProtocol, + _DumpOutputProtocol, + _ProcessExitedProtocol, + _run_node, + _cleanup_tahoe_process, + _tahoe_runner_optional_coverage, + TahoeProcess, + await_client_ready, + generate_ssh_key, + cli, + reconfigure, + _create_node, +) + +import attr +import pytest_twisted + + +# currently, we pass a "request" around a bunch but it seems to only +# be for addfinalizer() calls. +# - is "keeping" a request like that okay? What if it's a session-scoped one? +# (i.e. in Grid etc) +# - maybe limit to "a callback to hang your cleanup off of" (instead of request)? + + +@attr.s +class FlogGatherer(object): + """ + Flog Gatherer process. + """ + process = attr.ib( + validator=provides(IProcessTransport) + ) + protocol = attr.ib( + validator=provides(IProcessProtocol) + ) + furl = attr.ib() + + +@inlineCallbacks +def create_flog_gatherer(reactor, request, temp_dir, flog_binary): + out_protocol = _CollectOutputProtocol() + gather_dir = join(temp_dir, 'flog_gather') + reactor.spawnProcess( + out_protocol, + flog_binary, + ( + 'flogtool', 'create-gatherer', + '--location', 'tcp:localhost:3117', + '--port', '3117', + gather_dir, + ) + ) + yield out_protocol.done + + twistd_protocol = _MagicTextProtocol("Gatherer waiting at", "gatherer") + twistd_process = reactor.spawnProcess( + twistd_protocol, + which('twistd')[0], + ( + 'twistd', '--nodaemon', '--python', + join(gather_dir, 'gatherer.tac'), + ), + path=gather_dir, + ) + yield twistd_protocol.magic_seen + + def cleanup(): + _cleanup_tahoe_process(twistd_process, twistd_protocol.exited) + + flog_file = mktemp('.flog_dump') + flog_protocol = _DumpOutputProtocol(open(flog_file, 'w')) + flog_dir = join(temp_dir, 'flog_gather') + flogs = [x for x in listdir(flog_dir) if x.endswith('.flog')] + + print("Dumping {} flogtool logfiles to '{}'".format(len(flogs), flog_file)) + for flog_path in flogs: + reactor.spawnProcess( + flog_protocol, + flog_binary, + ( + 'flogtool', 'dump', join(temp_dir, 'flog_gather', flog_path) + ), + ) + print("Waiting for flogtool to complete") + try: + pytest_twisted.blockon(flog_protocol.done) + except ProcessTerminated as e: + print("flogtool exited unexpectedly: {}".format(str(e))) + print("Flogtool completed") + + request.addfinalizer(cleanup) + + with open(join(gather_dir, 'log_gatherer.furl'), 'r') as f: + furl = f.read().strip() + returnValue( + FlogGatherer( + protocol=twistd_protocol, + process=twistd_process, + furl=furl, + ) + ) + + +@attr.s +class StorageServer(object): + """ + Represents a Tahoe Storage Server + """ + + process = attr.ib( + validator=attr.validators.instance_of(TahoeProcess) + ) + protocol = attr.ib( + validator=provides(IProcessProtocol) + ) + + @inlineCallbacks + def restart(self, reactor, request): + """ + re-start our underlying process by issuing a TERM, waiting and + then running again. await_client_ready() will be done as well + + Note that self.process and self.protocol will be new instances + after this. + """ + self.process.transport.signalProcess('TERM') + yield self.protocol.exited + self.process = yield _run_node( + reactor, self.process.node_dir, request, None, + ) + self.protocol = self.process.transport.proto + yield await_client_ready(self.process) + + +@inlineCallbacks +def create_storage_server(reactor, request, temp_dir, introducer, flog_gatherer, name, web_port, + needed=2, happy=3, total=4): + """ + Create a new storage server + """ + node_process = yield _create_node( + reactor, request, temp_dir, introducer.furl, flog_gatherer, + name, web_port, storage=True, needed=needed, happy=happy, total=total, + ) + storage = StorageServer( + process=node_process, + # node_process is a TahoeProcess. its transport is an + # IProcessTransport. in practice, this means it is a + # twisted.internet._baseprocess.BaseProcess. BaseProcess records the + # process protocol as its proto attribute. + protocol=node_process.transport.proto, + ) + returnValue(storage) + + +@attr.s +class Client(object): + """ + Represents a Tahoe client + """ + + process = attr.ib( + validator=attr.validators.instance_of(TahoeProcess) + ) + protocol = attr.ib( + validator=provides(IProcessProtocol) + ) + request = attr.ib() # original request, for addfinalizer() + +## XXX convenience? or confusion? +# @property +# def node_dir(self): +# return self.process.node_dir + + @inlineCallbacks + def reconfigure_zfec(self, reactor, zfec_params, convergence=None, max_segment_size=None): + """ + Reconfigure the ZFEC parameters for this node + """ + # XXX this is a stop-gap to keep tests running "as is" + # -> we should fix the tests so that they create a new client + # in the grid with the required parameters, instead of + # re-configuring Alice (or whomever) + + rtn = yield Deferred.fromCoroutine( + reconfigure(reactor, self.request, self.process, zfec_params, convergence, max_segment_size) + ) + return rtn + + @inlineCallbacks + def restart(self, reactor, request, servers=1): + """ + re-start our underlying process by issuing a TERM, waiting and + then running again. + + :param int servers: number of server connections we will wait + for before being 'ready' + + Note that self.process and self.protocol will be new instances + after this. + """ + # XXX similar to above, can we make this return a new instance + # instead of mutating? + self.process.transport.signalProcess('TERM') + yield self.protocol.exited + process = yield _run_node( + reactor, self.process.node_dir, request, None, + ) + self.process = process + self.protocol = self.process.transport.proto + yield await_client_ready(self.process, minimum_number_of_servers=servers) + + @inlineCallbacks + def add_sftp(self, reactor, request): + """ + """ + # if other things need to add or change configuration, further + # refactoring could be useful here (i.e. move reconfigure + # parts to their own functions) + + # XXX why do we need an alias? + # 1. Create a new RW directory cap: + cli(self.process, "create-alias", "test") + rwcap = loads(cli(self.process, "list-aliases", "--json"))["test"]["readwrite"] + + # 2. Enable SFTP on the node: + host_ssh_key_path = join(self.process.node_dir, "private", "ssh_host_rsa_key") + sftp_client_key_path = join(self.process.node_dir, "private", "ssh_client_rsa_key") + accounts_path = join(self.process.node_dir, "private", "accounts") + with open(join(self.process.node_dir, "tahoe.cfg"), "a") as f: + f.write( + ("\n\n[sftpd]\n" + "enabled = true\n" + "port = tcp:8022:interface=127.0.0.1\n" + "host_pubkey_file = {ssh_key_path}.pub\n" + "host_privkey_file = {ssh_key_path}\n" + "accounts.file = {accounts_path}\n").format( + ssh_key_path=host_ssh_key_path, + accounts_path=accounts_path, + ) + ) + generate_ssh_key(host_ssh_key_path) + + # 3. Add a SFTP access file with an SSH key for auth. + generate_ssh_key(sftp_client_key_path) + # Pub key format is "ssh-rsa ". We want the key. + with open(sftp_client_key_path + ".pub") as pubkey_file: + ssh_public_key = pubkey_file.read().strip().split()[1] + with open(accounts_path, "w") as f: + f.write( + "alice-key ssh-rsa {ssh_public_key} {rwcap}\n".format( + rwcap=rwcap, + ssh_public_key=ssh_public_key, + ) + ) + + # 4. Restart the node with new SFTP config. + print("restarting for SFTP") + yield self.restart(reactor, request) + print("restart done") + # XXX i think this is broken because we're "waiting for ready" during first bootstrap? or something? + + +@inlineCallbacks +def create_client(reactor, request, temp_dir, introducer, flog_gatherer, name, web_port, + needed=2, happy=3, total=4): + """ + Create a new storage server + """ + from .util import _create_node + node_process = yield _create_node( + reactor, request, temp_dir, introducer.furl, flog_gatherer, + name, web_port, storage=False, needed=needed, happy=happy, total=total, + ) + returnValue( + Client( + process=node_process, + protocol=node_process.transport.proto, + request=request, + ) + ) + + +@attr.s +class Introducer(object): + """ + Reprsents a running introducer + """ + + process = attr.ib( + validator=attr.validators.instance_of(TahoeProcess) + ) + protocol = attr.ib( + validator=provides(IProcessProtocol) + ) + furl = attr.ib() + + +def _validate_furl(furl_fname): + """ + Opens and validates a fURL, ensuring location hints. + :returns: the furl + :raises: ValueError if no location hints + """ + while not exists(furl_fname): + print("Don't see {} yet".format(furl_fname)) + sleep(.1) + furl = open(furl_fname, 'r').read() + tubID, location_hints, name = decode_furl(furl) + if not location_hints: + # If there are no location hints then nothing can ever possibly + # connect to it and the only thing that can happen next is something + # will hang or time out. So just give up right now. + raise ValueError( + "Introducer ({!r}) fURL has no location hints!".format( + furl, + ), + ) + return furl + + +@inlineCallbacks +@log_call( + action_type=u"integration:introducer", + include_args=["temp_dir", "flog_gatherer"], + include_result=False, +) +def create_introducer(reactor, request, temp_dir, flog_gatherer, port): + """ + Run a new Introducer and return an Introducer instance. + """ + intro_dir = join(temp_dir, 'introducer{}'.format(port)) + + if not exists(intro_dir): + mkdir(intro_dir) + done_proto = _ProcessExitedProtocol() + _tahoe_runner_optional_coverage( + done_proto, + reactor, + request, + ( + 'create-introducer', + '--listen=tcp', + '--hostname=localhost', + intro_dir, + ), + ) + yield done_proto.done + + config = read_config(intro_dir, "tub.port") + config.set_config("node", "nickname", f"introducer-{port}") + config.set_config("node", "web.port", f"{port}") + config.set_config("node", "log_gatherer.furl", flog_gatherer.furl) + + # on windows, "tahoe start" means: run forever in the foreground, + # but on linux it means daemonize. "tahoe run" is consistent + # between platforms. + protocol = _MagicTextProtocol('introducer running', "introducer") + transport = _tahoe_runner_optional_coverage( + protocol, + reactor, + request, + ( + 'run', + intro_dir, + ), + ) + + def clean(): + return _cleanup_tahoe_process(transport, protocol.exited) + request.addfinalizer(clean) + + yield protocol.magic_seen + + furl_fname = join(intro_dir, 'private', 'introducer.furl') + while not exists(furl_fname): + print("Don't see {} yet".format(furl_fname)) + yield deferLater(reactor, .1, lambda: None) + furl = _validate_furl(furl_fname) + + returnValue( + Introducer( + process=TahoeProcess(transport, intro_dir), + protocol=protocol, + furl=furl, + ) + ) + + +@attr.s +class Grid(object): + """ + Represents an entire Tahoe Grid setup + + A Grid includes an Introducer, Flog Gatherer and some number of + Storage Servers. Optionally includes Clients. + """ + + _reactor = attr.ib() + _request = attr.ib() + _temp_dir = attr.ib() + _port_allocator = attr.ib() + introducer = attr.ib() + flog_gatherer = attr.ib() + storage_servers = attr.ib(factory=list) + clients = attr.ib(factory=dict) + + @storage_servers.validator + def check(self, attribute, value): + for server in value: + if not isinstance(server, StorageServer): + raise ValueError( + "storage_servers must be StorageServer" + ) + + @inlineCallbacks + def add_storage_node(self): + """ + Creates a new storage node, returns a StorageServer instance + (which will already be added to our .storage_servers list) + """ + port = yield self._port_allocator() + print("make {}".format(port)) + name = 'node{}'.format(port) + web_port = 'tcp:{}:interface=localhost'.format(port) + server = yield create_storage_server( + self._reactor, + self._request, + self._temp_dir, + self.introducer, + self.flog_gatherer, + name, + web_port, + ) + self.storage_servers.append(server) + returnValue(server) + + @inlineCallbacks + def add_client(self, name, needed=2, happy=3, total=4): + """ + Create a new client node + """ + port = yield self._port_allocator() + web_port = 'tcp:{}:interface=localhost'.format(port) + client = yield create_client( + self._reactor, + self._request, + self._temp_dir, + self.introducer, + self.flog_gatherer, + name, + web_port, + needed=needed, + happy=happy, + total=total, + ) + self.clients[name] = client + yield await_client_ready(client.process) + returnValue(client) + + +# A grid is now forever tied to its original 'request' which is where +# it must hang finalizers off of. The "main" one is a session-level +# fixture so it'll live the life of the tests but it could be +# per-function Grid too. +@inlineCallbacks +def create_grid(reactor, request, temp_dir, flog_gatherer, port_allocator): + """ + Create a new grid. This will have one Introducer but zero + storage-servers or clients; those must be added by a test or + subsequent fixtures. + """ + intro_port = yield port_allocator() + introducer = yield create_introducer(reactor, request, temp_dir, flog_gatherer, intro_port) + grid = Grid( + reactor, + request, + temp_dir, + port_allocator, + introducer, + flog_gatherer, + ) + returnValue(grid) diff --git a/integration/install-tor.sh b/integration/install-tor.sh deleted file mode 100755 index 66fa64cb1..000000000 --- a/integration/install-tor.sh +++ /dev/null @@ -1,794 +0,0 @@ -#!/bin/bash - -# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/ -set -euxo pipefail - -CODENAME=$(lsb_release --short --codename) - -if [ "$(id -u)" != "0" ]; then - SUDO="sudo" -else - SUDO="" -fi - -# Script to install Tor -echo "deb http://deb.torproject.org/torproject.org ${CODENAME} main" | ${SUDO} tee -a /etc/apt/sources.list -echo "deb-src http://deb.torproject.org/torproject.org ${CODENAME} main" | ${SUDO} tee -a /etc/apt/sources.list - -# # Install Tor repo signing key -${SUDO} apt-key add - < 2 * 1024 * 1024 + with tempfile.open("wb") as f: + f.write(large_data) + + def set_segment_size(segment_size): + return blockingCallFromThread( + reactor, + lambda: alice.reconfigure_zfec( + reactor, + (1, 1, 1), + None, + max_segment_size=segment_size + ) + ) + + # 1. Upload file 1 with default segment size set to 1MB + set_segment_size(1024 * 1024) + cli(alice.process, "put", str(tempfile), "getput:seg1024kb") + + # 2. Download file 1 with default segment size set to 128KB + set_segment_size(128 * 1024) + assert large_data == check_output( + ["tahoe", "--node-directory", alice.process.node_dir, "get", "getput:seg1024kb", "-"] + ) + + # 3. Upload file 2 with default segment size set to 128KB + cli(alice.process, "put", str(tempfile), "getput:seg128kb") + + # 4. Download file 2 with default segment size set to 1MB + set_segment_size(1024 * 1024) + assert large_data == check_output( + ["tahoe", "--node-directory", alice.process.node_dir, "get", "getput:seg128kb", "-"] + ) diff --git a/integration/test_grid_manager.py b/integration/test_grid_manager.py new file mode 100644 index 000000000..437fe7455 --- /dev/null +++ b/integration/test_grid_manager.py @@ -0,0 +1,351 @@ +import sys +import json +from os.path import join + +from cryptography.hazmat.primitives.serialization import ( + Encoding, + PublicFormat, +) + +from twisted.internet.utils import ( + getProcessOutputAndValue, +) +from twisted.internet.defer import ( + inlineCallbacks, + returnValue, +) + +from allmydata.crypto import ed25519 +from allmydata.util import base32 +from allmydata.util import configutil + +from . import util +from .grid import ( + create_grid, +) + +import pytest_twisted + + +@inlineCallbacks +def _run_gm(reactor, request, *args, **kwargs): + """ + Run the grid-manager process, passing all arguments as extra CLI + args. + + :returns: all process output + """ + if request.config.getoption('coverage'): + base_args = ("-b", "-m", "coverage", "run", "-m", "allmydata.cli.grid_manager") + else: + base_args = ("-m", "allmydata.cli.grid_manager") + + output, errput, exit_code = yield getProcessOutputAndValue( + sys.executable, + base_args + args, + reactor=reactor, + **kwargs + ) + if exit_code != 0: + raise util.ProcessFailed( + RuntimeError("Exit code {}".format(exit_code)), + output + errput, + ) + returnValue(output) + + +@pytest_twisted.inlineCallbacks +def test_create_certificate(reactor, request): + """ + The Grid Manager produces a valid, correctly-signed certificate. + """ + gm_config = yield _run_gm(reactor, request, "--config", "-", "create") + privkey_bytes = json.loads(gm_config)['private_key'].encode('ascii') + privkey, pubkey = ed25519.signing_keypair_from_string(privkey_bytes) + + # Note that zara + her key here are arbitrary and don't match any + # "actual" clients in the test-grid; we're just checking that the + # Grid Manager signs this properly. + gm_config = yield _run_gm( + reactor, request, "--config", "-", "add", + "zara", "pub-v0-kzug3ut2m7ziihf3ndpqlquuxeie4foyl36wn54myqc4wmiwe4ga", + stdinBytes=gm_config, + ) + zara_cert_bytes = yield _run_gm( + reactor, request, "--config", "-", "sign", "zara", "1", + stdinBytes=gm_config, + ) + zara_cert = json.loads(zara_cert_bytes) + + # confirm that zara's certificate is made by the Grid Manager + # (.verify returns None on success, raises exception on error) + pubkey.verify( + base32.a2b(zara_cert['signature'].encode('ascii')), + zara_cert['certificate'].encode('ascii'), + ) + + +@pytest_twisted.inlineCallbacks +def test_remove_client(reactor, request): + """ + A Grid Manager can add and successfully remove a client + """ + gm_config = yield _run_gm( + reactor, request, "--config", "-", "create", + ) + + gm_config = yield _run_gm( + reactor, request, "--config", "-", "add", + "zara", "pub-v0-kzug3ut2m7ziihf3ndpqlquuxeie4foyl36wn54myqc4wmiwe4ga", + stdinBytes=gm_config, + ) + gm_config = yield _run_gm( + reactor, request, "--config", "-", "add", + "yakov", "pub-v0-kvxhb3nexybmipkrar2ztfrwp4uxxsmrjzkpzafit3ket4u5yldq", + stdinBytes=gm_config, + ) + assert "zara" in json.loads(gm_config)['storage_servers'] + assert "yakov" in json.loads(gm_config)['storage_servers'] + + gm_config = yield _run_gm( + reactor, request, "--config", "-", "remove", + "zara", + stdinBytes=gm_config, + ) + assert "zara" not in json.loads(gm_config)['storage_servers'] + assert "yakov" in json.loads(gm_config)['storage_servers'] + + +@pytest_twisted.inlineCallbacks +def test_remove_last_client(reactor, request): + """ + A Grid Manager can remove all clients + """ + gm_config = yield _run_gm( + reactor, request, "--config", "-", "create", + ) + + gm_config = yield _run_gm( + reactor, request, "--config", "-", "add", + "zara", "pub-v0-kzug3ut2m7ziihf3ndpqlquuxeie4foyl36wn54myqc4wmiwe4ga", + stdinBytes=gm_config, + ) + assert "zara" in json.loads(gm_config)['storage_servers'] + + gm_config = yield _run_gm( + reactor, request, "--config", "-", "remove", + "zara", + stdinBytes=gm_config, + ) + # there are no storage servers left at all now + assert "storage_servers" not in json.loads(gm_config) + + +@pytest_twisted.inlineCallbacks +def test_add_remove_client_file(reactor, request, temp_dir): + """ + A Grid Manager can add and successfully remove a client (when + keeping data on disk) + """ + gmconfig = join(temp_dir, "gmtest") + gmconfig_file = join(temp_dir, "gmtest", "config.json") + yield _run_gm( + reactor, request, "--config", gmconfig, "create", + ) + + yield _run_gm( + reactor, request, "--config", gmconfig, "add", + "zara", "pub-v0-kzug3ut2m7ziihf3ndpqlquuxeie4foyl36wn54myqc4wmiwe4ga", + ) + yield _run_gm( + reactor, request, "--config", gmconfig, "add", + "yakov", "pub-v0-kvxhb3nexybmipkrar2ztfrwp4uxxsmrjzkpzafit3ket4u5yldq", + ) + assert "zara" in json.load(open(gmconfig_file, "r"))['storage_servers'] + assert "yakov" in json.load(open(gmconfig_file, "r"))['storage_servers'] + + yield _run_gm( + reactor, request, "--config", gmconfig, "remove", + "zara", + ) + assert "zara" not in json.load(open(gmconfig_file, "r"))['storage_servers'] + assert "yakov" in json.load(open(gmconfig_file, "r"))['storage_servers'] + + +@pytest_twisted.inlineCallbacks +def _test_reject_storage_server(reactor, request, temp_dir, flog_gatherer, port_allocator): + """ + A client with happines=2 fails to upload to a Grid when it is + using Grid Manager and there is only 1 storage server with a valid + certificate. + """ + grid = yield create_grid(reactor, request, temp_dir, flog_gatherer, port_allocator) + storage0 = yield grid.add_storage_node() + _ = yield grid.add_storage_node() + + gm_config = yield _run_gm( + reactor, request, "--config", "-", "create", + ) + gm_privkey_bytes = json.loads(gm_config)['private_key'].encode('ascii') + gm_privkey, gm_pubkey = ed25519.signing_keypair_from_string(gm_privkey_bytes) + + # create certificate for the first storage-server + pubkey_fname = join(storage0.process.node_dir, "node.pubkey") + with open(pubkey_fname, 'r') as f: + pubkey_str = f.read().strip() + + gm_config = yield _run_gm( + reactor, request, "--config", "-", "add", + "storage0", pubkey_str, + stdinBytes=gm_config, + ) + assert json.loads(gm_config)['storage_servers'].keys() == {'storage0'} + + print("inserting certificate") + cert = yield _run_gm( + reactor, request, "--config", "-", "sign", "storage0", "1", + stdinBytes=gm_config, + ) + print(cert) + + yield util.run_tahoe( + reactor, request, "--node-directory", storage0.process.node_dir, + "admin", "add-grid-manager-cert", + "--name", "default", + "--filename", "-", + stdin=cert, + ) + + # re-start this storage server + yield storage0.restart(reactor, request) + + # now only one storage-server has the certificate .. configure + # diana to have the grid-manager certificate + + diana = yield grid.add_client("diana", needed=2, happy=2, total=2) + + config = configutil.get_config(join(diana.process.node_dir, "tahoe.cfg")) + config.add_section("grid_managers") + config.set("grid_managers", "test", str(ed25519.string_from_verifying_key(gm_pubkey), "ascii")) + with open(join(diana.process.node_dir, "tahoe.cfg"), "w") as f: + config.write(f) + + yield diana.restart(reactor, request, servers=2) + + # try to put something into the grid, which should fail (because + # diana has happy=2 but should only find storage0 to be acceptable + # to upload to) + + try: + yield util.run_tahoe( + reactor, request, "--node-directory", diana.process.node_dir, + "put", "-", + stdin=b"some content\n" * 200, + ) + assert False, "Should get a failure" + except util.ProcessFailed as e: + if b'UploadUnhappinessError' in e.output: + # We're done! We've succeeded. + return + + assert False, "Failed to see one of out of two servers" + + +@pytest_twisted.inlineCallbacks +def _test_accept_storage_server(reactor, request, temp_dir, flog_gatherer, port_allocator): + """ + Successfully upload to a Grid Manager enabled Grid. + """ + grid = yield create_grid(reactor, request, temp_dir, flog_gatherer, port_allocator) + happy0 = yield grid.add_storage_node() + happy1 = yield grid.add_storage_node() + + gm_config = yield _run_gm( + reactor, request, "--config", "-", "create", + ) + gm_privkey_bytes = json.loads(gm_config)['private_key'].encode('ascii') + gm_privkey, gm_pubkey = ed25519.signing_keypair_from_string(gm_privkey_bytes) + + # create certificates for all storage-servers + servers = ( + ("happy0", happy0), + ("happy1", happy1), + ) + for st_name, st in servers: + pubkey_fname = join(st.process.node_dir, "node.pubkey") + with open(pubkey_fname, 'r') as f: + pubkey_str = f.read().strip() + + gm_config = yield _run_gm( + reactor, request, "--config", "-", "add", + st_name, pubkey_str, + stdinBytes=gm_config, + ) + assert json.loads(gm_config)['storage_servers'].keys() == {'happy0', 'happy1'} + + # add the certificates from the grid-manager to the storage servers + print("inserting storage-server certificates") + for st_name, st in servers: + cert = yield _run_gm( + reactor, request, "--config", "-", "sign", st_name, "1", + stdinBytes=gm_config, + ) + + yield util.run_tahoe( + reactor, request, "--node-directory", st.process.node_dir, + "admin", "add-grid-manager-cert", + "--name", "default", + "--filename", "-", + stdin=cert, + ) + + # re-start the storage servers + yield happy0.restart(reactor, request) + yield happy1.restart(reactor, request) + + # configure freya (a client) to have the grid-manager certificate + freya = yield grid.add_client("freya", needed=2, happy=2, total=2) + + config = configutil.get_config(join(freya.process.node_dir, "tahoe.cfg")) + config.add_section("grid_managers") + config.set("grid_managers", "test", str(ed25519.string_from_verifying_key(gm_pubkey), "ascii")) + with open(join(freya.process.node_dir, "tahoe.cfg"), "w") as f: + config.write(f) + + yield freya.restart(reactor, request, servers=2) + + # confirm that Freya will upload to the GridManager-enabled Grid + yield util.run_tahoe( + reactor, request, "--node-directory", freya.process.node_dir, + "put", "-", + stdin=b"some content\n" * 200, + ) + + +@pytest_twisted.inlineCallbacks +def test_identity(reactor, request, temp_dir): + """ + Dump public key to CLI + """ + gm_config = join(temp_dir, "test_identity") + yield _run_gm( + reactor, request, "--config", gm_config, "create", + ) + + # ask the CLI for the grid-manager pubkey + pubkey = yield _run_gm( + reactor, request, "--config", gm_config, "public-identity", + ) + alleged_pubkey = ed25519.verifying_key_from_string(pubkey.strip()) + + # load the grid-manager pubkey "ourselves" + with open(join(gm_config, "config.json"), "r") as f: + real_config = json.load(f) + real_privkey, real_pubkey = ed25519.signing_keypair_from_string( + real_config["private_key"].encode("ascii"), + ) + + # confirm the CLI told us the correct thing + alleged_bytes = alleged_pubkey.public_bytes(Encoding.Raw, PublicFormat.Raw) + real_bytes = real_pubkey.public_bytes(Encoding.Raw, PublicFormat.Raw) + assert alleged_bytes == real_bytes, "Keys don't match" diff --git a/integration/test_i2p.py b/integration/test_i2p.py index f0b06f1e2..c99c469fa 100644 --- a/integration/test_i2p.py +++ b/integration/test_i2p.py @@ -2,26 +2,11 @@ Integration tests for I2P support. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import sys from os.path import join, exists -from os import mkdir +from os import mkdir, environ from time import sleep - -if PY2: - def which(path): - # This will result in skipping I2P tests on Python 2. Oh well. - return None -else: - from shutil import which +from shutil import which from eliot import log_call @@ -38,6 +23,9 @@ from twisted.internet.error import ProcessExitedAlready from allmydata.test.common import ( write_introducer, ) +from allmydata.node import read_config +from allmydata.util.iputil import allocate_tcp_port + if which("docker") is None: pytest.skip('Skipping I2P tests since Docker is unavailable', allow_module_level=True) @@ -50,20 +38,24 @@ if sys.platform.startswith('win'): @pytest.fixture def i2p_network(reactor, temp_dir, request): """Fixture to start up local i2pd.""" - proto = util._MagicTextProtocol("ephemeral keys") + proto = util._MagicTextProtocol("ephemeral keys", "i2pd") reactor.spawnProcess( proto, which("docker"), ( - "docker", "run", "-p", "7656:7656", "purplei2p/i2pd", + "docker", "run", "-p", "7656:7656", "purplei2p/i2pd:release-2.45.1", # Bad URL for reseeds, so it can't talk to other routers. "--reseed.urls", "http://localhost:1/", + # Make sure we see the "ephemeral keys message" + "--log=stdout", + "--loglevel=info" ), + env=environ, ) def cleanup(): try: - proto.transport.signalProcess("KILL") + proto.transport.signalProcess("INT") util.block_with_timeout(proto.exited, reactor) except ProcessExitedAlready: pass @@ -79,13 +71,6 @@ def i2p_network(reactor, temp_dir, request): include_result=False, ) def i2p_introducer(reactor, temp_dir, flog_gatherer, request): - config = ''' -[node] -nickname = introducer_i2p -web.port = 4561 -log_gatherer.furl = {log_furl} -'''.format(log_furl=flog_gatherer) - intro_dir = join(temp_dir, 'introducer_i2p') print("making introducer", intro_dir) @@ -105,12 +90,14 @@ log_gatherer.furl = {log_furl} pytest_twisted.blockon(done_proto.done) # over-write the config file with our stuff - with open(join(intro_dir, 'tahoe.cfg'), 'w') as f: - f.write(config) + config = read_config(intro_dir, "tub.port") + config.set_config("node", "nickname", "introducer_i2p") + config.set_config("node", "web.port", "4563") + config.set_config("node", "log_gatherer.furl", flog_gatherer) # "tahoe run" is consistent across Linux/macOS/Windows, unlike the old # "start" command. - protocol = util._MagicTextProtocol('introducer running') + protocol = util._MagicTextProtocol('introducer running', "introducer") transport = util._tahoe_runner_optional_coverage( protocol, reactor, @@ -144,9 +131,12 @@ def i2p_introducer_furl(i2p_introducer, temp_dir): @pytest_twisted.inlineCallbacks +@pytest.mark.skip("I2P tests are not functioning at all, for unknown reasons") def test_i2p_service_storage(reactor, request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl): - yield _create_anonymous_node(reactor, 'carol_i2p', 8008, request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl) - yield _create_anonymous_node(reactor, 'dave_i2p', 8009, request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl) + web_port0 = allocate_tcp_port() + web_port1 = allocate_tcp_port() + yield _create_anonymous_node(reactor, 'carol_i2p', web_port0, request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl) + yield _create_anonymous_node(reactor, 'dave_i2p', web_port1, request, temp_dir, flog_gatherer, i2p_network, i2p_introducer_furl) # ensure both nodes are connected to "a grid" by uploading # something via carol, and retrieve it using dave. gold_path = join(temp_dir, "gold") @@ -167,7 +157,8 @@ def test_i2p_service_storage(reactor, request, temp_dir, flog_gatherer, i2p_netw sys.executable, '-b', '-m', 'allmydata.scripts.runner', '-d', join(temp_dir, 'carol_i2p'), 'put', gold_path, - ) + ), + env=environ, ) yield proto.done cap = proto.output.getvalue().strip().split()[-1] @@ -181,7 +172,8 @@ def test_i2p_service_storage(reactor, request, temp_dir, flog_gatherer, i2p_netw sys.executable, '-b', '-m', 'allmydata.scripts.runner', '-d', join(temp_dir, 'dave_i2p'), 'get', cap, - ) + ), + env=environ, ) yield proto.done @@ -190,9 +182,8 @@ def test_i2p_service_storage(reactor, request, temp_dir, flog_gatherer, i2p_netw @pytest_twisted.inlineCallbacks -def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_gatherer, i2p_network, introducer_furl): +def _create_anonymous_node(reactor, name, web_port, request, temp_dir, flog_gatherer, i2p_network, introducer_furl): node_dir = FilePath(temp_dir).child(name) - web_port = "tcp:{}:interface=localhost".format(control_port + 2000) print("creating", node_dir.path) node_dir.makedirs() @@ -208,7 +199,8 @@ def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_ '--hide-ip', '--listen', 'i2p', node_dir.path, - ) + ), + env=environ, ) yield proto.done diff --git a/integration/test_servers_of_happiness.py b/integration/test_servers_of_happiness.py index b9de0c075..8f64696a8 100644 --- a/integration/test_servers_of_happiness.py +++ b/integration/test_servers_of_happiness.py @@ -1,19 +1,10 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import sys from os.path import join - -from twisted.internet.error import ProcessTerminated +from os import environ from . import util @@ -31,7 +22,7 @@ def test_upload_immutable(reactor, temp_dir, introducer_furl, flog_gatherer, sto happy=7, total=10, ) - util.await_client_ready(edna) + yield util.await_client_ready(edna) node_dir = join(temp_dir, 'edna') @@ -45,13 +36,14 @@ def test_upload_immutable(reactor, temp_dir, introducer_furl, flog_gatherer, sto sys.executable, '-b', '-m', 'allmydata.scripts.runner', '-d', node_dir, 'put', __file__, - ] + ], + env=environ, ) try: yield proto.done assert False, "should raise exception" - except Exception as e: - assert isinstance(e, ProcessTerminated) + except util.ProcessFailed as e: + assert b"UploadUnhappinessError" in e.output output = proto.output.getvalue() assert b"shares could be placed on only" in output diff --git a/integration/test_sftp.py b/integration/test_sftp.py index 6171c7413..8202245ce 100644 --- a/integration/test_sftp.py +++ b/integration/test_sftp.py @@ -10,15 +10,7 @@ These tests use Paramiko, rather than Twisted's Conch, because: 2. Its API is much simpler to use. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - +import os.path from posixpath import join from stat import S_ISDIR @@ -33,7 +25,7 @@ import pytest from .util import generate_ssh_key, run_in_thread -def connect_sftp(connect_args={"username": "alice", "password": "password"}): +def connect_sftp(connect_args): """Create an SFTP client.""" client = SSHClient() client.set_missing_host_key_policy(AutoAddPolicy) @@ -60,24 +52,24 @@ def connect_sftp(connect_args={"username": "alice", "password": "password"}): @run_in_thread def test_bad_account_password_ssh_key(alice, tmpdir): """ - Can't login with unknown username, wrong password, or wrong SSH pub key. + Can't login with unknown username, any password, or wrong SSH pub key. """ - # Wrong password, wrong username: - for u, p in [("alice", "wrong"), ("someuser", "password")]: + # Any password, wrong username: + for u, p in [("alice-key", "wrong"), ("someuser", "password")]: with pytest.raises(AuthenticationException): connect_sftp(connect_args={ "username": u, "password": p, }) - another_key = join(str(tmpdir), "ssh_key") + another_key = os.path.join(str(tmpdir), "ssh_key") generate_ssh_key(another_key) - good_key = RSAKey(filename=join(alice.node_dir, "private", "ssh_client_rsa_key")) + good_key = RSAKey(filename=os.path.join(alice.process.node_dir, "private", "ssh_client_rsa_key")) bad_key = RSAKey(filename=another_key) # Wrong key: with pytest.raises(AuthenticationException): connect_sftp(connect_args={ - "username": "alice2", "pkey": bad_key, + "username": "alice-key", "pkey": bad_key, }) # Wrong username: @@ -87,12 +79,22 @@ def test_bad_account_password_ssh_key(alice, tmpdir): }) +def sftp_client_key(client): + """ + :return RSAKey: the RSA client key associated with this grid.Client + """ + # XXX move to Client / grid.py? + return RSAKey( + filename=os.path.join(client.process.node_dir, "private", "ssh_client_rsa_key"), + ) + + @run_in_thread def test_ssh_key_auth(alice): """It's possible to login authenticating with SSH public key.""" - key = RSAKey(filename=join(alice.node_dir, "private", "ssh_client_rsa_key")) + key = sftp_client_key(alice) sftp = connect_sftp(connect_args={ - "username": "alice2", "pkey": key + "username": "alice-key", "pkey": key }) assert sftp.listdir() == [] @@ -100,7 +102,10 @@ def test_ssh_key_auth(alice): @run_in_thread def test_read_write_files(alice): """It's possible to upload and download files.""" - sftp = connect_sftp() + sftp = connect_sftp(connect_args={ + "username": "alice-key", + "pkey": sftp_client_key(alice), + }) with sftp.file("myfile", "wb") as f: f.write(b"abc") f.write(b"def") @@ -117,7 +122,10 @@ def test_directories(alice): It's possible to create, list directories, and create and remove files in them. """ - sftp = connect_sftp() + sftp = connect_sftp(connect_args={ + "username": "alice-key", + "pkey": sftp_client_key(alice), + }) assert sftp.listdir() == [] sftp.mkdir("childdir") @@ -148,7 +156,10 @@ def test_directories(alice): @run_in_thread def test_rename(alice): """Directories and files can be renamed.""" - sftp = connect_sftp() + sftp = connect_sftp(connect_args={ + "username": "alice-key", + "pkey": sftp_client_key(alice), + }) sftp.mkdir("dir") filepath = join("dir", "file") diff --git a/integration/test_streaming_logs.py b/integration/test_streaming_logs.py index 036d30715..efdb23df8 100644 --- a/integration/test_streaming_logs.py +++ b/integration/test_streaming_logs.py @@ -1,16 +1,6 @@ """ Ported to Python 3. """ -from __future__ import ( - print_function, - unicode_literals, - absolute_import, - division, -) - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six import ensure_text diff --git a/integration/test_tor.py b/integration/test_tor.py index 15d888e36..d114b763a 100644 --- a/integration/test_tor.py +++ b/integration/test_tor.py @@ -1,17 +1,10 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import sys from os.path import join +from os import environ import pytest import pytest_twisted @@ -25,6 +18,8 @@ from twisted.python.filepath import ( from allmydata.test.common import ( write_introducer, ) +from allmydata.client import read_config +from allmydata.util.deferredutil import async_to_deferred # see "conftest.py" for the fixtures (e.g. "tor_network") @@ -35,12 +30,29 @@ from allmydata.test.common import ( if sys.platform.startswith('win'): pytest.skip('Skipping Tor tests on Windows', allow_module_level=True) +@pytest.mark.skipif(sys.version_info[:2] > (3, 11), reason='Chutney still does not support 3.12') @pytest_twisted.inlineCallbacks def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl): - yield _create_anonymous_node(reactor, 'carol', 8008, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl) - yield _create_anonymous_node(reactor, 'dave', 8009, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl) - # ensure both nodes are connected to "a grid" by uploading - # something via carol, and retrieve it using dave. + """ + Two nodes and an introducer all configured to use Tahoe. + + The two nodes can talk to the introducer and each other: we upload to one + node, read from the other. + """ + carol = yield _create_anonymous_node(reactor, 'carol', 8100, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl, 2) + dave = yield _create_anonymous_node(reactor, 'dave', 8101, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl, 2) + yield util.await_client_ready(carol, minimum_number_of_servers=2, timeout=600) + yield util.await_client_ready(dave, minimum_number_of_servers=2, timeout=600) + yield upload_to_one_download_from_the_other(reactor, temp_dir, carol, dave) + + +@async_to_deferred +async def upload_to_one_download_from_the_other(reactor, temp_dir, upload_to: util.TahoeProcess, download_from: util.TahoeProcess): + """ + Ensure both nodes are connected to "a grid" by uploading something via one + node, and retrieve it using the other. + """ + gold_path = join(temp_dir, "gold") with open(gold_path, "w") as f: f.write( @@ -57,13 +69,14 @@ def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_ne sys.executable, ( sys.executable, '-b', '-m', 'allmydata.scripts.runner', - '-d', join(temp_dir, 'carol'), + '-d', upload_to.node_dir, 'put', gold_path, - ) + ), + env=environ, ) - yield proto.done + await proto.done cap = proto.output.getvalue().strip().split()[-1] - print("TEH CAP!", cap) + print("capability: {}".format(cap)) proto = util._CollectOutputProtocol(capture_stderr=False) reactor.spawnProcess( @@ -71,74 +84,83 @@ def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_ne sys.executable, ( sys.executable, '-b', '-m', 'allmydata.scripts.runner', - '-d', join(temp_dir, 'dave'), + '-d', download_from.node_dir, 'get', cap, - ) + ), + env=environ, ) - yield proto.done - - dave_got = proto.output.getvalue().strip() - assert dave_got == open(gold_path, 'rb').read().strip() + await proto.done + download_got = proto.output.getvalue().strip() + assert download_got == open(gold_path, 'rb').read().strip() @pytest_twisted.inlineCallbacks -def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_gatherer, tor_network, introducer_furl): +def _create_anonymous_node(reactor, name, web_port, request, temp_dir, flog_gatherer, tor_network, introducer_furl, shares_total: int) -> util.TahoeProcess: node_dir = FilePath(temp_dir).child(name) - web_port = "tcp:{}:interface=localhost".format(control_port + 2000) - - if True: - print("creating", node_dir.path) - node_dir.makedirs() - proto = util._DumpOutputProtocol(None) - reactor.spawnProcess( - proto, - sys.executable, - ( - sys.executable, '-b', '-m', 'allmydata.scripts.runner', - 'create-node', - '--nickname', name, - '--introducer', introducer_furl, - '--hide-ip', - '--tor-control-port', 'tcp:localhost:{}'.format(control_port), - '--listen', 'tor', - node_dir.path, - ) + if node_dir.exists(): + raise RuntimeError( + "A node already exists in '{}'".format(node_dir) ) - yield proto.done + print(f"creating {node_dir.path} with introducer {introducer_furl}") + node_dir.makedirs() + proto = util._DumpOutputProtocol(None) + reactor.spawnProcess( + proto, + sys.executable, + ( + sys.executable, '-b', '-m', 'allmydata.scripts.runner', + 'create-node', + '--nickname', name, + '--webport', str(web_port), + '--introducer', introducer_furl, + '--hide-ip', + '--tor-control-port', tor_network.client_control_endpoint, + '--listen', 'tor', + '--shares-needed', '1', + '--shares-happy', '1', + '--shares-total', str(shares_total), + node_dir.path, + ), + env=environ, + ) + yield proto.done # Which services should this client connect to? write_introducer(node_dir, "default", introducer_furl) - with node_dir.child('tahoe.cfg').open('w') as f: - node_config = ''' -[node] -nickname = %(name)s -web.port = %(web_port)s -web.static = public_html -log_gatherer.furl = %(log_furl)s + util.basic_node_configuration(request, flog_gatherer.furl, node_dir.path) -[tor] -control.port = tcp:localhost:%(control_port)d -onion.external_port = 3457 -onion.local_port = %(local_port)d -onion = true -onion.private_key_file = private/tor_onion.privkey - -[client] -shares.needed = 1 -shares.happy = 1 -shares.total = 2 - -''' % { - 'name': name, - 'web_port': web_port, - 'log_furl': flog_gatherer, - 'control_port': control_port, - 'local_port': control_port + 1000, -} - node_config = node_config.encode("utf-8") - f.write(node_config) + config = read_config(node_dir.path, "tub.port") + config.set_config("tor", "onion", "true") + config.set_config("tor", "onion.external_port", "3457") + config.set_config("tor", "control.port", tor_network.client_control_endpoint) + config.set_config("tor", "onion.private_key_file", "private/tor_onion.privkey") print("running") - yield util._run_node(reactor, node_dir.path, request, None) + result = yield util._run_node(reactor, node_dir.path, request, None) print("okay, launched") + return result + +@pytest.mark.skipif(sys.version_info[:2] > (3, 11), reason='Chutney still does not support 3.12') +@pytest.mark.skipif(sys.platform.startswith('darwin'), reason='This test has issues on macOS') +@pytest_twisted.inlineCallbacks +def test_anonymous_client(reactor, request, temp_dir, flog_gatherer, tor_network, introducer_furl): + """ + A normal node (normie) and a normal introducer are configured, and one node + (anonymoose) which is configured to be anonymous by talking via Tor. + + Anonymoose should be able to communicate with normie. + + TODO how to ensure that anonymoose is actually using Tor? + """ + normie = yield util._create_node( + reactor, request, temp_dir, introducer_furl, flog_gatherer, "normie", + web_port="tcp:9989:interface=localhost", + storage=True, needed=1, happy=1, total=1, + ) + yield util.await_client_ready(normie) + + anonymoose = yield _create_anonymous_node(reactor, 'anonymoose', 8102, request, temp_dir, flog_gatherer, tor_network, introducer_furl, 1) + yield util.await_client_ready(anonymoose, minimum_number_of_servers=1, timeout=600) + + yield upload_to_one_download_from_the_other(reactor, temp_dir, normie, anonymoose) diff --git a/integration/test_vectors.py b/integration/test_vectors.py new file mode 100644 index 000000000..f53ec1741 --- /dev/null +++ b/integration/test_vectors.py @@ -0,0 +1,120 @@ +""" +Verify certain results against test vectors with well-known results. +""" + +from __future__ import annotations + +from functools import partial +from typing import AsyncGenerator, Iterator +from itertools import starmap, product + +from attrs import evolve + +from pytest import mark +from pytest_twisted import ensureDeferred + +from . import vectors +from .vectors import parameters +from .util import upload +from .grid import Client + +@mark.parametrize('convergence', parameters.CONVERGENCE_SECRETS) +def test_convergence(convergence): + """ + Convergence secrets are 16 bytes. + """ + assert isinstance(convergence, bytes), "Convergence secret must be bytes" + assert len(convergence) == 16, "Convergence secret must by 16 bytes" + + +@mark.slow +@mark.parametrize('case,expected', vectors.capabilities.items()) +@ensureDeferred +async def test_capability(reactor, request, alice, case, expected): + """ + The capability that results from uploading certain well-known data + with certain well-known parameters results in exactly the previously + computed value. + """ + # rewrite alice's config to match params and convergence + await alice.reconfigure_zfec( + reactor, (1, case.params.required, case.params.total), case.convergence, case.segment_size) + + # upload data in the correct format + actual = upload(alice, case.fmt, case.data) + + # compare the resulting cap to the expected result + assert actual == expected + + +@ensureDeferred +async def skiptest_generate(reactor, request, alice): + """ + This is a helper for generating the test vectors. + + You can re-generate the test vectors by fixing the name of the test and + running it. Normally this test doesn't run because it ran once and we + captured its output. Other tests run against that output and we want them + to run against the results produced originally, not a possibly + ever-changing set of outputs. + """ + space = starmap( + # segment_size could be a parameter someday but it's not easy to vary + # using the Python implementation so it isn't one for now. + partial(vectors.Case, segment_size=parameters.SEGMENT_SIZE), + product( + parameters.ZFEC_PARAMS, + parameters.CONVERGENCE_SECRETS, + parameters.OBJECT_DESCRIPTIONS, + parameters.FORMATS, + ), + ) + iterresults = generate(reactor, request, alice, space) + + results = [] + async for result in iterresults: + # Accumulate the new result + results.append(result) + # Then rewrite the whole output file with the new accumulator value. + # This means that if we fail partway through, we will still have + # recorded partial results -- instead of losing them all. + vectors.save_capabilities(results) + +async def generate( + reactor, + request, + alice: Client, + cases: Iterator[vectors.Case], +) -> AsyncGenerator[[vectors.Case, str], None]: + """ + Generate all of the test vectors using the given node. + + :param reactor: The reactor to use to restart the Tahoe-LAFS node when it + needs to be reconfigured. + + :param request: The pytest request object to use to arrange process + cleanup. + + :param format: The name of the encryption/data format to use. + + :param alice: The Tahoe-LAFS node to use to generate the test vectors. + + :param case: The inputs for which to generate a value. + + :return: The capability for the case. + """ + # Share placement doesn't affect the resulting capability. For maximum + # reliability of this generator, be happy if we can put shares anywhere + happy = 1 + for case in cases: + await alice.reconfigure_zfec( + reactor, + (happy, case.params.required, case.params.total), + case.convergence, + case.segment_size + ) + + # Give the format a chance to make an RSA key if it needs it. + case = evolve(case, fmt=case.fmt.customize()) + cap = upload(alice.process, case.fmt, case.data) + yield case, cap diff --git a/integration/test_web.py b/integration/test_web.py index 22f08da82..6ea365017 100644 --- a/integration/test_web.py +++ b/integration/test_web.py @@ -7,48 +7,60 @@ Most of the tests have cursory asserts and encode 'what the WebAPI did at the time of testing' -- not necessarily a cohesive idea of what the WebAPI *should* do in every situation. It's not clear the latter exists anywhere, however. - -Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations import time +from base64 import urlsafe_b64encode from urllib.parse import unquote as url_unquote, quote as url_quote +from cryptography.hazmat.primitives.serialization import load_pem_private_key +from twisted.internet.threads import deferToThread +from twisted.python.filepath import FilePath + import allmydata.uri +from allmydata.crypto.rsa import ( + create_signing_keypair, + der_string_from_signing_key, + PrivateKey, + PublicKey, +) +from allmydata.mutable.common import derive_mutable_keys from allmydata.util import jsonbytes as json from . import util +from .util import run_in_thread import requests import html5lib from bs4 import BeautifulSoup +import pytest_twisted + +DATA_PATH = FilePath(__file__).parent().sibling("src").child("allmydata").child("test").child("data") + + +@run_in_thread def test_index(alice): """ we can download the index file """ - util.web_get(alice, u"") + util.web_get(alice.process, u"") +@run_in_thread def test_index_json(alice): """ we can download the index file as json """ - data = util.web_get(alice, u"", params={u"t": u"json"}) + data = util.web_get(alice.process, u"", params={u"t": u"json"}) # it should be valid json json.loads(data) +@run_in_thread def test_upload_download(alice): """ upload a file, then download it via readcap @@ -57,7 +69,7 @@ def test_upload_download(alice): FILE_CONTENTS = u"some contents" readcap = util.web_post( - alice, u"uri", + alice.process, u"uri", data={ u"t": u"upload", u"format": u"mdmf", @@ -69,7 +81,7 @@ def test_upload_download(alice): readcap = readcap.strip() data = util.web_get( - alice, u"uri", + alice.process, u"uri", params={ u"uri": readcap, u"filename": u"boom", @@ -78,6 +90,7 @@ def test_upload_download(alice): assert str(data, "utf-8") == FILE_CONTENTS +@run_in_thread def test_put(alice): """ use PUT to create a file @@ -86,36 +99,38 @@ def test_put(alice): FILE_CONTENTS = b"added via PUT" * 20 resp = requests.put( - util.node_url(alice.node_dir, u"uri"), + util.node_url(alice.process.node_dir, u"uri"), data=FILE_CONTENTS, ) cap = allmydata.uri.from_string(resp.text.strip().encode('ascii')) - cfg = alice.get_config() + cfg = alice.process.get_config() assert isinstance(cap, allmydata.uri.CHKFileURI) assert cap.size == len(FILE_CONTENTS) assert cap.total_shares == int(cfg.get_config("client", "shares.total")) assert cap.needed_shares == int(cfg.get_config("client", "shares.needed")) +@run_in_thread def test_helper_status(storage_nodes): """ successfully GET the /helper_status page """ - url = util.node_url(storage_nodes[0].node_dir, "helper_status") + url = util.node_url(storage_nodes[0].process.node_dir, "helper_status") resp = requests.get(url) assert resp.status_code >= 200 and resp.status_code < 300 dom = BeautifulSoup(resp.content, "html5lib") assert str(dom.h1.string) == u"Helper Status" +@run_in_thread def test_deep_stats(alice): """ create a directory, do deep-stats on it and prove the /operations/ URIs work """ resp = requests.post( - util.node_url(alice.node_dir, "uri"), + util.node_url(alice.process.node_dir, "uri"), params={ "format": "sdmf", "t": "mkdir", @@ -129,7 +144,7 @@ def test_deep_stats(alice): uri = url_unquote(resp.url) assert 'URI:DIR2:' in uri dircap = uri[uri.find("URI:DIR2:"):].rstrip('/') - dircap_uri = util.node_url(alice.node_dir, "uri/{}".format(url_quote(dircap))) + dircap_uri = util.node_url(alice.process.node_dir, "uri/{}".format(url_quote(dircap))) # POST a file into this directory FILE_CONTENTS = u"a file in a directory" @@ -175,7 +190,7 @@ def test_deep_stats(alice): while tries > 0: tries -= 1 resp = requests.get( - util.node_url(alice.node_dir, u"operations/something_random"), + util.node_url(alice.process.node_dir, u"operations/something_random"), ) d = json.loads(resp.content) if d['size-literal-files'] == len(FILE_CONTENTS): @@ -186,7 +201,7 @@ def test_deep_stats(alice): time.sleep(.5) -@util.run_in_thread +@run_in_thread def test_status(alice): """ confirm we get something sensible from /status and the various sub-types @@ -200,21 +215,21 @@ def test_status(alice): FILE_CONTENTS = u"all the Important Data of alice\n" * 1200 resp = requests.put( - util.node_url(alice.node_dir, u"uri"), + util.node_url(alice.process.node_dir, u"uri"), data=FILE_CONTENTS, ) cap = resp.text.strip() print("Uploaded data, cap={}".format(cap)) resp = requests.get( - util.node_url(alice.node_dir, u"uri/{}".format(url_quote(cap))), + util.node_url(alice.process.node_dir, u"uri/{}".format(url_quote(cap))), ) print("Downloaded {} bytes of data".format(len(resp.content))) assert str(resp.content, "ascii") == FILE_CONTENTS resp = requests.get( - util.node_url(alice.node_dir, "status"), + util.node_url(alice.process.node_dir, "status"), ) dom = html5lib.parse(resp.content) @@ -228,7 +243,7 @@ def test_status(alice): for href in hrefs: if href == u"/" or not href: continue - resp = requests.get(util.node_url(alice.node_dir, href)) + resp = requests.get(util.node_url(alice.process.node_dir, href)) if href.startswith(u"/status/up"): assert b"File Upload Status" in resp.content if b"Total Size: %d" % (len(FILE_CONTENTS),) in resp.content: @@ -240,7 +255,7 @@ def test_status(alice): # download the specialized event information resp = requests.get( - util.node_url(alice.node_dir, u"{}/event_json".format(href)), + util.node_url(alice.process.node_dir, u"{}/event_json".format(href)), ) js = json.loads(resp.content) # there's usually just one "read" operation, but this can handle many .. @@ -252,14 +267,25 @@ def test_status(alice): assert found_download, "Failed to find the file we downloaded in the status-page" -def test_directory_deep_check(alice): +@pytest_twisted.ensureDeferred +async def test_directory_deep_check(reactor, request, alice): """ use deep-check and confirm the result pages work """ + # Make sure the node is configured compatibly with expectations of this + # test. + happy = 3 + required = 2 + total = 4 + await alice.reconfigure_zfec(reactor, (happy, required, total), convergence=None) + await deferToThread(_test_directory_deep_check_blocking, alice) + + +def _test_directory_deep_check_blocking(alice): # create a directory resp = requests.post( - util.node_url(alice.node_dir, u"uri"), + util.node_url(alice.process.node_dir, u"uri"), params={ u"t": u"mkdir", u"redirect_to_result": u"true", @@ -308,12 +334,12 @@ def test_directory_deep_check(alice): print("Uploaded data1, cap={}".format(cap1)) resp = requests.get( - util.node_url(alice.node_dir, u"uri/{}".format(url_quote(cap0))), + util.node_url(alice.process.node_dir, u"uri/{}".format(url_quote(cap0))), params={u"t": u"info"}, ) def check_repair_data(checkdata): - assert checkdata["healthy"] is True + assert checkdata["healthy"] assert checkdata["count-happiness"] == 4 assert checkdata["count-good-share-hosts"] == 4 assert checkdata["count-shares-good"] == 4 @@ -417,6 +443,7 @@ def test_directory_deep_check(alice): assert dom is not None, "Operation never completed" +@run_in_thread def test_storage_info(storage_nodes): """ retrieve and confirm /storage URI for one storage node @@ -424,10 +451,11 @@ def test_storage_info(storage_nodes): storage0 = storage_nodes[0] requests.get( - util.node_url(storage0.node_dir, u"storage"), + util.node_url(storage0.process.node_dir, u"storage"), ) +@run_in_thread def test_storage_info_json(storage_nodes): """ retrieve and confirm /storage?t=json URI for one storage node @@ -435,24 +463,25 @@ def test_storage_info_json(storage_nodes): storage0 = storage_nodes[0] resp = requests.get( - util.node_url(storage0.node_dir, u"storage"), + util.node_url(storage0.process.node_dir, u"storage"), params={u"t": u"json"}, ) data = json.loads(resp.content) assert data[u"stats"][u"storage_server.reserved_space"] == 1000000000 +@run_in_thread def test_introducer_info(introducer): """ retrieve and confirm /introducer URI for the introducer """ resp = requests.get( - util.node_url(introducer.node_dir, u""), + util.node_url(introducer.process.node_dir, u""), ) assert b"Introducer" in resp.content resp = requests.get( - util.node_url(introducer.node_dir, u""), + util.node_url(introducer.process.node_dir, u""), params={u"t": u"json"}, ) data = json.loads(resp.content) @@ -460,6 +489,7 @@ def test_introducer_info(introducer): assert "subscription_summary" in data +@run_in_thread def test_mkdir_with_children(alice): """ create a directory using ?t=mkdir-with-children @@ -468,14 +498,14 @@ def test_mkdir_with_children(alice): # create a file to put in our directory FILE_CONTENTS = u"some file contents\n" * 500 resp = requests.put( - util.node_url(alice.node_dir, u"uri"), + util.node_url(alice.process.node_dir, u"uri"), data=FILE_CONTENTS, ) filecap = resp.content.strip() # create a (sub) directory to put in our directory resp = requests.post( - util.node_url(alice.node_dir, u"uri"), + util.node_url(alice.process.node_dir, u"uri"), params={ u"t": u"mkdir", } @@ -518,10 +548,294 @@ def test_mkdir_with_children(alice): # create a new directory with one file and one sub-dir (all-at-once) resp = util.web_post( - alice, u"uri", + alice.process, u"uri", params={u"t": "mkdir-with-children"}, data=json.dumps(meta), ) assert resp.startswith(b"URI:DIR2") cap = allmydata.uri.from_string(resp) assert isinstance(cap, allmydata.uri.DirectoryURI) + + +@run_in_thread +def test_mkdir_with_random_private_key(alice): + """ + Create a new directory with ?t=mkdir&private-key=... using a + randomly-generated RSA private key. + + The writekey and fingerprint derived from the provided RSA key + should match those of the newly-created directory capability. + """ + + privkey, pubkey = create_signing_keypair(2048) + + writekey, _, fingerprint = derive_mutable_keys((pubkey, privkey)) + + # The "private-key" parameter takes a DER-encoded RSA private key + # encoded in URL-safe base64; PEM blocks are not supported. + privkey_der = der_string_from_signing_key(privkey) + privkey_encoded = urlsafe_b64encode(privkey_der).decode("ascii") + + resp = util.web_post( + alice.process, u"uri", + params={ + u"t": "mkdir", + u"private-key": privkey_encoded, + }, + ) + assert resp.startswith(b"URI:DIR2") + + dircap = allmydata.uri.from_string(resp) + assert isinstance(dircap, allmydata.uri.DirectoryURI) + + # DirectoryURI objects lack 'writekey' and 'fingerprint' attributes + # so extract them from the enclosed WriteableSSKFileURI object. + filecap = dircap.get_filenode_cap() + assert isinstance(filecap, allmydata.uri.WriteableSSKFileURI) + + assert (writekey, fingerprint) == (filecap.writekey, filecap.fingerprint) + + +@run_in_thread +def test_mkdir_with_known_private_key(alice): + """ + Create a new directory with ?t=mkdir&private-key=... using a + known-in-advance RSA private key. + + The writekey and fingerprint derived from the provided RSA key + should match those of the newly-created directory capability. + In addition, because the writekey and fingerprint are derived + deterministically, given the same RSA private key, the resultant + directory capability should always be the same. + """ + # Generated with `openssl genrsa -out openssl-rsa-2048-3.txt 2048` + pempath = DATA_PATH.child("openssl-rsa-2048-3.txt") + privkey = load_pem_private_key(pempath.getContent(), password=None) + assert isinstance(privkey, PrivateKey) + pubkey = privkey.public_key() + assert isinstance(pubkey, PublicKey) + + writekey, _, fingerprint = derive_mutable_keys((pubkey, privkey)) + + # The "private-key" parameter takes a DER-encoded RSA private key + # encoded in URL-safe base64; PEM blocks are not supported. + privkey_der = der_string_from_signing_key(privkey) + privkey_encoded = urlsafe_b64encode(privkey_der).decode("ascii") + + resp = util.web_post( + alice.process, u"uri", + params={ + u"t": "mkdir", + u"private-key": privkey_encoded, + }, + ) + assert resp.startswith(b"URI:DIR2") + + dircap = allmydata.uri.from_string(resp) + assert isinstance(dircap, allmydata.uri.DirectoryURI) + + # DirectoryURI objects lack 'writekey' and 'fingerprint' attributes + # so extract them from the enclosed WriteableSSKFileURI object. + filecap = dircap.get_filenode_cap() + assert isinstance(filecap, allmydata.uri.WriteableSSKFileURI) + + assert (writekey, fingerprint) == (filecap.writekey, filecap.fingerprint) + + assert resp == b"URI:DIR2:3oo7j7f7qqxnet2z2lf57ucup4:cpktmsxlqnd5yeekytxjxvff5e6d6fv7py6rftugcndvss7tzd2a" + + +@run_in_thread +def test_mkdir_with_children_and_random_private_key(alice): + """ + Create a new directory with ?t=mkdir-with-children&private-key=... + using a randomly-generated RSA private key. + + The writekey and fingerprint derived from the provided RSA key + should match those of the newly-created directory capability. + """ + + # create a file to put in our directory + FILE_CONTENTS = u"some file contents\n" * 500 + resp = requests.put( + util.node_url(alice.process.node_dir, u"uri"), + data=FILE_CONTENTS, + ) + filecap = resp.content.strip() + + # create a (sub) directory to put in our directory + resp = requests.post( + util.node_url(alice.process.node_dir, u"uri"), + params={ + u"t": u"mkdir", + } + ) + # (we need both the read-write and read-only URIs I guess) + dircap = resp.content + dircap_obj = allmydata.uri.from_string(dircap) + dircap_ro = dircap_obj.get_readonly().to_string() + + # create json information about our directory + meta = { + "a_file": [ + "filenode", { + "ro_uri": filecap, + "metadata": { + "ctime": 1202777696.7564139, + "mtime": 1202777696.7564139, + "tahoe": { + "linkcrtime": 1202777696.7564139, + "linkmotime": 1202777696.7564139 + } + } + } + ], + "some_subdir": [ + "dirnode", { + "rw_uri": dircap, + "ro_uri": dircap_ro, + "metadata": { + "ctime": 1202778102.7589991, + "mtime": 1202778111.2160511, + "tahoe": { + "linkcrtime": 1202777696.7564139, + "linkmotime": 1202777696.7564139 + } + } + } + ] + } + + privkey, pubkey = create_signing_keypair(2048) + + writekey, _, fingerprint = derive_mutable_keys((pubkey, privkey)) + + # The "private-key" parameter takes a DER-encoded RSA private key + # encoded in URL-safe base64; PEM blocks are not supported. + privkey_der = der_string_from_signing_key(privkey) + privkey_encoded = urlsafe_b64encode(privkey_der).decode("ascii") + + # create a new directory with one file and one sub-dir (all-at-once) + # with the supplied RSA private key + resp = util.web_post( + alice.process, u"uri", + params={ + u"t": "mkdir-with-children", + u"private-key": privkey_encoded, + }, + data=json.dumps(meta), + ) + assert resp.startswith(b"URI:DIR2") + + dircap = allmydata.uri.from_string(resp) + assert isinstance(dircap, allmydata.uri.DirectoryURI) + + # DirectoryURI objects lack 'writekey' and 'fingerprint' attributes + # so extract them from the enclosed WriteableSSKFileURI object. + filecap = dircap.get_filenode_cap() + assert isinstance(filecap, allmydata.uri.WriteableSSKFileURI) + + assert (writekey, fingerprint) == (filecap.writekey, filecap.fingerprint) + + +@run_in_thread +def test_mkdir_with_children_and_known_private_key(alice): + """ + Create a new directory with ?t=mkdir-with-children&private-key=... + using a known-in-advance RSA private key. + + + The writekey and fingerprint derived from the provided RSA key + should match those of the newly-created directory capability. + In addition, because the writekey and fingerprint are derived + deterministically, given the same RSA private key, the resultant + directory capability should always be the same. + """ + + # create a file to put in our directory + FILE_CONTENTS = u"some file contents\n" * 500 + resp = requests.put( + util.node_url(alice.process.node_dir, u"uri"), + data=FILE_CONTENTS, + ) + filecap = resp.content.strip() + + # create a (sub) directory to put in our directory + resp = requests.post( + util.node_url(alice.process.node_dir, u"uri"), + params={ + u"t": u"mkdir", + } + ) + # (we need both the read-write and read-only URIs I guess) + dircap = resp.content + dircap_obj = allmydata.uri.from_string(dircap) + dircap_ro = dircap_obj.get_readonly().to_string() + + # create json information about our directory + meta = { + "a_file": [ + "filenode", { + "ro_uri": filecap, + "metadata": { + "ctime": 1202777696.7564139, + "mtime": 1202777696.7564139, + "tahoe": { + "linkcrtime": 1202777696.7564139, + "linkmotime": 1202777696.7564139 + } + } + } + ], + "some_subdir": [ + "dirnode", { + "rw_uri": dircap, + "ro_uri": dircap_ro, + "metadata": { + "ctime": 1202778102.7589991, + "mtime": 1202778111.2160511, + "tahoe": { + "linkcrtime": 1202777696.7564139, + "linkmotime": 1202777696.7564139 + } + } + } + ] + } + + # Generated with `openssl genrsa -out openssl-rsa-2048-4.txt 2048` + pempath = DATA_PATH.child("openssl-rsa-2048-4.txt") + privkey = load_pem_private_key(pempath.getContent(), password=None) + assert isinstance(privkey, PrivateKey) + pubkey = privkey.public_key() + assert isinstance(pubkey, PublicKey) + + writekey, _, fingerprint = derive_mutable_keys((pubkey, privkey)) + + # The "private-key" parameter takes a DER-encoded RSA private key + # encoded in URL-safe base64; PEM blocks are not supported. + privkey_der = der_string_from_signing_key(privkey) + privkey_encoded = urlsafe_b64encode(privkey_der).decode("ascii") + + # create a new directory with one file and one sub-dir (all-at-once) + # with the supplied RSA private key + resp = util.web_post( + alice.process, u"uri", + params={ + u"t": "mkdir-with-children", + u"private-key": privkey_encoded, + }, + data=json.dumps(meta), + ) + assert resp.startswith(b"URI:DIR2") + + dircap = allmydata.uri.from_string(resp) + assert isinstance(dircap, allmydata.uri.DirectoryURI) + + # DirectoryURI objects lack 'writekey' and 'fingerprint' attributes + # so extract them from the enclosed WriteableSSKFileURI object. + filecap = dircap.get_filenode_cap() + assert isinstance(filecap, allmydata.uri.WriteableSSKFileURI) + + assert (writekey, fingerprint) == (filecap.writekey, filecap.fingerprint) + + assert resp == b"URI:DIR2:ppwzpwrd37xi7tpribxyaa25uy:imdws47wwpzfkc5vfllo4ugspb36iit4cqps6ttuhaouc66jb2da" diff --git a/integration/util.py b/integration/util.py index 7c7a1efd2..59be528dc 100644 --- a/integration/util.py +++ b/integration/util.py @@ -1,22 +1,19 @@ """ -Ported to Python 3. +General functionality useful for the implementation of integration tests. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations +from contextlib import contextmanager +from typing import Any +from typing_extensions import Literal +from tempfile import NamedTemporaryFile import sys import time import json from os import mkdir, environ -from os.path import exists, join +from os.path import exists, join, basename from io import StringIO, BytesIO -from functools import partial from subprocess import check_output from twisted.python.filepath import ( @@ -26,18 +23,30 @@ from twisted.internet.defer import Deferred, succeed from twisted.internet.protocol import ProcessProtocol from twisted.internet.error import ProcessExitedAlready, ProcessDone from twisted.internet.threads import deferToThread +from twisted.internet.interfaces import IProcessTransport, IReactorProcess +from attrs import frozen, evolve import requests +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.serialization import ( + Encoding, + PrivateFormat, + NoEncryption, +) + from paramiko.rsakey import RSAKey from boltons.funcutils import wraps +from allmydata.util import base32 from allmydata.util.configutil import ( get_config, set_config, write_config, ) from allmydata import client +from allmydata.interfaces import DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE import pytest_twisted @@ -61,16 +70,40 @@ class _ProcessExitedProtocol(ProcessProtocol): self.done.callback(None) +class ProcessFailed(Exception): + """ + A subprocess has failed. + + :ivar ProcessTerminated reason: the original reason from .processExited + + :ivar StringIO output: all stdout and stderr collected to this point. + """ + + def __init__(self, reason, output): + self.reason = reason + self.output = output + + def __str__(self): + return ":\n{}".format(self.reason, self.output) + + class _CollectOutputProtocol(ProcessProtocol): """ Internal helper. Collects all output (stdout + stderr) into self.output, and callback's on done with all of it after the process exits (for any reason). """ - def __init__(self, capture_stderr=True): + + def __init__(self, capture_stderr=True, stdin=None): self.done = Deferred() self.output = BytesIO() self.capture_stderr = capture_stderr + self._stdin = stdin + + def connectionMade(self): + if self._stdin is not None: + self.transport.write(self._stdin) + self.transport.closeStdin() def processEnded(self, reason): if not self.done.called: @@ -78,13 +111,12 @@ class _CollectOutputProtocol(ProcessProtocol): def processExited(self, reason): if not isinstance(reason.value, ProcessDone): - self.done.errback(reason) + self.done.errback(ProcessFailed(reason, self.output.getvalue())) def outReceived(self, data): self.output.write(data) def errReceived(self, data): - print("ERR: {!r}".format(data)) if self.capture_stderr: self.output.write(data) @@ -120,8 +152,9 @@ class _MagicTextProtocol(ProcessProtocol): and then .callback()s on self.done and .errback's if the process exits """ - def __init__(self, magic_text): + def __init__(self, magic_text: str, name: str) -> None: self.magic_seen = Deferred() + self.name = f"{name}: " self.exited = Deferred() self._magic_text = magic_text self._output = StringIO() @@ -131,7 +164,8 @@ class _MagicTextProtocol(ProcessProtocol): def outReceived(self, data): data = str(data, sys.stdout.encoding) - sys.stdout.write(data) + for line in data.splitlines(): + sys.stdout.write(self.name + line + "\n") self._output.write(data) if not self.magic_seen.called and self._magic_text in self._output.getvalue(): print("Saw '{}' in the logs".format(self._magic_text)) @@ -139,12 +173,39 @@ class _MagicTextProtocol(ProcessProtocol): def errReceived(self, data): data = str(data, sys.stderr.encoding) - sys.stdout.write(data) + for line in data.splitlines(): + sys.stdout.write(self.name + line + "\n") +def _cleanup_process_async(transport: IProcessTransport) -> None: + """ + If the given process transport seems to still be associated with a + running process, send a SIGTERM to that process. + + :param transport: The transport to use. + + :raise: ``ValueError`` if ``allow_missing`` is ``False`` and the transport + has no process. + """ + if transport.pid is None: + # in cases of "restart", we will have registered a finalizer + # that will kill the process -- but already explicitly killed + # it (and then ran again) due to the "restart". So, if the + # process is already killed, our job is done. + print("Process already cleaned up and that's okay.") + return + print("signaling {} with TERM".format(transport.pid)) + try: + transport.signalProcess('TERM') + except ProcessExitedAlready: + # The transport object thought it still had a process but the real OS + # process has already exited. That's fine. We accomplished what we + # wanted to. + pass + def _cleanup_tahoe_process(tahoe_transport, exited): """ - Terminate the given process with a kill signal (SIGKILL on POSIX, + Terminate the given process with a kill signal (SIGTERM on POSIX, TerminateProcess on Windows). :param tahoe_transport: The `IProcessTransport` representing the process. @@ -153,14 +214,24 @@ def _cleanup_tahoe_process(tahoe_transport, exited): :return: After the process has exited. """ from twisted.internet import reactor - try: - print("signaling {} with TERM".format(tahoe_transport.pid)) - tahoe_transport.signalProcess('TERM') - print("signaled, blocking on exit") - block_with_timeout(exited, reactor) - print("exited, goodbye") - except ProcessExitedAlready: - pass + _cleanup_process_async(tahoe_transport) + print(f"signaled, blocking on exit {exited}") + block_with_timeout(exited, reactor) + print("exited, goodbye") + + +def run_tahoe(reactor, request, *args, **kwargs): + """ + Helper to run tahoe with optional coverage. + + :returns: a Deferred that fires when the command is done (or a + ProcessFailed exception if it exits non-zero) + """ + stdin = kwargs.get("stdin", None) + protocol = _CollectOutputProtocol(stdin=stdin) + process = _tahoe_runner_optional_coverage(protocol, reactor, request, args) + process.exited = protocol.done + return protocol.done def _tahoe_runner_optional_coverage(proto, reactor, request, other_args): @@ -169,7 +240,7 @@ def _tahoe_runner_optional_coverage(proto, reactor, request, other_args): allmydata.scripts.runner` and `other_args`, optionally inserting a `--coverage` option if the `request` indicates we should. """ - if request.config.getoption('coverage'): + if request.config.getoption('coverage', False): args = [sys.executable, '-b', '-m', 'coverage', 'run', '-m', 'allmydata.scripts.runner', '--coverage'] else: args = [sys.executable, '-b', '-m', 'allmydata.scripts.runner'] @@ -206,14 +277,43 @@ class TahoeProcess(object): ) def kill(self): - """Kill the process, block until it's done.""" + """ + Kill the process, block until it's done. + Does nothing if the process is already stopped (or never started). + """ + print(f"TahoeProcess.kill({self.transport.pid} / {self.node_dir})") _cleanup_tahoe_process(self.transport, self.transport.exited) + def kill_async(self): + """ + Kill the process, return a Deferred that fires when it's done. + Does nothing if the process is already stopped (or never started). + """ + print(f"TahoeProcess.kill_async({self.transport.pid} / {self.node_dir})") + _cleanup_process_async(self.transport) + return self.transport.exited + + def restart_async(self, reactor: IReactorProcess, request: Any) -> Deferred: + """ + Stop and then re-start the associated process. + + :return: A Deferred that fires after the new process is ready to + handle requests. + """ + d = self.kill_async() + d.addCallback(lambda ignored: _run_node(reactor, self.node_dir, request, None)) + def got_new_process(proc): + # Grab the new transport since the one we had before is no longer + # valid after the stop/start cycle. + self._process_transport = proc.transport + d.addCallback(got_new_process) + return d + def __str__(self): return "".format(self._node_dir) -def _run_node(reactor, node_dir, request, magic_text, finalize=True): +def _run_node(reactor, node_dir, request, magic_text): """ Run a tahoe process from its node_dir. @@ -221,7 +321,7 @@ def _run_node(reactor, node_dir, request, magic_text, finalize=True): """ if magic_text is None: magic_text = "client running" - protocol = _MagicTextProtocol(magic_text) + protocol = _MagicTextProtocol(magic_text, basename(node_dir)) # "tahoe run" is consistent across Linux/macOS/Windows, unlike the old # "start" command. @@ -237,19 +337,46 @@ def _run_node(reactor, node_dir, request, magic_text, finalize=True): ) transport.exited = protocol.exited - if finalize: - request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited)) + tahoe_process = TahoeProcess( + transport, + node_dir, + ) - # XXX abusing the Deferred; should use .when_magic_seen() pattern + request.addfinalizer(tahoe_process.kill) - def got_proto(proto): - transport._protocol = proto - return TahoeProcess( - transport, - node_dir, - ) - protocol.magic_seen.addCallback(got_proto) - return protocol.magic_seen + d = protocol.magic_seen + d.addCallback(lambda ignored: tahoe_process) + return d + + +def basic_node_configuration(request, flog_gatherer, node_dir: str): + """ + Setup common configuration options for a node, given a ``pytest`` request + fixture. + """ + config_path = join(node_dir, 'tahoe.cfg') + config = get_config(config_path) + set_config( + config, + u'node', + u'log_gatherer.furl', + flog_gatherer, + ) + force_foolscap = request.config.getoption("force_foolscap") + assert force_foolscap in (True, False) + set_config( + config, + 'storage', + 'force_foolscap', + str(force_foolscap), + ) + set_config( + config, + 'client', + 'force_foolscap', + str(force_foolscap), + ) + write_config(FilePath(config_path), config) def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, name, web_port, @@ -257,8 +384,7 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam magic_text=None, needed=2, happy=3, - total=4, - finalize=True): + total=4): """ Helper to create a single node, run it and return the instance spawnProcess returned (ITransport) @@ -269,7 +395,7 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam if exists(node_dir): created_d = succeed(None) else: - print("creating", node_dir) + print("creating: {}".format(node_dir)) mkdir(node_dir) done_proto = _ProcessExitedProtocol() args = [ @@ -292,21 +418,13 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam created_d = done_proto.done def created(_): - config_path = join(node_dir, 'tahoe.cfg') - config = get_config(config_path) - set_config( - config, - u'node', - u'log_gatherer.furl', - flog_gatherer, - ) - write_config(FilePath(config_path), config) + basic_node_configuration(request, flog_gatherer.furl, node_dir) created_d.addCallback(created) d = Deferred() d.callback(None) d.addCallback(lambda _: created_d) - d.addCallback(lambda _: _run_node(reactor, node_dir, request, magic_text, finalize=finalize)) + d.addCallback(lambda _: _run_node(reactor, node_dir, request, magic_text)) return d @@ -357,6 +475,31 @@ class FileShouldVanishException(Exception): ) +def run_in_thread(f): + """Decorator for integration tests that runs code in a thread. + + Because we're using pytest_twisted, tests that rely on the reactor are + expected to return a Deferred and use async APIs so the reactor can run. + + In the case of the integration test suite, it launches nodes in the + background using Twisted APIs. The nodes stdout and stderr is read via + Twisted code. If the reactor doesn't run, reads don't happen, and + eventually the buffers fill up, and the nodes block when they try to flush + logs. + + We can switch to Twisted APIs (treq instead of requests etc.), but + sometimes it's easier or expedient to just have a blocking test. So this + decorator allows you to run the test in a thread, and the reactor can keep + running in the main thread. + + See https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3597 for tracking bug. + """ + @wraps(f) + def test(*args, **kwargs): + return deferToThread(lambda: f(*args, **kwargs)) + return test + + def await_file_contents(path, contents, timeout=15, error_if=None): """ wait up to `timeout` seconds for the file at `path` (any path-like @@ -482,14 +625,16 @@ def web_post(tahoe, uri_fragment, **kwargs): return resp.content -def await_client_ready(tahoe, timeout=10, liveness=60*2): +@run_in_thread +def await_client_ready(tahoe, timeout=10, liveness=60*2, minimum_number_of_servers=1): """ Uses the status API to wait for a client-type node (in `tahoe`, a `TahoeProcess` instance usually from a fixture e.g. `alice`) to be 'ready'. A client is deemed ready if: - it answers `http:///statistics/?t=json/` - - there is at least one storage-server connected + - there is at least one storage-server connected (configurable via + ``minimum_number_of_servers``) - every storage-server has a "last_received_data" and it is within the last `liveness` seconds @@ -505,25 +650,35 @@ def await_client_ready(tahoe, timeout=10, liveness=60*2): print("waiting because '{}'".format(e)) time.sleep(1) continue + servers = js['servers'] - if len(js['servers']) == 0: - print("waiting because no servers at all") + if len(servers) < minimum_number_of_servers: + print(f"waiting because {servers} is fewer than required ({minimum_number_of_servers})") time.sleep(1) continue + + now = time.time() server_times = [ server['last_received_data'] - for server in js['servers'] + for server + in servers + if server['last_received_data'] is not None ] - # if any times are null/None that server has never been - # contacted (so it's down still, probably) - if any(t is None for t in server_times): - print("waiting because at least one server not contacted") - time.sleep(1) - continue + print( + f"Now: {time.ctime(now)}\n" + f"Liveness required: {liveness}\n" + f"Server last-received-data: {[time.ctime(s) for s in server_times]}\n" + f"Server ages: {[now - s for s in server_times]}\n" + ) - # check that all times are 'recent enough' - if any([time.time() - t > liveness for t in server_times]): - print("waiting because at least one server too old") + # check that all times are 'recent enough' (it's OK if _some_ servers + # are down, we just want to make sure a sufficient number are up) + alive = [t for t in server_times if now - t <= liveness] + if len(alive) < minimum_number_of_servers: + print( + f"waiting because we found {len(alive)} servers " + f"and want {minimum_number_of_servers}" + ) time.sleep(1) continue @@ -548,26 +703,171 @@ def generate_ssh_key(path): f.write(s.encode("ascii")) -def run_in_thread(f): - """Decorator for integration tests that runs code in a thread. - - Because we're using pytest_twisted, tests that rely on the reactor are - expected to return a Deferred and use async APIs so the reactor can run. - - In the case of the integration test suite, it launches nodes in the - background using Twisted APIs. The nodes stdout and stderr is read via - Twisted code. If the reactor doesn't run, reads don't happen, and - eventually the buffers fill up, and the nodes block when they try to flush - logs. - - We can switch to Twisted APIs (treq instead of requests etc.), but - sometimes it's easier or expedient to just have a blocking test. So this - decorator allows you to run the test in a thread, and the reactor can keep - running in the main thread. - - See https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3597 for tracking bug. +@frozen +class CHK: """ - @wraps(f) - def test(*args, **kwargs): - return deferToThread(lambda: f(*args, **kwargs)) - return test + Represent the CHK encoding sufficiently to run a ``tahoe put`` command + using it. + """ + kind = "chk" + max_shares = 256 + + def customize(self) -> CHK: + # Nothing to do. + return self + + @classmethod + def load(cls, params: None) -> CHK: + assert params is None + return cls() + + def to_json(self) -> None: + return None + + @contextmanager + def to_argv(self) -> None: + yield [] + +@frozen +class SSK: + """ + Represent the SSK encodings (SDMF and MDMF) sufficiently to run a + ``tahoe put`` command using one of them. + """ + kind = "ssk" + + # SDMF and MDMF encode share counts (N and k) into the share itself as an + # unsigned byte. They could have encoded (share count - 1) to fit the + # full range supported by ZFEC into the unsigned byte - but they don't. + # So 256 is inaccessible to those formats and we set the upper bound at + # 255. + max_shares = 255 + + name: Literal["sdmf", "mdmf"] + key: None | bytes + + @classmethod + def load(cls, params: dict) -> SSK: + assert params.keys() == {"format", "mutable", "key"} + return cls(params["format"], params["key"].encode("ascii")) + def customize(self) -> SSK: + """ + Return an SSK with a newly generated random RSA key. + """ + return evolve(self, key=generate_rsa_key()) + + def to_json(self) -> dict[str, str]: + return { + "format": self.name, + "mutable": None, + "key": self.key.decode("ascii"), + } + + @contextmanager + def to_argv(self) -> None: + with NamedTemporaryFile() as f: + f.write(self.key) + f.flush() + yield [f"--format={self.name}", "--mutable", f"--private-key-path={f.name}"] + + +def upload(alice: TahoeProcess, fmt: CHK | SSK, data: bytes) -> str: + """ + Upload the given data to the given node. + + :param alice: The node to upload to. + + :param fmt: The name of the format for the upload. CHK, SDMF, or MDMF. + + :param data: The data to upload. + + :return: The capability for the uploaded data. + """ + + with NamedTemporaryFile() as f: + f.write(data) + f.flush() + with fmt.to_argv() as fmt_argv: + argv = [alice.process, "put"] + fmt_argv + [f.name] + return cli(*argv).decode("utf-8").strip() + + +async def reconfigure(reactor, request, node: TahoeProcess, + params: tuple[int, int, int], + convergence: None | bytes, + max_segment_size: None | int = None) -> None: + """ + Reconfigure a Tahoe-LAFS node with different ZFEC parameters and + convergence secret. + + TODO This appears to have issues on Windows. + + If the current configuration is different from the specified + configuration, the node will be restarted so it takes effect. + + :param reactor: A reactor to use to restart the process. + :param request: The pytest request object to use to arrange process + cleanup. + :param node: The Tahoe-LAFS node to reconfigure. + :param params: The ``happy``, ``needed``, and ``total`` ZFEC encoding + parameters. + :param convergence: If given, the convergence secret. If not given, the + existing convergence secret will be left alone. + + :return: ``None`` after the node configuration has been rewritten, the + node has been restarted, and the node is ready to provide service. + """ + happy, needed, total = params + config = node.get_config() + + changed = False + cur_happy = int(config.get_config("client", "shares.happy")) + cur_needed = int(config.get_config("client", "shares.needed")) + cur_total = int(config.get_config("client", "shares.total")) + + if (happy, needed, total) != (cur_happy, cur_needed, cur_total): + changed = True + config.set_config("client", "shares.happy", str(happy)) + config.set_config("client", "shares.needed", str(needed)) + config.set_config("client", "shares.total", str(total)) + + if convergence is not None: + cur_convergence = config.get_private_config("convergence").encode("ascii") + if base32.a2b(cur_convergence) != convergence: + changed = True + config.write_private_config("convergence", base32.b2a(convergence)) + + if max_segment_size is not None: + cur_segment_size = int(config.get_config("client", "shares._max_immutable_segment_size_for_testing", DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE)) + if cur_segment_size != max_segment_size: + changed = True + config.set_config( + "client", + "shares._max_immutable_segment_size_for_testing", + str(max_segment_size) + ) + + if changed: + # restart the node + print(f"Restarting {node.node_dir} for ZFEC reconfiguration") + await node.restart_async(reactor, request) + print("Restarted. Waiting for ready state.") + await await_client_ready(node) + print("Ready.") + else: + print("Config unchanged, not restarting.") + + +def generate_rsa_key() -> bytes: + """ + Generate a 2048 bit RSA key suitable for use with SSKs. + """ + return rsa.generate_private_key( + public_exponent=65537, + key_size=2048, + backend=default_backend() + ).private_bytes( + encoding=Encoding.PEM, + format=PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=NoEncryption(), + ) diff --git a/integration/vectors/__init__.py b/integration/vectors/__init__.py new file mode 100644 index 000000000..31c32d0aa --- /dev/null +++ b/integration/vectors/__init__.py @@ -0,0 +1,30 @@ +__all__ = [ + "DATA_PATH", + "CURRENT_VERSION", + "MAX_SHARES", + + "Case", + "Sample", + "SeedParam", + "encode_bytes", + "save_capabilities", + + "capabilities", +] + +from .vectors import ( + DATA_PATH, + CURRENT_VERSION, + + Case, + Sample, + SeedParam, + encode_bytes, + save_capabilities, + + capabilities, +) + +from .parameters import ( + MAX_SHARES, +) diff --git a/integration/vectors/model.py b/integration/vectors/model.py new file mode 100644 index 000000000..8d9c1d006 --- /dev/null +++ b/integration/vectors/model.py @@ -0,0 +1,58 @@ +""" +Simple data type definitions useful in the definition/verification of test +vectors. +""" + +from __future__ import annotations + +from attrs import frozen + +# CHK have a max of 256 shares. SDMF / MDMF have a max of 255 shares! +# Represent max symbolically and resolve it when we know what format we're +# dealing with. +MAX_SHARES = "max" + +@frozen +class Sample: + """ + Some instructions for building a long byte string. + + :ivar seed: Some bytes to repeat some times to produce the string. + :ivar length: The length of the desired byte string. + """ + seed: bytes + length: int + +@frozen +class Param: + """ + Some ZFEC parameters. + """ + required: int + total: int + +@frozen +class SeedParam: + """ + Some ZFEC parameters, almost. + + :ivar required: The number of required shares. + + :ivar total: Either the number of total shares or the constant + ``MAX_SHARES`` to indicate that the total number of shares should be + the maximum number supported by the object format. + """ + required: int + total: int | str + + def realize(self, max_total: int) -> Param: + """ + Create a ``Param`` from this object's values, possibly + substituting the given real value for total if necessary. + + :param max_total: The value to use to replace ``MAX_SHARES`` if + necessary. + """ + if self.total == MAX_SHARES: + return Param(self.required, max_total) + return Param(self.required, self.total) diff --git a/integration/vectors/parameters.py b/integration/vectors/parameters.py new file mode 100644 index 000000000..e1fafcec4 --- /dev/null +++ b/integration/vectors/parameters.py @@ -0,0 +1,93 @@ +""" +Define input parameters for test vector generation. + +:ivar CONVERGENCE_SECRETS: Convergence secrets. + +:ivar SEGMENT_SIZE: The single segment size that the Python implementation + currently supports without a lot of refactoring. + +:ivar OBJECT_DESCRIPTIONS: Small objects with instructions which can be + expanded into a possibly large byte string. These are intended to be used + as plaintext inputs. + +:ivar ZFEC_PARAMS: Input parameters to ZFEC. + +:ivar FORMATS: Encoding/encryption formats. +""" + +from __future__ import annotations + +from hashlib import sha256 + +from .model import MAX_SHARES +from .vectors import Sample, SeedParam +from ..util import CHK, SSK + +def digest(bs: bytes) -> bytes: + """ + Digest bytes to bytes. + """ + return sha256(bs).digest() + + +def hexdigest(bs: bytes) -> str: + """ + Digest bytes to text. + """ + return sha256(bs).hexdigest() + +# Just a couple convergence secrets. The only thing we do with this value is +# feed it into a tagged hash. It certainly makes a difference to the output +# but the hash should destroy any structure in the input so it doesn't seem +# like there's a reason to test a lot of different values. +CONVERGENCE_SECRETS: list[bytes] = [ + b"aaaaaaaaaaaaaaaa", + digest(b"Hello world")[:16], +] + +SEGMENT_SIZE: int = 128 * 1024 + +# Exercise at least a handful of different sizes, trying to cover: +# +# 1. Some cases smaller than one "segment" (128k). +# This covers shrinking of some parameters to match data size. +# This includes one case of the smallest possible CHK. +# +# 2. Some cases right on the edges of integer segment multiples. +# Because boundaries are tricky. +# +# 4. Some cases that involve quite a few segments. +# This exercises merkle tree construction more thoroughly. +# +# See ``stretch`` for construction of the actual test data. +OBJECT_DESCRIPTIONS: list[Sample] = [ + # The smallest possible. 55 bytes and smaller are LIT. + Sample(b"a", 56), + Sample(b"a", 1024), + Sample(b"c", 4096), + Sample(digest(b"foo"), SEGMENT_SIZE - 1), + Sample(digest(b"bar"), SEGMENT_SIZE + 1), + Sample(digest(b"baz"), SEGMENT_SIZE * 16 - 1), + Sample(digest(b"quux"), SEGMENT_SIZE * 16 + 1), + Sample(digest(b"bazquux"), SEGMENT_SIZE * 32), + Sample(digest(b"foobar"), SEGMENT_SIZE * 64 - 1), + Sample(digest(b"barbaz"), SEGMENT_SIZE * 64 + 1), +] + +ZFEC_PARAMS: list[SeedParam] = [ + SeedParam(1, 1), + SeedParam(1, 3), + SeedParam(2, 3), + SeedParam(3, 10), + SeedParam(71, 255), + SeedParam(101, MAX_SHARES), +] + +FORMATS: list[CHK | SSK] = [ + CHK(), + + # These start out unaware of a key but various keys will be supplied + # during generation. + SSK(name="sdmf", key=None), + SSK(name="mdmf", key=None), +] diff --git a/integration/vectors/test_vectors.yaml b/integration/vectors/test_vectors.yaml new file mode 100755 index 000000000..718f94f0d --- /dev/null +++ b/integration/vectors/test_vectors.yaml @@ -0,0 +1,18002 @@ +vector: +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:yzxcoagbetwet65ltjpbqyli3m:6b7inuiha2xdtgqzd55i6aeggutnxzr6qfwpv2ep5xlln6pgef7a:1:1:56 + format: + kind: chk + params: null + sample: + length: 56 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:fs6ul2fju2fvb2cfx7gt6ngycm:hncpinwszbggrurbvuaaexnftk3j5wfr7473pj2g734mo2isxlbq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA1I0X8E9USJxPRJmD6l3cjlyGYi9hXxJFb5km5/J7elPbYPP3 + + DhdHmJcELYP5HxGBmfLavCBvFDO6nVA3TDwCPrI/7KpiY7uHzZkLgsLA8M45NaJE + + eUgACBESZcNioUqYLNHvYKLOqTDV+JwyQ9oWUNONd2jg3LQ+e4oyVwvxEZ41P5cM + + u9wJI8OO/G7FItCL2Ts1OgjmNWEz6KN7MjU/2UsNfa2eK6mlZ3Wi1oprhmfCrWHu + + +hjevqW46Qp/ddCCkBQCHKcV5ZsbBVxq6vqrYClUYa6Y5jzevMK8euuT+tA289sx + + jXpbY3eXaggWdeDIoDquOumCCkVxhoj3dvUKsQIDAQABAoIBAAIAsFSN0sv6WQ7a + + 6XDIYJ8gxQ1gx+iW6fuStFikIsC00JDZy56g3oZUCfCJ2UuPJSr3rFLwdUt570yz + + KEo6GIVRtaN7uYCaED4CLqcVQa8jKkvUkxOXd5Sb4JH/5MqDQurNMZW2Av96G9ID + + Wr/j6qjpTWBuJww9UIdmdnH2hVd2oz12+6Y/6nlrE2iGPDkQMPnkKXRb7xeaXJOq + + l6003hA4JRtzzS1uBb7cRuvyW/oOouBBxoP49a8UUoetgOMNDvVX4/16lRY3K6Vj + + VfserJz2R7QYKcfCJAe54VImGGhvq0Q76kfKsbX0xZ5fGFgS8LyAaZYyR6M3V88+ + + qmUT2WkCgYEA1mI7uL+NEn3zRjjkpqqO1tmKfZVDayQ5bpOtJG44qpmv+eihBuu7 + + S7V2waf46SwZAdUyXYxj+u0Dfnwre53tx7jdrntKNP9o1i8b3pZW13wv/IWq2bcA + + UFAhSlFjw9qj9nVFYHnqhygKGq+EbzkILp2eQUstjoWM4xCo1bRMMN0CgYEA/c/K + + YwVm6nyK6jMAK5zGWstliTPYkkSU79BvdbwXayIVp8CeDYPWpZxtqVQtoNvQwA9C + + 8K2PuHrHFH3a16siXPrto0hoC3oXyyKKmqbeZLpafg1ngQfieVYS0A0qt6cVx+Sz + + 3gy7W7xeHfBSBbDPJR/G4gI88+9GVJdCVAfK2eUCgYB5ZaD56gZBfW7fyeG4ewZt + + pTwmBvrpVdbrxdYatguCl4qt0kw09hHWOkioOqzZpO34OrjNfm0zLzl2S2v4ESMP + + oKBvaENKJYNBHeYDMlC0rw8hSLPJmzYjRGzFf7cltc55Bkkl64Ohy0uFdvRgYwQ+ + + GWT/Bkoi1X9FKS7h7LnkRQKBgQDF/jZvEGO8P/NNxwM3AlFpuok2go9LatyURxDr + + 0xKhrDEgb43cFSB4iJKzKMt/VHp/mGgrv/kBfCWYwqTY4NMpnUWLvowLh+7Ps95T + + ziBmi0jUVDiN20y8Qnzid6L/KQRArxPxABWX9lWlHTee4NJ2r1dCL2TFFb7Tdjtz + + ubBwUQKBgQCnEaEgoZ2Rqp3P7TzQjSzPlHlHBMXW28sEdXprdlwicU18fjDj5+og + + iozu9orcAD8AOGBNueKErSWiXhp+MuY4AvJuJZPV3gkMKKYc83HKWN05Xh8rkcZ3 + + KIHCyp3EBdtI+YDWvLISSqvFqCYtBR5v7eU9Ri2gOVljtmgJ8lARAA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:tn2ekpkmearz7k3bivm3ikuz3i:wcqgst36kymoirczlkok5pqdekt5lgsyfw3oh7ecoro3rj5aiyya + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAxMTk4iBj9yi+NBdRamEGlVB+/Vptm6uWdSZIF5CmliQHr0Ev + + KlYaiMxlFR+1RiV3LA7tzUbeJtHUsiaefAxtZFf4gtCKNRj1vC8lhbcH7NKxY9vr + + 4QMhNJ4NxYpC8qqcnNrlwULMw+eYLb2NUhuI2NOPKDjs1dgwypIEej0/m/Z7r+NB + + xO6dKx4NYOnrQBoLNfgN7KlHCruhG4LcZxaMkuUD38brtxkd25/PlQ9tm77ODz9G + + WrU6/G5LSzTgWqrCQ6ww3UXfk9hfNqXob3loaTw2U+2M5gy73UduhHqSkyskNHgt + + WJRUyU9YC4K40IDtiJAlJJ3cShfVNDWCAIjKuQIDAQABAoIBAADRMS16j/UMpMQ8 + + 8o1xYLXJs0qkZP8iq8nJwmk3+bvMONdH+y+pqDY4Ob/oNU2uGybCMHJL9eE4ZDHn + + NNJJZOzn6/Qdye0lhjkQAw+2mQr+kauwqUlHxOFd5KsU2L6plGPsXsw6KvUx/DD4 + + cA1OvaHqOFZ6Qgrd+SSQ4wGKST4sZSwX5ZtMX2+o0eghkK1W5fMgbxSvMOWBOwqk + + uUnYeyFECxvntRhv0clYwvqfNetf4SMFcPVp+pCY7gaZH2rr3tVk3lSHWFI0pz4s + + lLVtINuK2jeg8cfhZhnlWHSVW8C/F3xryNZCnacq4UgeoYwje8swez4/gCi0PyYY + + MbufpP8CgYEA8qLO9guRyFd9S4jgmVmu1TQLN8nX8sDOfkmiT7+V3oDJm/3HNRXV + + nX9UD/n2bpydEvjMCF7Mh4FaQvSFqSLTSBDoAY+4KD4KTecz3JD5yugBWbT7hWpp + + 0jgUOW18w+2HFyLsEYFyW1xpIa1iZT9w/R031nGeH+q1CJcGfYRNXN8CgYEAz5tb + + wAShRkugicR3jO8geYzuDV0JOAsZnTdef0OLrTYoOL6V5TX1YNnSGsv8vTs0sF08 + + PGVQqEdZrYtgCJ3wehs30xBoE5+CZdAo8k810x18TYK7Zx5ZF8VjKT4LcW3SPD5I + + 7T0TUeOKZRWgF0V4uRMj2qH+4fX4fjkoSDkYM2cCgYBYaf0yaSrZLxBIGvuExcpQ + + hGNmE9Xt7lYQbLKJjs2Ew7czcXlKncc2WfR+0d37lnQiOqjWj/zFj9wdM88Uv8zv + + oMF5+C3p9Bl7I7mhMO7lAj+jubBBgHJJGQg9mOjy2DX2t1IAWwQZyIXCsNR/Amwg + + v6neKY6uIK+RDr9ds30hTwKBgC8Svv8PDbJuu3wBfEoMfoSRG/kTu19lxO0M/PRG + + UIl52izjqgFK9tR7D1TcI/aUUiIbQek/38YIR6E+FQxfI4PMYCAPfEnWxS5owKAQ + + rdesu96nYe7DxtfI/e8ADoAtspnOVaLVUmgi++JnwOEF85WjbWHJkY2SxEF6nFOj + + /oTJAoGAbhFazz1ZFuMJ8TwKo847lkC3TqWEvMzvoFgOLej6eqiZ93LtZG2+CG84 + + eBDDyAVnNaHUa/HrX62B3Si6WN/vxsI9x191kaRUkiBBaqTg6TkruQwdFHMlFFHH + + Vg7Pqd88N5H5gMl6+i4c/RgXw/vlzFkTgc3TwtUjwAWGQs8eAVw= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:mri732rh3meyh4drikau3a24ba:6tsj5wvcp6szdhmrbu5bea57wduoza64y6nd2lm7aleqpsxjm5la:1:1:1024 + format: + kind: chk + params: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:kwn44kjzh5s6lqyaeh3d74ziye:fammlnqo37yrfilvn4xwralire36de7ogpusp2uprtirwlpdbtca + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAvXGjmoIhKgrA08l9upgUZdyMOgOrH2rayM6/7tuaNjWzXfBL + + 048vgjD+PyWUCat9+Y/ZXO7RybLSSJ0z3tK+7177gaBBPffwp1ltlVR0mFEnpzRE + + 0wOzhn8+DBcrOV6819f2TcL1PLjtX0RnbHoVpQaMhacwE9laXSGvvDEliPRSTZGF + + a2p4DmkoBaTILKT9ZYjUdYvWT0VjtpK0aj1kkBtAvJFfnfKw+SXEnuRb49QWlfZy + + wuQdAZKqpHv4lm8DNDXviU8mXqU1N67D2Nn+TIZhZIMIILlinFKvbQDgI74sLoMI + + PxnuSZELZkRF4SaV+oNIjOV1FUaPlhs7mVy98wIDAQABAoIBAAIVmYrDDBUizImR + + 2dFeEhLE1zf7k0X3OGWlhtxSs3aXYjTDd+0wb4HX+RQqU1+68LKCZjWx3NX4rKOi + + DBVw7bThJZTPsOMnWiTOdgJbYOn1WWZ+8wmte1z1Kvw3YLxfKqFdVnnP8LY0ohCX + + C1CT7NZFrxjlDnxdHYxWAceaTUpYms5+vw/XwNUtypjjqeU7yd5lZ8Oi3kRlxgDE + + tVJq96pzahruy7Xo+QOrqDd55zTzfm1lXA5ZQNMtSKOumSoJ9+/NTGkz9WYDOzUy + + jBJDzKYP2hE6moUdDOUqK0sWPFmcRvMcVgxlg9DRuNFWzqM12jTzEaFqnBpRUoJ7 + + KuNBBlECgYEAxUo+GfuPB4QuWy3TlvbheSOP8JN/wJm6YgsisncroYdIibR9r0BW + + dsaQmJEPKjsn86qIIT10AtNXlieRg8GTtW+1E3GPDpkJYnqcQZ5D/R2AJPNK7Acd + + JeOAjZOWVoD79D8sC6w98+akURaO7TrTwXEQpSIrl+k0aR/RQCThuycCgYEA9dGs + + AWA4RVU++eFyXiu3fc08V7Asgh2h991FUWckP3/kRM8P9wLBJkBvolbQ6OA+kpKy + + IatQ02fCw77ksathgv5X7dRwBq+ziBX4VSch/zZGXiQTxJdjDEDFvCOzmXmuTVgw + + ZXSXaQkTlOci+aknV4CHZ/uZl0IRa2gX5u6x1lUCgYBE+3+ZUCcjpqkawnxARdRy + + qeeTY8+AhX/w9hnMsvRzhzzqwUxM8b8JysYWQmo+Bu8iONdeYAFnV4RBgVZU7mN6 + + RjPXN5agsQvh/iMSoob9QspioRrqSlZ7v/9cAWXIm1L9hPUeo7wJwvRjUfLpqe8O + + rTz3sGnztNvZggGFXx/6cwKBgQDg2CT1qTYvLNcKpwz+WAxhVF2yc2FyrnodBtbF + + q4r7ThbUXXVj4bAcNeomWjSCHcL+PJIUu+eVRx5d/3idjn4F3HE/CAZkB0g23Kml + + 8cJl9xYMPAGc2z/s0D5NZXZ8llE5S8YQtsFbgMLZe0WBiRiEL/sqwHbvZK4cST07 + + rO8bdQKBgEvcv+EJfffxdmzgRaZQaxxLdvrFH63ArPa/CnTMsltQUHifZD8H5F/Z + + MaMXAN+tbbwcE3uId0UcwsSflJOCoHkI7fly08FYTUCzyWLcTrrntP256SU7bybf + + 1tD2fzeoHns+FePq5qSkmXyw9bKC4WaP7PYEHr8RZ9+z4tmmo1GU + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:uuln4c3nhnggvyge4trhl3d6pa:gzzewapuozilqyr7jo26dxv4hmip5nalhwsztkxxu3dcrew3k6wq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEApavd9AgytDZu/VS2b253N8/2MWMUJJsAV+dXcMl1lXlD0yN9 + + Ljxay3m+ZIc3jJPqj5nDhxFz1Yd9HI0pJP224jJjlVHif9SpXzH9FVAsGIizYDMd + + ozHIZVctpX2ZbcQUbsskiGGnWwrDCrBDruVq+XnPbbGUpTMSFsXAafuKlvBG/zu2 + + e9TyEYPeenCz2+9A/Y4wIcK9z+dbKoof58R3XyQO20v4oD1cb2ohQreE778iEYJj + + gQCjO/EiYz2ATiEOlPrdD+R4NjFxqzfh8SZWrIDyKho7bMavvBM0N7Fa20n5630r + + Czu6jZNVgqK5qS46otsQN9XCUB9F37IVZbIwYQIDAQABAoIBABzHki9QFETPlvDN + + nEKXyUCKER3LtSZVwdXDY6J9cL52WiSty9NyGyCxRbSPc8QpNuxavQdz7fAoUQDa + + ec0KARDiyX9ZQfRMZF7b33fqHTrm1mhOAOZGYeZO4mhW/QX/M8B6bB5//lNXt4Ge + + FKfnhTGQ6kqHOIgJKumHUVFn58+n5khCgM6TKTOc6A4wqSfvF8Mt0eAlj26rPwUx + + sil+5uj+KMf/Z2Hb0/KBONLmRY2/dIwjNa41sxQ/DnW0Dae4PSKU0IojV3qz6mVF + + upfeZEVcod3Kn5jbHvkL5VpP2tFkGz+v9RPEQ8Ipt53GzaWMvMzKJ+nKKhp77IYL + + TBGp9SUCgYEAtf+ri1c29XMhUENSTfSKfihxBdlABVfrVCeVb+5TnB9NyGxLdC1W + + bFX9kn1pURR3d/0MJvgr5oLt7pFO3FmJ6cAeBYMadoxu0mKZZldnh9m69+hZ73ih + + im9VXLzKvwzVO7lpC83UMgJXyGs1EVQQ/fpCDlq8wEGhH/zIlrq/IpUCgYEA6Qis + + xm5d9lmhhM7rGWXjzcWDI5Jla1SGe2bNDwTyXMFoG+W7ENXtO5bdgRRaiByrb0mh + + X+9r5PI241MkseQIHL8B1yEgsxExj7aebfujspeAeSA1IznaZOSFWOxG/L0xpYQZ + + G6dlk5YTv/d7nKpZ0AV7z1rvOER+KLhpWsIWT50CgYB8fTG90h5JrkKvQB1gLVE+ + + EjOdKIleHlFd8uWI9qBCPjdaTJkgEpL276rPNPGBAFrnvDM/xKlit1RAxywGFUj8 + + lujyJEdJp612QvNiyY6Loib9w/UglIcKxjTBhnG4VPLWM3DjHhtzSZ75/DsEa1hk + + IXZlkzNg3oNz+djLIVn0MQKBgGUfTZJCHGYlmPB/tgdsboFBn1mVUotTv5PXFU7a + + L63Og7XvZ9CzdGGyYuZ7hZmhD0eYpP1zcNeFwAm/b6H+OQL4Y/0NtBngcShS78b8 + + NpnuImLtdgGWPk4f2DmxNlDAbMXNX/PfHTYBHwrjgvGF/rlFV+ewJzS8jB5xf85R + + gMsFAoGAB1V5eES3Ud9t38YeEcc4zi7amA7oiLZgAlMStROTxBripUuw5aFvzila + + S09EunEasURmykYqdzUoMAsIBlwiFS1Ky1pliR/PDgkX2YVM+S15bisIrPu5DmGF + + bbYgYCgKvkxL0p7hRTeu0+czw29He/T0pNfvr6/+4nAL47oCju8= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:e5tpbcmalcq6nn2zhd3qvg4chy:emzsvv2xnkhhrj2oatds5hf2cney25awi56ybeq4ofiyehzyakua:1:1:4096 + format: + kind: chk + params: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:s2ort62uw74mbnvhqxgtclusli:xm4pmnye4mht7hoh5vcpt2zknqdvogujdo2ygmj2fb3chllp2ecq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAughxLigyfdYXPUaFIYRop4H33/VxoWOha93f29U3uFfjeij4 + + GPTARKNbLaqvANxHkscAPvKSTCJdECqzdg05AowyX0dbDPrBDirDVb2FF3UJ43oL + + cFIrDSpAL/+Yr14mVfvPtxomdw/P3rRocY9PtiLbSLGF8u7eliBiUsITbpQUy8bw + + 2s25/uBlkWXaflolma67NyVS83tugO13PN1W0pBqRRhmoJtWm6Eg6CRJzP/PeKmO + + OhZ+ybKTGWl3Z8Wu6w5TmEDrp4zHRZ3SYyGXrRppGhHhVQY8awBVCQzDf8hoUt3b + + y4ITWSIS3DqiI3n3O7JaPFKMTeFknv7hmgWLhwIDAQABAoIBAC3RtXqeWO1IZTno + + 3BJox9P1/WSyUbXj0Za9M400A0DKmNL2M0EGSzK8n1DsmKTYHGLI08UtPp402oII + + WmIpmJCJkkCIVSMpwZHpM2ozPwwLfg+CeD3GZycbcGrCA8uxnzSPTFY3QSETCmo6 + + Of06RHJCPIs3ehve/ICyYjUq6tlRgKtLSWT/YcV/rF0HcLz2JOa2gmGHPlazLuYR + + 4xKm2xWzLAHTKI1JJi2C6eNla4yIKEuXtTK4qfSFCnL/L9UZuzR/jL6Eq4iRNcKW + + 07nRRWppySr3G7nesoFgNxDvqDB0QpE5oTuLbSSiZxe+55tvWHfgECeqedpyN7Vt + + b/CxNi0CgYEA7t4Ks+xEX+q8KIhEVsK3lncV1o1Rb/GLAmSqCqir5Aux9j6+WING + + emwlv9rGQBgRXxCMDaYD3eBXXFgKCIKcu0OgA4racaEZ9zySSpqChyRJKNOick8c + + WXsnsTv6yQtN5r9iT/QuTpDXawc0v6O3fwAj7U3itjZdeYuakfb84nUCgYEAx2BI + + cWxKvcRYGhn0ClwjLw0ZzLj2vZpwKbu+kJiCFVyxHWgqAo/HtYBDGDAKLi9Xbu2C + + 78Gdv1Xmgq7q9kLr91qH6hbIXr4fzC06R6kTjPmXwhtfKJcgagt8fNztKnf15wwF + + fK0rZQ1SBtp4iYM5yr1L5LdkwAImry/jtOAdfosCgYBa7+F3Of2V3pGfhLEvrpWi + + DNgdhFN6oKRhVt19jVVTTjiEHMLug5uzQ7TjY3CSOhdy76PCFm/mamAX4dSABOmS + + SV6DursLA2AVRdQT3trOhDvt9RQlHIUYc9BaoxEItOsFa9sLwVRXyMCaGHY2dyeG + + jBEhaMNkxzGy8jj4VOVVeQKBgGLsReVpCsiPU/tXZzuArcBZqrRmDZ7TstUyHwJV + + eS2qOQLTPQzaVAedJS0qINL7kFEsrWvSUDewIlgy+8fGOpgXJhqixTYk9Vf0FNeb + + b2TiLkcUF6nnGiEjo9e8MjyBGtRRpaNPtJlF+64E1gu8vX73X2GTEP0n1BPWGhAT + + pu6zAoGBALYVsjIE+zj8c91ybrbNGLZbTg2KKr8fO0/07C2CvWH6HHN+lpyRTF9A + + QTGHviaFWoSLjgjmty9o0BMlvUtB1NNb2t2D1pNX15fWVYh3ALeFO9T7iR762Pdn + + KjfScNMgtEqigVRUEclyOlV+bH5S3LsMK4D7ripJSRvcQfsJ3LO1 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:ftamrjwrnnk62rlturhbjwd7uu:b6qgy35eipo6kkzqem45uzxfl7prx25w7nevtqv76akv52y6fg5a + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAmQmtP2klQmq9icuQMvprLk5aXgk2Nv5lPWs+6Qf9R9Tft3YB + + CboH0scSgfb1+dYfuH8FHhuKD341yVBj2uGqhYPZDiGodMoAnGp4ZElvFc4qLDvP + + Fv0jixXMKYykT0cxwvWlQjCu7bQwUABdh32ZugvGcg6Kl2ubE2HUPRfPNSOEQGx9 + + DGHOogph8DNL0dm9xha1FggJLqFN8BnGk0nIvxA58oXG3xV8hsBJF5UV0BZcurFw + + 98tohmKmAUObYCtUl67nQixFQHl2qUuxQo05EMwRUcjRsnH/ywUjzu1sgDs0Ee0k + + OlMKPtT6go6Z+1iw7MEeQ0IS+rrRZmvwFhUCJQIDAQABAoIBAAnheBAUsn/RSHTc + + ccOjgMa37wRh7+ApGKbt2l1NU2sPMXU05z2WMenH9J1I2/ofew4nFVDWUlmhunNJ + + qh4jB9F1q86fxF1AqugZGmDPOkmGysOZsPXjricivHsfm37gMomgi+T0I7cxNRVV + + O19YxVN1GIDws2iyB+HTz9I0oVTli+BIQAaEdOOO7cM1AJhiUilqXcmCXBxoAfoo + + yEfchETAU25JgsirfqK+7QIbpcVB+00U9PKp1AM2ievPtXa6pmnuKZcSMjyEB9cj + + sNMuc/A+DnkmqpHHLOXhQCDl/ETLtcnGxFhp6rTeN0TYf4cEEEi+fFcvRV4dPMqs + + ttF2PsECgYEA0brJPSqQUUyyKWbdLJ7LpvWx0w3j8SRb2mj5Wjzsv2rNCIrCnwZ3 + + E5ABYrvDGG2w8Mvs2Rx0v5jvvms4vB/QmxC/bjqOXaM7DY7+6oezVLhGgsg9sMN4 + + 1neTcCmBKdQTTjzvd3QzFm/j6MkRITL7E6gLIspqRHY4+kCmAQo1R6kCgYEAus0F + + cbqVoSYqM1M+GEI+8OAsTV4+xxVvx19FYgNjcRJ7aFfvTAPMtxOHxzLXcTWPS3rp + + vTBQz5XPXLw4iKC0mbVyLxqBjS7AtU22vGRWiXi1Y9THM/G7P4bHxiaGcoBvX+nu + + VzHCrDVVksT/2V8osreyaFp+tz3i1EoF5NTLRB0CgYACe7u2RbK/w7C4XMdxp8+x + + gmdAoIF6cXvE8klBkEcdXR5gY4Q6bdErIiFiEecVevcFYuTDDVs0iZMNJifd0mKd + + 82zQ2VCmOzCP0ImkLUcqUaREGCri1O2xXGkaguNMo343BvGu8GlKcri4IOlbA0eF + + zA+Vsd/gP6YdEHbmvEA1QQKBgQCkzJ+S+ENtylfMtBHCIR+aLounLhBAXx8gS2LQ + + 16BxbxbEtV9+NpPyqB5PlQEQ9WmX87YmnGuO1+H7NGrDztPGD5fPCplkzuxgh1FB + + 31uhKIcOrfeYUhkaMHQq0m4msjyP57fH4TSX7O2z8Gyvfw5OrVWOTtggHU7ybuPI + + QBub2QKBgDst8HtCy1kYdVFuDyG/CjReI6w3/NIeril7T3MixYGAC3zxfwFvq9aE + + BGMEWmoIg2Vq3NGLE1qHrwCrVvfiHQOJDC1XBiBzH2sijIHO9xnWwWLSsOspMXMj + + fX32WOapE8++CmasDj9ABJlSHbQkUn+iHT4ncBhce52sp3xE0kvf + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:ok3slnjd3e56za3iot74audhl4:2mjvrb455yldouoxwzbx4sbsysowipqje6ifa4pmbzqahj5j4mnq:1:1:131071 + format: + kind: chk + params: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:gayk76ptouc275r3kxcb73gpxu:lsdwqdqlno2gkdldk6s4wlvbz6d7qviuotfg77t6fquhkx67go7a + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA2d5GQ9jkZXW0mqMtvm4qVD5nq5PR8Ni6DPhNk9+qpvtUTz6S + + 7QBFOkYz4Sbuzm7HYk40JWPDpTaBBUi2tB6sp/HUUQetqUeh4BSZz1VwB4pkcOrS + + vr4pLeZyos65U/lB2RMUwcDKEqtbDGMKZjBY2ElYUpQWsPsVB94KH0/nTVFRJFVF + + DrsuTdqwRSGoBXfKuL4MWWJQY7CUJ17ds6ixizAnRVAldHCWQvrqWIOaekLi2LAC + + OFP6w+g+fQKzNj64mH9Z4sOHLHevrU19hITv2HyjvhGVV+YACYWHXFEchiLr8p08 + + CAWktc8xobZrAzeqVBV405TJP6FooROpwidqCwIDAQABAoIBAGBGZXV45GDjdUY6 + + Yq36n7DvjL9YyJauJvUxSpLUbWRxvq1wANxDWNQqDqXpnvDRKkGKPmfhYKTi43vS + + yI3q2jAy4LhX9MlP0rwjTl69Kg7i/ISbeDp67NaQNcs1H9d4V71Vvb/q8kDywP7F + + y2Qh7DjTnHiNYUOfCImb+IwisptKyUeER4b4kqgREhwBkNm3O7EcdDn0OK0zBwTr + + Je8dnJ0tLRwszDUe+cSiAUY5fBM/UMq+Z5IJikk55FkLkvr+oam8Eq0s0oR4z4hm + + NHldRoFu20npfPM9SG7acViLWZIgD3YYXbF9LkBlk9X9ajNtSwE3QMtNHa8zko99 + + M7oKqEECgYEA+h94ts4x03c7cpetfvA+KM6O2NbWUmKo9ZQkO7q0orTo9FEB75nn + + NXZW//f6OZu3TZr+3yGA2F52rbtO97Rd8yIncR1ai6nd9THR9d9AGEmBZBM6tjvf + + dVypg702tUGLU8NvvmFkkyUh9PD1paES33dr7LgbTH2KXxcaDts3Y0ECgYEA3vzJ + + ORflPds+Px9Fb3vO6ZjAoLToGrhiR/gna8vm273Otl/WAXiDLHah+wCdlUgmGEm1 + + KkfckVuU2SGzfBY6YbQwEMAlZt1k0Z5DrYSydX6JWkJMqklJa7jZ+rGO/PsBatOo + + fLy+G+ysFfsqzPgbqEN6ZqvIQeNLNNgABU+11ksCgYBB+9VznFfGqpizNVJev3AW + + gc9rYtmtaDucdZVNcIbAuasO8OPq2pYFI4/1/Ow1EGA+B9qe8I62Bc2XLWe6rwlt + + 35+6Fn1RhOF6EseJ7nhRL+sDhJMjig38PxK1H1B0ZrMjyNYMylKnAs+/d2XGaQS6 + + kR2WmEcTWbcMOOL18lzdQQKBgQCD/sr2Wui+Juu/3bjydy1SJbPQ7YV/W6oBxClB + + rB9p7/9PAYfisv2i8k6MEB834M98DRWKg6NTAA0qQsLGLzo737ecEsGRFHi7hJ23 + + NxeUaWTQ4vIS0vL9Kx0NQtHLeqGqJMRVojw+t2heUqFRV4S3o8nuwLz4E53PGBVb + + D4Yp5QKBgQDxVnCHap5zYzzShVi00ygZcT+rBtldcrsj8Kjm4KE6qafB5M6R3gM3 + + bFbG9DSE4hnuP0aJMrLEfmuU7X/nxuzU1JRXIzgFXOS2hoT33LbZBkYZrk/m9AcP + + QYt8oUu1Ad3WLADavwwU2ZeQiHERomK8b5FyfhG0x4cu5zdWiel//g== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:ko4vxyhbyhfzkmucaou6leumvq:ysb7buuikg7nktiisrnm7je7nmdcqr5qxnfkzizhc565o4j2x5nq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAneiyaICnfRuLwEd0VV/xNw+mB0KBEIJvVhmfYK43vq6VLy2w + + qPOLPCx+bGv28IRU/0CKkFEN680Ww1Bdfv3DLUHisid3ISUMweEqQW3tKabOHgqZ + + ieOQIEFJ0dLFe6QnoMeiE+9bGdgtXi3bWzhV++1W+2uAdcjFGLpjHLozVstEGP7e + + +So1FQt02jTRp2GGgfOw6o4CKcwGuXmoud2dsl9j1dm2N9Ov7gJdz29I802wjmGI + + DZDSiWtgN90TU5480UENMqgMJ7jgLGQ3yJdQZDeDopQZ72RWpoXhuJWW7hy1lXwa + + vga1vaxpy2XLfVS3fVsQycWAKs87g50Z35HoMQIDAQABAoIBAEiAzmHTKJP18N3R + + MSX+DlgUtEvDClWVPh+PVjFi7K99o3vtA58fa7+uQkHv9IsMh6ZHcRfAT58EJL57 + + COLFCzedd8QLANTUGR5wDyHJokosj5kVjtfUB5n3wDg6CXiyr4tP/igfD3o3WuVS + + MtSYckpg+D4qZuoty+mFsvo+uFHR++gzkeS51+wvk+Psh5kVkTJ9eWD7WdNJbHP3 + + QKtrSWN5zqJYkSI6Jn7Ur1olKzFpT6QkIwHIAmyY9brwBkILdvPBtSTVRLX3sJaT + + XaX5zd3jlmGgvzBovGJc64jlSJWl5lhb0vA09wq6tVUTqlch0yBWsnIHxrZbT2bm + + /J3pRk0CgYEA0LlCapyHjXUD5GJ6wzXiY2EqzTCNhDaJHTpWrLCAa6jkZ2h4du9l + + /u6uiVZSj7lIxy1UD/Skuiz5xWe0j+Oo2P5XnbzAA3mr5lEicqbo0+oP59mxfWJO + + KQ7iiIikcf7s3V8+H07Tvgwc6XAQ9rSAWFxU/0zmSDskTRSFWcxT878CgYEAwaz4 + + 0IwB7NELsJiqEyjotGkeVxw7bN7F6/XjIU+TuMouyH768KqnatYBPvAPXSUNZtX0 + + 2Nqp/WvOGrL6wh7GCX0zj/Ro6OkoVwmDQlNPhsyl+h9yvTXO/SZOhUzlBoU/PkZE + + 1ATL2H/Eo43iE/9EkREZX6ydQwdlHuQaHGiBYA8CgYEAryY3EKWnrlG6YWUuZS+L + + eR+pviP3LTJiXw98ek9mhHFmsUvegtejvIjoF8FDaO3vn4xvFTCTJtPlCP1cbL57 + + CxRry6b/bisk0BHXmWRszp+El2d7ZJ8gvZ2LBU28yRhGBgINbFJGpx0dCdsLsSqI + + 5R0eClqqh4Rxkukced1XuZsCgYBGy4eYE9WQT0nKn8NrhYSqjdI9XWCLh1Mp0ZPY + + 1VHWNnGrcF8iIf9YmimSbAXxsl2XvZXmvudsbz/DmrD2zHDvfwieEmVW3gOU7TFB + + yVpEmAID0AMNDuI+vwXszBLbs1FO3jjCl4478VhbwL1nOeRCctGnm11Q5x8bj53L + + zJeeGQKBgF82eley8ZEdCutxK1pAuP1FrLf2Xf3k43+sf4XviiONEZqn0GMUUAR5 + + 1L6QOK6DolBs/8eQh0qWAM4Jzp1ab7k/ju0I4rXZtWPH1KChdGSh3GlF17KgWxTZ + + Fp7IgXxx2lqs9WCffy7BHqa26oASZlG83r9bbVgXVtgDyi1QhEQb + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:worle55uksa2uqqeebm4yxnihu:vta76jbmejt2pxx4prqa75xawpdtx42cmzzhappeetzjksym37wq:1:1:131073 + format: + kind: chk + params: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:p2jakp7rmlimr7kdcyv7pmqk24:scsdwkdc7jmwgvz5cpttafschwome2dv444rkq4ra72wwth65tra + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA6MkCkLlqIfmB8wLFgZ7c4ValETX48Zf4zJWOAFisLzvSOUJO + + lPHTSZYLA6zqMqZWYrvrIncrm1VkqXkQ00lKJih1GPXGXfTHZub8ZU6dUHySe59I + + r1QBOdwZSLczBVAx6B2LegL14IC4XW6cgq7aJrm1NVCAgTVaE7TgeKBgZtCOfj1D + + 28IvBHxp0et6hJXMtNle5nbpg9VcHWWPcc/rbZeK+tTZkH8WAZXkyCJsDYjidr/A + + hSGdgZVyoxdCB3lRBAdZKaojxbDMCKbT852KzlkAbvQTk9XaANbxaVknsNCzSfRi + + 0hIyFgRGV8D6pYzvPFTFAzLvBHNO4M2oxXDzewIDAQABAoIBAER4o72IHFTD/FpL + + jWbUIKpTfxgx5PjDN9aNgwhNDNCT6wEWpOCgxQJXFQROv1CIps5B8ibgIL5+/q3u + + w2kynJHewprF6ERItMJq3QC7gABls/yS3KFdt0KaoAFIicRdU2CwGA+agVI46oHp + + ADZcUiSj6U42UKYw2D8FrCUvH0v+Kt6I8W4iIeuy6P2TFmp5/M/AdzVxz9tZPeHY + + s74aKp9yBnQJfoJCgLt7RF8gBeGLPyJVPesf5gt5GKHHPryujWOEnOZkesHl6uyW + + iHI+x9k7yrypYwQVIH1wxklUFMzqmKF2qyFcgLF9TIAsDZT+0qJhc80nWeeIQ4Yr + + Z3lL6AkCgYEA8oqywuSnDHkVJHxhH33XF4+VsjtHnPjh2K1yPTBBiEzOdD2WyVgL + + ZmXXQmh4/fbc+ioHUK3GKyoqW7XK7wSLwnBDLZ5qGybKquvDFQy+3+jOYfz5Ux/v + + m/efDP6gAyLRc08hfEnnPf9u3scgW1J1klN/jk1YYW48GzRqnYvpQ5cCgYEA9bO4 + + MAx6vMt7ZO8tNZGs/Pe5KfDaDNlyzMTKAzBpr0thHtbY3KhBtpE9eWZBvTeNIORE + + hmmZjiiGRcEQWDfbxiPeTLsUdbK+YmrkBi3PFrsMzcvDsmVsFag0MNQ95uiaDGmW + + xhMzprSQQIPpOlDQo17x19e+EQjVbuWSZF1T+70CgYAPS6nsWokiYzxvGZzyZHg9 + + FyQEonJottVfWcjbjQCE/PsEH6IzvmaxpXiGypnewkO7Tw05DEx0CmuzbuqGWk4K + + DBRgex4L6k1brBSYbj5XVpI1YcPDdz8gIeSY6DHlILv+vp3I/cwkf8hZKkujFrct + + bDCeI8iQFGib+plCqEEkrwKBgQCQYjwqgCrgJvMsLSD0CdcOEMTO0KpSQrYjfsD3 + + fsucJz+7T6XAhV+YWWE2pdCb0LkuSvW5xvRlhYriEsH4FVsg5JMNpCIxyAf47bdx + + qhm30dOEW+l0PAV39JA24YI+3xEnmiTv4PjJTfI2901m53azimez3yPh/r7tnBqa + + v/1KQQKBgQDUdKGfCOGGLV8Mzk41foCzT31r72bO6niNyqlGrx0gn9GHMN5/+4oF + + BJExrdUPi5w6FsbmYXqVUPfmKQER6QBitALbH1ASMCg+scoYZq5ic4J1gU7RWB1r + + F1J+puZLYmg2Yfcy6l8mHjEez3jNCzkZ72xFRmo++UB5VGkYXcYqJA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:rn2dx65oniewqh2lzqfcosqr4y:op6fupeh5cchglxtz2qmxukfvpkltcciv4emt65j56irkloppijq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEArrgsDZFLXR+LmZPnzNSaqgxy6pv8euDq8iTLTUmF2HWiBc97 + + MH2Tl1V41pSVY5a2bOoRN38LPs2Z0bP6dKDgaKNayV8ykWvhzlJpRMLP+K7XYATB + + qyVE7n34TytxWdHM+gHmlf4laTbQ56Ts8yZ1w4eINMaa/0OAFgbXBJZTElrtYlAA + + 3ljAU6HDDPVoOsXtOsNXTsZbMssadi/5ESEn/EzQw3muAvDxLV6adEXk4/1HiY2j + + 9EFLXUbKFWpfEdw13Goj385ok7/iLNR3ExCNbc6RefNeZYIY4uJGkWAKV68HyJUW + + HCag1gn0uoiEfc/1ichkvnJUytj0iUmK8q186wIDAQABAoIBAERJOv56gOf+gkMd + + 6YTGu0Qm5WsIWB92dJz7AHJOf/9fyllXBCCpk9ubzkSfgduQdfAPc3crMivzk2DA + + ZOzcS5jZ74uwp0Tq0zAeSYJUWZAqVYbzlPXc9Rn5JelbY0vlkaVMxOBumhSLFg7T + + CMDnkTNAb12hGccWjKMPPU5PqlwLgrUkwqEYrnNAXUXg6YTNaulW3BYi/gKcqyJ8 + + zinOFLL3gcQ5wR3qX41YLe1fwGPplC9TFuB9xyoJSMN0PJkhEGm4GAFdL6mjJGwW + + 1HHmEfSeHkvkAlzrDRVR/7e0xSL/nekJPY8wORLQT+fRfF+bhSCVeB5BKOFBIkIC + + PVdGNTECgYEAt8vEqMi6f+rm+atxOEScOpPyFoU6RDYIgmM/hGjiXp/UCX743fIj + + pujXFMbx9jv4LnnEjsd2wcx4SsncA/x9Uezk70+Y+RzpqzqiG7YNDDIQU6QJHh4p + + Bs4v6XOIdhzeWHaNav5ydb361Sm6Ppk0wxfT49gnxC2PBPd97/10++cCgYEA81uU + + 9gocgdBignouVx99rL9OaQ4orsSvG3NY30C4LNYabDM3qMpYdZY6NjR6J1lDZyFD + + X7LKlK5yKOUfQYs1qFIFelm/IT0ey1pEa/JtRLg23c7bGMqAh27WLHxyC86BwkiV + + Z6srKR/rwWVV6rtersvMzFdREetn7J6ksW4y9l0CgYEArlEfwukmIR5cVJ2qszA3 + + cENTDtLq8TjCF1AkNOP5Fr/Frf/z7ySxdaNOIpGRePVlrEanCrfZRXM0/9G1zz4Q + + abwhYWt/7Xzjjhf9GgUvGMr+uyVP1HXMeXzi6io+Wa+FnidKdxi+3DcECFocHzTV + + Wtdliqo/BQwkohRNKGmeIy8CgYAoNCoQ16zL+Ww82AiN1iMCBfzjODaaYN347/5v + + q5aBucFVvMRmOz+P9YiaaeMAWyvaftFNnxD+rS8o+GlIf8IWk65Z/zenOxy6Rahm + + GP/aSYCu9jyWBOZk+Xeik9CXiL9BJJKiNNIFkkN1iNM/20KSKBMmcwpupnBd0/ur + + YGRE9QKBgQC2bvTY+NYnwB5fVkyswl/Luy2B20Z2RCBWXazFfVQCFGQ+s5cMEUsU + + rYVRM93RDxod6fMgXRPKYs6phgByLVi1bMRqJvFTx9/9/dtkjRlX0S8WETLHEyhY + + VHf/xJdGHph+8wBa8qt7vVoyX44ylAVWgXvmf2PTL6qvLKu6OLbUmg== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:6kccrgbtmmprqe4jcfi7vf6v74:wpivl2evi25yfl4tzbbj6vp6nzk4vxl6lbongkac7vl3escvopsa:1:1:2097151 + format: + kind: chk + params: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:k2opn2p3sjpp66iipfj5x7ylka:dcrrkpbqjs4vv4r6hrfnftza47zkhsrfbmkavwx5gadbphm4gyoq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAwoi+YH41KOcNqUaRWqZGXqPyG8Zs6a8fjTu0NzfiiPPXiBhe + + fCd5fHKHYXTr6W/YCqE4c1E9i2JDNtFULRrnLyjRwhnvaeWizeNN6A86aupuNN1c + + Kt0XxDPDEgR8kxCh//kIgOALQs/QxRK8WMr+NfsV5xWLDXzPrfftKWvTmRaGRuPd + + iuvVzFfpfFyQXHRZoSr5uxTIEiEjJWTEvWv7Qxam0mQp0FeGGhcPu8g2zDbrG5uq + + 3VEn4X6AtFwQOFkvQV9E8oFtFm9mtI3G8Zx0dVQw3OMM4/2vIrloQ2PwzymEPvx8 + + Vnec7pY5AtWTssnihLLn9ZJCpcVr78gtlTAvJQIDAQABAoIBAEUSlLZiWbHKSJmA + + SwAq5fWYtNCT5fwbiFJ3jofEuhTyr+bM8E+ZHJPJfrRl9ZYPdmBf2lFn2ThyjXcb + + YI4bbVbbYY9P8ihhtyrvuKvbLFzmHHd82csGcffigTMWkL0PPNZMsG5CHv18GJ7B + + Bkto2FaGbsJ8bcE+PeeBjp4UgS9rp1QLnFu5CFVEv9gju+MUDNWxWptBfLxlKAaZ + + Wu1yGuDa3nWY/JhP7vd/Gtum5aDqsPmI+8JCma5EjXgdJkG94bBBFPgKdkOClh+L + + 5PSnN53RwuQwbKdFq1Py3CNz3WcrB2jq+W8EaM4NQX2fSp1SRoVECuk7NqxuM5FD + + q7oH9MkCgYEA078uAEnM2YovbquX602S4vnoZTtfLhqFrisrJwZfaEj9HK6mRJAi + + xsWMK9a3ZmI9v4PnWzYiNjWr9SJ3RP1AmaCuIjncjI1iN+sJfGsGXBLYtVzRuuvy + + hPkSN+kIcM1bJ6vALz1XMPiNnJEbtQOyFLNYuITqFE1oKG8UTZK/+6sCgYEA6zCo + + ao/w462g+cK2kBSrB7J6tJJpGTs+jrN+W3p8le/OKpPI9e+8nwlnJ3TVThhAkeFN + + rUhIRQeG0LHVrIyodzJXLPZNMYbzr5EaGeTdjoFQl3GvfMoN/xX7D87F5SQ8QhWp + + oMvKEEVwtdj5vaEcjurefCCFrpokc/r1FGxGMG8CgYBTxnrjwE8c7nAvwBIeFZX8 + + 2VUZ4DCbJAEp1IiBKyNKNj52P58m10P7EqOPoKb1Cf9WK1C4pKVKf1emZ9l6YRxS + + 6+CZR/07WqC2cPZS1GEywn4c2zlbVAiilYygtEETqBvdiTVDO2ioxl34yOyGZIzr + + Zb2/W07lc263OKN2wY3VewKBgQCNq2i3j+8l5m/iIvT/g0Omxk79uHfQeAxtvxdt + + GTI0yxfgM9dItdlj3zEg5lKa0ScL+LBmofTOiAMgcQ7p+mx8KHm0nsTPAaCGcBxN + + 3rvK3IBkSVnRDJEzx9TMp9wy9AnMMOpV7ovQE1QaZhHBtWvTdwz/rkN4cmdk6ZV5 + + cOMyxwKBgQC/EScZHM1YPWvUatfF9oQrzsYBYTXk14RQNvuW6MAAK8C3E2REnypm + + iZDeMOyYzxmJ6r3sT0YIOmRdsaSWof7Rt7x9aeMmG69KDJL0jUsUhE7aK7W2H2yB + + Ax8MWdGte4MuzMRXpjZh6R3P1ZZ7xI9/O8SeKVnZB5u8G8zxvY1XUQ== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:2wd77rwew4gfhioljcisf6jr3y:4nzqf6vq7w3f5pw5yv5reh4isfsxmjpkd5dvc3swrcwm7zz7m24a + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEArEKiDSb3JVVLiePghhefdt0bfKlhe0LHgxxnpBjkRK9SNScI + + iwNB+PdJZHJcUSCrGv1l4dpEEhXpykuhb9ckeySFYh93rtS2c9Z3pKYJ74SzkTIh + + T3x2B99H8uIeKEtbs+3wt4N5gwedYIfhxfhIC+AZcMD+BF7JyAermj9p81kqfRgv + + n00kRQpozitb8Vs0upKX+T7n2F+1JDrisJjTT3yDxnzwQ9fCGXa7rBOb80tNbrqV + + M+LpObdZ9Z6TKZbTKoVK5KFz5zSCiIx5usnTJ7MeFKzZQbPArsH+YkcAaSA0ctjH + + yIIeTXuCA+oyO1ep1YLRZ8YDqW3/GBSQ1sbqLwIDAQABAoIBAAEOKXL+LZ5vbtaJ + + 7Fq181allSEG4p+AMbu2ZVYmhuN61XyY4B8F5pYRYcQ5RSKEGjhAnCqtjSQ1OSjx + + nKwUO9d7SkvnGHMcP3/nLPV3xPDQNc9qaLTVdN2oewbcZsXlbErmZsahChII7mhk + + fLgbRMm51j4QGJ5LPF462czInawNebeJxMfuX1ycZsCd5b/hCxhhn82CnA1LeGWQ + + dl+87lyCJvTb9gTfKxzKI6X94N46zYz86f6KgaMDPazZz1cbuRhMJsRpqFWltYEM + + UneIEKEIm/56RtLnlAKZOIAI5AJGpDoMWYzxzlD3PwXyFfP6PigB0SDl/nAGA2Gt + + wdoSzC0CgYEA2ajpn6JHoRnYfgi1gXvkWnU6jZMfYNeGO8kjSvTRok240eVMiMR2 + + l9dOe7qRglNuksW6gCv5XlF4RtFJnugf98PQ7Hq0yGvbdkA9P4+jnwuJZ13G6lyy + + 75cFDXpx34SMSfWMnZvHO7cQuPe6bTSj16LGFiWWVIWjSpJo6++zwOUCgYEAypp8 + + yO1NYBxcuVhVa6hJR6s4ri3AKkrMR+tIFyClBBgSdkGFSK4f0fnyaU3xxWQjAZFu + + ZlrNDj4VMQ0RkIWl6RjCWt2yzv5ZRS88Ft/kG9FpI18N6QpdpKqPSgILTCThYj0r + + Q89pbHTnIEcYj77Yt5P9XMFPq02WKbhFm3JMEYMCgYAgaHQQnrsHk6+WZHmSA/5m + + MZo8RjGf27dS44nruTQVdHkWV4vjuUznItm+tnK/8ug91k8EkoeYsYy1Dqhljq5j + + 17Yd389XICgXHU2BT0PPhIo6582cS50g612HOjoGS9gPkw4S0YUCsSk+QTRy2imj + + C8tutL4Da7p6ef5BUvlyuQKBgQChJoBa0WXaLLUUN8658wFWoDpVUM6o1RXnnp2Y + + x755oywMI9GAHf/xZH4MhJLqDtxJQwQtJcw0p+zzNxHhgmyVG7x7yhuPyX/4J7oD + + 99HYzphyKglGc5hIgeG3XCjgR/V9zmm7Zh7UxaeRZPuEWqjGXFAKlzhnaS8nwAqd + + uiwHQQKBgQDCqKXQa8ivL9IDscqW1UNK3HPf3JU9GNy1OkcIhz9HotoooXB4ZASS + + iV6B4tKev2scNA4KKU5KvY0a893ghsH9yEDeN5MJP/cGCceh9+IhYMbDqmRmIv3W + + 1uWtjpmyRGk/rFBDf6jxWcXKEVXAvXq8IQS8AJ3JaatJzwuasiIDJg== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:ofgig3loex6pev2eymmvohv7wq:yasbfedqnueaajdcavuba7kxbdqzn7e6zx3y6cbvucgx433vlzcq:1:1:2097153 + format: + kind: chk + params: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:eojgidosziluemkir3zkxtkwjq:yvdxyopvbqryaeslumwlc7x2xuw2y2wcqenys4rws5xslaox5nra + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAo/r4ebWzsWZmdDKCYXv+1Fh1/0xyPWR8do4BgQjDG5vsJFX7 + + gLwo+/v5JuvE3k+aTMDp/VNFtgvSd4t8WT1QTYJvVQakqcIL6mZVVg7ZBc8JCvHm + + D3wtc2my4nqN1VLw2iUduLmFHY9SPMxRVHafVWNaD8MAhDpEhA2Daz442uG73/70 + + 6x2LeOqscoTK7/NREfUbeTK9Xz5mFMnyGSWppM22+mDf9/3nCTTbfq8WLafMcLW/ + + 4X74uJMRE4uYzUzwiC0HyDZnsNAuU4ggjzljTryCK6emynSNHGAX1dkBccvd2Q0w + + g2nNL5rbsonjbaYHZ6BLWRUGbh0PMm9vZcL0CQIDAQABAoIBAAJMkyYKtgalWcek + + iycwgL/LrzjZgqsJcQTNBCih3bFyB9cxFO4GxVjWHTXOXbPjwBU2Kbm1bI9rPkPS + + kvdh2JHDo8m4hn/CUp4yWd0zZ1fRYa4zes8qBa2d9GYAQ6OTboDSfyaPRFZoI2Yq + + k7PYV27QLiu1J1lTQ1FqKpbq+Atak+OEn6n/y+1aEztaF7G8o37W+RDJUUhgv4+P + + SrOVXVbP72XCW39JPUBUPu5Zb4kqmWPZwh32nFZhWgv4dsjC4y97G7fluLhBu1F9 + + rDaX9OC+jQkgOq0znYAYWpaQQXqTJcluCOuk0RZHbT0ZOLdCxUl2Ar0CCZmDLXJH + + +3++gJECgYEAyMrvmQRbCZBJ2934mksJ+i9sYWBoh0E8yPFp7f7MFZM1x+PLGINW + + Q3qKLjzpFThlAHVCu5cHcWnnqCbceE4aaDUjJSTEn8MTDsvTleF138ARl5iZUCGw + + EA3qk95XyLz8QzOAY29i3SGdoCF5w+RciNDMq6eDfvOgtpUakHG63XkCgYEA0RDy + + eRXjpZAr6whs1Z+zDUU8zLDeh7Fpp1xSDElqR/79del8LkoRQNf5e75qRXcw4YSn + + cAxGewQ68d80KGi43j1vT3cyyM4qdA74AslMR8avtr14DZ2B4ZucgOPxEXh+Sn4J + + y6uXA88FezhW56WRo5QhwDcUwWD8cdbq2sffdxECgYEAuDxkTc2OTp8i9eec5M23 + + cOS0mECiiIAWavX/0Uz6K02wiiF8R5iyvSWhxj1DTKI7Iol1WrSAMtMOqN5HaueX + + GUiwX7N16nVOdkQVdy+xPUV5ntR4z+qpJoHuXBEmEN0+xdZqllIsr2PZwoUUueY9 + + 9ZdIRLRmVkTqkL0M674+zFkCgYBInxdrAjSCdVMcIGAV0JSUvoFBJQsXnYPy07nY + + dMQLYELQ65rsRWQ63wrVdi5aFO4Q2FCFNm5hGvYSwJLQmpfk8vgwZVppsFvE1tJF + + vYDWyeiNlMEYEadlI/W4O/WoOO+9lox2iBM8CnR/+fEOJFAzvyf+KeP2zGbEcIBC + + IP250QKBgQCwFmCfg4/EBvU9lQZJkGZOdEig5Y5+pqOSZLkOYxTb8b1pbDF3hfZP + + CjfKpC+u6dUVLC/glR+W0csPsAPVSftm0cHNIHVLaHExeeS6cFPJ37Tz4TsOWoxp + + z6hEblbGV9C0pyhAgu1dAJAPRBpzzIZax5K6EPIGqFhEkLUUL9VocA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:axafhf5i3oi43qvlbi5crtcn5y:6y23ckrgvhzeudwqh7tl4lx3th2wogx64c7oxzu6q7mbai3wfuwq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAqaKZBqNMZdBBYFHtrwyneRRmT+bj/ZMXJXupA5LgTIkFAgeu + + lOCUknGhdmteXd5KJFpCY+/YUYdo2I61BL1EzfnCGTMyQUy76jsNGVUTewV9yFb7 + + ji34ulQ+va8O32F5b9e9zBFJ/9YEvzlEXUh5Ui54LYcJBSK81Pi4YBuwfMhjcAGF + + YRSGOV6yFNu+yVZ4X3M5BG9dEdSS9D+RWvSQvvf4it50MUUZhB42ZuxYceEQQ3VD + + /yzbeAA7VYkA4x9Ogi1vjaoM81edSpzeEL53y9uq31sJAa2TWJ7+UuvUuOHjHZa8 + + wBo0k/ggvMU5gKYf7Uk1omcLu5bjawUuqLQwmwIDAQABAoIBABJt59Igy5wEFBYy + + 0G/EIC23WaY0n3BdGpan6KTKwDOQb0rZKs5h18JRNgl3gLkR/VwVskDJPecdnvZo + + CqKJLYBzMfMq7LrLa173K2UlKQikgBXT3WCE/hDANlDFRAhgvqC3/5a8Ch4RUlK0 + + FglMZmHXSpcnpIM6UumQW/eVCoeD0GzG4RcUm0VAPbNXDqekyVZdc/AYO7FUVW7r + + ATcye36CT6AqiAP7OCOyk9LqIxJluHHqvci12Ab05gBsfR6ihwNkGH0zYDVuIRV8 + + 6OBxpPsHO6fdp5TzxKEhh5Mu61dMjhjOYE7gp8BiQSTaPtvHxcAZiNuAb6L3TtYS + + dIiqDAECgYEA3WkNFq4ADXjJ+3ClmYwiv+AX/8apD/YOwcozcZlUUmlzywJ4dVrM + + kFpo7BA6iJWeFuow99T/IgyPT935D2miFPW4uTG7JxNdFdL+QAykSggiSs3CH+UO + + TGIPJf4qFZYZjlhfzpyjfiJkL5qdT1wDabPMhVN1eC5RWHplIuzv3WECgYEAxCLh + + hlwB+mlMsnxYlzD/Vbk3L4y5y+bYjuPY61OdRTOnVknnI7WQQwSBPqK0eR4aUIRM + + XLlLCX+vvBpG+I2i/YGzkOtTqk3qwyqapipEjfTCVuWKjGXZnID7r1ESVslUgXMT + + gqjk7aId9FRfAhaXJrJSXwM/agJhF2cWLB/qs3sCgYAFtMRilj9oGXnTIhcSevsW + + SNc1f1AyGhxNQEHNJq92pEMYs0qZc6qb+ciEdPKdPIXjf0udx35/ySUUYNsfW5CS + + y7ZkB3UUT7pxaouk6O5+/fCsTts26TdSHqDXUNKS1dh4w6xMbdsE0CwW0fxF5FAu + + NDUMJpd7bm4oQpdCrCqOYQKBgQC6mfD0km6zXiFBInpqhYw3c3pke798lYjtESsc + + YLWc1BLdTnxghenVSODpxYRsQs6IUgYQpZ0VUWzRSjLBYId0JkS/mJRFz9GNkugF + + NOt6HyYR5FrXTkMl2bUaS8hl1y/V5LToN2VuDDHxdZ/abcGqaDdj+8QGSHM0/7eh + + jVAPJwKBgGbuMV/ccKzDXzlYhj9ek20gBfq/kdeSRY/BK1oIWhF0wb4G/S0BQSGQ + + AJs6tBozgrN1zxI1PkJ+J0u9KNtUhBUV9KCVSXv9IxzOtEOacFXmIvJL7hDYKMwe + + 52PBFl9+WDE20km3OYtlFmezu7EvBNxEnEAAXZ5VO/0pxiRCLoq2 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:gsxbfakyyvqrv4sqvzftmoq5sy:4hinuje6rhm4myffxq6xwaocimd57c6hfutcbqql7pgd3rdhdnta:1:1:4194304 + format: + kind: chk + params: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:pnpjd2iuvxpslyhy7xzlbnl76q:5wjat7x422zdadbomr45uwbsurpj6ezasplybg6b2zkl3avs2sfq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpQIBAAKCAQEAmx/L+VzqYjeGT9QCeDb6DlBzSwGM5dZ8hkR+1JeURf5GCdfo + + URGstMbQQWl9RvHNywFYBoxXWD2Xzk/OVAl9cTKtagDGT8MfR0X0AfXQ7CfzZeV5 + + 3ThNsF0X2qd1DamOiSIGm+RP8NaqginLYVqNozMk47iN2CkSnl8r3XpatrIvBDiW + + iY2o2wWkfYCcDAbSb0jVY23y94t6JApqPSKNQ4UA7BI+0oWt3TI7ucA379drHOO3 + + 7d4G3kVwWxO9zCi/4kV4Q90QkBosqTdC2rBR/Y83FMaM2hT1a0iypnz+0wGGHkD8 + + PQ5gmHS2fwmN68vOWk6j6nv0GGB9WukH0Q9ZJwIDAQABAoIBAATz1phHKM3VRfrQ + + eE+25EmGtKzwB2aNxXtSk9YHt4Ul4WiOYG1jf2cPczX+FIYqUXlUuVG5iKRZkGTW + + 1BiHa7OB0bAxBs55xUrfkd2/fQRny3PfsqynD9vnQkwJzUk6tkFB49d1n5O/LBcX + + FyuBhO9xQjvpotBZkgLvuOp2Qzp9QxIP8RMecu/NRtJD8XzWpkeEI1cKGnPl8oNV + + H8Tw7X6YhicYI15REqQvw/KQMmgK9egOGGtAgSZu6Le/PCfDMsWWTXhP9gnjqY2N + + duRcUvoGvu/kaIM2bmp++4DnZgp9mumcAwwbVwYNzA73NrmCsswDk+ZBULRd3yCW + + R9Hrr+kCgYEA2DBW333pWiEzv5lzjmMQD20zZrbrF9trBTxByf80OCLWaSEOPvXr + + zbP8zFAAWOgJFckFsgygHfA5yIcLPLaa0TiWtelXNDNVVbgEVo/MAWKOmuujKh6O + + 3/GKEbln935UnrECDg5G8GY36fyyoNWjezfug6olZ/goiIJQOsNSCOMCgYEAt7C5 + + fETBtLILfHVu7IeLkUW72Q6foTzJVTkRx5cfGPLB1TgohqMqDTICURIL783ZGz/i + + Cxk042QRD7S3j3C+moLjXQMceSdWgY4sdy7OX2hscmbNSET42qyYFoUN+tLc8FUH + + jBVIVwZ1JfohDD0DdZdMMXuHp/BElGr1+6Uble0CgYEAiJPJXjoSgRE2uxW7rjmh + + PM21Sm/HB/RjoRQXUAC9QbWolRQABwCf7v2FeKIWBhTZIH017u0Q/rj0GF5QWBPY + + rNK+S8BVijHf+F5fxzvjGwDjrLWvB/30L0BOBLKIHxAdb3/OF4kngdph+p3dT8SI + + GmEUevOz3AInwU3qV6VrnxcCgYEAsNfM2xx+uG2orTuJfOHJtiRCgueXOu2AjzGQ + + Mm0FHUmo3pNgQK6Y73czz8TmBQpSd+96uWCdEEXoPwymo8vRVIOqTIOQR/tdRwEP + + Qfan7CZmMYVTIL52LmB3U0bpfI7A8geKaoyaxl2LLvKuGlArImx0iDb7FO01uQV4 + + p7n+4skCgYEAh9u+E2U4YtqeaSaL0K1TUlNPCrS+iKDbikC3WXv/anEDAik8R9Yq + + PO5kQ2AeNwcuIeCwY6F9h2TAIS0HzZTITW1AcVmoVKekqkGPsxzNV5zofZR/BTZY + + OpNYpBwJmxFwHPNnN2hcco1QVRN663FvaptxS/kgtUwFB2+hLp3Tbn4= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:ihnqgpid4zygk7utpua7n7beqi:2qppzsf7y4hdbo5fmo3ns66mvshiti3govobsriwysvy4w6dovvq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAyGaEmDD0/hTK7WAWTEfXO3Vkgq3vi3UYydqC1610aQ32QOnL + + GVtwlXPM1KyC7nm+MmBUx/f6nhOCLmK3Q0+4uVf6D/58u9I1f7K1hsuvZtuzoP/m + + 5OV2jEBm1R30hZUFZ468b4KuS65WNH+LaW7ylNziALdIuLNmz7WPta/UPpjOQfWj + + vq/XXLenVsHngzL6azBUr8U11vS5ombDLWZec+5Z5WEXPJTZ0ywW4o0VcVAK9D6W + + a+K32XanLNVpCpBhVTNQ4Lk7q1O7OobYoLYJBhdiFIW8jMTknoYQicHjTxmfLTz3 + + zWlCfbBC58KHogd3qVMV3JDRbYOG7I1/vD65+wIDAQABAoIBAATvcWiGHCJ9xJmf + + +iyawFQ4iecl/XZDxf6CoSJKpUlJDL2AhH31YIpttaevL/JLkUGQWcYq90MZW+Vk + + jPrdZcE6x2/JZq0BekvQzOOq9IDl/ECEzNzqQccmduHcwP7hMqbgPwfIAh7fBkR4 + + t6g7EUJVRkOaP/I8iNWotQdWczWvZoMrEgSjIi9OR2yeosrZB9EihCUml18NH2JC + + 8MDBcKmkSfFSfwYoLPwH+EoVwhotBzNl/dofMGCeG1DzObKXZSC0Fi3nXILVX/Lp + + 2wWaWehN7HggHr2SggQm/4jA6RoFAlP/qqeD8CBpJn1eryeXRd+A6qsIvK1C/miT + + aJPVr3ECgYEA0IGp73f4g/sb7kIYQmylo8cTy9LqA5tK/SwIHyc3nYtqRMkcG9Zj + + gLpGSvZABbPiwprawwQcxXz9TwMCY6PjUEfhevvo5K/ObQmB4p2cSWflNJayhqh9 + + TYzUgpIypjVlMmhQDRIX9X1hwOxmEt8ueIlDGNlaLb6sFKHhNWeqIIcCgYEA9gwt + + smtU2xVmoXas6Q5VFZV3X/5dfhs6Uko5XVTS2cwhzxDHn54mstbDfEqCU1G1ZDJ/ + + 3EUH2aX+RZjEU3eiAiB6TNq8SfIFqDzJpnmGUR6y2RofhjLA1cZZu6/L2fqZZ9nc + + 875lV7tENrebLu2a59g+l9NoLBbxgtsKAX0ku+0CgYEAhxCHWTU4yZ3fUO6FsnmY + + rsflnfHpXx64a9mbBTstPqOx2g8AY1P0Ls37fNGZVVhaer8/GHbQgGlf2U/Uu2DN + + fhKiED2gdosfx+gRuA9qzu47Pl6kFLCOQq8IdfBoWNxbylRiDqV62a43pXY9BNqH + + ytL3oOAjF2DdLZxTO3oEbX8CgYB3fD6M0Jaqtd/bNViO7QjgrG8GTO52GR7fa3Ak + + JNcoMXuRpOJsX08HtkfEiiJz99AQ0n1JKLTBO10ZyzA8IHKqeb8qp2acuk2I/8wl + + bgqORkwwJgF9GBSRO/vDq4FhX9MznZcxPxrT2fssX0mbJoP9ZwQuktmZ36J1G43m + + XzGBmQKBgEeliDU1sEpkXMYUaBs2mzCEe3u7DqDNOaXjvT9mJQioUNoMFMdd1JPG + + oVJGuGKJAOAWbfeniw7wraIXfSp73jlxWULKKs7kbvMWFXcJ3OclOyKtFs0RycUd + + bTkmeb5G6hZBu3y3HjYYW2MX5E+2TRPHAWm9CvnYaTbcGW8BsyBT + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:scwepfpkvhuc3fc5jkpmrodsfi:wui2ixaxyac76mbxtr6diw2ajky75wehk57f2kqgytddnpo4rozq:1:1:8388607 + format: + kind: chk + params: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:n7a2ymjpmh6yzczjz5dzttejpm:zx5xl74ellbif73polmylwfapy4ptr26vu67x7ecwqrhf34ldqda + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEA7Qad4gnk7mpesxcpiKQfXW7+ybDNH7rdWoHlMsXSaE6r85/4 + + 5cZkBSptDFJ9+H65qlyiuUVOo2gnX30Hd2jRv9Uq/vVwgcGTlG9yTeqtLwNK6fRt + + Z5wIYWdver+rACxUrmg1uwYFcD9fqzkTtRoFTNwUmjBZKDAkJA0OU+HN8ehZ/9FV + + UJdscqxIbC6MDDwTckVMOvpNq2Cc9maInHw8On5VwEOsOaDGYK+TaxsYjmwhxWl1 + + TF17Io6wdauHSLP6ZYjv2KibGZhiiyb1NBKD9GnSuHji7oQyAWKaXGGKTU4F04jx + + JdtwhYBysZWHUf7FQKsVIjGiyE/8/f04FBlX5wIDAQABAoIBAGK1QTXbkfuZz6M8 + + b58IXkl+Slv7JYljvAAPnUAGMwgeTyyvf6tM8eVW1D/v8Kb2O2LPnjKSwtt5KgBx + + pJTdUZBWeUfhNb/LuiZ0PQFmzEWKVP3WPWOLDtBlj37qaA+z5nYVTt76dHRY6AH5 + + zJO8aN2nv2qw3MhIOBzNVRyoqiflorQQ7su4pXe6IrvHzmzS43zgahJby4SSmt7W + + OebkYXMsQ2Gv6jc0aJj11CcpCvutO3B85OE/GEZlywWyNKo4sMD8O9x40ByVnT2m + + IydpZOSF/z7P0RZOCKyZdDoEZ+hipc9taog8Drdu1eq+L9tAF3W7McIlLFv+Jqrz + + PrdVuaECgYEA+zG8iWbfT3DdyE/59t4OX3MJMj3SYDTQ4h1ePLc84zbuCy0Z5ajh + + mCnFSPdel+d38bUFHwh5BkFV12qMwXTqMeRIn1s6EDuccujxINETPN77Dl7FRF7k + + EE2xNfJ5Io2L42Izzy3YJVJiQqBnXjMdD1X+Z8hKbQcdjmxOZTu1accCgYEA8Y98 + + 9ODKRCocZ8BfFyHs7D7eY0Q7r/vp8iqE4xHXZGLyDDozphwmMl77Y6a1mjEEzsrx + + 6ckl0LVvq1ESt1wKoGBMxIMlrv1tkfHvgrhu6MeBe2QSDCUWfd823VujytEESyj1 + + xwBelqRcTXoZC62FlGA+uMXOGs84RPWDfeQdoOECgYADxn6X7hTjI8YhkZonLLU4 + + mAkGWUmFKqYND/Xvoa1nmNbBEj92ZTBm0hHmA9nHHLJ/zoGyMrVm86pvn2lYKwKu + + F0lEI+HehpbWX0voe1v3qT5Ku//pBCgXWqOUNP2/GDOHCl3O+lhqTy+s4q5LCyef + + qGI3exorQ1UdY+FVwiz61wKBgQDHJ5XODsa0DEP/Bgtf9whufia7kLXlEbx/e66z + + xzHeAfWtPw72FJ8pSEXakseGqINeOtPX+47B09SNWfokUi4wqzSfj8Cx1R9RBDaD + + f6txH4sRQB/hA3LXtAB33+XagRkZHlwEBbn2WOwAtHmRty46dl8/11VlpRKvR/tw + + /3GuAQKBgBz8ER+tjIMqlVSwLRtgJ+Hi4B/7I94HdOhpEuXynkPxYkOUSKGxSysg + + i572W0X5fIFK6m/ubzdtfYx828y7p8LM/BofvQpy5q8mNL7tOzbnsR6A2O0k3Amb + + t0H49VVOSMlHAYNFWL0CQE98M3bWZctIHIFo7xyGzHpypCQUfI23 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:jqzeglflgihje5znycp4axh43e:cju77h6dvd5w3oarj5m543ff5ou4vlrsipfmd7ntfkleq37tltma + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA9+tyPYSnh4G/0SdL+ZxbCU+UCQn3ZduTfDstJbGfNGdlZLgd + + F5mSDgO0Ius38nmOmMAx07abuUkvdVkDNN5RZKfOolLZlDl+eDh/XBb55AaSV8I/ + + 6asiblwxGA+xOS31pvnhupLSXYKI1xM2Pc+wLbQEgS3MJIazJAakNySCrNJ8Yp8A + + MTAHLDdMYJa4kdvQJeJN0GBFRNLdbdQ6VCqnn1rK+ky0khEskH5QkNZV8pjGFAIJ + + 0RMcbysG18eZwvLsyCzhtsST2f3v8/3B9N1Dm49Sf2OV9DvwMxGRamO36f5etPt2 + + xrrg+lTnWKzEoqlINSrQ1tYNOZMGHVoA15BTnQIDAQABAoIBAE+LfDKP0v7P2x88 + + +AwFJlJ09X728yl7y7Ty+bfb50R1nls0FaWCURHtD0ma5e8HIIETPYl70DgharhA + + kJ5QbJYan1qGsaf00Ia7PeXqu0/16dN9kGslTR1SuC/LrSW3ANgL2ei2fgehv80e + + LWukrRbk6QMXkiXwEB9RgDPvI3xWRQQT79Yf35VwUoj2ldG5bDRiwciA2NeNbNNc + + KUdKtN1acgqZMvv94uN7HeBMYKyttlfUM9+EMoTDhGJzGNN1vLcB7R7nUwiCJume + + 3/4nm9+pOJt2ETl1pFY/G+CSAUeIrJU47y664UTfHhydt0YI5Xaco+3burj/9xxE + + qieUK9MCgYEA+Kon6r3VpPd+vkhylgqk3vNleuxB/lZmmXMH/oR8WdU3bQh65q3W + + YcZlRh2Jq4iwChz16aDSelWB/mjDXotPLK2cSvJZL+W2c7tzCF/RT4XiphkELmAB + + l2Pj9W9FEZVbxHBZHRuaeQQsJHGhM6MtnX9TCdSpX0Apd2YbKx71mX8CgYEA/zuq + + I4R8dSVNkliH4it6ABhX3z0t8pTQqhDVd2G+ubQ1QjweJ+VA/xxyQF9XoqCd5Bqd + + XY7BpXRtVVQTEyZ3ecAQTeed4pVlDuJXuwMqAxSVb2EAZ4rLVf4DpfSuSvREqZjb + + fhrW3zbRzhBQF5QL/WferHruii83JDKuAF8iyOMCgYEAkBhCI1Q4Lm1A49ElnW6z + + lYKjxrSLlW/J6pfvBP0O9huJD8S/O1d3CJen7haFxYHiySl5ExYfgcZ1GtDojava + + iIBeNkvzhL7vmGcCRNMJfrSN30RV2O3HXkwDOCFve736PH4CFcz+GaxiTAgQqtSf + + RUoX+3VhZJHQtaDUk4tQNM8CgYB7GtITU4GcFyP1JFJWGlY72YH4oM+ao4CJppjv + + feu6MltF2S1KXN8erR/GQLZKMGI3dUbVq1dncGKTt3uDzxftV2AF02NpuFkH9tAN + + 2ZbX6YOyNv0089LjZSNpVj0C1hKQIrQrfNKK0ywa0e9vj+7AiOr0Ek8fw2o7QV5/ + + u2NRtQKBgQCdjdTxZw1n5jDO62w92iJVi/Q0g+KoUEpeeEXAXmctiDNaCNspsETb + + qqIo2ZmoLA/yptOPa0vfEYg1yTpQJIf8ZY0AjeBHRTw9ECUFeqdakQdV6IX1dEFd + + r3OSCPSREX24x6CQyUhZMAtMn8D9mAVMlUt1Iq6HtXA3EzViTS0PSw== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:5eeprb6lfxclwt7fieskd2euly:ffjgjbrxfl6d2ug2a63iaesxtrqa5pk3yaagfcldqvo7x4ok7ykq:1:1:8388609 + format: + kind: chk + params: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:n6eoyebsflmwvubrh2crtnxxxy:layy5tfeichjjxoeg6jehn6lkxtcxpke4udpakwpmotutsdxajja + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAm5KsR6Jp8mLWjw6HtwEMd5rnzsyCivFl0kRnZoC31r38+JeR + + iYgM22iJ2rzYVOjZlSBtq/3ts0wZxx5HkFpqNaermhD1XcpPWBX094SvaiCxiRFT + + I6o76DZ8xaDgpbg6qOle7+zIGyR83ARl7s8gNCdHNWS1/eAJ7VYtWukyvAMdOpUu + + i6Mjqy2xgaEeEHBdHQq136NwZDaQiz52EodJE/Fi+hatP9XluKwkv/UXwE3lIcBi + + sEr1/FeG8CwRAk39NKk0qf97vAPvfrzPOQ6WFFV/MBS0To7KsCXBY4Ve2zNVBsOf + + YL4u/52a26X1yH5SO2oblE/cyFCGWiatd3IjWQIDAQABAoIBADmRRdTgIapCrriT + + HN84MR/VH3Ajty6o8w+ipkyE1wJMnV5z37Pvtyo9fb2GYdrRqyoGrO6W8S2GvIc1 + + CjA9dM3T9Kj3G4SQR1oGDfbFj4+K94cL9SLebHqaJwOOa3KHQJWefbX0fXulvdpA + + emOrG3SREEWOtdVy4NmFKRVZ858kSCXGbOZOl8wuaTfzBQCBSgWBM+P90ERMKRgL + + Lw25mqpXBVBuIsS2/EHucMaH8F5V/2k2jC1w0bJQ0rIjM8vITCQuLCl1zb3sHjYh + + alCQyKR/DjhE3NnY5PRedDu+OikqsEMvFxEXIlUWbAgPybe0BgWF25z4VWMROanW + + uEZ8Ev0CgYEAxBmqfy7Q/wRbOdlsFcSBNkDtXHIjJYtS9OvYwJcOj3Yusnw2qMSC + + LZ2teRvbRlbrI+zIpXWCVgiB7iaw2AK/WGohtkmf0oMgl5QR+lxqqwzwze8rKWiT + + gtnEtGNBPLxuPmEt43thL64nK9beN/vdlVEwEsZcpjRgf50bMEXY91cCgYEAyxfq + + sD3X9WTE9jOnJWpzA94z0ccVsUYD5+vSXRBhwtUcPet3KPc9M/PS3XTShaV9QOzI + + z/z8wsFLyN8JzoyOZteIAd6Cx5MKQJ3KO0+F6jv0xyIeaDaqcVzsycw6vT2ITQtq + + 33VGVB1GaXYCNwMBhUx8VjyMcbW4xGGPa4fsfM8CgYEAqZhk6v+rQpIa74oJPz4m + + XayDW8teeC7pfOaoG8/IiOw18KkagJUK3Lace7xKxKeRTw2ObgKVySAsdrHBid++ + + apHHPCaqcV50hoNJlRPuMKbNb9zjoDlQMf9ybmvU1NlGIu7ax/1BjQH54KFAqHxM + + I8IGaIZjRF7SAiv2gqY1wZsCgYAw6cm0OLDSgTqOsVIISOL6g4GnfHNVBq/aI4m+ + + sDtbWUg8AYHpc+JhqM+YVpJ9baYFBQI4VY3qufMupckO3ftN+YrgKF8HAfruJRKX + + xkdSaq5BZ447Oy9Brke5Ml7TRQaWx7EtsGkHySU0MR/HcAnluM4ZVuvcVw/w/C42 + + j739MQKBgQCFbLq7w8IGar1IXBuG9hCvVFUE0zx9ij7u5u40I8F6yW5Rt9QYNiWC + + ctNwMCkvbhiDTvYvd6titc0U4hplRmTpCeQDf5B9gHKQlA9wlk5sfJ5cN7AZsc+0 + + Zl9LnyoMqX/Mug+HcTEMnwQDHQugAkto7jQLYDZezXIX0ppxLp/zlQ== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:7sj3wkjglezcfjwsbnjanxlouu:3p7qxclmuc5vz7vbnivohlfwcvwd6kvnnikbdx2afkf3kfxripea + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAoDSGK++imql5vyeH632kPLk9l4Tzb4tj71be7xXT475INrFo + + tUFmxce6eT9TKQQx06OAdiUfOeaOMPuRL6+qxijFnQ9dkLmpBhOdqW98lOQtr+hf + + t8zMEFkC7PYThOpmc25zeRI6U78PLlpOXY1A8dKgy2JvGqqXlKu1ffULBhyzZay7 + + VxWsArjl3b84vfGyAUeAZkaqHff6rlPrsR54muud18tWBMTfwpBFxUvnD4PI4bLn + + FHR23SlqCdWZ9A7s/M6z6zVgfmqOaRmsHqXjdq5yXKBYhlrF0MrBIi6V3s9kKUZV + + OJGC4GC3j36l4mxplcE1onK6o8sLcjAwbPoHVQIDAQABAoIBAAxohQvaH6oC/l8D + + 3M8siA/7+P1HWuOE1FSxUcsK0cKN9mHmE8oWKrOe6J6DfRlsLb/KpiSAc460gMbi + + dThQTtXSSpwDmKeg+apy0n9RF0Eg+zjosqE1x4hsnIFl/dUJoq3GHEOAWewqnC5m + + 6DLuwdz5B2M5WImkNOFa+0+qLxRl/UPLytzQ2+z/zdHAPaS2spC0MFO2J5z1H3rm + + N7gaVtGPDqDnFyqpN7zuDyOD9+7sXKgV4CL50g7+b9EwKwsjWNaQeePKWlbyNL90 + + 6AkWT59cahVhqzprl2k5GWURkxi3QryfBjGJcBPv7ZkJceuhgrhNg88Hga7fxLCb + + GiksSPECgYEAv5rT+9s9B96Xd0h29pJ99sNgLaus5yhGlwcIeDFCUk8jvMbzGum0 + + n2BCZ+4aT8BBYYx/qMdosZPMISmznMJkOEPPlCP4wHlpG6C9QGZR+uGiw8ZyzBf2 + + /s8CFXtRvlRY3e0TQDMHgKcFGxWn7MGa6K8CR4EGxdbXFmjRzEyBIXMCgYEA1gwn + + sYiRy5ZTUdvI/r4YYedsNrFlv5M1tFiIccS/wnFpDg3vBfYgeI8zO/iGWNP8OLWz + + 8C/lyuTgRQ++HoPahJ0XTywXZg51qZIC4GxbieKCbiSdRztpIb77+waJUN53j4VX + + DOyK75hfxS1uE3NbK0ZbEQDGf6zN2fxEOlqmYhcCgYAEIA4+VusKd1VlgQ7moiLK + + JEy2zwJq+6gBampZRB48bW3Ei7gCNVPpNoZXfH3eh7Igqoi5Fon/gMIdWKuATYMg + + 3vziIKAjbLnBmYVZlJphP2hktKoWENIFjGlsEvqgkWpUZN1MPY0EzRPEEIRMCaMP + + LW1sIrAFpGl/FwSlVGRXVwKBgQCtiDU2DU6GC12JY/JT9LG3zfNBdBjVc/d6OryD + + 38rHTUKqjklWP/CbTR1wZVAl+9bj8wvqkipuj5fy5YxxGNyz3tfi7BAcQWTLEQEc + + CT09UFIGEdEgyt206i1HmkkBMxsjVCr641rQXGxoYyh2xHMJZoS2CDblk6dgLtDx + + rkRuCQKBgQCHK+Dq/DQmCi0yYiWz4gEYmgTPgCDOOEstKD0YbfFjvMesitgQYXd4 + + uWHcTMZcycOwlWXZR709Snrhnvy4xuKMlYK5/lHFtnK9n+Tzs4KtsDRXuIkIHJNh + + JYyzXjQUwhLbMHY+JSa3kNVOjMkP8Rq+po8xC/6thsDaGJ0CoIQzYw== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:tkjwggbz6p4wvuipe3gtmgfmsu:cnbcggp4scaxcde6vtfzga7bsuja4qjfbtv23xhaofwhbw5exjrq:1:1:56 + format: + kind: chk + params: null + sample: + length: 56 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:jlpnxcvtg3ohti5c224dtqsfi4:g3exm5bx4ctu3iuew3ir633j4dieg4p4fygpvpb33cvll6jqj5ka + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAznrHSzm1uPCplwOOjhgL+PvikNBWyjaPdZEGGBqWLIpr7gsD + + ek/I01cdgJzAd3p2oJpVj+Dew6Wr2LCTlGFWaPLFiCaSlibE7BJHqhQwEDazq24f + + eAPbkxFT9Y6KC7bB5Vg4uROUdKqadhdN8aXOC2QqrosdT4IMzMn0Vxe/GrwanCY8 + + fzCrcp6DqAH2SEjlavcSlCelKgt9C9cHVN3XAAFEsVKAlDnSKE6CVE3UTMTKmqOH + + l84nGMspHD4BUV8juSsseKYz+Y7LElcdslZiPsfiEDcE9Tv28zWFlFG0iV36NIg/ + + XzFutNOioIOhDhWpzUSOENUuMXZG3cDINbAEcwIDAQABAoIBAGVOklfTYdjyo5LH + + mPsYy08Hbxt0TRD8AhlB5YaQDNyfseLinns1iChBVuVSg5Bbkrar4o0sXMALmixA + + PriPpZDqhIaPvl5TeU0GjwjgzNA3tqHG70O4SNR4rQQPQqYKrkmzpmkQNUekqRKF + + zqVgn56xL8vhz6jB+zvDXtIYgZhAjtB5gyI8NEZzzWfD1VNLwGP6Hpi2Y7LvPPxM + + tjCkGN/D1ejbyiAHA23EjAAIeLF3xjzSViZv6i98lGE+hFe//EPDs/h1C+Sso2YW + + u6R9IIPW6hwGw7+wcmb4djEB6IkmrXwMDzRdPqbCLaN2DEeWk6iDBkgbggcqxxVQ + + uque8OECgYEA8AEj9fQCkzJ4R3g2Agj3eD37J+44cs8XEeslh8lZTFHezvQE+5BY + + I0GAX6qsetKJYV8d/SooIyIaDy+LULaxBiipw5WQIAclHhFSTiZ9zskALzm3qUaF + + vJPzpljTCPskVDtXEdkRpfZ/uOXKQWdMHUQm4u38i84zLUxHBdMIt4MCgYEA3D2m + + hesxOjuUMQ9gNhRVauq1gEbZ3V9RcMaq90g9xsAXB6ax2YkFAy7fUWpfdn1LIyKh + + Lt2aCVmvqUupsFLNOonDLmasOEj/RUbOxUg/TWpmVq1AExAF3ZQG3EyB8ig4uEFL + + ZJDgYQ/IWIofPgZ6GY5LGR1GX6pgDbqXCJxD/FECgYEAsYSGbrMu/GUGJga6G8M6 + + F4vwqtY+llyqeaxttAOvsw2TOYuv68oWBu254AjDTo1O4+CQs+JskZ/1mmnWJ7sZ + + MK7+8hU75xSh1Z2GPRunTj3JjySnveLVpGfifZWRckEf29WQTzk5HoI2cjI06S9Y + + UwVHpe3VMCsyGz0iAyLWfbUCgYAh0vINJSbFS7shobvj8lF//xXq4na5MddfG5PM + + MHMUYBHpYed0gj+b0ooHhe+tUebFOZ9JhE0Q3I5G0ND5vG26bMfmC2ytpEBYElzV + + HZhjOlEHRMpPYymTcxVupe1bFGRJn/WFN17OaU8akfPkYbhEzn6oF7/kF1VzJlHl + + x6IFUQKBgDVkZseTqRYmO1dZxwEqAEgOZ+4c7RJ7vaYp6x/JZcmDD7UvmH/QAJUG + + J6S3uHv23lcR8rwtAlbHT1jIW+kK2vst8M7W4kRuXafj8nW/mk7mDMXzXU1rD56g + + xhNZqMb2ruLqkEUMRpbklMHlKxlZtk5NS9vslPxFPSDqPfwctu3C + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:l43byqsidupuz4k4kzskysj4hy:fnacy4ixivqrmptbpd5tvybb3uhkha6zxucyj57ovqbiof7dvcqa + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAnYen24v52vWuFDhJps20/WRqmksmi74optyIDS9EE7fL4YRr + + w1EnmATjh/09h//Yu0LMjEwSwfOgfKK9VK5FmsnWExfSDYV82Gcca87JRNdmvEvo + + fsBMZOFVbJDLvgTNha8Y4cHiAmtom1fTSklkEwml/pVPvKnTPVT2rFTp6S5iOun3 + + W/akECkzPKXuUJ6erpvzF9ksQloyL7d+UQyhUjyL6+sQ8L2RTkoWWhDiTagXiZJT + + tnXvDXugY917TpTYfwaLuscpbzmSc9eaXTBk8cC8ZGSWluUDaC5PVbheSXfYFLoQ + + mizLMZMCz/Pih6kNWiFb60T7l6TCNr1R1YjhsQIDAQABAoIBAB8p6A4pydsEQVTk + + scVa6pQ6WlB9z3lTvC0OcafSEvCnqqDJlpwEIQYU6YJMmfCer5yUIW0b25YdAUHG + + 3Be1hjWR+lS6oKZmIwWYmGnHdc+1oTBc//ibSEGoxkJ8/qFvx8zLj+uRdImv//jD + + ThxjGnYdsYYEucqD+jMm7Mm43rFvWScA1wAFlgKCbylLXRZK6Z3R0sTE47SZrNX4 + + FGCRssorYNybFgw7r/7ewbYyKJpLtp7r5QB/YcQc3/ZZKVOphSqSwpRCTDLTMH8O + + MwhbefL7jz3VJPzu3Solm9W8udhu5A2iXhXUj3IL86N2ZncEBPyh/dRWwKFPxvvr + + Xyy91rECgYEAzyqf53UCCRrKfBaPgLX2bl/lN1cdJWeRTE5CQvnBnE4Qhwfj3nN8 + + 5yM597eA6ROLOlZmFyAyKGK1jtkS6ojZDnlAr7nVq52+kVqCR86REybbbaXjzT2f + + Jb7a9+1pQZ/bb5n7xs7GYH/LX9I3MCPhSRS5apf2G10GPoFiRe6Iv/0CgYEAwqm7 + + Q0hZKxk4D2+mG78HNKgh4WVR2JTRrRR7XaVTXWZNuVhKW8BBEyniFNyn9+AWQEAY + + jUx35hyiFz6DF+TlGZ1cPOTyWespY9JLi/dIdACc9K1lWAcniO7z2ksYKycPI7IU + + 4K98qhrbfLJPAx7rF13uKcp68snYQ57g1wWQ9MUCgYA7gcnmyVRpWxm4pR5ZYWtE + + 7yS/TbWgjexNl9kuteEoTcAvmVOaDWBeYF8BSeOsj6GZg0HV+LiPozL1smLdnauD + + nc6361B1+FzKEc6EY9CGSM4U4+bYiI/TXsw1FSv73rhAiWGqDLEs/OhlQNP7bwMC + + ZAKSnM3jtEfb4nxhDBCZ3QKBgQCWZyX02lVq41VZN96T2YjrumxTBkGyoWlP3V9j + + /3Tl2UF8TydEtMqSz+2KSOLOtij7A4r0wXxyIvVqGDaZo5UPsXGu6wYFS5jzM2yD + + fFBSsJaUxdRjq0N0nYtzwkmuLcOYxOM0puIfXBjxw6MguibSKxT03SkZpbKerIb0 + + G6zgMQKBgCU5nTQTh70j3b5mcynwsvOwmOD0YEd0Vgps2VbNObsBB0u8UkcUiZGz + + 4jloIGaqTsf6dsc7hdHdkL2zCiBtbSsur+HXpVUl0HbtAgXhsrCu0uzLHInTvtg6 + + K4E7oqX5emi/cyUG6Emx78GDO9dfYm1dz+gTZnAek9+aQyo66F5i + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:nnv4vrtlxmzkurfzvonj22leua:ywcyijrfnwykraku56dq7v3o4ts3xsxqfgmk3kgzwhq2cpest4pa:1:1:1024 + format: + kind: chk + params: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:rq7lxvy7oorzoamt2runjfgvky:ebenedfynevee34zqofeu4vtv2bgu4dfqhqy34yry5lb6x4fdy4a + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAp3GpCG+2qiLvwO5aCQrOrjr8Uzt1502l6qc+LwbJ4MDgH3pw + + re9knaikOrghUl/qOo0iVCImZoBItDShEU/HZSXDMMXyqZVHNnpnLNn2EQOgHXdw + + OK6uDNONDJg7rb0zIpr5sF4HoPukO6odIqogsA0S3VUhYPL9YE7MHEj4IL91AhH7 + + u2WdKvxR9ZOZ90Kqsha7iBhMzy3/JtdrXRXP4NRJPTGDSGo88bb33P/i4CN/GQ1W + + Xu9WspqE0Jn5yOO5YOrxvwNJK8l4dBCqMmvGNLucul7UYDsZZNHbixOlrrRexbP9 + + I2Ho9Gsfx1hN05vKNlLZ2Tnuef18XeLgrceTpQIDAQABAoIBACA8Nqsv3IXdA3SM + + PmuSt87dfrGkVxKwRWKLD2LcxvUclJkiyHoHxgI/EtzWEV4rJmveu07goy1lAXol + + zqNHTU746d6kIQ8KNMM1ZdMB5AgK/2Jk2ccjw9Cm0nbsAMM2ExfUp4CPXZ8ditTM + + r166y1+xKKJRDwO7y9EkYlGIr7IX8XY0QIr1FU26QItZXL/Dtz+rInVx+UReL6OV + + q6bA/28gWCOkCeoMqlk+LGU0WWAFryoJr+Ob/TjqkjFX4nURFXmVhJwaqoVMZHU1 + + J2O96V89Y7boz7VAuoZc9fxy1IH/q/p1S/DsD69EN1elFs+g4TchWg8AdaYK2xg+ + + nyCS/fkCgYEA6GAbr3Mf4++xg+Iq0kV8lFRrxH0fSMhdLeYJuQjsgCjzkLpuMUX/ + + JbkP4LoUmv5W1HQ+Gq7ZAOmS2VseNjaeWWrZwdoR7s478OTFbQwIEVxOpyzi3MR8 + + WKU6KiTCzs4ijjf3f2oyf7Jl5/aEnJVhiZrTw2iNhk3jnqZA3h0AvE0CgYEAuHef + + JTQi9e3rGr3KWEl0vhbjB7W6isYdh8+xZtwdYHSVdogQuO0TgPooVTkCrJI0KiEl + + xghSqYF4OAh8iLHGtexrTahw6/xkxAx5So3PHev0eackG2ZFVYbLcRxxHL2n+jew + + XAQ1j+T8RdWOVdXIkWTOoKq+lYqwrB7JCUr9gLkCgYEAox7cMGRbTZF0BkVck/Ct + + TB6a6/p9XIUyS8cAqkBmbGzS1ZTZR4OAYUWwrKtTTZ4e69KRyf9VW7ubFzNMWPgs + + Xk6Qf/EJx55EG40sPalFfJJUsCvlMN5I/5004GKf0baIMVd+SJYOzu83dAbr/lMq + + fgMOhky9lDrW/wZr4L9xRb0CgYEAj2XeP0uSSd+Tvgv/ujYQHJ0qC5pH0w1Dc4oO + + /EjsRUkbzzLi3P7fBIpyxB03aPOWvZFbDeD2cXKGA/kE5jZcpJuOpqXkcm6X3pdb + + yosGkNoWCGPX+7y69Ut95wYXICKG7EpSPJXBFYUKXzcuGKfB7NSSk+9njFRuFr8v + + xJuZCXECgYAyyV2Ql8HWz+iF56c7Tar6vjU1iaHHp0iTS+OiZbe3iGbBaHbCpNQ+ + + SiAOcFOhAJjw/0Ez2Gffvmk09TnjuiXBUmYHgZKIWIa8EhAQCklLDUl4YmqcRA2s + + HvvNdvNPEnpyoXKOe3BmANrjkNz5Yk0gtOXDH5ar/JgweQaTxXoflQ== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:syhziku625dhr3x3c67oa7d754:cbp5qtz6vphepl2tmyo4b55o5x5afrdjjjyt5i5t35k7fp42qw6a + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEA3NxdDuRuM4xF0bmoRI46FoksTeyQwtzCYuL0eEhfbn4PTB0F + + jjcTcDp3Lc+E7FOT1zUU0wp4rhvOZDTpFjrW8n0+iBZzZCgka1L1sEtncPDgYb+/ + + VDtSFIoSKqQLmq0L1hdA5oLgXWEE918kLlNjN5l++W6OGfzB0ZaiWbJfQpOyYWvv + + PN5P9CFxh/aNO4924oivqB1iCRJbP1szGcEuZ1L8QWISZK4oauQA3wO05UGPNrQU + + XIEJDUoEtTSlsuOV+LZlFWkEHD7cwtlG3ZF9u2r+iHf0xcOPqsmqSIpOjPqg7Uic + + Qh9r/g1WwKmlmzgYgg2dWt3QUTv20WYZ6GOmnwIDAQABAoIBAANwH2IMy+sGgHcQ + + IPPsPjA9SLtvzB2/FPw4FIkjde+A4S5p9z0BqM5NmARWX3cpopADi1mbp0mX34xv + + cb9prBCQmDQYcV8Plug1cuNLJobmC6rJ3X8WAKRBiqCE9t9HWVCnAljJlGzaW/BD + + 2ArWkSFQ74k6H4EnuOv7yyGCpXlM+YfJJmAXSVM593BuERTM3//Ki5ctMwR9t/CN + + rxzDVrGhbIOKHWWAKfiNUseZVuy5F5QR7772QU+mycAvW2qVLeKs9lYpLekwqlLR + + TWYjE3HVeuhSbpQymHc6TsLiADlSVgGVC4mDeg80Q7vLEyM+BS4vkouJOkZl10ZA + + KdJT2A0CgYEA4suaXxcc8IrNTyE7lS1NCAXMYaI/Qlmszy3KfR2aIFzbJWBRTlli + + U0WWVbvVkcs616IKEb6jMgK3gJK9Sg2+IuT8W/DBr+q68/iTSJcjOhWD/OzqCHpB + + Y5tAkLA9NZf5QUcS0eSaZzWJdn3s30r4kTV/ZtfSbsXR97mlb8vRWaUCgYEA+U0g + + XARhs8QDChYNRjcBnZQz8usHL/LMXbO5h9NNtdfqhjYaBljHjYgu7cgEeaj5TMPr + + rVKgVUmIgMun7mBIAOTsQym10tM6ARf9wIR1+yYxbtDgoeaWSje+InSaG+3hYVe8 + + o/T1QQFQPW2ngQQ3OZ+YRrZLCqVw2qRLVQuiI/MCgYEAo7WKsjdR6YSYHRWFF/LC + + VxcwaA2hEjj/F/Ia52OV3OSKQBmdtyuoYSmrEinrSTllOUA7eoGc9b2mTkYeIzV2 + + WWPnkkpg1aZf2zpEvrJyeDwNsWYmrYXqa4cm/QpqtKQGBYvTVvVoSzYHCyRs9uX8 + + NX1jgI4r1VAwd1xnwiJi9Q0CgYBS43/k2FgbywovqlFTjSpuWD5FgDtth87HQOBo + + 9qqZ0WZapVZV0eLXffYMfTpvsOzixylvAU/py38lQ5FcQoruMS8UzaN0q2JXxsBJ + + 6EDJ9lLtQ2nMqrxBhPMkxZwPuTH8iY1g/islJ+ij4/eTf/FUqWmZ6TZeHc++Am5B + + opKQXQKBgF+86OH5QEMW5MMMKBhL09uzyMnM/sweKLo5fNYL+7hiO2Znb4sz55u2 + + I3xsH9+OMWq+o9Uy4q4QIqgGApbfrujOECe1z+j5mZBRTIpWTtW4W13sjT0Srqbw + + rGIvfueyhQlQqxcKobvVGIdeU8co2tG03Xg9jlFhrq8vMdnxg457 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:wlszole5m2emf6wbhp3lnlyfxq:irdttvny74gxdrcotzfjslfq7p24kyiigyrm5shhw7zyj3hsiuba:1:1:4096 + format: + kind: chk + params: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:ad5wtml26s7jjv4gfwlrs2653e:hx7zlqd7fp44i6ky6lfrrwcqxslkg6gd6x7moxt4kxkqvm5d6r5a + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAtrhCulmdP/BXGRpy31rlHCP6E1vlAqnbaxWG3o3o0VlTxS2a + + Bv4x3ac/KhnIfkznrXj5G2o+lYNlWNt5DhKIInA9qHsPlIHeXHh8dtttnP17to8Y + + YtMjGikMyfSF5tTCCYYHxXxFknHZ8Rf8eo+JCuOzFqhO7RYSN3Rs7zYTwMeG7rQX + + PZgrt9PxOYxlGcluBsphr+PAm+ImFg56vk9Xy0fbDMXI9q1Po3W4eo2RTyDdmBOl + + y+v3MVc5L0ai7Tw5wiagQD5nnJDPwLcEEsE8kj6GVa1wGEUG7eERzS+QsoBLlmkW + + GLThyO9/vCp5cxoZ5LBIKUfq9W8D4bH5mauzxwIDAQABAoIBABSh5o8KXX0B06P3 + + CO+ltjyg51/WPvGNDUTLRSkjrHLZ+7ab4/T+29H5TQ2g+bMhVgE6/s4Usi/J4DYn + + b5pM3L3JdNJDNcrW21gPwZDeTZiuRZD4xDqB4vMqSCe/NSY6nL/cjaDEMUr3EN1u + + OvP6pJvr2FDU/Tc9CHOQnFhqqn0DGfrevRyBBD12AnHdfA/hlvquMdgp/u+MDeKg + + 3McZw0oVDqHHb3t/GwYNvEesMKC9xeA08YIMQOMYXyUxZ7VILiqVhUc5F5Kw3l9D + + /SGn+DxYggL2+C/JwwQzbgHRCWulZFe3l8wDI7cOI6ajmjJBQDMx10YYeZsUHmEq + + EkEGOuECgYEA1d+v3L2mRJJrjblh1juJilnSMnsIiuw6W6fGBfR1OCsmaw8MQ3jL + + 7JH90L9kHtchyz71lCAf7V56txXxkHvn7K3zuEn9qBTodH5GSKYJFYW35zWwARsz + + J2LifWzQLYyKg3PTcIm5z9WMCmBovZhNQivhhqRdGeRrZEyVWg9aXLcCgYEA2rWs + + JM0ywx5F4S51vifdM0PP6VbVCYPfXeZ9tDLkdA+Ztz80/IpNUoQGmfmTFXk3YRsK + + bheqltMJcqC7xUm5YMJLGlmLkYRj/5s9Xux01pYNt9TDJowu2It/d09OIPK7+ioU + + +jiparuYULKNsEJp9cSRXyZ8qOKB8PhX+GsEcXECgYAsDynOgq9G/xbzGlaiaJ98 + + Beb8iUYIQIQBL73mqiafzJvcgDwZhkAUWzr7jwIULGOE2FKFEl0hbE5Be17JUg1E + + P82ukGeWAcClhwH5o2LJsUNieTfp8m2GVqOsDQeR6pr6W5kaXPUPcMGpvZS2QjLg + + R+Ps9d1MITdScUhvRixqXwKBgBlQB3Fm8mYUvd+3AdeVQ4uoYIrQCu4D/jke8ROH + + BFvOZmsH/LjxxMs1DpKJiRVmJxutBoMBaDP2jtRed/z4cGUbd5fAH2AjI3O04uB2 + + m3sueL36+O8gMFfNpV4IprE3hrwIXM8s+aapuZI1aCKrPRo9utl5WdouBP3/sCbH + + NAdxAoGBAMTddDwt4yuRavBVlZSn+b+DNd3vV4skOVa+zV7Pdkqf2MLsoksIwQhz + + DT2RwZ40+Sh6JIBaxKERIRTUE0o9zfWwWQ+csXAGUxUo4Pd7FV8svwfKVl9vqAIX + + x1XcKc+/jgT5lvmkpv2TFwke7S0jNvUtpb/j81L2QnYg+9nFUxDw + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:jfsmvwzuduc5flywq6or2sybg4:uguwgnvjuxfn3ivws4zpt2dai4kzoucp2zr3olid52xeln7u4jqa + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA2K/v01VINZgu6XRXhKx1Q9Hl8zjkve0R5lQmY54p83t6eF7K + + iYqi9X9pFxVmG+3pGwMwmA2ZO1lg6+FWhiSazDk9O1bEdK/fM6N6Pyrk/s1imIf0 + + 4lUkZH1tW3cKuzsaCSXRvPKqMRXqr45ONhhNXpJtxJcsrJXJrQ+37PtqJnbQHVo4 + + UzqCBOm5ytUJp65EtQDMBCdf8up4E5HvpmOzjfEw9SoJ5bREvuF32t4QNiBvkkjZ + + Xv0G2EtXBsO1ujpCQaRftxVCFermNDt+qL6s6pnYsfgpNMBmMgI8YSr9aNQrMDdD + + r50e0MwnUo7uyqQuZkuhC65pdaL0Dc5Fqi4dVQIDAQABAoIBABZ/5kImZ9IQ1EYv + + a2r+UUrSf7MKpE3IUQR+lmHfqXF7z9Kx3Qv9FkCxkyLveOPLh1njsecH+nI8LKEx + + i79wC5bLFr2Tm+CV5nJBNk9az95ZSzSVYWsi9h1tHK7TpIyebWynvaiF9gAUy4Kh + + HyPk0BvSzo0MOXpOL1vF9w4naPVHTfC+7GyaDld2OeZqIkrLJtXBxMnq/C6WTRRc + + yurMaAaQCtp8ZgjKaw8vdud+KMfXDJZqNIxqjDe9PN9Lzh/sR53y0BifEYKeyJ28 + + PavJWKAIUH74aku45FFPjZEIJ/a/JrHEyTeOdcn2a5Gx/dZUN/h9zVtZRgE23wKA + + D+Sy2QECgYEA/0V9P6NRlP/qz6+FfCKAx7xfmlABM6qvtVgZ5kFOACd2diY2Io+y + + nVAJa1no9JGu4ufJsRYaaa2ilE+rekXfpqXBngmlazrRgLkfyy0TkKDxMgP1wxkN + + owHh9Hgr2/l09Hb8La/1ITzqIQJJgtJVf7ObYRxW2fQD8lTUKvz2nEECgYEA2U5B + + rBME13Gxx/Y3fMhGJJGuJv50t/ECdPxUXOzg7WkcNtHXB654YvFmsr7WxsXkwviQ + + k5bnewj5BHQuj/U5B/G3kVubxhbdiPldQaVP+xfaXTLFhUodEshzsA5mlYg76QFA + + /iKUQ9W1gDvSn/xK/ARLIm+GdWrw+JEsRg+QTBUCgYAobE6bJzeiCqyaWscekzAl + + cPUKsKSgE+VjKCJhzfGWIKmnqAFmk67LLoNvVnuHTxKMp/vOaRuhpHdcWQlkgXAb + + KaBxcEGbq2LFqYsZV3gDrRjEvM/MJ0l7iK7JUcZQPT6B/92LNpPwwX6p33zYlIop + + gL2YMS6nsPZ3B2vZqtk6gQKBgQCY/+dvP0jWZB+XOb1hpyTz1Hp4zAnkBNYFBjBj + + 6QiJP8t0sZQjvWzXxT3YtlNESstBl3872zEKSIwD3cV26GKKPF9SAd0QwMKkEWbe + + tIU2tlmx6vB1Y3RK6EXD/K+vsubzrEVVaYVYqZyMOBKZQCqPfHpmOX3DKFOXv6cb + + gRPI7QKBgQCLtj/0Z4oOc/o/HaMKnP6Nsu7vCtbB5iYxtmA0yhBTB3Sf44/1mENE + + LZ4dVfA/ZbZH6M/+6xWnnZJUSXSS07dVO/kjX8obYRGEMXlSFtF2hsw6P1fkJBLU + + 3KPOvN+NxQaCi7zW2xoifSGPvLwzHdMwGWfioVoUVhWXSK671OHFuA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:4ewm23jvdtm2i5xf4gck26wg5y:7cujxxc34mkfkmwhbemqtuixuektknmhhxlmujcekibf5amfwwga:1:1:131071 + format: + kind: chk + params: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:edbq3ekmt5si2lpahrwyh246gi:34snfcth4337nbu3cdgznrsrzmcudje5ot42rft2kqxyad4uyanq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAkFwLHxfDZEKh+ekOTF6YXW3pl2TxEoCMoMegV7GLQJrmj+hD + + fE7WFpWGE3DPsZWfVU+79VlU/o56/Ymx2REZQCFB19UgkGV8FPlGDjsYcXPU08Wz + + peR2heNrx2NKF2Y3v6CV9Tb5mJ58ibfnByNyjswVstEiL5FCRGswKGos8eMewmJ3 + + 0AnC0s8txTEGki/mI52a0QkIlzgJTa0qu0Cgm0XnnK7xi1w6Yli4VIR6VbNrGhwc + + xChrb5cVD440H60LGJvqNrhAoAy+6Lg+0N5tWPuD3ZnyGqNjkCzqKVznvhTyD62B + + 4Ap0Fd5tbcNgqHAqmyMIvOfsZYmz/Jq6ql8ppQIDAQABAoIBACK4IGyf+Hxakj5a + + 0PeJILgHwVCKFHjQtgHNQUEWEFm/Z4hg4io5g7/2wkJWtX0OcT3BaYE+tPRsLCRi + + Q4XjWOFVnlJcjfJslgUtVq4BhIV0yFEOkYBqjB7zbW6M8Lrj+LB73NUXHbyZEXbF + + 5iiPW/QAHY/eQIyUMQ3ngbOWpayfCLJ8M/LZBorPb+JYLLgAygfOP2XUH2+kRbeH + + 2X5gOhywDkGSeoOsBCI8W3f9Yhn9HzP0E/htrZE5hhKim1ZcFcvSW4Zu6jLSMOGp + + oRnpYcyFg2Nx6JIiWd0w+f+JP0Nx9ukEJvHmJc6zYZptxFCdAPW1ATMB2vIao479 + + 1HwchgECgYEAx9mVKo9CA2kqoDO4nEOjV06F6Qc3me1C5KUglXHX0JQzW0Vxt5Yz + + ygqV21jokzksiat1D6cGnOMPqaaz1bQu+bxaBO1LPku19dvDfi5VdCy+BwzUVkmE + + uj6EKqX7MsKkz8DgOPXRJErBIkMFxK5q2qmLaJ6I+QHE6TAxzuvKTeUCgYEAuOtB + + jVNHpNgWAWgmmva49j/LmWy6pBpBt2uuxXSmKlk4PvB7oAzDvn5HPKw0zLZ6MVsb + + /e+YqbLGxfAJeEsVZ4ovppTYOdClVaPlqvnf3tztav/JWA5mi/YsQNIJJRIGwLZf + + n0lyY7Eby2GVH4cCRAsXmDFKnfWoT1X8b39JsMECgYA/e5xooo0jrDqAHS3dZZbz + + Wtwqw8IjwTxoiROqpTka5pjRu2N+H9ZfrbEgtkNa0OSW7sIGsNXm7DHDgFLL5aqu + + ZehqfD5UkZRBfwfAg1NdzgCnGKoyprPkvYsaSRNccnwMCoavUVaYIq7rBUNF0Onc + + f9Lq7sEv6CH2uPp5cmkXCQKBgFbVqm+p9s+y5Qp+FPrZ9tsz8/C0/SQIbGmseGKi + + t1DVmrL7jKIIvHacp+kW2Kh03AaHSSrCs0ak+/CBGoFRiNiZLG0mIi9sCeegUj4q + + nnTx+88uFCd0g7UfwYIi30Z4I5GlUlvjSoMD7RBhX3xxkp/PMaI3K1nnvMSclTDq + + bflBAoGABvqvBpYQj+Lv7WWdC218juhICDEnkcEJoZK1ElS8AIFiQ2m3oaKR/+Qh + + ONjJGROx7v5JdaM357QPllCRuQjS3UfU4XOFQ814GJ8TiOzp86AfhT+9h766nQ0Q + + TQo+tOB15TiaBC0QMChvFNibHytFmGtsMGvAfseEEWYnRDDADlc= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:h6zjlq7mo46y6zqu5w2whkpmcq:ykakuwcliizsy7n4gwqsntjvsxnqznzt62iepifwuy3b3kwyffva + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAu0lVdI3gH/PySDIEdhByXGIH4UxlSn1ZZs23TgiEZhxVIeeu + + HA7K5uHfXlstKt7phM+6x8j3hSHPMpq3svwdVqioAr4133F6W/ITVvZEeBKGrmEl + + kd4SI2gU6yvG/iDJmf6Pq9qAq5embshOY8gC737fufoXMkaE9fiOMXpzf8sqF2RL + + w9D0WXBiI1g3QvR3Q65SQTY+s4N96odAAD8p5HgO/LQlNRA5sFFJVLfCSlmx6FY8 + + wIYQnv3VsK+xrdo1KKNOpBBlldYth64nhCpzX31OWDn2nDsoPvZgU23ndXDPcKBK + + ztgZW5rJ9vt5HGbbShKFKBxGQCK0ud/IKOILpQIDAQABAoIBAAm4RlpYc6GM5k3X + + ZLJg66J+RvTrI1WgmEd01TbUS9TF0yhBjyBvJxog7lgGCNvQ+lMVedbdB/WNmeSB + + MZf1LCufcKrFxuN8DvLfJyBMAyUtJva9XXcKzKuwPueum7L8LiJTGw869ZMSOYXF + + 2QWmL3rQ/Zj4EQSfss5WMkEAnyZqZMY1wIZxdnpvQ79iGXy8dJDMACBfJ8pLTeuE + + lyPERg+RfDlvCCP3rmcNyNLcUdA8caaEzKS7bOhXM27ZI9WZ2D3wR2WyohCl9eac + + EKAquDoDwe4EVOv82cphUuFhcTF3xU3ddRADtP7Cz3UtuITc6w8C8TeVB/pCpUp6 + + 9LDQlbkCgYEAxzf0oesOILxQecpnIbylDIk8khnBVeCAmXpYWFhjLu96pf+Lf26F + + IBiWA9jwETPCeztPVZy2KDnt3lh62DmPUW4HQN2SVINVTo3Xe13Ve2DSh4crYjuk + + c+B4Xs8TyXCA74FL19H6EbgSDzf7p7SICG8IHMuxcoiVQHptdOzJiWkCgYEA8Kq/ + + aM1D1tbRIKeICmqZATkwJJG2MOB6FFcBfRICpIGtn90+ztVdjgMfZ2FQJ9Eeg3gj + + ImjrnTig0I9Qlxa5DcgUqW2ZueR+r0Bw6dAp2VzOy7nWHZUyv6RGQGMA5vModGNC + + FvBj8mqD/XOpt5VCw7t8MX1r+GgBZfAPPiiZjN0CgYBMQAKGJu2VYf57XxjyNL4H + + ek+QrALv16nhFI7T4aC0yjxrZNADyk1x53cjqdjY/LKncCABaKXf56w/uiXqtL1C + + MZbdIPFtH4d7NZcQRO389yYdcYMNaj6bi4MG5sNwCnuPMDHTPS81sPpYkNjla5fV + + goncW6pjaBuYPkO+yRKqYQKBgCl1oMfTJK6sDxbLBZqVxon5ahvCplpBMYazfmQn + + aCEi3eA+YwWKqDVAwHY0w3Q4iEMpvRO+c2iASuPi7IU6uuJu53BQmzz06gYS2eDN + + pYf2fwGFoCc0fquZByksZQlkNkHmn4oIG4+1XcuZ01D2+6twbvKvopwGfscq1dVl + + dR5ZAoGAMsHeOltLyITJ8Ti9mFmd8HVhN5Z10sCKtoEBagvlVs1e8egy1wh6l0Hd + + wPj+r3UHNfoxiWQWQffKMw6sAhfAuuRGP8gdcglzq7hYeVpbmw8eDn44OQYYC3ql + + lz8eD0KbSm8MlWvyGvASgtquvuDK36uMGEBC2n+nN8Gt1+FPGHA= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:heczpdxphw5frp3ri5sh2hpqei:6wflx6lphy5mhtpfb2abznoa3yk27ynbqbzcecs362miu72vkowq:1:1:131073 + format: + kind: chk + params: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:rwikq43zza2vvcy557moizudru:clcrnbchmh4ucl745t3f2cwo5hwhchbvb4pp5vbapqo57nbh3iba + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAyLPhUE4G+3cl/Qb1SSmciZrbAJ8qFKZutcWqQQM12THlNtzg + + gfl3wTvQsPdjBJQ1DMUTyWb254Mgdy/KJPGI56NY5WcIKMvNUe5QUt6hzH/Cn/uF + + N7z0hZ/590b7dsr83kir52eIjcyy2Bv8dYwUEybWv7NvRW2pboTuqLpg0ww80+vT + + oCVvgvJhet2V5eXmElmzb8HbVEZklGVoUg0fGHEM+hzXiv+Nvepwm4nC2+Yq1cay + + gB9h1sDAgZSERoJrczlfi4pbrj3qgHMZTqGkzKCy3d1zmKYIOub5TmCnzMAa0eHT + + G7i+h5yWRm/k87H+tRdEkenZoPlgk0eSVAsNbwIDAQABAoIBAF5jIjJnD6eRaD8v + + 14k51ZFtT1NihyLBBs3bkO8UOG3Vpkt/4uGdVfF9VO702Q9dN/mycVTFZJaKN2l2 + + AyYOpWjyjCsOomq1NfEzF3lxlDwdVYVxfzwwU/rHuoHNUxOR8QwEtzuTmEe/ndg+ + + iSMq5oH/QP1UwJ6xLP5569dUF5cIlBfGVuyMBfynjHrtQXjUCKM5ZJ4onCnzm9f3 + + amS5B0bMGpWNSnOogrUIk5vlgVJoydRkEWook0yNYxx1/EFanx3rpzE2V2IxHjXQ + + VvKze/lCNCddGWNvgqevsBMeGUsrl9VtYQ0fvWbYdzNrKb7WtYmXE+WkBPm4PM6x + + Hxe7k2ECgYEA4E1gmYaHberFF4ye3tZ7Xdeke7C+g7Rc7JfoSM6MnUC1cnOklipr + + yhibWkqGi+mJl/mWB07uPz9c/81LZWVGLVJMI3RikGRfIb70BPXddNIaAXceMd4b + + cLx6rQ4aWLTijQXqu7t3kz3utgX+9wGbVtB44W9ii7TGs7xVERQs8X8CgYEA5RC8 + + ZI+4lMFFSsYe6pLQMauLg7d+hJjcr/sQ96TDnJCMCkcDQ9YVNepfBWbF/TZWU1er + + F8RCLN+HtDrXqaVREf6ESYOccF/rKCavI97QTwD2Oy1PD59Oh7gUo1djjRIPezuL + + 7EnYvcr9DpNMjDeKKiMocAlWSRDXpSHaAx+q/BECgYA/DaFlJws1G/URvKcAb3y4 + + kaEcYD/+GBqzK7TRmraukf0v0lBnIj+wzSAGzsJp3FmgjjndjhOtVeuXwSc7tq92 + + mBbtNI9slbqkauB/8HmzmEhVNx4W2KAQHfvCYB+J5jd1ez9UTMu9aYCMTL0yxJHd + + YrdIcB5ctZHR/tRO+8PykwKBgHIvVooWbp+QfFcazbyG9MtdxQ0iwimc/Z2n3Lxl + + 4LDCCVzyKzl8lVQsAbPymE1x8bRX5kzRo181CjOYhXrmkrQSmKUAu1H1Lob0Safq + + 4RIQ262CF4AlHINhCsClxlVDJH58n3JpGWb6sgy69pSK9w+sOPMoZF/Fyolhh4i5 + + F4XRAoGBAL9lDVciG+f7flylMPFkvKNcdgKMxeTAYdupkmYhBqg1Z1sKpLWzSLHM + + HgHel+wm8sDw2atJ0QCgwgDWzkSerzz4MujYEuntM7EReMbx5ArhqsGo1abAo5iT + + P6pdPFMUBWaw0rVgpEHdcJylb7mIQsorLmN3KaGrFs64/rGVM/4E + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:sj2qlco6lwavwmcjtuxohhsn5a:nn2kte7aa2q42gq3fyy5m5ojvm5qvpyeqpvwzswwrti6ushqg2ia + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEA8pEDu3IwxvOnY/TFR/sK/m0X+v6OW4nDv/P0n7KmikC1elek + + NPE+KZZYciD3RvYqcmT/y6av37t6sV8kYDDRBNO6sYcAlByy0qqaBJEdp/q5sGRr + + tmF5zfBtZX1nvbmsy8x7pGrl8p5uhfbUy6RfJZOv5s6uIHJQMkae32LfPjgyGkTb + + Ah5mQgIHBun7NISW43ESNu0XPLatcDezO1gAAjQQCfjsLBcyU/bnNq4zZTj202VL + + Jl9G+JMdPT2qkEduNUyeY3B0Ot7XXT6SXFd/IBkoeO4r/LKnxsn+QaAUPedlTBcx + + lk91EPLLCvH4H4ZHxgOLAp9QZFfOdcx1NWc6iwIDAQABAoIBAArGNDM8RDxiEDpZ + + YfXribZ5ZApLCkm4mdBJ5sC9L7aOX0E66VlMqeUw/2a6XiFxx7rjD5WdJsy6SB3e + + yv/Wy0H6oZ1HENiDWdIPr92qEHYopdzW6Q3l0II8Pq+2XUhJGgrHX2qTMPmQ3fnn + + V9Zfy7YglDydS5C3YyaIioADpUTfJf6hUqmbHEg9qmmHOOYY5rDjXxd8L2gaFMci + + tO52tHBjHGS42DZVY4NYN8cYIAqqgyDuXIUDmI/RCxQst/s5BjUnLvK9FAXsh9pL + + XQvCDRU7ENMt/FXUzR34lb9k3OL15Y7+tgDU/7njberBBZmgvejcC5fLN3P5bMMI + + pIgbhfECgYEA/UtYVco6NbKdYq0mI8HQEieLOeO1ecdjO/n6amxHBvtTYPTYmkus + + YvpEQ0+VRcqPQppMgLxZOkXFsM9MsD24wb3CNb3NtgXQyMGFwo4wP8NgR4csoLlM + + cVu//PgupPlAeMlicLPxcvIRnXKQmQrOYit8iWy5VZ2G6bOiqJ65JTMCgYEA9ShV + + UmTZjvqv83tNnA8ViZbeXyc4xJE/qrF4Saxcb5Cihyvh4vc6uBGyXZwA0euybaeC + + Xqbemc8+FjoSu+N8WAbaKdzy96IkgIJJn20k+ZljqGA6a1s4OSoTcIklx5PoOCaE + + 0zdoP5opOE7dsM9jxtvfWTCxi1ZCts/vRJBX5UkCgYAGAoqnBnRZH9LSK4+TG58n + + Px7zka6VpCB7pNPHQKhyxvXUgBq/lnoRoySJgFLnZAYAK48TIuTvGAa3ykNkjyJa + + HnmEMuu1nO+2Q7k7w4nriWQ4bkGl0p+4tNeaVf0tVuirtQOL7wkUlB/M35IEv5fk + + BmofDKBdIq63ztZWL+XutwKBgQCCiJI9h8MrVSGAhCPDt2hhVTpb8ddRGoGK0mnY + + 2HRzVtCjJmNk5PyX65xMKXdqTpQ3vJw256TYwrctQIifEDYx7JwW9DVOU0AaSMUI + + pSWt3NVqXqpcZTqffV7SacP66y8XTrMkf3j7fIr8F0oFDbfztzjKFZpDNY/aJQci + + O7UBOQKBgEFTXq2BFYBWQ4ot0TuvnVF9/8/3nAYFZEWGGu+XRcoq0DsgIwZcb+rk + + LB79AN27BJw6gaskoGKH3muylECFWju3CQ4yDXSLFhZhHH/NN9p8Geu7eluWDNIJ + + Z2wWnZzIgOK6W1QY1ho+x7+BjQDJ+T2NBK3ApbviaKA5dv05dcf9 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:mz5siv27pqttsl2f4vcqmxju64:rvxhulxtufho6pdbwj7rifneb6pei5fl4fqptcce7jdkbyrjfaya:1:1:2097151 + format: + kind: chk + params: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:74k3o33ahqgozn7iq5vgnnxuau:t6wznkzfsjrbudipdvh3c5xcnpyndge4amgfhajynisohhfhyp3q + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEA7Pj4z8JZKl9I36j9UWEKCdhdrgsFDo6B9Ul2KcZjFTw1shEM + + Li187RthvkMtN/ih6MpBWXx3YabO7wyI6TWWyYvfcTkXnkzs/bGo2ludJHyPucLE + + +YPNTLLYvvya+iwnDBk9y9gSPVTQKwVKN8FnXi3uUHN/s61pMEF4zIA5GhqtVAPn + + cKvpa6fWrNay3+qiyOz7HqL5loTUI5UBlyuQedQH0vcqYRDWb0eLZAYMFM7Q6kCK + + wWSwZEcu8Fvl07wCqgYdmhdWIZSU6PEzZjeHR2gsUirZc0/nyvOXXMEOQxGgkbdL + + ifYBFVNJxbZjPBZIIk3RuCPKOsgz4jOuE9gXuQIDAQABAoIBABX/5l5BuIEF7lnQ + + ckJo1BBZhUqDSWMYxcxD/dofY2yoQoMXSnf+NX484yEeH9RcNIoypK7he2/JMNlI + + FgaYhZ9ZnFMCYUS+XYFCShIjgssAj8Z65EoWlPhzrhcxxyCskqw9d6g1LtROZTKc + + cuk9y98WPCA9G/Qa7qYFN5xWPXbeKQRoBaDwnNIWkNTav1wXY5aK/w5IR2iqRXjb + + YemajUleU4Vbn8ge9CLf6iEj9ILtGeG2N0NzmYvWUNvbxN4HxIxd3LJHfLMZyqvu + + yqDuhSNTh7nKP3sYxWGkgkLcfoYYnpz3syhhJeAA4L9WXlvKtv92234bBzS6TbJB + + +DXUAyUCgYEA95419jXoLy0BJ1i0rgr31UwAGs7w78yUw1mUyZeUbj3xxAu/Fv7R + + pcxmxpzMa6CSeDxHY3DWuXqOu5oXV8S8MeFI6EgBlBiLWYoGVP88Ad0iTYo2q3FI + + Thwx6jyMCLntcIHJi7JtYMMJSLD7ctSgqh29iDyD1QuUmz/YZbVkclsCgYEA9P6C + + ru+W6NxlNZN702Ha+c41cN1vo16KAhlc2Li9YI9/Q51hezJMrV2ged8yUfYxFiPG + + Kqtk9y5CkNmt0wNZ4oA/8d3WatZL90cf+p0G/yGWb4BVNFao2Y/pMJtVFZV/T9A+ + + 7zDuyPPHjaG0swiQ7NX5eravsq47v+SXFBocUnsCgYA1tlPuPHNJCHIfntZSin6H + + /hxntEv/OFlsppnnwMGpyDYRWJry2gOP+26v1oNhNUuQWUMDBw8M3NDpUNuPZlWM + + XFn8SOJOxaQ0oAQPm+3gWZ9/QmPpfIE6sFMDhG671djzdrPJYcLoImZ5JirlFcpk + + HF9olffi1sg9hPPj3B0V0QKBgQCGveOJ6uOIto5DZRXZMByK/0qNBHx90WT9uo1B + + 9HjTPpizyz7tzsA1KSU1Yff+8/QTRSGcHh+tgpfBqrbbMyCgXgDNOUDQCYRGP6vq + + 3aoXb5WZRW+XFYJQBcIupX+qG0qlztaOHs91Xf4Ge0Uyoidy2kwXnZoMH59k7ofY + + 2nNxOQKBgB38kUiRxm2LGo4pbfuGhgiVf4aDRUDT4nwlaRV8waIu2qeaWuETaf1k + + bayNxEOYFNraatkRVF3AGnOJUHLNwj90HugILF6uXDc6FhlgQj4R3peui4iaDYvH + + OPLuU0F+oJIKhUnRB0InZ2Vbaa4hCKgvEftqNPR2EkT2YkHgqAao + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:5jvanzz7djyl3ritv6cey66izq:ognbst5tvfuow72k2lx4zlxbjiwpz2sjbtien4kvfl3ksiybhnha + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAtJv82wDGagqVX1ivYTR7Il5iF3N5NnlBp6aMbCRRk3wcebLT + + VpTYwlMfo8LHn/b6trMcyP2l5vczc0EfkHYE2zy2f31SRRZ/LClZqtb1mu7lV5Nt + + h1qLc8jGt396bvAQtQnCM8FavIQW5MZUxpMwizWuvNoMEcjOva8PXr0umvq1wROd + + b9ZdTSJMxD+uROl7ksX4F7zOS4e8Q/+s6lyumiZcSF7ZSF3BifO0d1tzRR/4/jv+ + + EtB4zMf60b4m7DwClMKzht2dYmh3Km1rWdVgxZEKncWY9iNJ9Ohul8TpdxGrTWQp + + bdYdjRS7mNiryzU6hHF4gwzaAEjQ3tXytehvcQIDAQABAoIBAAlttUMRMZdNyyN8 + + 0HCjwHCrlBv2TS2KiGKb34VlVfuvK7HJijfhMfmr3jrhC1ih/s+n3q8gpfCk46lq + + JmwwhRHIg+lFURSs10gmaaxxNJKS7L0bXXxTitrC1s2zvxM1WO68JJVc5dA+mXVc + + lznKOFGkJH87NAyy1EjzVe5ggPANMETFU847/4mNE2BYINy2eGX14AK+2ghkQYVX + + YFfO1OGZiOoxbEK4O4Jqd7KezCUlNGMygxveajO/mUdrRfD/DxTdo1FM69USWSTG + + ZV+VS3cKc+zxaL+lJVoYyVNJmPi+n9RR/+OtS8HZXqcwszGHQQbPRXWdah6mNiWK + + Vpf+ODsCgYEAzA0MDFWu8QVeI3yn1o4lKiZTM55gRIBxpFwVe7WCk13mQCp3XuMy + + Cm1WP47fMmTTGCOsNO0A5U6ZNf4J1yNbRfITCLaKkFw4mOmq9NqPz3DY1dfHKwUp + + bjVB32rGrS1Qs7Gtbq6P37zXUGLdrfL0qs80wqztkqJKOvv+il64x1sCgYEA4pcj + + yQO4mAEsz1h3RUrnV83OOooaTAd6i6Vfcdlo6VEnyl42uqbrmkzmzpRrYBrmWaTp + + ctDMy3j1tmRO1Dhpm+uFDjGlmIOeQcLwFI1kx/7kvqWdRPSVP5mRskF+WNiFV0ER + + dFptBVjLT1lRBP70rzCQgIY9loM9fdGBam0E6iMCgYB0xc6sTGimM90wz8i5J3Wr + + Tm107+DFsv/WAICm4DQOo8D93Y+ctMZRY0rlapzemQaZHOkTDMLjd3yEgpIdFXXJ + + bIRqCxT3El+tWqPkJiQAoeLlVev7+aNBF6dP9SontvQlMbw/yBQ8BTTvIvUb9BsC + + mTvnYNFAhjGW2dlMVHLIWwKBgBKypiFQTUs9zZTOmAj/xVdZhEsQWlsrwtEDNH0Q + + k7etGrt4SsvcOlThQ6qIVNP5ZEjBcwImeL/Rm3URke+xOAXFyZUCQ8fyFH0YuPb5 + + M/fM8NNKl0+5XxeAdKVhAiwSse4hUG9phtWKHjzOAgGHiGlseIAik7J34fsf7q35 + + kQ5BAoGBAKqKtCu0OndLo1ZI3voVGiQrbn+OZ/7Hhjs/Um9UuOlVopv8rrzqRpAx + + LnC9rU/zEVG/TOjkno0Kj7/Af49z9mxjtRzxP2J/yMujOPv6Eer30z5NjtLNyJ2u + + QYIiihHEUULkdZ88FIHfx5YImHX4E9lEeFG13JaJnMYbmvViXtyk + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:shbt5viqjzuewblgt6qeijry6a:je3omw53tmmluz6fvqupx4uh4jaejc3fcvfjt56rfzokzhgdczvq:1:1:2097153 + format: + kind: chk + params: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:kibmgpkvunbi3wmq65ohjuxfoa:y7gfq6oydjcaeui74rjoupf5xst7hpyjuqyvcop6yhk7xxmpnaza + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAsbpi7SI2V3v47iSl/b84TmUZKH6fJIaznI6bDCRiJce+rOCA + + LATvooZutPlbkI1QI1RxCmiZ0CPjemUpXwUIuLclBUIfZDBXnjZeNM+W5a2C5Yt4 + + CzAHQl/Hhr7Xtra5csf69dJkSOwVV63TpoN7r5TrCoLrReKlgqHp5iYndaHIsP2S + + lfUaX92Z74xtXEKxi/ZaOZHsMh5DpxtL6YSZjHkXXctmrwmwcAvs5Rc/qppOydqh + + H85l5H+PlSAJjP75roQXD8T1xlIFNFb5TUy9+fP9/NMqUmoES9eV+cCjj9Sk3S/S + + WhSSFXoG1DOBFpR6iQtuJD9qRjL2JkDD3PjiDQIDAQABAoIBABMkz7+Zwg851hY2 + + wd97f7HoD2Xuf69kSAgKz1YnPCA0LAx8kSnMrVBNGTMqset34UQw9g0oN7s1Bm16 + + ZJKs3OPirFzs4qs8zs9GrW6UVr1uK22U0I0p8vo6DWis+VjfxUmBE33zl+RH88OS + + QHxM6N+Ak7G56ORJ9ciErshg3zq7EKFjUrBMcpq7L6m+yzy1PBqZ1BcnaUI/o3WA + + W5pMTZFODnpo8MTWtmFQQ3ZUUnFfuBx2iwhVEP7Mu7BQJ0NG7C7GLsIBL0bNosk/ + + +7YbhKWyvvAiRvu46JKRIgv/Dn/iOjZqDnoM2UOBNWLbq6g4gBR/MRdgH1pM0m7C + + 4g8oNzkCgYEA5+bJ6Yf9HBBSROGoZxMppo92voPSZ0ykJSVRzOIu/8wFqQHlNXhw + + /udWfHCIqwmqcj7yhl7wYdSLsperEy/ZMnhRb2+KVNyAZBlRlWmZ0kX1lahYJ2RX + + 5s9+fEr+bU54ibHZy43d3+W7//NW09soMHyaLuhnGuhEy4/lRVo7xHkCgYEAxDJv + + yLooFWeasa9usQTEeyGQLuh5Fm3TfEXUluA+pBegTqNJ7BCegYl7tI5+btojZyXb + + 8m6LFdPj7OiTA4fHX8c+FgRpZkh/4HtbX+pKbKkp5IycMG7N9Fg2TOkdDfoJl4Yr + + NhLap4xV6A0X807fiTG4saL905AT6MOojbsznTUCgYBv+3tfIQLxrVP83Tcz5wYC + + 315I62EL7u+I3Heex05IyZ2mGjsz0eBGxzF1T+Y/KaC8IHd+uZO8uiVnbWP4FO/+ + + Nimk9SjIh94b+Dn0O5VC+/N2fF9tTkBAPcxnetNXtz/vxglVCUGuH8Lj+v7fuQG0 + + QEc4BZPcY3LtFaRyE/uuiQKBgG9ptNDn5ZtCGjaMyO79JhZGGPqKSTjTZSVNAkwr + + S2cjg4UkdPX4+gnVaMo/oMySU0hf12b0H0dl7Ci8ab+3eyCIpFkcaD4NLZDsfBcb + + lOffqEqBDrDyO0JmVW+XcUhelNPW/PLYAhLjPmVoChHA2G+wLJGzXTCmwKeNdEoH + + 5GeBAoGAUEAdLLTRoEUXSUnQ5Hk9OGgHdc44QDZjVfbN4sc08mc8HEXHcMDIwpMx + + LNWapDRO2uixnzkX5Z93spC23CerGRcH6co1fRqO4N8iRgJCvCZiluKamKlpzkBp + + HAMlqkVaySOcnAB1hkM6T/MzUu8PV55f0LkkbBbqF/hOk7003W4= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:7itcabaosxi4au4pdhtyjepqnu:emhaqs7urtgtsastqh3kzforde3y4kqm4j4yjhmbnoox4iqqrrqa + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAv8HiUgJ/Iyw7fYq2drT3Ct0M5XCZPK+5tprVC9wP3jKcwo+3 + + gPQ/lOFvyJah8tP3Qdkth978KRU1Y+L7cG5wWFcH3r3KoNB0tUMsI0HaLRjbpTyi + + mU3MuLWlXJOtOw53BOpwFcFWLdXZhODNm8IxEvPKTZYeJstzhUmew5AujZkpnEEg + + S2gF9JFqwB2LGae5NmS1EbKXPHVSAUvGhuMBvWnXD1+LNKhLO78VFE2C8rj+zCNu + + khbF8XCsf0TN7A1rxuQ0xXlUDuAiiS7Hzx1doncQuGSCApKxBzAEKXvprl0UgCcg + + sRmVzYbPLWoFVkbn8dJgwwyzDXK/I15/aIioNQIDAQABAoIBABDpdKBu/++GMyj7 + + VuRZSYB3xm9l4t3rUaG9PhTxr6SVKiYurqx83i6vQ0CZqGbWMvRnxxA4plypNjA9 + + EJf15YqlAliuvHQ6blCeQAJMCIX5r0V/d4e1yNxxiMgFbj3LJMwWMRR6HLOmLKz/ + + dqLKGbHmNm1pU/dv8hxLRelRigmK1VJtvyplgYvJ64vaEz4Z9Gh1zFIHpdeiXSKH + + 3RRCCXVwUhwr8VikpsSjPeSfRj4BjyqlZr8ibEkvhr89hsNUxjrrZZ/stZX5x3Ea + + 8QqRUrfegEn0a+hq93JXN3Gn7psmrL0Xy+0/IrW619MjgCP/s9U/VIAEhVcc29B1 + + MQj1OQECgYEA86wnP5rqlw2z8rEoXwHKPkrk4gVQpQ0zVwBJQUE+myTHMaaMuArU + + 8zpcoG1TROJ5FJVERPQLvVttAOxa2iK04pAwtaCiCmN3CPqwv9j5DaLVcZlNqpe4 + + msFKDCahqgPq41/+yakb9RG7l8TZn1ZkfgRI1wIncphtlxCTPrUXCSUCgYEAyXVe + + XnfyAj5oN34k3nz41Ee0DYUIIVd26wUKVUFt75DwQ2qucHU1Vz1ajGXoON55y45d + + avd1W1aZMs24Qg3to0UoC6n2wX1gWyf8m3ibIbTQkArcePPZNQqbqJqICJvcq+jb + + 8A2zk2h1Zp+DrLWpBUHnW12jpDdFKOaiyyWdHdECgYEAsJCN4Ajg85N6UOEN38ns + + QjcCosQ3K2HlUaVjb2VXeBOuQsvsK2+t3pDrjVOqgr+X/NIsJcqwtwUIdyLMskNz + + zresk+9RezWXi2obqOgPj1HuV+I95N8LZReqECPuAMPV7+wfMwDWwT2YMODy0AJJ + + zwZLwYBOFTteLZhVGZselgECgYEArmRnmKeEW+TiGoecKu1MCZc4iiuK6jHow3HN + + jBfjrups0i9bagZMcoSuCbN93xzXmhpXS+2DLdo9K/lhc+zSte97xv0OmliKPN7U + + kVFKGVeI4+hDCoEsmfng3YdIEwu5bydYnOl/di+K0ZdsSOnIssBmInVg3xrpR4q/ + + idO5usECgYAeKvPyn3i8jPHyAZGcpw6MRpNEr7NjdrsrXE8lC/uKEHdHBONh6kLW + + +B3Tq4wBeUjQkRjeBCOIH0fivDjDGQqOKDfV6ROqJfqqsMCEc5Z2hMQWl+wNHf6c + + Xbqb9Vbs+foYJImZUixnhsBUFTFItBrgrjLPIHyhXW4htQ6Y/6W3Dw== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:acgfc4hxhztews6tdk5i5dmzxu:qehbpfrrj6nw32tdhkgncqaqenvlm6sdiskvjq4zasc7g77pzqta:1:1:4194304 + format: + kind: chk + params: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:fcxs6wbchl76nfcmlc4wufptya:gtizcwbv67zb5g4ezygowytwf53zlrf544vgsglfmpre24fxdzva + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEA4PxKNABaQ7xlzJS4kc12HHplV34jhtKN5QBoo/RSVzB90lnj + + GXAa1wMFDwTaruBznTbM/QEZvMtdgCG5SaQqBZJoXjkBcm7RKeD0OWDDjOIWfD9e + + xnbqOjko6XBj0ACOwSwJ+sc9d39Tnd8v0OOSKaI6wPrmXMzIxwqEBUBi8iJsWwjn + + Kn94dZBHzRZMuNr/FSsXN6t3KEhClEQAG2JbYrmbg4efC2pvQArMi+xw/0MH0CGd + + qDBALfB/kU93PN2ZEb+D7iGic/3pphLwmlDVGI8ZcS8bTcDnNj73TPabshEpRJTQ + + kQmy+ErVHyJFndHXQ2XdfNNbHptiEx5WmFRJ4wIDAQABAoIBACQTix8r3/gqRBVE + + 0xRyIpqDbS+qkYN4zM79PJ+ZuasIeAyHDwQ7toS8E7oU+FoAB29HY8xoD5qh7jQc + + dEEg5VTFEB5CZtR/fOO0Z4UHL/mDIWw6nyBqM2SIWOKXJod/0g7wrbL8SC4as9ZF + + /RKyWHQmSDnnTDwc4aRlBRwbIc1F4bWnbmdoM8M4jbw4ORs9Vj+CzGqkNDKaFe+M + + CYmyZFrc6d3N1bpKR6HG4zS4nwwIwHJxdwV3A8Nd8oABZ4ZI21FlL49uicdQR2WH + + iZhYfCjqayZWDIKQNxphaXMRuZX5lQPxlJY/3g/e6kEIPUc5R0QLw+KeEvU6gCQW + + gV3q0rECgYEA4mxMFUbDghRr5rOCjTt+kC0WDsJtLWHMLb1o7Gk1eCz8puhRBhAq + + 5vxAAPtx722HAoJ7/VkNpawsZ8+O+njjwt13UqPfM/04cz9LAsX7Nl7Iz13HX0z/ + + eI1mjqvZ+I+TPUbhv/aU103BXpGfUzDFAJOgfF3Tr9OFOgD9y9k41nMCgYEA/l/r + + w9/D06OmkAo4CP17cs3LzJTZG1x4HSF7Cri52BEhRj69ePkKEB8oWhhCg+RUVb6y + + qPBBHhnVyB+n2qmRacL2C8zvScbSjRb9adbVYNk1t3CfsKb9bIinEyYhyBnV9Osi + + bIPz3KKArI6g0UjTJCDz+G5J93geLJ64Tf9actECgYEAl9FzpmR/XO4id1rv85Dr + + yPJiMt1M5TwI8rZo7vOQZZcMhUGKal1W1vBWXhI7EAZJm3YweuxGSUrLr4OtY+bB + + GPz0MBYu6CYmvqe2vRJQ4eDmFpzTvOPc/FEbbhhum8pxOIoZfmRw4niBas6LnPU7 + + cqqJ1jn3YZKbZwwZIKDzCl8CgYANJXHu1pKtTmjeStjohgkqPr6InSy3QEko4EEW + + pcNdCk2Y7scmCGDfwmx4c4aEgHlnUQq0tm33G0i0Jgarw1WzjJLguz+mpXLePdDs + + 9mBit/cRuu1V2NuCD3HrCG54g/VNyhUmXI9u2Ksjv99J0aSv2lAhh7mk431TuwQ1 + + a7wF8QKBgEKYyNDgiDmOdUaKLnwZYi0UfAHZ9m4MzgaereIScyHNxl1FiRAkkte2 + + X/Crl2MX4eyLxjFiafvb34l7wAsw3I1G39WvNYDS9tI98iryizB/4c3YDWf7NeJZ + + C5iTznHcOf8+8bGuKjjEVUp/RZ1/v4L1huCqLYCKak1jFNoJyGxr + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:srwpg74natztaqqh2rm4p3skji:dvxhwilixw57pkcto5aviohrba7gbwyscnlsdvjaqhgvmcy2jezq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAr0Vb2oFiCV5paQeKQD9USqx9ZKHux2CWCjzUTOCGpgr/uXLk + + 67/XL0H+69Z3CNBk9mNRpOR4bDWlps9QL6axSz9Of8E6PSO3YYEKktyzSsMhQn4M + + AaJvN4RBgmOGcN1idUvslv7wbNtlvMfvqEUCSaylrgErcNsudn326/kkE2ARWfJ3 + + ulvZ6h8QA0iDKgnQsuOq0k4jQT+4XLe6RzXIubNZJM5AwBCToCZFUKQ9wuqKtbGk + + hj55NgvsKUj/EbBEaxDzjVoP+hdSAR6XoCXkCKHzAzdJNyLZgUXy5CajeOHFDsm+ + + gQ2ZjRhKVtAz75QWNy7eas/Na+I3zsJMzfVbIQIDAQABAoIBAAFdkQ03KqryhJqN + + dRGyPJyhUo/FEDMW86Ghntt4zgEUqbCJmTLPVDnuxxw8wpbREtGgQqD+KQRaIvp/ + + 99ALOdXhfiHSK6Xmyuq0TT9e1KRtjUCInVzU5bjBFnE8Mm5bgdpylzsHl5rC1ycn + + nUcfvy27mIXYxfxzhLLmdn+Y+bjkkyXJBqfG87C31Lzyc+08kYZQh7/Rx8wuXiqJ + + 31CbD/fzAiKWKWeKopRs9nmvls6C2xMdKqM/r1XI3Dyy4mDn9xzRg5/uSAZYOmeo + + vw1wvTYkPQHYdPZJ3pxQ7GMhY04Xc4sNtxsBlIA1I7MANw0jNVEZd2yMrt4luxG6 + + gFwUkL0CgYEA52BooMM5xybo0uKSNa85RYWGOepZQxPV3hz4dczIpuiPeekQiPD3 + + etWw7+nL9aygm7HQyc/QxpcfUGCPZoiwRbaL+94wQ7PMleOtC0ixxeLWQOS/UQ8X + + QklSaBEiUayDiZJGknQdjWlbhLyGGfzIU6pDh5TiS3fJuV0lA8E36PUCgYEAwexq + + UugfChyoM3ZFIs8CIjH5rLrmfFZG9RQQu5xKKjWYpyfe+y94TLIepKv2U6b7oeZ3 + + 0BnAF5UB6Bc2/MrX9UmI9yB7a8cloeP1eVzH5g1WISJfVKon+Do7eRTlGUb0Pykn + + C8Rk6LB5LFEWjsMTZc6AMpo1f4gUtV27fdJS/f0CgYEAlBmMvyJXMFeCfcHS7pP7 + + J7nhAd80RZBDu8l1bAmpgdSoSdNZ5x2+exyfBeHz0IwvvZji2Nqxevwuagd0op/p + + nKXNEmnVIPDMikDSeb+NMuoQVDdXEm6DZ8WA/uXAvuCazYsYqxOx+tsuXldByw6X + + t53rXbR56O6C66hoUe/ydqUCgYAFbUJEc656r/adChBBOx3KKy/bf5d3n0p5DUiy + + l1sT91AATYNV8CwjqVBmN1G7YY7lJvfvYOkZP9g/0HZ/eIW2nYoxsD0D9Ry+fQyf + + itMlQvZIExgr3F8l+Ss05jrLDEtFgTdQgvx37ohVjydcc2UVkkPQJrScjwhVUvwu + + NzaPWQKBgQCr1nEFMNY1EV1cucmhZlp5QOUhybUH0fqPfRjRp3sFuyu3BUOeTmum + + lkKoA+Rs6Yrs2U3OsEQxOl7qV5Ab350wiUhzqOZsCVEK4UkAWweeY2IlsyBybbg7 + + lSCo+6eJWyStRracQofwUwsXklCZlBcLAL3HVyH2R/9UuplVU/Wmvg== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:n2642ofjfr5qljptdwjbwjz6fq:uwtjrewrev42yvez7br2574xu5vleallwnyddjaxqquvouv3dyra:1:1:8388607 + format: + kind: chk + params: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:m36gxdp2beqzfnymsb7q4lgmcu:2gqyjo4z7f6z3kqbbtfasnmfxdledtkxr5vs6hrksaodejunrdra + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEArJF3xtkkEXvoJbJy0WIuatA3Ou1SLV815+BAtobFPBhFO4IM + + f+wF+3TLsLgo1kvWz3OBYhiElx8W+ZBifi0iGQfAXQJNOerRhGFgdBONN5cXCR28 + + QUvvmmM8gB1BWfJRcoMgRCsGrfEJE7r4OQ1E1iPwjOr36cNvssiIT8LDeb2yQ7hq + + gzN6wDpFxt8aXR/weo0vjcw86A1MKby1zn36ufgiQAj42f/8F4bXHh8wGs7IBKBz + + 8ZkgwCaI8UbksX8YCQD/i8MCJCrlQMaIefxlhR/r1CVB0hWII9jwirbT7vviaI3g + + 7p6/s5Xgf3eB08ZTvfD8Kizenoh58DoZ6eQ8ewIDAQABAoIBAAE7ImsZNnAnfZ7z + + OG108VbSuGojsj/fm39VcrC6omKM28WSZmttMBe8nA1dKvFoZhZhwQ0FlsCLOLs8 + + A+/Ze5JJI16mew2MNsVmem0pjIrWeZQXkbW3iHSF+7MAQmyVVW5nMA48blZET2fE + + ICwsA48xf3BJ5s3UlpHka1CCV6mhJUoDhbQjnIOy80sOVExDlPuyh1OBXp0hsNvz + + 9HNykDeT1uioE/E3HEQ2Nsmo5YNXoQD1oBnk2aOZv9loFFGGVXLJXrYo9fJ/OmcW + + 3IVz+xIIaVomeEkM6CH/3yp3J8n3Zlv9lYVthpoR3PGdU58wFPD/77BUthLakPMl + + cNRMg8ECgYEA1zz4Pc9ldZepCZy3ezemUv48p589BX5ehbbpqd+csraacBLq+RGu + + IlnVfmqrXD2XYnFzWHxExUXuwWYJEDXQzrOxsgsHXuQNv1nEipSZhvpBvH+etoFC + + tROEmaa+9MA5DJe58OHW8U48oXQac72bNvB7jF0bF9Z3xGphaaEARLsCgYEAzT/N + + vtJMKAjTf4PknxhmCk3DPSqwAb988v+oYw3Pqwzq+45Sg5eOGImum9mu34Gwxecp + + PawdlHRBY8vli1KHJfKRkATh6Rd+8s1maiW6Mpcy2d8qCUBKX5Kne6sdyk6xIkPa + + gJutO1CPLUKFuKdxaFNpj2pIlo7D4TbHc+sQS0ECgYB7x3NXUIMfmiU7AuY9tSYw + + ikblet0D4MWJDkTYTWF3IS41j5uTuwgydwkhF0UO2djKY0YbN/PwoyQIEp7ZtKkt + + hgeFxXPqrSn+xigSLh0Qk7DkL1xdxn5PVjcmic89P6JPTJ5BGg+bXAvgKb3gm8S8 + + VpYmhZDEJ7FewnLc5RsbawKBgHaMXKzT0HLrLiWfq4QM1psq8RK6PjC0RlogOkUE + + LCdS7cJgIN6qwcMAex6/a5bi1JRqANMDP46IW2Bl225OO4s6gMLbXxR/oq5g3r9+ + + jP49gHyAvknbnVl4Xk46tpksPHlbEbBounThAeGVY6EU7ZbhXr4cGFMFoLPLLQaS + + BbZBAoGBAKKMG+MRnM9d8AlvTWeJF6nzY7H3PXQHFgs1ISINqE5v1MUJ7o+i/OWl + + rW6q9BKhgdxoQKEmaOSvw/ecS3bxoDyAieXBkTjbcfvETGnUv6JFCmu/tolHCX6M + + PHUyyYABqJHohWyLm2AvQTKcSt5zK2GCFXtZ9fMZQlHZUAmiNRpm + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:mo34r2meiiy46vq7uatrgadt5u:5zscxdgegy6z73ij5zqwq43mlqmko576fxeyhuqc5rvauq5md43q + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAzD877AvjU9kwf7WSCxcTCtMR7gc2Q2F6GL4y0/YnnGOiq72u + + Zf9mVTfdfn8TcQT5h2dOYWyypEAxnIK3To/myFJ+1/34c0ddPBTHmexUpr2czzx8 + + wioJdG9upkW4tabxQU2xfQAYcs8sEVmZMcTILfGdN0vcSSKKpLx953rX09jxuaSc + + sbDOwqKvt3wBGDcRZRuJuoCjEBsKKQMhWDSzzuZBwF1xn/oqr2NagJ8s7oTpj+1c + + srfTXwx4oXdZmeip5xMeI/nZzuIYNlSd7q6NSAK6W0CRpiK0glK0ixOR849YmCGc + + 8Cnn2AAw4ABmLOXeBHy/mUHO2Kdfn+QrLBbymQIDAQABAoIBAASoa4Qq/ZHyXyKx + + IBsXJAodkqgwQL/2zOCcE8+AKq/sbhpcZB9eMfX/D718Wz6QVmV3XmhRNjNnOwpp + + SRIRAjpuy9w+pegbyEf1ZwY/B2uEYaYF6IggCnZTOpLZqQhMKJkskv/kB5VfXU9U + + UQP7dh+QG0hWd01cAjfsDDNXepe8Qs391Tzxf4/qlHoZrpYt+B1xItk254YrrPjZ + + kjcR5JTonutaPi26x6ULLS+VLeSGdFAFf22xW7eDPIX22aSsYJ7nzAWodRPGqBcN + + l1mgcHgpSFnP/TbWm6+FaM0eIiTad1I9ESpQdB1Gvw2uCNqa8sN2hLgDztkWH8HT + + +PkOSaECgYEA8PxHZENANGqVmH10WltFAbzmDMagGeLC+omaMOekdBNI0cSTlSTR + + HEeELWPCfRRo6En2VCYuiJGdi4vss6r5ppI6vu66Q68eMjB6+sozIA8CsarH8N9/ + + aDBG+Fy56/2vrK41ywS5L0qTMbw1lmRuGgta9eCtsYZvjGUyXtzgeZECgYEA2Pj5 + + 8yurZUZBhbQLGiL6qR2Cs8Z1HQpeGlopFN7o5EH7arTy1f3CF7McYrkyefnwAzCs + + JcNl+ufgVtx33Bkm4yNxirU3WlzYbtC014ERvdLbUdrKspcxmn1zSwrkHbg4gpWu + + qLtNl1OZGmpVFxIj27YcdhXWuaMRyKV3PQZFpIkCgYAtzycbDhWkYSZyyFZX3sWt + + YOUyRIempA6AZavj5ATE8+2BwqZzUX5Wq9mabz5HXJvcnEKxGFj8KQITxtOGC9hN + + K7rzFJpfx2gsDj8ycUFqtK/Eajx7s2Caw6KaD7Zf/+dnIe6j2xAAx2JXr/lXz1uG + + o+X0m3MpLe8CdzIuCjq5oQKBgQC13AIMyxaO/VMgESeZEaaNpzmNG5O/8pereNSk + + NK5528Ay0VYU/Ov5V1w8d0QSruZ4lgxEXsIUitQjmgkwxzgr++JIQ9oQeG/EelSJ + + qRpIw/qmYj+xbz7ZYbsINCm9q1JaScGqlcvUQfK7DFMj0kWR+9NhOq7OzBq01dPa + + p24qGQKBgA0qtgklc9qaDCEhNT66KHDgRTrrBRNf36IAWD5mEfb+mpVMBYBiq1ej + + 1UIae8DbgJTzLNHxlBtFEpCB8TM8xtVfoW7XGWlQaIRK8Nr1VvmhmsCQVnhK4ZKd + + p3IMoWvoug6cZpr8Hs6c7j/u5zdwbbYA7/mDRer6LoqmA2pYYnpw + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:nktoeeuydph4acj2fux4axyaj4:g3fvjwanenwsgdcn3oxnau5gtbdmzbnbevlrzrb5qe4yukuwhejq:1:1:8388609 + format: + kind: chk + params: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:bmqbu7cppakl53iwet7rwxdkc4:hc53hqy2gli66tvjvnl2imgg25zjj4lylooxmwjghaxrq46ch3ya + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAq12cz/r3Jv8sUPbOvXlldAhifcEj72eE81sugnCZnGS67pvm + + d8PoyNgMlFXbTKXTsxeDVcoOdUcq3q5dxhhbKwQVt9XscZJvV56zgDTG789tcmmC + + NqlvCnoXVXThrj7RU82qS8V5vez1ovaVo6tEnZnXHTJ/JdrLl4yfP21W6nz0YkC8 + + cMqSZPCYczfq78S46njEulLYaEPxV82Rl4ffm3B24l1Pl5SmGsSsVe4SOBR7WFse + + JpZB13p7iBzuMT2SIQdQAkIE4ILDCo+s86TSVFDRfJpbFX1nD1dntGSUmtvtIjEf + + 5uiw56hwZ6Xb9tiSIOLUrPHMWZLCZUYNfqRevwIDAQABAoIBABeK003BWS361XBE + + D+/42wa/ViSXoaO0sY+rQa764CebsRCxy0491F+vSr8gMnILwBM1Ej55dVYIUmvo + + QYrC8tdshr1MPuD1cKV1cIyW85OjiBI1S4XN7irezhDX7188Uw6zzQb+2LROdwqN + + 3M3w7ArIxURGGTCup9SopYIVt+Cb0yRM2xU0Nnuhk2CPemNwcfzulxzLOoaNT4eV + + wxbhJH9xiutZY7+gk5CFesouMgy5KvBCLGLw8osMgdBhX93yNQULknidIA4Ko9jQ + + 4+4sesH79TmQWg/z1LSV1XOKepOoZzQv9RY060LRQAAzdUV9C4iCAnKAWiw3mzPz + + PZ9rj/kCgYEAu8NbvINqFTuQ3iy+xLmDygNYZ5scetnZsxMX2JOGb7DNof0rH1AK + + YjGku2xxJHta6S7uXqVhItMf4n17eUpb01+Id+oeYgUb4BjTfBW1sThv+Wl4zKvu + + iQhrTl3AO0K6UrPYoTMx2pcXAfEsy49YL6zYajnPwwXDK3AQijOmBNcCgYEA6aS1 + + j9r42626zK6ZtPpy4OcjbuytMZceyvz6dJnUKRYm9ykdWBmVHxKJZ+a3VqihaYS0 + + /kECd6/HAQaPMgR280eDfhVHZouKi442Jh4GiC5TT5gGzAEuoUgP4aB1AQpd1D9t + + DwZ0iAuLtRQYNN8j4+FsNrcCV6xxrS1LpWwy0FkCgYB5bspYrCEipEh3+DZUoqpi + + LzGwp/eOWHBcSV/luNt8RrtnJYYLFUfx46tnb6Xo80KDhs+xNIIS9LotT/xYIEgs + + 9x8adra5rBYwI747BQtiF18LzjPLIvL9ew1zPFzDts8sB5Z2AtceSRMfNWxEJmvh + + QYchhEwjFAn6gNqhlu+rNwKBgAwoJ9JOYHh9t6SCyTijd2rAXBWfdvuHk3CYbSe4 + + AVQJ9QkTOJWm7x1ox4GCfbOinpNw9kHsfAZiPQaOotDFbrMF24+p58csJ49PXP3q + + vghD8M3JaUEgJp75sunYgX4GXg93JWOMwG97uk83tnK50ZI/3nOSXirVrCyImNEI + + qjQxAoGAM8Or1b0Ko1SQft619DRfKH6pHmGcP19ymidhcAkjfvwhJbrEkoGtkh/2 + + TBowy9QLaHMnKIPnkVXGNWij5RidLxUqrg8GzdYvrKC3cDOZidr8GNwIKQIaynh2 + + /TM4NRU628hVBPL+8v81u4EvNvNBTgxuRpn2CcI+nVO0arCHg6I= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:jg47nspwliweckiaov4ajbts6u:yegxrkzkdb4umh4rayql5rwybftrtnghh4g324cehqc333ryscyq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAo6KN758FqGA+L30E9U8AVEDWqCu0k0dzFt4Y6Njr6GfYLN3a + + PeeJcuFQe4NaohPfderNWlDSZ2rRDM0Ftq/MQjHlBCkjY/PolqQDARbXrP7rmDsX + + zyhG0sawySrgld/sy93UZVCprF3Lc9/CVNN0iphImfpOPNtH2FsN8ZCqi2fyuL4D + + dxzVn7VfPOXKWx3OE6p8ME/0UpwEwAB/W8GT1CvAV43spMxcQIAmANpXKb0agIpG + + 18WoEI+kgyIS+evPQBC5pXRAmoeRgyPVnNZAjhFNntDvP7ZotLJ2ZLAsu+Z8xy/R + + TWe1xGydtj4+4m4lMT79VJH3KiGtggEzr1ETMQIDAQABAoIBAETFMW+uXntYD2p3 + + 2VibXiEKquw8igSHt7e5mbBqUiL3WaPpjSoNH/f295MhLjsFrRlql+lIJFUwUDFY + + DPmtQmPjgkNQYr5EKND+lwCjL/tVm3/7/dKjM7irpmq1KXPzixpW3UfDMbvuI25M + + cOijgcwpmgGUb8MlyTkFc4O9b99sIRrR7c+Zf/02tPPo1gCXIyEBKfVoUBDajt5l + + ZceA7hIcFjlUUkdR9zQbEw3QaJQ7Ge0AZBQtqz7t1UwvhLn/+cMD7MzQi7TQpDQT + + wO778XL5Wm1TgZPivT8t8bvvo/hwtZdPAXOt/gvXJNqXdD2kDjJhWxXI//YSrDRi + + 5VPVgu0CgYEA1siNix3V46t8B/nfBcs5kiX2/pMTWqQePcWjVQpOGNz4PT6e76UY + + 23+urd96KuUrn3d6LdJBrsfEr4AKJGDBpETKZnUgZMsgCQ0mnZIExxXYBZ/OpeYw + + qG8bvB1/Yldtq0Y+14XxgC8/XJY1C1Plyl8Yh17NeCA7htuir1Vep3MCgYEAwwlI + + ycE+P87Q87+hmO3xHnRD2EScT6+jVtIr+hSQrbHoqPDjNNgHnSNbRzbSvkeG5IS0 + + l5IpwcDLSiGn2+u0AiaP96rtyk4fiYhKX3jis9vanMi4VKORuwMJuIECMET1Aw/l + + NgUlRlxnYqmQjOBn2ArcSS/UcrRAyfbI57jTycsCgYEAgZjRjzeZb55xYH6sy1os + + irrNph4od3C/rpYqT43AQdBTGOFIFWGQ9iC8zb0ige91uurklfFgII35Z8viUsDv + + FqdLWTcjLK5DzjJZMoqAx3+usPYUQpX6lic0nPVPf48xZT8le/YeGjJoEP2xU/xz + + kwB+VHAnmmwYfu7X4uOoEXECgYBz4N5IUPJFQwHO8Lp4fFbYO0fcBNfCWJ55hSHv + + 0awsJxoO1iCIUxoi+NDQvPf1adXxjA8oRwVcQsoF3302IxKufG7pPbtOiaAfPMTD + + eLVpG2UF5hPu5cg+Do4F+1BrkWzpRtZuhBwjc99RNWHW8bWBHOLI8QwOop4j4OZ5 + + Fs1uhQKBgCKDVJojSw3IOoDC66KAN+XQfZ+Q/n5pEElbaUzoOCTg1q+/4mX0HZ30 + + 9AcIaiXmWAjMLmOcihMMm37yapIIplhF81Mkb6cYS3i7boFCa8Ioi/D/ryxEt4HU + + kOZqm/l1lQixd99wjf9i8ozjxFMop4WQdNCNzW9ujyrdBrPtKtYx + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 1 + segmentSize: 131072 + total: 1 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:rl4bzmselnuezmapjlzssnqg2e:p7kvin2fnemochuxsmh6ot75qpbfhrscbxi5i74bhqdhzcy6i5eq:1:3:56 + format: + kind: chk + params: null + sample: + length: 56 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:uuwofnazzi2ehgwkbzwflclpt4:75jxdbmcilhxb75i6jakaqfne34gu3csvfhem2iqkkkzvcqnnwsa + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpQIBAAKCAQEAvZZfYCB6QoKGf+YkZVpLZrQw23Hhl1UboV4nXgKLzfCljmGu + + tBTspcr6eWSm2kmZQuYVG6jZkHePpnOhTGAtbPI1EvH855zUVccBhkgtLok9WSoZ + + Nq9Z0TOdgDfvtB1MKeUq02EuiwgAoAU9j0cs/uL9upZn2wlrahwXcrZcmoCstppV + + P/Zi1E0kQ1WvwJlQAuK7i8UsqwOSw45jNOkTlN0nnnA5UnLbPuvg0czEEu7EaqYu + + CAYMl/ZyX9V1U8hKw6lmtYWKpGBoR5kJowBGfYhZM9Yr9czuYxbWi7g4ZcrgDusJ + + uwq8TeVpAne1CYhZZOh4XNrSzehmDnkcqhYHrQIDAQABAoIBAAFj11osksjnDRZl + + +RFXKqNbodoSCS3jXVr/BjnduemuIICdPbsrRhrnFJQMRV7nWDzR1AjKYaH6Bm8Z + + fO7C68JXOkVjyc9m6nWgimXSJKapMe5z7RBmE7oBb1+vyU2gQ10xRXGcTkuNqPeD + + YlKpGm7Z+jNChAtqk8OI5jEcniwxzxRVo2ujm34LLahbQbw412yuohwhkK0m3kJ9 + + /nIaDuNhKaSqrAS+n8gflIvpFY9yf+k9xWnPjWhW9nLie86Y45Cn46Mw0OU6AHSo + + qoSorW0BUZZhRPIDsW1B1YINHXOREtxvqpMEeOat68Q1+UQCYJrgMQYgrpMUbKzd + + 5JoRZKECgYEA7/nFVS1F0a1U2ITvmF7p66WT5unNUFppm8jk8xPQXcC8SX+Tl3hc + + Bch502NgKIchDO0C65/EgQwY7Cou742iweagQhEDdojbBxmQdQXLzkJlyzxCh6YG + + b8hWzfc/TlkhgJToqvMvF9mPNkJjo3vdbqFQfee9vPUsuJcH4Y/H370CgYEAyj8/ + + J1nNACfxy5eNM6FPriRFFcHXdrR5PvqG1yvAAgyTZyKwlE99pWF5a7zwfFye6GDF + + RlwFOY5U+rP0ePG65Ho6zTlxyG4oC9mF9JuAnKEIIflAWudG7OqHJr462BrEURYZ + + RizLb6DrI37zlORvL1uLPr/b+okVUJuih5PIDrECgYEAhrpZ+ZozSqbfrbfktE0F + + U5FgWhIFfQllpVrCf14ua5RboYAIos+mCnElRHLUd6x198XRD+xg7HqYO27rbv67 + + 09ThQHZA1Xm8Tl4h5jFc3O4WLGYmi/XAQ13crkITvq73yjLP9boWRHOWncXkHtLZ + + 3NSgVi+XLNERTIkumYqZkpkCgYEAk89ff4nw+jE3VR1A1EALtPDLENineRjzF+UP + + EUjnPlgkjpbayLnD0U+I5wWiGLG0bY7z/rUYGHV+g+9rN80rUvpF6WEXWG2xlN94 + + OEpB17cU84dv0j//JP1OozEaXoBJhB2LgS8Ry1anIz0QFnxRCiJ0hPrBcbwoOM5W + + HZIS/zECgYEAnyX2uJ5Awnef2hAQH0gXNfnHjcIy03UJOLjOFCH/z2wJEPXZKzYD + + HXimoiodVWyjAKu7HzAwCxfp6PcC5l1llevJFjIOLyG+MrJ1JOKi+EtV1ETZt0BS + + PKkxMCV+ib0PtLyC457p9q3XrtuaAuEH9uJNkv6+e9eg6eS/cvPWaFE= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:tznn56mtthkyxbrmiv3q2tjlja:7upsseycyxoyeoals5xnhczxb3iopgugggwievjibkc5duezlb2a + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAtbFsdwrCBiI+8Hh+s2HB6/6N2wVgjm7Nivejr7FzjmK8+UgN + + DVycRdnygdzoEybhzXImonLw3bJh55D3BRK9xlIsXFLDfWDPAyr9NZW9316GhB5s + + EGnvvM4Vr2+XW0X0fymcOKunwaNTQ00ps3fPqyepW/FLUuSrX8wJNuEOR4U4/vEo + + 52bmxq48oYeWwd7P0gMR2q0eHydqA1sL93S27NwMR2RaRV4s2gxTZyrrG493gJhJ + + g4xAsXWiieUboDccYrZRO99zOAB2vyYb32gUh0vs6xlHvSFQpX9IEftdL/Vlrbtq + + 4y+pSPsZiJwCwJPJQqMLU7OksDCEZzJDFV04MwIDAQABAoIBAFrFjJg+dieFVWdi + + 832f0beaoXkyCwatoZ+TT6IXZ9FTT/DER80MnwAgvhCV8hWbX8T5igavoNlJZLNB + + T7+nmMrrQ8FOEd9iDZoaEI2ERWtCOLbp0fgzTLPJS7ktaXMOlHMxMRx42aMaex8M + + /k/shAIQmwJVntmHZ3zBHTtfHXip5EbdX/L36TAfCBjbd0ArsFP6YP9F6vP+VWok + + HES6wtixAX+ff6MGmU69PYlthJYWXrhS2/7xGdY6Ezdrm0GEvn9GviQqnS+6guSA + + Rkq79eFG61mq+Oqd0xEe1wO5ul10xngH70NJ5xQS37mio53Ijcmgv4SH7GsIy+86 + + 2VspYW0CgYEAxizeJnFH7IjugaXoPOUxz3nXm/Cq+ijmwz8pZu9KGQDm8Jf/WYYP + + 49uNeTlfEPf/qVHLkzNVKrHSqaxubwrOONf8/I6wqe+QGHO4k5elll2bI3wFFdTc + + tPT6tJaq5MTdySfuUrbAx4WC59m+2Z/M4N2HxTXBPvO+AAqZiRLsgx0CgYEA6rVh + + xCOIKLRN1fduW4C+FuKAr5umFnOiCRya5gDwD6WY1jPAaSbfQVc8INAVV1NExQiE + + oMAmEOrt3HuYxEoRSl/5AAXWoVN7Qk6b20jtOshEBKkqk19Ew1rXBkfzWfVq9+4c + + 4hrK5hRpC/MIi5tR8AlTM2eDvu3/WJR6uqCL948CgYAQaqUYgCfaI1nGqay8Zqwi + + qNBAncY8JOlA4VmXqlj0C0wWQDEqBF4KnSRyF1uVt0WZjCoWDpmOiN6PqbYYQsfk + + k5fkgBmIak0AiY2PxG82LpjsbpipP1HtN6IRFa4gd2J8CG/IsFT18kxu0m3p7z/0 + + nMVjg9l3Uo+5xycC4OtwzQKBgQCktyBijvEaZ9cMJzZanxJIazMWiqxXq1T3Ag0v + + B09yG6wT/4O0B+S8LWV0PbQMcdKcWGsDiXXtf4HorxC6CKTzxkCwJGjJFRY2pYY5 + + sYdTLoKVpsbLYBuY4eJvdQUyh8pHLuM0RstIBuDl0uyXVSx+wXyTYb0SvGHsH1+2 + + I7+2vwKBgQDBAbeBgU7reZHXa+LOK1McFVazYXCyi/CSjDyQOtaIRJpfoU2dHQfy + + 0SpcXb6mWC7gAf92sT4kDXJyts8jNWkvgKhifhtmd7TGSqsdNdhdc8yB+Kl8SmjP + + AOLOXs2hS6NMCDlmQDBlEyh5/NHhUVyN9FCEt+6l0qjE9gS3FyQAjg== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:ycvogzi6wllnq2bkx3t6zdtwju:um42l4yen7jiwdfgirvedtty3tt3xuhjiyxzqoourvughtxjar3q:1:3:1024 + format: + kind: chk + params: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:i5hhjthb4gsivkzvch3megrely:hkqc6eypopec6otxa63muh5olpihrsnj3ssnq7whrwvc4fssoojq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAryhP4uLqtk4R6BJnXYLo+rwmWCcdec5O7Cg1ath3Gvt9RmMb + + vVVKwTdC0ZDyqx0+Dsr74oUNXA1oZDr38/3Xk1OLJmCcb78V3patv3yJxOiz+jvb + + wjKtvyg14rYIBoEu3/bgUPasphmEbjSmUEbBjfihNr8e0bi3OQjNNlVQgLUyI3FO + + 3PX8awmTkyRSubAFfRxOjecxfqj214DQB9j8F6LrsgGSna6IaNTRBI/UwtOiNOPm + + sczhdoctVRZOVzKlwpjickL15gwfdldPsuSLm1nkqXfoW9n0zV9V/QwyXJCdhasT + + mIoDN6ZPHunL2XDRN2IFjholYyW4cWj4VYVnfwIDAQABAoIBAA73K0gv6iz0Y6xH + + 8kP3nO9bZw1OHkMbgPvFfbbo0thf13bNnf+hy3bRwWhFca50G6rI5heXFaqhTKOP + + tELJFAO29iMrywHzOiugBS1gtya3WTVOqvqfOOAlz+DUe8AOhpJFNipEwUCZ2opN + + /k3KldwK+79BOiFiHmmFmn8DcBLnA/iI08hKJRRUOtn51kVe+MPAZkETXQthMq+X + + NgJ0a6kfRDV+7+aQZfhdV5raCB7nK8LZsm0NzGLvtasX944XxFH15kKXaLkKsvvQ + + zV7QNeiQMfshxlLfCbcgA9NVsByYdpJOfchx2WD+L/ZEpIM72wVfCIzpqkXJTPGi + + +Rqil70CgYEA7fgz0pOe6mGElk2TSQBjcYjGLrCGttyKtl9OCmsFgOlYadufN1eq + + Ahn8wQtuEkQRbf8t+0bMCZ8/XU5jlIUjO0bIDgZ4LDPOckV45e19fnqkCeEnlPnp + + NAaM0SewY98gsvjHuVl1r9m96pVzlGiyC4XFG3T0XLSzg7EpIIKey6UCgYEAvG3F + + BqXU6o+BgamohqPHDR7qY41Lt6f7XZ/o0WCkHJZq/2/JFycwmHgRU3esj91zbHcc + + 14TeoCsIPIZcSdqA3rwtWolLrSuUvvmDfmKCkEgAJRsHi0oayYBcHE4lW8UHWcVT + + FYjc4JkcK7viZXBRIMEhwLPk+DsSjX5CO3ZKDVMCgYBpt/l11IUmBRq9F0uWg+ip + + 2KSKu3utoz6wlJh8Al2YjpHrvVj3Yiex9U+Xh3dn//tqTZJk7mfY4nlo/1k38wna + + 3LAlovQiVwWhOIHkS+STmvJjPTazdW8H4N0QUjyHsem5+NHp4vdonyhDHhAR340x + + l0Ug1I123gRePgdSXRUkzQKBgH2DCMiC0a5kZLl/zzfQBBjjTPF+/r6Y8EDO8X/2 + + RZqdPyxiw6neeuo0oCXfA1zY/7dyKA4O/VPnFhdq0DKJj2nOIs+5wGTbMLt87G/V + + Im8E5sPQm1fWxr0N+U0JaK0WMu1DGTKw9Z/NnQwsnINBK2kL/HWl3pDSmGsTfP6q + + rmztAoGBAI39m3K8RjybR/46YG4+4eOOFeo81amjSR1Pf8o/SsqpRpqqCzcmGWBO + + hRbA04KNjgludOcMIV0LYAWqy9LKfaHKcbhdh9Q0PL+VO/fJsh/NljkusRxHxTXj + + /bIeG6yEqFtg/MYe+zeYUZvgGsZP7+7Swbp1ScexuyZes9H37v2D + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:n3gsik4lgjkekhb2hnvkhiieya:35q6b6riggvqzsmyso2hldyb7hklnrmu6farokcj6d4nvmufqzja + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAhaXFN/J9blEqNROOLhzzOY/yy5wAQgNmBtULXPLgYffFop+K + + bfZcedR3VTGArQAz2eOm4KfP7BNSd1udZA4OEIvub98O90osuj3B9cFGE9+P4jv4 + + 1GfynZbUnBNq3s71P1TqXSTEJ8DD1L5HFY9coRXSmh1+TiEwhaGDKsBWcMMRjizm + + AjeH5hzQLhY17NLjP1215Mmw4T+gXUYWKfSR40hfXOEx42uz0W2nlMHR8vcDoz8l + + UbV2xCBmMa8m+vl+UwK2rLhjWvkZZP2o5hpk14kHMYoLSF0U2aQPgP+7rLCYYlhN + + w+ClF4B9Jixb+Zl0FhLZeJ/bzXZHdY6AKPYhDwIDAQABAoIBAAOZgpWDrqov0yBe + + gvgDUppsd0sm9qt7K559KChcPaaV2wO1eAUx21gdXLYgNl7dKwha2oNMMcxM0Zii + + ZeZXKed7MAm2VdEGSpcc6Qz6pC25L0cWJhkT0FPl8fBe5i7THzA8qW/8eLC4v0Lg + + ecn/Ca0oXnrv1sI5oEobyWHM5WcgZ/EFWaixuz2g2O12pb23XSXeoREg3Hq6UoqH + + 6w6CfeRI3rXyN/E6LOywHv+gDll6rBKilBBOrkwSwoTca3QbZEf7iW7FC42mI+Qt + + g2T+T6MYgK81hDcu6zpMY67D6ZS5qZfIaDduZKeCabt53BTzJDxSKqeMVjCb4iqA + + RDzGEmECgYEAu+5FThTnzfP8K4Nwuc7lbUQuU/1Abpw+bc7vQ9QWE43sl++YIpp/ + + CJLBiGuhW7RQ1fjDk16M8phBIyeViLkHCkbxsUkmurlQIHq2KOGAvcr6Jxnhb5mM + + crsk/QBW3GS96x+z0q7ZKNM9jVSqVhtmZ0nMzZBPv1VhNKxTEMRuAS8CgYEAtg4h + + 8QQ96+FW+PrcUwuQ88KC2xAVBt4J7sa8sQ/QLJHSXWnQoSSy1CmpJVAVUpAB1Dw9 + + J5ypVcYNuu/DFkoK9Ill06wP+vSc1RhjW/ozaZgzWIIUpC+54G0DMj3Pz4BUL8tO + + 12RqpgZfbI8rBXwURuldtsTCC46TR/SzxkJVJiECgYBnf6DhkLfdABsH59qkKiLG + + W39cOCRNBnWHSikRZPNHj6kWQBi8LfP6R8CYHhZ+h77hKKClP7RGQr3U248J/kS3 + + Tzz6kzvmJ/rN+Gbr+s1JOUktUZ6LNLhZ02FaiN5NgJnrrMj/JdZpGnVSqacpxutN + + xSIqr+iLijz/okwY9uVSdwKBgEzR7dSLm61a6p4pDKsmKEYTf6/8O0MokjxlM93q + + 9Ea6SXANZHF60NLhuXP7NOQfzAXIXW3Hl1SQO97zqPhQygqhp4wIAL4+Vac9oT+A + + dg1Koe/pA9i8IszmcwDSQEwotF1uhpgw0Se5bK6cQuUPlGbPtjGXGOJTiSZFxU4V + + U2TBAoGBAIYIgp8UZtxpIfj+u0EJqtQGJFGgw5UgqOQLozQ+OmWxE0Z4jaCgxhj5 + + TW2b2rNesGN3FlaFrXPW7QXw6caZ3/C5/KWcmpwS9gESCJWP3iEbJ4q5Avh6mxAo + + 6KnU5ietcfrDZtjb0TqLo786IHndR3aljbqjZzWvZS7yncfIkpVg + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:3yjtuv5h2g45g2cncg6hka3euu:bqqhp6u73ldawial7rtrfomx2qkmboyvqve6ywo2jpvfbu7zptga:1:3:4096 + format: + kind: chk + params: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:qucnax3vhjild5ifkxf6uxl66i:26feb7whvq6cm4lxyajiwk4hv3cry5ongo7d4h4vjpgskvgmy3ma + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAwa1PAiv93FgzQ/4f7aI2pEWAyJkqtC4cP0aE0ifKCZxZ/M8F + + xobPnNw1A3aWrAdOC/4iew0y56SRdejGPrrno7d8eDwwc4qV0LqWs/dGs0yb3/KN + + MW5tXzi/VVoo06eQX17YdyQllKqf2ebpkopDYZ23sOXOY1krUFWclGGSlmrpP8J0 + + arLxeDZiWnzE0jZuZqPliQmCozUzPSBbi3cSaiw+hjYsEdt1ooGQcfBfKOwCx+HW + + jjyjck5rss2jVogEDgfo+/HgUwkoZ/h15YvMSt0PBh0Suh7R79P+WXl7m0SvM5qr + + w/xSJKpi+RIcsLOkU89BW+2DiGM3ZeOghnlm+wIDAQABAoIBAFEiBt0EDL6HfEJZ + + bIqhz34VV5OxBkCgqFihc/aNkIdiJhhPqT23L7WoUdT3krrR/JHtjgg6ST7co8rf + + Dl0s8uiUbuH3ZNyiC4x6/bK6PbXSu+GevCMe/VZMcWqR8FRp94LcOpX+YHfc2kXw + + A5zNqthzt4W1XzYjHo/yrTtDfKLhvwEp7NciJL2cQ9DZeQAOTbASjVd80Gps1ZLT + + XGgUpbwm0AzfpjK/6I4qtw2XlhlSBesj3k5PPHqNB6H5ZtRPUGFKeKk4vRl3TRdt + + XdKWbGtawWe9He4fpGc2BMbNE9EScs9q4qZ6Xd+90bgGN3pa8O7xSqdnHtlujOEr + + WwAevaECgYEAyWycuheE2nU08iCxicgiEzJd9L6HLhBvyeYoZuhk/NsSElav+7Mz + + pzWohc8sv50lKo9C159onkK6u7dNqTSZezoDUn2GqrazsYgRP7LPB0jcF+vQUK/D + + E2gC2ZrTutdmaCr/Ddl78x6v5WLlKrVEvnJBP7pd/XmjuVvNQyDrQFkCgYEA9idS + + bitt1aqndeKAtYhquVL3xJKwqASKEbWfxRYXFX7fRqbrmCGT6fg4rs2rn9YAB9wH + + ZnQ2e0tZtG4liewD3Omf5nvekkvSHJFKscm9NS+XLcv3s4UJuCnDpDlmJyFRXSct + + UuVAYcOImpZ+Qtvy+sA1IJBeNl0Zgo4fbUWvl3MCgYA8PuI5vuMbvEbTzPeNMHEQ + + sNXtaDdijcQB7XdUIFpkTtn+5jLI4/alIqV/MFJAFa6SJjtl5uYRv/++Obteyr7F + + Xrqzp5vp36+rf/k4xjCqCx7ZgMzT9V4xpcCEeYyuq9KTgZi7+brbIuiVgZjtxz4C + + gIYHm6SVNhbEUDL6yxPSMQKBgQDqkfxOelpXlCGzCB7pX098vaDZFYT9CB5e0/qm + + APAMjvPMy0KVneHrw5yYj+wuC+vJkZcHvlUw11Rryc9CCMSBn6y+ImqudUyL5rUM + + iZgh9/EUNlwdGflyI3KJrB05ytlTcQMTDN52i7RAxIsbwahh5gp6trjhC4VE0ZUH + + N7ImGwKBgQCM1L+omSvZLd3HyvCEot+EV+bzUsoExDK6+k6a7JgPN3nH9ReHDH9I + + mTRuonZ9zNdeulse7E/7UXlHH0YtgC+XeMQAjiiyV29HuycYt9NCdF1xTrV1Ao3f + + 2Hc2JrmHgccuZP9y/xYTgXU4qD0sOrOpDD230dkkt2xFujfapl2PoA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:vspwhm74zz2ffbihu37grhn72a:len3xv7moahbuptdlfzy6otoguaybmsfroeaonep7koan6mcxzfa + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAtyEYblHULHgUI3qeiIBkJGYKj8bmbIKBz9VdtYWbyp0Elvx0 + + cE9QcLFK25fSCACviocH7Z4yNXPd1yamDPVhFUjcI0J1sj4UfgR27iMYS6rJvHMx + + Z/KwSKz7TW3eDPVJbHl4Y2MstlQBhkTbzzve4XCK33E9d9hMj9OTfgG75XP9wdOl + + Y0tBLoQIPRi9DzVqPMg05oV75B5o/GOWCSzLCTdGXpdDsbFsQPO6o/nNDUvXwVM3 + + 8yxEIZW1oXw3+dmbmKnZi2/RjHvRixzW9JjwW6kg04P22ehjYVRyqAATMoaTDvNq + + S7q1JPe6tTpBGqnhoFjQH3hHsHylhAyXMOf6pwIDAQABAoIBABjHnaqv3+n0haUX + + XoRR+zsBo8Q4wc1FC5O352o8ngwYmxpjJs5brSLSmrKEJKN4lEhGZUg988VP3GDU + + lfuC6JQcu8z5nTt1MwiqSf2HOi5i+dFKNRE/waLT5V1g1H8kYb8P4L5yGQbC70Hf + + 525vR4Vx2RjLFeo1loaPtpGCYo51nOJ9UKkaQZ4sKVgNPL1K/WVDnzmaVklz5R2L + + i2dV/4irCO8HP1ZUagV9JHVpGUT1HJDzBGaVF/3zmFa6hrOrW9KOVuhc/KdybLU9 + + Zhk66KM6fKVgtGL8TG49O99nhcT+jcp+xyzKq1fN6V+dmMFS4PtVXCdLMRfaV/5S + + NuoLhgECgYEA4zPbX1M3hXsH219AvASkDnaQKio3AY18T58XMt2+laPPEIhFDaiY + + J53or7IOfBMphwX2+V0LqL32b+x4cbMTnXN4tUomLVZnvJKA4+1Z3qtTf6xv5yif + + kZ1w2GFcG9rvqCdUnvandkQo63FLWsRKXG4zYXibXof5izSbtMaM8ucCgYEAzlcs + + QqTzcCujrjA3Pr7Tj7qkdJZjCPOce3k5MRZfzqpE8f72OkMwHw3DR2DXm9vP0yev + + OrrN2u0LhK/3fkZAhcTTkUwlH4WqMLGmIUGlUqMyLvSUEW777qzcupbpGYG6K+KF + + 63Ie54KpmAaly7HWRhWsM74XcY64dHlopnpzgkECgYBhWq0bcZsO1SMOuwgQCKUL + + lX48sw4S2j90FqVoJGAv2ps1aE6+hYl9IEq+TjuqqsNWmhWz0EzYp11bpCYQAj3b + + b8k/VWB6eNXGlbgo8mFZ6mvC/26LzHpjeOULstw3C1853HCEFQi4wogOKuOxJv7+ + + EDJwB6/7l6Q0I/y8P3/R7QKBgArb6ZdkSO95THbpUK77qfShdPAxzep1r6GL4qej + + rs0YhuJZcanlSU4JEmLaRN6N9eT97KnhlN3VpcqI3DSIC/M+RYgbAsUi6q7/Wmfb + + pZCwx/5SnsxAFAAelss3D/NosVyH4lZeviOe67/1cZpDtKwhjdt3QJKYWTq//PLt + + 9NBBAoGAQ/Y0z+5u7MhB3tW7o/CxZWdlFWDJ/qB/fUMMcULP3bT9Q1JaJs/Bq/SJ + + MhgeCsc3dR1jmZvgvxsosuvNHUvBhpksoGYtJvOERBN5DpQk36bDg1MP0pye/WYX + + +hVtQVrSOU5JvQ5fWu2zQLDTHLEXPaR4F1Kca8ljgFfW/Ay/T4E= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:annnqmu72p7iels5loqwlxt2zq:s7wv3jfi2hlpcvp4dnloc4eex6vre42kwiel46achaie5n5uodqa:1:3:131071 + format: + kind: chk + params: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:isonfapi4rlbrp46a43pqvzxjq:xitths6mep4ltlbktkwgzegtoaz6efeko3d6pvjgccqtirhzzgiq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAttp4OhdOj4JqoNbsQ7aoF5SUk0aIkziEJK0M7DRCqtgU2c5e + + e60BpRd144CXkfc3uzAgEIOTt1F4MtXAWkjki5Ie90pF8L1/+SjOdPii8IA5BHis + + C+ydELjbQY85z2GVPvCwB4VR0aT2GsCbtDaeshZsUn+AH3BPsUUP+fXJFsIGtkpr + + WpC5pd/AQlrlYWj3tw4a6j/Fh2Jofez1ShDoHH1GQ2IP04ekWLza0lrzxNqQuP1n + + hPrGhr1+kS4sOLM9LYmYRrmIcIUSF0P75EdFbmrSKddENTk9PcUSauOYxpvdk2vt + + HFuWBp5i27YUxqcEDKmolqoKhbh5HDmdjslOWwIDAQABAoIBAAGOFLz/EGlNWqAe + + M8l/oX5R19GeJUXbPS8dVEwjRaMzoznBn4a9ueiFgo3PZ3qTok2yjb3rizNhO1HI + + r5IU/JyTPuArftzFP8lb/NAnLSY5G5hbeQXA8ApXCBuj9CyR+orxJmNp/CrO6ajg + + lR0Qjy5Eh+H2Y53g0/dV9wTRKkdJ63El2MVjlTgEKdytMWK8S2RyjbcOKGSMgvvq + + vgw2k9M7dx/KHVxGbqgVHefSkfUFCQne1wEPJS+zk9eLRcJEXP1MY4JgMbpMnH5B + + m9BVhPvCocxfjCDoU4CZW9mQmsUx/J+De3icdIOuZsJgMMBJ9uV9e58x0lStdscL + + gSHlccECgYEA1HPUIQ0jat2cJe+SxIwUWqpbjQQN4Gd1R7QtC6UqH4WXDDCe4wfD + + K3OpaU45BNm2VFNlozfTIMyXRBKM1SnBi3040TWr0TEzasWcneuCbhdP0VUWS//R + + L2rRpThtJeV24aXWP64XMZcorMiTSPBqq6f0OksExzSkqDrhVckD+30CgYEA3FV3 + + 281DsFgqPhJMFJ4NNJgNbVA0mSnsHiQ10tnlioOgjRhRWGgI8Z2GSA0ZxDxyS5kX + + +j4U2u0+UsiHoa5tv5/CUgutg+HEH0PTvJ69f8kyLCinBD+b7FPHvLJBAzN54/dV + + 1mvZ7FWfdgYlCNxWcSQ6bpjDLy7gAkEP7lNJKLcCgYEAwbB8FEnrIVG7O2bIswJW + + yDYKU2z/zbfk16Nvce95kNV1WTq2kJsSF3pSWFxlZYOrVAPYZM7PYFbGDdyvouN3 + + vdlDRJEe/RBTJSPWXq9I8V+1eE6PjmhC6W0EhxIDiIpEMQLFarcoFCEQhz4x8Uym + + o6ry4XZle8wF1g4gQ9qJE+0CgYEAjfdX8j0w9wfnt/TsJoCr+45ZYGzEZ0fWxpkI + + QSJ6vyQOp7radv8ZfCzGX9hpGMLl1gX/qBKmN2WTuZ1RnwCX4FdcyqaRl585UffP + + DwKtERAfDsrmylr96WkWEmQ8dYaObC9qlG0LjjahN1fANxRZci8ooyg040rttSYc + + 0K/DPMMCgYB0RI5t8wbyIo1H82Wl/cht43SVXT37rP1lkz7kYISaCfU251jiHWON + + BWECvFup+k5IL4Kx988CxZt7zJcAbgx8CI5ShvEJ1Pn5sl4flwjoblzxY/mpyNYB + + efYc8zrCkd6ILhCrKtbxijt1C6ETh8ufOjoLKWFISLkizKwi0kYILQ== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:77ev4xscd75db247gnsymtav34:ssizcjz5yooirldyp5n67sq4ec3h7adkjzb5i7nf2idlgum6ve2q + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAooIJdI38ekWIkn6i+Xn5Aht38L967mmHX7N5/PZdYJl33Oob + + ML4xSK61X7MVlvnDnMxGbIQ2fFdbBnpVAMEb4z4poKB91pYF8HyoogV9XP+mJbCl + + mPzYpbEqcPt+ROyIOgSshE9c6ID6hdG489oqDkOoavTvnEcfEmCF89Cm5PsEE4DJ + + nL5V1CVZQfPBmPcS01KaDvNE065CjRyomx8dav73zpVguKmGLpxba3d2GnlTYI7z + + MNaxZcGl/seDX7KiGeI2kUC8pWb3ezyvPHNbwJOAPNKPkw1oVkEZICwnv4znEnXJ + + ZlH9qqmPIWmn+3PgWRqovpYhEEWh2YOTZaXtYwIDAQABAoIBAAy5g/CwIYHtfz6B + + U1Ts5muc0lfJQTX5O2lp+KEEetan4wkq/PVkdHV4K1Q9W0ZsHj4L3MPTLR+wCLz0 + + HUMm29PVDUC6RcA6FRL59TZimai2N+tMRAXmXoxy1/Dq2xNIi3jc3tP+vDapyslp + + skLIkBx30xs051ePAMbZRK6NEFRuQexOYnUXXJPqPb5pkTYxaYmgER21L+CfalYc + + sqnMhE+Es7YPueHv+fVr/cw8Sdauyzx4PkH1K9iummDGLIA772jerX5SA2oBXwTQ + + gV89YMdeVCyOa4LpD64OSG+NKda/H5ZcBi1lmJMDqyegTOALpTB1NC9bM20PpHZb + + ILkMQKECgYEA3xvNtJybLTLZ1bTVcSVhuuftlnBYqVLHue3DZpDH/6L6wSgUbtmH + + tWpPTzOKEvL1Yc2IFhnl4j9/d2TFs91Z/MOZB7XFsoOySe3kq+8X8r7XlsqCEdbo + + /yFxN/espG+GDdT5/k3UwGuAZek0811po1+EXkkjqKK0Ayhrtaa1z/MCgYEAunck + + zSg4wmYwT0EgQw+HuKWwmtYUO/9QgsruVFtuCr1XFHCwEtR65qnln1aratJYMoeg + + +eFw7VJE47hTuhIp1kTFe0zHy2xyOme8kw3Uu5kRMMGnaAjWveQMRHC0H6CpXJEY + + hqnz0gs3jZe/rjKxISZdibyQ2G3yp+u66EveONECgYAlvl9jcaby04p0k94UAR3y + + b7AK5kCpjH6LXsTSwiWDgr/nE2+5fQVvVGfMX0y5fe6zOAEQtBrm1pUqzpp/ni/O + + Cg0Gd+LVG2B7D0rDJ3SmtVBliybL+8548uBjdnv3aYKFLoWIVwRp9QXIt9YkYaYw + + ZEVRahAlRDkt25W1KlnRrwKBgEIq9Uw38axKRupY2fHyGN9VLI5FWXjQ6OkAygH1 + + T99PwQ7nzhNggxxHPcyFHN8TNWj46A7ECSauAvQr/MoSl/YJAWr3nA57tS00kp48 + + Ujkf4BHFJnqsaEeKHTBMLh7rDC5k7qcauALZKCV+q+5M3r48twShfWTP30PnSrQT + + +A+BAoGAcLzVVavz+whB7FuH3wcbfofHW0pI09PB88qHHbm99voYwi1XmiNl0r2A + + a54LNTlifPhPfI1OIIV83XvyRitgl3bCdww9CyqD35mXSEKhjp2UaKYbz73gxddF + + tkA/rbjwYc0rvnZumDEgeMpSBvxHKNiMCHuwvHbkeDZu2hCBe8s= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:rhkln7unkktot72mit5dmuqbdy:ilo6u6hugipdimyrzrvlam47xsmp3ur2lwnrtbecmvocb2664zxq:1:3:131073 + format: + kind: chk + params: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:6lnma7gxkxs4gz5wuitsqs7loy:uugb5btz6wxdrtmtqf5uc4i7vnukhj4wwezi7ygwlkguee6ponka + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAvsUlCU9RU0cOaxNW1B8P+ADl+e++2dVV4YDfwTQ/tfAV4EeV + + ajvOZ2qRveGeXPFoFiRs4f1jA78AhHDu9YXUlq0ID99yCZUGAa4S9ea6eSYybScr + + 6ZgvAxJo+FAkR6DKvrjGEK9ioav9NitwVlJMhmyrQbLQz2kHu9TYa+WGdxP+0R9L + + MGgpjkl5JClHMK9YQFWbC8OyJqtua7fj0nvE40yYxw2v78wyad+dxpAieoROu6ys + + J/FLcYcX5/ijp+ipEPW1izlWNtGsKLfZkulO1OHaTllztLcY9RKcS0sIZAEGWEIk + + aU41rUVAgnjz6yNu/NojE9Hb3Yogywb7fHpvuQIDAQABAoIBABw+hgA93RWOVKUn + + xNg9DRkz0NjTVRddTYzIjthFCxW9yQ9eqdGDr0iCb7eEOvUZzMZSeEhxQoKgecwi + + CE8TlGkGj0YCWBjxFmWTRz1e/sbHD/o3LXo1TOiQhjgVbXmpZbsdEj4QKXM0SpYF + + kmlFYA0a87QTbHT44Os/VReMcP8aFlGhs48JafEMxAeHRk47deugSML/DS2OlywL + + 4FNrAwA6AZZrgE9+8qXHoR6J46k8tvRI04Cx5ADmbcgoOrLBnfheUVturJOZGxKx + + fEftj7TqTtHY+YtMawSVEd4tC6V2XP+vqIVduYw4q0gy/gPWKquKC8NE0BgpgXui + + suZvRC0CgYEA2KwFvZ39x7+ALjuqWd//jnfA/RWGh01632PCatgrlmr+XMHQQLZl + + xrpN4bQCur9yOtF6jkF70npK5n/LnbtjWZ6CQK69jXnShUvdkgZJxk0iIDT1o+5R + + 3r8hi0qWxolhhyLgpIoKuCIU8+KHRnRud8Pw+oW2aMWuDPYVSr12sNcCgYEA4WWN + + 7Q+h/86Oe0L1Bjv7UgaunvvCDpNVAGk1IFj3+FnVFf2PXNW+ECsXBviXNvGzfNDm + + UoWRZPI1+ZBU1vzlidxY3ns7l7Xa6NBVBEKfEWwSeQ881S0pF0Cf7MxIP1xMUts8 + + tx5koWaCf0+my5kouTqHFSelAQmfgpYetowJge8CgYEAz5+HoLvUg6Qt4B+sjZLo + + AE0g0WPfFahZJdciZd/fZLQCKkBOnrQpstSz7KPiObFadKJnHgoB7R7ixx2OsAbw + + nOAXUIQhf4BNCw43s8Xyy+L94H7fI8crDJd6PU+sS3M50ZTKTuE4hFmkWk+n8QuB + + D6LjOC0JLjy/HAxzOrtzEOECgYEA18YTl1T23d/M0L1pub0kPAM/md0jijaLEiil + + fkENqge9kR665sGMAQhvM/I5OJUsIZoOVAOgC8Y/25jLT0CtMUvrG5lXlEW4ulXu + + fXSVuOT/zjrDHsTr6GGqd9OcemOOgWd1+Uu0RDrRRLVo1NHbhW89MAhS0up2dFno + + LxNiaqsCgYAFitD9jsQuSigjltdWGBnaQA/cyS9Y5VzU5y59B4nXW4JOqfnLmMps + + o/WGAO7CKBvnz+7UOCOd/1ZdsA1ny70ZbLsgaf0Ku3+ehS5XNy/ow0LBFqBEw0aT + + LYdUwudveoV5ZN1SOOR19Zzc9+30WHRP6Nd/UtfX2m+EWF/A51AuRA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:l3xpgqv43m3df7pr462evj2mei:xeixqrar643gyxwyyz4ccxjte4oznzm2ka347klrwr6kwaktzjgq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA1HnRxcfB+NKQ+Kw0dgtWgRUXgHcmKmQ0cu/44QlykP0BrFJ0 + + B+jmpHwDROh8jG8tmVyW1caWlAZpdIUvqHibCB6CJhQ4ymKGkC3BBTlAzUSUTxEf + + 8vlx3UbPvvMZVjZrfTxfkcdScEybizqoQVcrxeA9iU8Ud4ef/dn6xT/rgtoByDQi + + E9PWRwByAqnF6fwsuKzQOzMDB6vMPPjMgo0lqJCgXHXZ2XY7ApHmQ547pucm6GkL + + bs+Ox2eGDeizTbmy+JgfqriJC0pb6cMJC65gBrmG3qL4P32EHfQFkZy8vazMYbNu + + 7pPM2CozaJ6XHGCOQFNxACFLep2V8DiNb+1LAwIDAQABAoIBAGG01zMlBdzfL406 + + 2zEBS7k2MsV/hQxvYfMMyRzq1EU6I1/T1smgXxd6c6JnaobFxWlFu5L4rFvLiwjr + + ChxlwZz9MopCOE9Q+WIpuB4n0tXR2IV3cYKxFJxVqMi9T4RmqA1CCwylZRKBF/Hx + + elf5twZadNHEjLveoUMBzyCPoURdF3jZuCjAJCX54NrD+x53hK8gKv4OOjp/k/DN + + inW1wk3p4ZEzy0/A7S5EsP+BlwRkkT2yvdEaqCsuOeOEwIuz6ZCK/EZp57hsEBi+ + + mbqh4Gdvj/MxOiWOa4MwThw9eiYmSGwBpqwwdK6qGUNj8LrpTREYAIVw9lwrN0cz + + 8iQgYTkCgYEA7L4Rb1Iq9QcDDnMBWFUDdVASILfePCFwbmPSXeq4I4d3V/hRlYdx + + 4nNxva+Qkl+xZZovOlcFJyLu3xG4YYqALuVDnpjVXIKbHXrsJyYRaPTNc9iLjz3V + + cGQJVT3/uIQRkjjvws37FiumntYwQAkhEVqVfemE9an2D2sW3zrSW/UCgYEA5cJs + + SVF4P96iNb6gKfHPvMwTmrmXqRpSc3avkM6WBWthx5RJIPp6jEw06rpXfclILKPt + + a8V3TOlZpYjjC0M1QgShPtN3zUPN3L9T5qs7x1ysppn/NNOLGgHMkrXDRcoAW7xW + + 4rBqobvbY5jIgAut2WH1A7MrYo5F7YnNOY9N6BcCgYAzjx168hk67gEDZ5aWZ0Vu + + ija4e3LiA6JZ2FGbdKAP1NPwC7uw5iOuXtgZqJ/C0SZwa0j32rXblScS2+gdDi3m + + iLXWV5C4KhWgMQI3cHoAMriAD1wtoRjX9mF1+B+2TsUI5G+LLJMPfAg4tYsilxpl + + jiXamz4CxrY5G2iKy2O+9QKBgQCnQvjOD085P/xan+G9Z1pSGUcUVpP/+TeY8wgw + + pRQ18cyHHH54UaCxTjEfyHQ1EDlItjX7RQ/qn94xUgvngQ/edbxlHlGSzw+o6mhL + + /tBP/Dl8N5PAg1g6oKCrFUOJJNtJ7TxbXw7hmv7F7M3Z2abAID3caazl3Kkvmigb + + BSY0FQKBgQCAM7Q1W9SjcZ3b5M8f/POYzgb3N5QU9RzvOJbXcs1jEB5LDdOkKorL + + mx/6lLXrKN+mkdJxXLO1ruNzlfrns8gL7WeRUqQ9h3OU0Ru2Bxe/VFja0+ZR0V+u + + 4TiKtcxIl9EijEsaGkE7mRAmlOnyvOBuEFmEkVA8aaCNkzJ4v1AGfw== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:7yjcwwt6454lbv3pni5mjofsxe:y5nwpzwmvpvr3gqxnykjixprpxw3w6qyqmszf7ijyxnm3wl6f5oa:1:3:2097151 + format: + kind: chk + params: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:4zprwdvwwgfc3ai3fumd5zr4oq:e24f6pm5p65dacq6hpv6vtcospblpahmjrd6uabuhg2zbahfkkpq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAzrq9a2BWiiPkWXUt6BMAdSHLqm7l7QFCjYeTEGJzFIvR1G4g + + LxSdWfLuVX75U8M7A57Km73BfTzHLkQEtv7HKvI9dFH+Q5u1DhrdHD1yaDOosqFI + + 9v/xk9ih5ngCUhOZ6oXVXnOnOpUYeXg//NTHNIAZcKS+YLExeFHpKbngDRvlsk7S + + XFTixFWO6xCsHVCNiq7cUe+1x0kG6I1gqZGWFG0NkTxycZjUjC67YNXzD7HGwOfn + + b1DCgOq+OB/u8EIHcDnw/N8ijELxbWC51Dr7zPF5j2ndOK3qtww6jod9Dihc+kcg + + ka4N7D8/KOPOzkD5OF5oZ9SW2Dfnl6SiasEHhwIDAQABAoIBABNGyGjdx9QDusQ4 + + r/om43EIoLQYuSnbZzhJPwZVF8P/saYsSqX5Nx7vDxg7ycXsu3D/+oaBMCycYTpI + + L2Rc53dytRZGmv304/IXwSxj8moS/xUBAwu9G+qcVaRm5lh+6Wg76IRxJlPJGUoi + + U10g/h5AH58oTXQ4sZM1mBdC9Mhj+Wm9dPIsra4v2x05y1qxhHNqHGe+n7JLrDNZ + + c1jZ7I+8lxBWP9Z8vnE+XxmOVENjXndSVlQsHRo2Lqujk5I6I7EcGapJH+xv1UD1 + + mdKewzqZb1MwaK5SMmRVt7q3QJ1MmyJm0Gk4MRWkGzbpj73zpKT3eeHTHxsw0P72 + + yLEKnAkCgYEA/zxyPOrUAk3AYIXKt/BbSt5evDg/S/9FOMwLiDrRgiI2neB3NFfW + + 0qOSKKhvpJM/2s6rCpHAbqI3cG50E+XUMqQKt2gSwwpxS23iky2qtetFdcPv+c+1 + + DJ+wAODXAsBpMvhuS45NEvZwdR0D9oiKrnz82mBrRiFokukuBmc1qbMCgYEAz1kh + + IbZOJu4+qY2+k9iir5gjuk9EcJzvTJ+PGI5d/gJVp2jUUPaAqM46R+eirhhBas6j + + WCxX7g89rU7rGm5aUCh7BDHxWsqtwp5k24eUwv8tgVFqOe17CXf/722jRJ4LOKCI + + 6R6qvSKU/BssmtZugasUhbjH02ctU+/IGmiIWN0CgYA/gxcaOYUQHbDlU+Wh59mP + + w83nIEf/7UGYZI3qFFjV/RWCK7z99W2rdLCGFYPSfCHDnPHK64HrBcqt245e9S3c + + fB1+jhM9HXgbwPhEj3SPWEAskdlBXII85e+yCED4mlCTMmafvoVHVrOdMN8vlcKM + + sOVqoduP0/hltkiRp1UfKQKBgQCvplOY2XUvKZhPzlHpsRVwJzPs+oWB2JAnmut/ + + 4+rf6V5iKT3jME8hsUJR83oufUG9lzts0HPUqXiYPkiP5XgAe6pqjVxmi1fTjJbo + + Gdz9q2oDKNMSK4mVJeDcFUbANBpRaD1TLrV477jCMMsCiDCpDCKgfT5aagdOGcys + + Am2uyQKBgEAVR90u1U6oevjeyFCMP6EzqiaJFRQQyE8BAtHNHlDXOZsULZxpIvy9 + + Y2Ib8QctoHfaurErSX32rSrXyXqK9mKnNUk3/DXicHhw7zPe9SrOL/8H51iBRouO + + uV3hW4DnDIJE42SShQcL5WNdFBrzXG/OAnyDnjmfO8o3ir1YJccJ + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:gmqhs227cyrfb7bkvjz5nkhxlm:hqbl4c72w75tn5w6dqiha57cwyfgijagxyhab53h2vccgqnox6ia + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEApFyk3JDYamuKCyq4piIPAGKW8xUEjZ7CSH34nTGucEHBmTsZ + + xzvMbnnIMCIDB3NIQeTTIRaSMnmdEbtDjiA5sJ54lkCSkHOaEHGXKVivI/NNTcnB + + rC/Xp7DESLWQjn64rw4UD8X3Bw5mFnj/i3dEQc0CSnwRAAgoWRU0LkXWIZaIPIaV + + JMX5cL4bgakkb7PgyQ16CIWKA6XdzAJA3Ef4TozRACeB6YcZuC1p12CSoO5kGvOt + + TLYd3OIvOgkU+TNoaWxf8k5ajPF6cXKDmDCDjwTShK0lhCRXqKqas2EwcZcp5Wxm + + Jz/tYerlcr2ZLQW3fJ2I33hzd77RxFfOwYiE6wIDAQABAoIBAE4oL80io4aXdKEy + + w3Ncpr2MJDObPvsJD2HhZSN6yHRhEGqJDA0NhnzSNDuPMNmOHEIZSbxmO0b9RY90 + + +P7QnB48fSMVuZwvHIfNPBBRN5dkztG6qvnyFh6LlArvK7pW0AOOMkP94yXb1vfA + + uePb5v1TO0+oB314Y5dY2eNXGigF5Hmp5rZIKwI67ab8AywapYHYOzvrYMj16Ur8 + + emmxx2KEXnLbABWPDUT/3T/4+h+T7pGOfZOl1bJdtNARGGXKIUYal/n2FFIan0/f + + pzoSL2m7dGyUFYBiPFWO3LYyVKkHpvXhicNjXhQ4L75gjAD08fnjXl01UB5qD70u + + lqU5VAECgYEAtlFX5quPBPAf+HgkwDZl0zOy4e9P+lIXKth+FqiHunSnf4uVbkMc + + tqxBNTr+Z9NvF6boRMZCIIza2QTjlCG+yfo7c/+/r7o6weaO4E3dhlb3D3ZxRVDy + + 8LxRDXJPHPiDXeMSJMt3apL9PVQxXxqFy8iobuG4zq+byOOm3xHR8msCgYEA5smU + + 3j+GQ8Uk/SK9HssEXwkRJY2BsEDlhnyFd7rtzrBMjSpdSC+js51xfmEfFf+wJDjs + + bg+LE/NOnQkqj0aP3cAnxRqxguYLDoue31/NTxHqF8Osh73CVaycbtGOlgZVSKlV + + wbIopIZCf1qT5Wqmq2iaHU4fexYwyOAt3YzRV4ECgYA8n4+7BBDqc24uEMUnpO72 + + 65nvxsOxWNqbRKGopyF6vo9zudZWc7p4g46rRJKTs5qdIsLZG5OhfzTGNPn3p8Dp + + KGvcho4WwLYJA8E+lKW0pfZBDgFcKy6dHgFVl0z3NSt5bKf71CxBI99RJU3FcexL + + ds2LEUOCdqI2inxScHp/QQKBgCAlykffH2vHRWzBbwigDP7T+4B4oq0TjSVbqRfR + + gRi+dBth4FaS1EHL16hcDQF6eWXCTCTUo3Hm/XdgdH46vQWNo7yQCFQMiVPAXSQO + + a0HtY+dliV3rL+vRPIUvSaZaQXz6oYh2sbShQxgMXPejEI5l5rnYTQhPQgJpP9pR + + qEOBAoGAWq8qBylHt6iCe/Y+mp3MkGbFXlOu01Nz1gHucIoAQdZYrIRUQShHGBhi + + SdXiBD08R2GwJw5LvvhIAf6U9EW0joklODKtl948OANOZMwfBjWDTIUeuWmHzXz0 + + YU1eZrwemS4Phq6UH9WGffDhW9UgKI7lysZjQDX9FBEIsCpz7kA= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:r7wva6tisq6m5zszgr7seu77cm:oqlgdw3hi72qtahpsi3h3yryxpqdshagvt6xnobsppkwp7ic4cia:1:3:2097153 + format: + kind: chk + params: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:oq67w7mdvze6t4nr4yxgdvs5ky:wvz2swuip6lw4fk5wmtcfchrdai4vclctok6abjyrnnblenlm75q + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAuVhfMuw4NVMyvfH43pOdmBQ54Nk24hgAl4ho2iKN5cHjncRh + + 6E1Jx+DraeQXeonpI7YwPG8rwBbvJbksSQAvGZldCrnHBXahcef99uaj5ozx0poU + + Jd5EN+3v0VJhKdF/BZ5h6okxG/aBX/9mzjg4amNBNpgCcdudU9NeukSfTKUAONfb + + 2RBkWLmJfv1ypoEVUUSWa5x09qDL45PkGWcnPgKM045pzbCBowPSAymU2XWU/Vso + + ELkYP12JLOgRY51I4rVwAXWa3DUOlE+xqRO4IzndvGceQfc/gL+qAoF5HvpIsOS2 + + tF6w7nXs9Ykv7qYPU7NknyMGhtbU7dTuPnf46wIDAQABAoIBAACIHF3NB6w/fNnG + + o+wfiEgzZqcaeGnVn8rPfV2C091g3QJK2e0Mq7WDE6nOGbMkSiLsFt9Vgk6ewjjd + + ex8KfRgZtIafWc7ONFBbVgMrTjUPGnJ8NqqVJKT0bcev8M9p5m9hsO43TojCAEUC + + Oo1E5ASKHFnHvdoNwznGVXM7VymlHkRvTRIv92BoaOYVWeB6HCaRqdlCpYLht4FO + + AUnaG7JVdyFG39xXV4FAmSpJGjZLSNgwNnl64CyoilyrcTLAkRgLzD2DnnWQ9V2j + + 6S4dumacT5NhNdZ7ISnZaTPjFi4tSUaBUFBB/d9gLCvk/XTGaIYLf3BtDksY6G6o + + e5fYr6ECgYEAv2jY+i7UsczKZFsYQOt4xDhOHN/kQayXPHLlxFm5Sw5fIQZ0BrmZ + + t9MmP4dFEbBkOGI1b2iwO182cJrIn76mWAPiopEAnE5MLZTFeQdKK3ITYYJitRiK + + e4HbOIrR8BxTMthjW878oR8LUQIn3C64HZCM9yVxyGx7x6oeoG7Id98CgYEA9+Ol + + rp3tT/6bqepdFAHb4zyewdHajc3P15TAYU4eImSGMZlaMyARjRaIlAD5nzdeykKP + + CA2vdgVOjsCv9tOVG5w/WUUS/PC+wjm3ZuSit84jghhEHVOunIkA997H9DnY2jcF + + r+ew3O1P/ebnqH7Amf8JLDeoF8RLWhxWpIMM0HUCgYBM5blt4UyP1b9ly/cVdcRB + + yIERNp2ECOuFXH+Uf5tiXPa41NfeL8hiwpB2K0kDT0MkJ8hh5sQORjUfzf9Vtgks + + CPuO5gRBx27xTPh8pAIXLDA/F1vCd4aDEetZbuPiu+5s2eQo6SIzNL6eH+iVm6ta + + LU2EqOVqaLLdxymguIEPLwKBgQCAfyz/WzYM5Xpjle0x1dTZ8i6JYfLc1vcKVT10 + + Mz5DrcAyLcAoCFOQw0GBFxBOjDFCv0XNcuqlTxLtxMxyMjN04IWmDLxPCayYmbqM + + R7BhfyXA5jtIyHwXAJ3T31PfMa1LUIJOMNfpbcqtXuhu22WTbjSfCyrDRymYSGBR + + Xu1hkQKBgFFuDFlGPsREXJyvgNiMgYdTIyLQRq7nFr8g+kYw03xtgjpcw9j4WYQD + + 3EkQcurkbHTY/6qUf1oh6vgrY07W40VocEHPuimSfZHzyeXbbUuI2kXyGoJeu6cc + + ibZ+MhbfpLh9ynjyNWreSn9BoWQG9beNcw//7Yah6beMRe0P0Yd1 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:7jicgfnv34e5aim6u4agso7gsu:w53hrgczlrz36vuxdwum234dzgfra55yqsmqmx6ighnfarzex3ya + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAj6EQEncNJm+aeBHyxTcfVnki3Oh+MLhMHYXSrabWXYDln7aX + + Fl5sSGVWDOU77INbWexekm7jJArTRGHEafOBiYF35P72MVQJv7ae05tS9DhM/tVF + + 6rBYb9BURjMeNoAls5BDNX8QwctMZGTF6NTtx2q1mXj6I+acxzZagxcunZDQZIQ/ + + YqQLLqlBxtY65UlRrpMchzVfqFv9ghYXKGxlC0OzoLVERuTYWMKjsd7YCAZkkjFu + + SPBsFTN02ng/9ap6H85RwpuomYdtGjgM1CVRGllbPoY1m/8Hk/MZ049SNeByg6FD + + YP830YZf3Wwru3xo+WAS3YWIYrKc9TOZHTYcoQIDAQABAoIBAA7aR9heUaX5tjPC + + yLlRN8nlm/p+hlzRFM9GwUsY13kdimFc7JHbxinUZN96LcYJrWmGp8mlHZMNatyF + + mbNKgvOZHQYOwZ8cmCd9oeizBWiLkUFsWkm93l9YX/v5xEmRhenkOgxZrVNvtmDO + + tP1HVlF1Z6h3xLJFYk1i1whQXjndPKMvlDV+nXdvZx9Y9x6GOKG6eHyeQ1lYUmrS + + jUei8O0ohR5uBXNVETDpZFLExOhpElmejowrPvvNc8A1goHkHpx20ddZGKW97aIV + + wbgPiWA0W/nyWChPwBHHfzv1QXY5n7akiP96KqknvoAQUUhEWYbVl3Ra/4YtjImB + + p0lfLHsCgYEAtr1XIHTJpQB/jaqKrNCh6ieixDDazjAnlWEaY6Fn3HFRHZehPzfL + + Ylnu5Md68PqDbb8ERrstAgbJ8lBZQboPBsq93Bqj0R1+BrsYGWuUXj+gFjj1Z3sr + + Kissmrc43oIOGcBvEuCIkK1QgVyB6o3Y6oud2iB1S7YyPcLLBZPehMcCgYEAyTXL + + yA17mcirXDuRkFqSuT2ApVRq+ddOf6TYxOcP5RZFEuSG01D4XSJ7iOsELUSGE7O+ + + 57N4MCdDdP3TRGOWCt8n2vP9jvCHT7Gz5LVuxzw9oPoe4ahh+YsdmkWWUWxgGhXk + + gifqThihSTQFRM0nH/MDpDNy2BGp2iFFVbS6G1cCgYEAlyDPxY/QlB1tYAQC3BlP + + Tw+olQiybIN2uRutb2g1NSKiKw8T0+yYz6YA5EP1cQY9W632I2j5OAvVSAkbSDhP + + 5RYXHskJYhA6AecJbzyBX9DO3JIOop5CfIVoRivxZFO6xaFYOwxm5P/w7ItNBmZ5 + + VsBQs+zUFOGBe4J11Q8NoFECgYBcb9Z6xZbvA32WFde91Z5qc5LSYYHz3bI2eekM + + LIrk1+JL16kJE73GK71NHYsBsOVXz8/4aj7hAGjBKosQdB/ORs7sjAME1AOV3TGj + + 9KY76bT2a3IcgVrhZcPnx+hS59MOqNgd43CFCFOwabGx9f3vc5lMqkYsdZDuoTJV + + OX32aQKBgQCr3mKRSwKzmsBEnoyfebbW0WKi8E7A/LeQYUsHY7zcKy9XNPewgqi3 + + f4cgFzPF0InVVI9h2k14VW27Yf6wqvpDaPYmdZiX45b4186Z/gSnjMgLthkEo8nF + + HFeIFIz6LwA/o4f6cmHIL5+r0GuOSQ1RyQPJaC2NGFqRLP0xjw5HSw== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:joatus45ult6nt44xj2awpeu6m:jr5gh7rt44ppdqwwwq7v4feibbygqfbdwxv2siqwijkbnhvdej6a:1:3:4194304 + format: + kind: chk + params: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:zvtntitwrkhj4cgrby3ffhlhxi:vn7utwydkd6e6pwf7iato5o4yplgpvhclc3hbt7zgy5irbdrmvka + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpQIBAAKCAQEAyK3peTMxSvg10hxjiJ7bUENuiH+c8rXiPG0C+aQGCnlzn0IM + + gOSrSX0FtKBdlMgAGdlLJB2R702xhXX9xbED4RnB27lEFCEVCMZjlRcxl1NBivNT + + gSMIkO9+YRHsSQeIOoZ9QVlfCSk6TfSMyRnYAERZjbITtJxW08P0M4UY75plTVrh + + r8tcubELsYGCy7DjyM9oX5gUY0KEqWAIGrxyCS7TSHsBHCor4f6hug7t1pdJ5RWN + + Yn76apmjnXnhZj9LA8AWhqUqM+cS3+GJ9F8AaIZHOF182vDC9u6t4dR41+OqcawD + + CYCJH6wZDYyUxf5LWKJauG0VH8w+EV+7xlgjeQIDAQABAoIBAADFimxI4hujsLFK + + IqfX1IgOelJUC0pzoJcS5DwJgWxw4xztqBamynHR5T+4jiOQUU/IIh3Vb0Y4SkJ1 + + HbCw6Y7oBnLN15EsP2R1PtTH7gzi6RGbtep9M/86rIW7B+mP+dofwkOKEG98lRjP + + o+ryCn6VxLJiyOic4UiXLE02ac4iDBrI2A5S751PzXPPKl6rriVF3i45IrWV4bXD + + iNFOQeq9T4jRrpdhEGlks4u20Xdrk2qZrfYmjlktKoypBwVFkIT/7phuL8sibkpN + + psfnrUhldGFIiF4YE62Wd/2axCqiJ9xX2eNandSlSH/rugwM3haGDDCjlneX7D+Q + + tc8jCjECgYEA1rZB+QWGt5z2f77byK8oJntpGdGxQEihzKvV47+7/5nNfLPmR4BZ + + bHmvus2LpkZ3OYmZ3Ay6hbReJjR8ZGgEknuOxqUPp5Sxo4EdrSYrOmtdgc2ktGwH + + imPGrKuwB+oDlKKpxKrr9f731ggF268NY55ZwrJdeVahW8D32S7rIlcCgYEA70Td + + ImY2fDnD7d2zxG/I7RiPfzlDV29cWuPk+t35HBbEnY9IHh4ye+UlUCcQTWp+tL/1 + + BtdF+xQvk++D6F8opaxOKvELH8GFQe0L52ougNXOle/jzeRNAg7xk2w7BdtDI4Yo + + nKMlqDHbu7QGwXFww8ZWF1N+OxgCaO3ohUlbZq8CgYEAv4Q4mn/kZ3k5oj7C2mHq + + RVEFMYOKQFXJBMAtfAV1EovE77uj5xlEKm7sYYqgSwNFq2vicpZj9YkqBZgBcKob + + kfFmLCflK8yFGtu7dcu6+VP1RygABvLpUvamqzRFQvnokbb6CTOQX486z42+c/LT + + 1YzUcccZe3bbXPVl3jJsh+cCgYEA6f9B+KtXq8PejplcfscIDIARjk2VQ0RAYQ8x + + V/qP7l2B6ck/sVy86JfgFvQtKFj3E5QLcKZF5VgHc7kxGqc9nFDXnX1g8KyUwzWt + + h6M7WXo/8DjMZAZbHaE5toCJdJ/LmElTHGUdpdEk4PweAz8LFhu5BFT+RZKkgLPy + + y69DOTcCgYEAkyRNcRcKTUAJF9ETcURZUydYxmVp61zG870+xkNib88wR4hMrUvh + + lCFlZsUVUuuCeoqHWZaMmwwmgfoj/xMc6r7h6b57Rfe1tTQhvhy4yVC3+Q8nR3qH + + fMB2mBedtoJpcuNnohg4TVmLWS+iVPwq6LLYXmqWs8T0gkehCx/0AFY= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:zg7tbgey5lcomfxvkbxcy7bdbi:fvkr6u34kfm6lnojyz3bdrpdvvpyzhtusee3lx32blskntogde2q + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAjrJ3kuOX436Eyzbgusynmu0jgJDkonYVq0aYKRZJS+zSIRR7 + + GJTPJb0VzkxMgKsf4BrVrsbybsZLB+I/JDzKhmrKaTVj3pw9Rz190vqif/JZqyY4 + + g4K7El3P7C+9FhgF4/WfhOTpVmG4Qk7XBjzWOQcBcwlkB5fcSx0dfK8ou4t+pTjw + + pohbW1l/ewQFqj5QgMOvvyxSQJOpQ6fNGv2T5s/7EAur6wVkp/IR0eB7QHMOUwRs + + yQo3SoM9Nb6AL5ai1woc5DgjFYr3q1Zr8FbLk2n4otUQ8Mn9CcFgitZx9ylG2rSn + + gf5FJf57uSLrc1jnOnv3FPe7Bxm9wa8ltI7RMwIDAQABAoIBAAyYLnSUMN/SPVwG + + pzwYrw+KyZZ7c127CJUtR5j5j67E8EdQj7+VXSXqc/Ly1tFU1gN8hHEG7yo2xzPv + + 1Jal5Owk0pU8y87ZWsRsZkWv4fxjKMxLaDNQ329mDa491ZvbGYrLe/C85LLyltKh + + ACdCFfHcYlTdNp5y8u12qUqyPQKca5oJoMXnkDUISKO2t8QY4yZy7oOu3cljiE0G + + NOQvkyU8y6RzL6X3/mWGR2dNNvFMSx+Z2MkNCN22WI1cq76l8S3TwTI6Kb2HweET + + 8brtEG90BQ5mYvyzxXgRhIuzcnIekRoHJT1FgHwNq9EGPAWLjH4FG3wINUUdr+t6 + + haoJIWkCgYEAuJC25X216BEtCXfofu5giyENG2qtWZ8C/kw3MLr/qB2MR1P5WKHB + + k3JeIwFVKu6A8o5obFsz8ZN8fUnEq5PcUCLTWKQw3swGjOLdDcU+ANUeZhC02cUH + + 6EqhlqccqwdOeqrjzDIh+d5OT2NECUDIZyxteZAqOxNMg+3ZFmEE3PkCgYEAxe1U + + aLJdWSoiyrr7ismFmUByJ3bYYu5q9fDqfLIUBH2FHIoUymLARqM0kcpH6obZjskF + + 8AXW3DYW/CkfEZOw8rS0JdEsZV/inWdOIdHfnfr0822QCIKvw5gF5tMN2xosYaHR + + N3SvgLSc4xJGB1OEAS2gKzqGerYPQcBKQ/flBosCgYANXCxhIGBylAu2i7+AsLC8 + + YYAZY/d4bVJCJjI4jNDE0p686w85ozvn+HdoAUiw+uLKrBRTA6cW7Z4tU5Gw+dsQ + + 0fSKjhgbiJlQyXtG6+g5FzREHyF4QhL4da6MwTwKBVVg+83Rki9zbuwsQvtB0Dax + + gT3LduwXqqX3RthYDl3TwQKBgGc2FmEuOc1oUpJDJS6/XaKH379Ckx4r060Cf1Sd + + DCE3TzWNr7/F9RwguSYZRJ1AyqxRmX4LnXph3mSKEQB6crhtkM9zn2IRuTt5hl3O + + cnyQRDG4fXZip8MoQFOY1U3e/SvAVThE6cwE4xbqDYh+fYSbxT0lnMDatWQFIPUG + + jPB3AoGAUgp8W9DPRavRwo90Ijmqs486/0YOB5MKfb/tFu34W1KTCtG1R30JA9oh + + GP4M6PRNYq2+CoWu3vzdVNVRmFV2TZVy/TJD/pF/jcSBOurnlTiafdi5CM+GU7I7 + + VM3I2EaE+TPEB6ovFqbtkhb14SPPYVt5BwK9EREC0n4kMRvth/E= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:mz7pzh24u6flkfmfvvokdwmriu:5mzo57de2ywsdqkdvm7557gcikkpjqbmj65mk235t2xyoejtpbta:1:3:8388607 + format: + kind: chk + params: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:yrwzlwbgia4maufleime3qnzym:ymybiqtqxpjuo7rwkvmxqov2baz3ikpyes565u3b2azlga6k3axq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEArKRDPNNLKhOIsALx23rZMi3gZVwZHDzlNuem1GlsnuCmGjmN + + kTFruy6QdNiecKO134SCK4XFB1pLsOrHlmyiWg7sulwYLAmMWticGZ41aZxANQsK + + em1AEoqqrtWDKKqQ3vrGdsLYrVFdgXk+KVavoy5RynZTURXeqh1AYiUujTP0EtxJ + + SxHyQeXgtz0SAMi7RnyY6ohdYOlhAj+6hQMBRkOOdXpMyt718QIBnVM38d9VCJX8 + + tH6fDIB8rxj/xYEu6V0jdxomYRtzhAK+C8a/JnJEJ4L0N0Dt9GgB826DQzMa5JS9 + + GXtyucnOmsmFjs+qZh3YJf/BQSZeCKYsAjcZcwIDAQABAoIBAAOFvyN0vNyzv6m2 + + XatDPemSsGM7tlLHq/ZAqBGUbGrEyE7SVsucF7J0LZlcuXAwlEDmahDWRAy1hcQr + + KNMDuL+f/Kouv9rXf7LI0aKKLP/QeCo85UQAik/2iVX+NZS0/4tf0CsofZWdCoyI + + 5eufq6oSLglfUa2JWzIwXO8ZCsfxu0UfdY+dp+/PWUJT04P1pkGxrWmOjXyCKSd9 + + fRvX0UGQBc6OsJZy8SWWoqU+0KiVdS58CIV1WdiRYdad3lNPs0IFbX4wQ6KaX0Pm + + w3Y8Spm1gM/UxKZ4pHuA831Go57bMLr/3aZPeT8wvTaG1KMh2qK0mIyYd8Nvd4NS + + dQeJDjkCgYEAuB9FGZWCPQBtbq9xDdBHm3585IJRUo5WzhEMcnbbqfG7TMYeNP+V + + sngt8B79om4ZdAHQnlmWwNd0AHQpGvi+V4OsO8VEpbtiN99nSf5O0PompWRm+lP/ + + wgXgVG9CVniFodocm4ouwy4sBrzvsOVBBtCBpq9++4iS9xX6FSMPWYsCgYEA8Amo + + u4x8XI6qbrXO6w4LNrB+vjjOV840cgQQCSoWaShyCqC/E5ZolEIFEQmnQXSwpB/n + + y+XTrPTWazEBhKhhed0Wj+SNAkCSrl1Zv5yPemjmuOUThiR7msDJSFLZhT4HwbEL + + O0anGk+Zfdo0e4y2unOFCvoOujBQuUeeJFairLkCgYApbwABzd9NEveNXPW4AhLb + + Lw+z7I+YYGewX63JZG3yRG+9yyepDYsGan0L+C1jjBs2O+JSgB6ortNv9rP5Wdib + + oQn1OWNFWHG12RRJVm0uIdzogzuZQaXgZ6of1hm7k8uerJKbffEgAxftPD9EM5L6 + + kZlbhUqyF/3alJt+fjFKGQKBgBd/2pUB2+rzzJuqEOfSKCbigIX+6bSO63N+ElPT + + Wv5Qh1mjAxHX18Ur8XMJjuZJlkF7HiZICcEU9yjnU065bVGQ43SS23ss9y861F59 + + 5U4Glw/i3VZ0m+U6mnoKImF5ASllO8RB1nos8MnxYtH1pK38QToh4O85a6235TOJ + + h8OBAoGAW3c51UXU4Ix/QkOUmCreh0QYruqMhdQSGIzPTVeBHm8nUVDqEnnbABNw + + 9dPUFRvkagDCHBM0J/G1SISKNYUStIBVnwOCVc6wQopC7K+SacN8Us7oG4tZwFOe + + C04YAg1FeYDYLuSfEDFxsooV9OV2Hrkc4dQ/27X1WwzgJe/9s1g= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:2fneabhgwzyxbv2toidi2mafga:ultt4xt6hlsliy63zbvdzgp4jzu4fez67lfbg7ogrpzarsharh2a + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAkq2D5r8pQqQYCgRMs3aMkVJKrkLY/EFnNLqM4xXj7aDMwv51 + + CqMNw3A17DsL6jCVAFqCK7prPGpfBhFG3sg/n7oRbrUDn26Ow0IxP/6JAAe2+uc3 + + YnF44NnKU5+8hNpyy8GJmtabnNx/+0oaKnrr0R556u//NKoW+1sBB5lM9V8B3GoC + + bozR/kosFoxzjCYSKROg+V8nQRDW/aqqDndD19Mk5x56cND4++/mqWx49nTsAlVb + + 1RiygnfeP13BrMiUMP998a1iZtnpQyoAMVSqObFh3eJogmpL/lKh80V8PzeerR0K + + /iryh/zRxXGyj7HWE2qa9JNKo8qm2ijFoPYGEQIDAQABAoIBABaVXbIo66blkgf0 + + yoR1M8ZqN7Jl+3e3cDcHpAqQw8PCFtTNDeNB4KwfOkYxfy+jJEXmKZWnAbzPGeta + + vKEvjtA+e1Gi0Dh7csKwjdmvDJPzHCK8d+QR2Y0xKzw9425HHhauKUwsT9BSiOf9 + + bFLvEyDjAt1/7zjCGYDA7cMoQNekeficFlaB1I7XsTH2w6w5SlYmSLl8rtkwn1N1 + + kaHgZp+YnotGg7D87J1WOsGR1grnK4E4EjVodCdEmFaSOkpVeCI22W6ZCrKdBNSf + + h0V3vyEfwrGd+FH5YJME51cD62ifioNnNQjzKvQLlxXCprUe9JFbhEGdRRR3GF8S + + /WZbJ9sCgYEAvHNMZ6mo+K8gul5VyJw+UNg3fhOK9vTKwkaF/x1GAyNQ/pjkup/e + + f6087aJ9Xn4avuv4h/yhbbQSPreCr6lSIX6fGDpxNXcUWH5eKLk/bsOcmneUh9Hj + + kvPJ5k/20APSoAsnN+pxiWmlSyoRo21DAtC7piULchyzEsY0a10DLScCgYEAx0EN + + 5tCBYnuKgqm9ooB0PgANWGwkYEBP9lhPtNAIThANI0bHI6yM5p8wzr1exLMN0EXH + + KB0m8c0lw1/1iQMGWYmRljmteORCwdTb+txiZNS6bUY+mzbRx6g5cPzR923EyjGG + + lu66pL8JcfJQR5BlkqwVJTbM2S5uSTFjP0z0JgcCgYB2lzArxA67gKnd3mOpfPmS + + Mp6pTm8S/fVi0LKeWrOmYeEkdt7pupVwT3qaKLkwb7cxEpoyKX8E5F7e7Ojm1m0C + + +wXvX0fC148MKWnjwr/yWlMAuePUnPbTkWsq7oNpYB557MrfWz/bs4n7hRvYdnfH + + G8gaxBEx3HGsjOKL9dp73QKBgQCh966uNkVGYbg4+HOvGY3kLDSs8NMs7npRfH3m + + M8jcc83KJCmSRRwQB80r8OGNMSOEboQyhmf23FTbGTDFHBFYTSxsGhx6Dcp6N2ZN + + 6EGPRyD33MbdctVZ39Q5lTm0UKVL2rBWFl7ftm6eEmPRmH4ImRtjMcWYsVZy5tOP + + TCWWlwKBgGMiL5S2zJG2SSo4iDqZW9jbnxnOAbAoMh1X6CDlVRaj5XYe+FqXFEQ4 + + aK4VpalIw8WCzgTJPdJLJi/KJiWywco4lvX47tCs99PwYnP1hT541XRKjcel1pCN + + GlE/aY2p8fIYY/yDDSOLkssn2Zkye/mc1jteKlXD38T6/ecimkm8 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:54sq6lrqwqlpxicg747gdls6ri:2aaxyrdytn7r74my36ek434hlbhe6glrgr2ic5vvpc5bbdxmvo6a:1:3:8388609 + format: + kind: chk + params: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:an3ntbx3uwl5pzmgz33s36tfoa:5eaytstw6x77wsov5jgvxt5vhcneykoe4myucajylkgznt76vecq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAsXGGPkzcQLANVVzczL+QBOxxyJv5zk4UiSmqIdJavHiGYDeI + + vp31Od0jdn1eTpbyixEDOrSKHRYwsDRavQ2mRA2NOJkWe0jYPIzFV9tvFBqjnUcJ + + NaOB7mKwFMHKUsIjepeCaFey98Nqvfm4Ba4s0MmvA/DPpoZEGyjK2AoUdgzlzfFx + + sC+XO4aIFKRcDxJX2f6lAOsKnUvFSgGw9qmEnUFI9kcw31Oo4Nt1Tkfix6ioEv88 + + y0PiJlMDIzbvOJO53dUCrMiAnv3CzpCIx5USKaIzoPu7fHSUmMIsetvKHoRHjs2E + + WqpwBsERKvOxte9SFUDiV3my5ssa5rq6DEZ5yQIDAQABAoIBACUVxenFZI78fHzn + + wJnmO3Jb/FfiCW6NsQsNsyoIbcBQLD11vdWg67yhNCUyhIBGWgComJUvYGI93gUl + + nAVBEgvNDUPT8vfnPJJDFYeMLAX2n1VioFEekCxDYeukqOVs/79CZRXrplLT+74i + + r8w4H9OvIy3eqXdzPk6y0toeGTKmm7WTWheMXTfiKpsn3noJBj1+OyjzfkEEV0Rt + + k2MDwupzLgFg9N2i+CEutoobxgQf2SUys6SM3Nf//+mtBd3pciQWusOKW7rzmX5G + + kO9GWTkG8oJko2OECRZExBpPdw+3/lb8HXnqLC42Zc3fECSeWLo0evilT/k9jjjw + + I2TC+BUCgYEA7/P66VQaCuskhT/9NDIVB1Mwhpqxvy0SO3yyASEIkCXGk1m58RsA + + bPV79oo8bD7nUkbQBS+ChTPpwaiPubg0pyX+TTAkAYNtlmP9bQl7LYR0tWDjIuol + + Zxa+HbAvmNclzQlGuk+bgevT2Hq/0/E4OZP+XcauaQfAmfi2FXOhuC8CgYEAvU9f + + tXFAbYbNNYM19LW7DXTVxQAk7+vn0b63ltpysqxVYGRxxReEtiHIgIapBO/aYABo + + ZMJg6gX2zGjcfBOzWKpnIYx4jP0cUrvNHm/by3/2WNOAzE2dYpUZ7+UzvujtI9Y0 + + JrCpB3q0B2885LVchDTU5XjYu+PVo5dFIwAH94cCgYEA0UsCwL/dk0Z0bVFZ/kvs + + sZ3rBo0pmnGqpH5oGLoCaRC5+s5ZdCa0IVWhkXITr+rSA57GVK+S7bJRItxuuHQm + + YOCvxg3GaheD35hJdPC/Iv2UepwOoeaPRzK8EtMZQPvv+b4slddX8WOMPRcb+LY+ + + 72HZjVv9xpi/cs1PrLhWB3sCgYAUpRJP7DDVgOziGBQLQsJKXmJtoG1myLg4NG87 + + AUme2JJa97k8gCsV3atK8OR/yFRtQb4gtt3wx4O5mPnqgg997N9gVjxTS8sJ7rcY + + yaQTljncR/x0y0YNmSsB8WHqQOaTkOmRCpT0XtpBMU1Xt7uGI2jQOZSRMPB8baO5 + + hGhaAwKBgQC5GJv3sLa+21PIlXp6UuIGMFp39iwfAKchhnhXP6rPkC2pP2WB11xa + + W9MuEko++3e7c7+adAoPFdsjf+a15CjogEURZwskiRpBYyBugM/y90sWM7ddOsJL + + 3GD4Y0aRJ1+N3R+l0MAv8JMAo86g2Xl5J423ZfXxnosG9JvtvPGndg== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:5hiosdrt4b4h3x5fx7v7cxmyqy:jkudr2qefit3qow4xs4eldwclbiv7m32w2cvu3igrm42zcak7noq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAsU9bSCK68rb90LSqN0IBMqtLfLkVWlUdKXD4J6tTtFiFWuoB + + Z6v0kpepHIIKVfWQc74ms5wFX/M5TzPhjuzf+IXMMBC2d9Y4p0iBwIj2S0RIPxH6 + + d4iapNToglT8eyoF0I7r4S6iV7t3Sr6q4DqtPlQmgCSvKbDuNYuP7eygEyR94Q2+ + + 3DcfwZ59W1u21humiftuoy46PSDMR5/RFY/0KfMaIeGnqO0IeWnqdSsJlyhy9jrq + + Sa27VycC6p5FnpzqkTYlY9WsZn6AXvrAbVX7wmNT+3E7LfO05GIBNDKCK10Ruxiw + + lppkD0oGdei+cOLTS+L/MYS2xpTZcPHQybQopwIDAQABAoIBACxz30g+AM2iCA8/ + + hB83apJ39IRv6IUNqrJ9kpFreCBSQxiwayrBJx9ra2fsyEeVuaHy7cQA3S4Zjegn + + 8yhAhcRKUw9H2V9A81IpMPKCw/DJzS0Wxksakd66TBKE2QnN1shbVJLPfL7vDnDM + + TY3K2C00rrRYvht/MrF07GAzf0xc4kt4dZ87/o0tsryZogpJRzQvwb7T8fiMNMyE + + wYYQCA5Tw8/iYKTjWMgevzi6qy/AiLBTT+KAcbaQpPF6IeRoCfr8BsTRz/Zvp3gb + + UUOWC7tdOKNtE2M19ado9l7JgoXNQ3p6bIydDFQSEg4bQxElvtz7SyukLnM3N3PF + + VZIm5wkCgYEA3wgwDFsIAm5f9n8r3SR2sEJgPb3iF2/x++DxI9SJRodM1qT6RAXD + + rNUqLSXlUUaQvUcDvIcwlP0hpETuBOjQwgCd8QEWLPRllYcWgCkBtoe0E5GyUEGw + + j3sDZ4EdPGQUaJpWFR7ZKEGNVjFDcRq3cEGQUkJWP1uxrVBQU7ohtVkCgYEAy4T9 + + nvCfrmUmhKSawXWTVSDVhMoBNAyt+KCOaIevHscCj97PfuJNhLCOrpYQuShMGtpF + + 5DqohfqBVosXRmGFqjxJbpsQQaQCL7zKy0e2kojgoIdSkmNxKoQup2oew6SWuUoM + + /08QaILHjd7t795xIFPimcFgVtD8Uy5OaA7jDf8CgYEAu9KyXAVhZmK7T+Pi9bYa + + ee88C2LYfzJYD+1sRedbv9h7fhurYxOTqP5PKXxLdTm+9JdUbzVOVXojFaqy49GI + + 1IgeenKW0T70OYttCHsAJU58+Snuh6X6YaqPwF+8VjpV8Y1fxyOWb09dDmQoTpzY + + NKISPyP8bBj1NWZ4bzpF3ukCgYEAmJDcs458lfab1mmy3X3vcayIg+AO4N70d+Kc + + fv3gKHlVkVadQ+gP5n2YqIY0iSkNTD/+juXuOWmeFat1SjyHQCOrhK/Xku2I+hJU + + D+l1kwnrIkvveW/0gMPQWY4y+8ThfItnjOjPSxlm0RKiWePt+CcKQur09n/5971J + + 57XpPi8CgYAK/ypz0j0gYUgwvYr+URad1OtaF2rgsYNt5XwoJjgPNIBNH0i0K971 + + gIiprx1i9ew6xWyuKp3E3dtClZ28m1txgOrcMuP6L3rEXU3lA6E3ftDY+l4wKYK6 + + O9zVS5GSIqjhUgEJQU+M6zo31L2hKcOYY95zO8/JyWfT+fgBC6mW/w== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:vvh7fppprucnsblhp2nq7ixyze:ck2nmw5uynyyhbr3s7h5ciffgzw766bt3e5n3qx7r4njjzqzkn4a:1:3:56 + format: + kind: chk + params: null + sample: + length: 56 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:p6slmwqqxjwjrnch7c2myu3j5i:kcvumf4itbihkvpkg46pwcdcajyr3wuglmiyw4gupqshdlnqp5ha + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEoQIBAAKCAQEA2wplwURV6PZJmwtqdq54eKXN3yhOiCaDEhdGyfEK9qUtrOAj + + d36v7ejCM1pxunmjqM0P/zMAkpuo2ezmTNri6OVwITtFrKYqhvzsPxGmysJlByhD + + 2V2Qjhngbc++q9ly8ArD0UAaq8p/c6M41YbIkafbtzrUUYMdx6kH5OP0T3fppNO5 + + F5kwnxDXEIwSCuXuyF5CZajk6RO/iMB/IeYhiGN8Hr8oZ1pgknLO57OlbnAL2XD6 + + a6qd4RnSCWSzWHRyK0N1/AiuvINBBN5b+l8tdz0BnrPYL8EVhA5XZ3Nlz+9y4Z6v + + p7vfiheAqTUUDcSBR3pf5G/AWkhIH4FSg+wa8QIDAQABAoIBAFl2Oq8gCPKYPOHd + + XMNSaRPlrFr7rG+BQ0FNTnVGRNMODcSw2uuAS7ygt0igJRkje0uDTYhOvWojt2gi + + kMFNGSZEJ3L7MW7dgzsU7CyqOfRQR1EQCf4qb9MKEJbpJZgsvPv7eZTqWLpXf4ys + + WpcjcKHE7EE+/t637Z7Rk87Rp8QYlVS9Eu+bXFl7ZF15k37uoPA4Or4RZD0Btxnj + + +QoSi3UacdvU0bkmoD+H/XBFnyEPNPKnWtxV2lQmp7zLQ+2jvUa1TKyhQyFuLb9U + + QhqGikpWnVB9YTskw5Z57YJ3w8pdnUbZokpblT1MFOc0qUNlstjXFZ3+q7s/ZNam + + QYiXwc0CgYEA6A7yRw67Yy/UWScJYqA+c54RuxmQhalk2jockY6Ur04EHA5TRBzb + + VKhvX0Po6fu6NNg290BT+jFCZNsgHiE1sIL6M0rxPJEDsNUe580tL4C+tncsqTc3 + + wdULPGYh0IYgZ9mPWuuqX8TAels+P73FFNzH29uRDNpDIYUAE+AcDKsCgYEA8aOh + + PFrYqE76tkYvtUZFgjI8R5Q045eAG4eET2BsJNRuP2aAK/bJ5ngwlo2kHlLtBi93 + + Ogp/k5wXi3JQ2y1vhjGQAu3cqIP9vVVYDZhQzx6WKy5P1tVcKw2r/KyDDiqqGcjJ + + A9efnVp/S/nESeHjOfiUhMUzpsCExa5vdDC0/tMCgYBMQ204EQ1gYX2l9wBMm2Eq + + 2g31sUcfxjXQyjxNUdBndHpBRivzPJCQV/KSGl1XWFUvvMcDpu5yUPIC90is3jko + + 00KqzLxPLVFLMh9ACtwIuoTyrmPNEMqQNxXEOcRvJUVNG+DS/pQ1eRHQpF/mztUQ + + MCa2iIg48xoQ0AbggUx2lQJ/Uh5JrTkyaABvM4Kms/QtqxFnauvzDWVvI+vqCw+5 + + sMqArQsog8ha1PgDiyaXn7aO1otK+W6X7JIfbkRrNhE61WACkPxFAP/aO33FbtlU + + nQ7H+eTDPT9FE1ySFkyKPUZCiICzz5p3pAIdQLShAHrDve+8iWJ7KzBB7uxY9COZ + + XwKBgQCRCG2U7oHU+R6eznmLIUscowU039DUNBxe/loZDOpx/fN1aZPJDlaII+Sl + + zkcOqXR9ILw/NDTlfpeptYJ+ahBgP6DjX5saA2T/uf8wgr/B+Aox0j9iBwfUahqn + + YVukY/sfdXp1xdWsllk0wc2HcoW+L90K7qHi0ihnR7oK4s9e9Q== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:frye2ctqdffyf437do4uq4ngpi:lloe5ajvtkpmyflag7phiacafx4pfwt2dnriayshfj5hvno5ltmq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAuwzavy62iNdYkMdbRNLSzYURMLaw3ipteYaD93gk9JfkDt3F + + W1Fz65n0IQN4fRol8po48mzPpi2C3Uq1N9FJrs62fx5pdMTzMolXWJbNY4xAXJ6s + + z6ciaze0VpfH5TMd4ruRZQ9hLPcGT0zODgNnGxS0l67+VVUhmeNXGqNKUwnYtUEh + + 0RMym/Mi/LmGXNk1hBtWhC6R4IhKhZgSVuD4yr0FEAlxUOsCmritJROknA8boSzu + + UFrUrrFMpo2VBcwD7kfm02tejDyXfe9aLn9kLIrbSG3YmTAQM/0NkVsu4PUr4iPg + + ULPeyUzoDhYFOnEAZf8aXRKQL529SRN4YTayIQIDAQABAoIBAAJTqlvy+Y/Rt/cT + + F8pPIhKu61QTDbexyOtYVkdrjfAh/JMHxLb6WCoP3/bSK3tI6jxumTNA0cN0MPrO + + PVtcpeFADoqdxvuOIKVaCoVeWN86ZSmRyr4JivbQ+lQSbsjl3iMOKMScUJ3l08UE + + RFLtzWhLlWSGp4DJvpGCv7hj3B6UzRIpPiisYUecyoiA8+YxueCRwE/GjdEBtAqm + + bnucpTpEgCE6idbD/8/zzHPk8Y9wLfivGi9T3M/kRWbhv2x9qrmaSFTxp2eARISx + + ohWmvlvl1XTF8ghvOifsIGOGARXk89AldWbohjB/zs60x3+Ein99WJTHSvPKBAN4 + + 54ftTAECgYEA6M/nz+42MLOSrQnotL8L6EqAI6vB9h/Rr01eRmSy/zB3/o9/PpJZ + + 7gYLpdF+PQxU+ZpgYL5qv0gPppLncWe9M1ET2gqImr2nS6KOpQ3Kban8OdGtSoL9 + + qoSdrQPpOM9awK4y+Heb2O+VsVykcJAOzH/wOz1sDH6GNe9Yldw4QYECgYEAza4k + + Eo9m39btQBS7z3Ne5zwT+XNusviF60LA8B9bepx5f3uKkcuSE5NOdjD/AYtBbEFW + + 6tqyGbee90emW2/ei8KlqkUpTwYH+9+59QTQR2cZUFYMDsqsXQCxKJxk+VG6ikJg + + /leB+M+/H+w3MD6qG2VcfUTmlE8NEjoebSGVgKECgYEA3IG25WmRaBVdcom1ICTn + + aU/PCHoxDyZaG3jjNzc/lpbYwII3mhNSHDEbrSW8NKROg89lQ5x3TM87C6GOlwoT + + 2NwNOnLJqg7ButCv3MMwHShonnbrdGyXSH+tPGc86bL0GRWlb1MSiKl8Fe5STc8U + + RTtUVTe69CaOhd06AU8A9YECgYBaTC2CCHr9onoeO/wII3pywilyxn6/C+SfWHsj + + 8GBVAAVHNpGrWFgVSAKWWQRbRSu/vx/Nk53FNJwRq98ZHY/yg83/ZsWv79HpfltB + + eo+GCXlPj7dTdx8c5YThc2fRHVRsBqBWiUyCU5JxDV9dSuWbiXCFfo5MZjgy3Fkn + + SCs+gQKBgQC2d3WE8uxl7F5LVyM0+ckCPzBQX1cdSaslQ/PmzIMkjmpm2nUVnhGF + + Ww6CDF6GOxLaRYloV1o8txTaKAKbrOZAUTer8RqK4ha5KVSWe+eW5MazbtBPGF0Y + + d24lR0kEUqH0i6qQodmnGsFHf4l/INvbQ2VlBtR1yzrBm7fki9WzJw== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:pbfcdvxhbikxd2hcc43oel3v3e:y5txpmiptmoz36ionmki3p6krmdbiqasw2v3wdq4ia5lmrhh33lq:1:3:1024 + format: + kind: chk + params: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:xol3f5smh63b63cg5zcelc34zu:646qwuwu2vkllhe4rcblvp2sdwc4hlffoh7p4gaxmirmtzuvdiiq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA0O7WP82q3/+vIKJl8tvzSgqFP2N9NpcIJ37Qp1qgVJMZoipl + + ulRnnmf+q5rNBnIMBtSiKwE8KDStxVhsQRloCb3CTFR4RMYIsMyxgKb5KJWLN7oF + + zezgtxzU+czBUhuXy3hsW7W0wLgrjJXsZboey2exrufDk3VfZ/8mMOVB2BRO8Eiu + + e2idLhWvM9hSxLz65clRaWJR3MbBFvXTR16e3urb0eBpRTKqm6a42YPR79eYZRJD + + XGHpqYk8K5AMaS8Iha+37c6soPsyNorFTRBU/CKsDb00v7NHutkuNVcTk+pY2vok + + 1ymla4hgZBfF9BFl6BKj+UMSrxwaPOdYAFR0/QIDAQABAoIBACakIX5b4pA6iNBr + + kJjduo11TCcI1rHXtYOedecZwQDbUtiV6EoRjbdzituAjg403goOXe6/s/lIouHW + + hHD2yrHQhWSaE8M+cAGI3gRJ/VdW7xBPqH6pIndjTvVjO9bccRLJ0xqUDNm+xhki + + aj33MfZTF37ecOKvCMXiX9UXtXmJlHHry5fMyaQp13lyEU25IHCVbfgHeeQrT3Hn + + jS2fJi9Dg7cDf14lM9oMW3T4rH83NabRkz9a392RSnEKubYMWL/aHcUiy16Iur5A + + MaWxofqcFvF6jxJK1giJaooMCRtWD6EqcWLkgc/ZTdztafswj0eIVXum8frEDxr1 + + G3kGsLECgYEA6Yi6QkzUZaOoELRPQA9YVr0+sx/kcsLA+8t0+9qpUCuWr2p5XCPD + + YH1vFUD1iOCHTVq84bXPxTsuS4xQFopHeyhMk2A9pB3/HCsVkIuoEUpGCAQwsmMa + + mVfqtX2iLxCy6/KBG/W61kiZq0PzpfdCTZjuTMqK+tq5xmSZl/pj0zECgYEA5QhB + + Epc8LnBOsgNVarkvj6wJfsKGjyFDQZd0KtJfIMkkc5HjSU+yx/dh4BGDC2UJ48UD + + +aXkdhXYXvSVvhW7nrNmOU3cCy6JT08X4BmzwdFcs0uu72FADoPkQLS80KFJ+xP4 + + NQ2OdE85wX3/Ac8aSSIazzkPK9/Ec8TImclUk40CgYEAo/zC7jONIiIdrj0vOUiN + + O7t//8BxZrSjVyyzZPdS1V0GXv9hYPYsB+GM01veDtO7rvH8mHJXB6RbCenpgypu + + r2jI/OQj5M67iUgnyGyJBDsnmhF3MIyu7ObzhaZG8M3FFjIfv0Z6gGZSohUBTpWm + + FV9CVuITXbuhoFKcGEBXQMECgYEA0b63aUEbGiQ7zYaECRLC3b5di7q15ApAP6dl + + +XljKPFL8pLeJVtZjQuelMc0zZCgd/kLZOtpyELFPmCbadMZWYNN0JjfNVZO5VS3 + + tsGS/6KuVHyxHgRf12st85wRdrbeu5NUMbHSje2oJO0wRgXWOreC+dd1b2aj0Kmc + + VEourFkCgYAsQfMB51KrrMyC49sUsYm9weiDBGQJuTATCxX1mSC84XRMiEzf1pIh + + csVTLMXlAGSZP4C1zqvWaQkETCjS3AfEVKnXI095dYp4bADi6QglwYV5GFvpJnMQ + + lQwWUMJEup11XmTe3KxKxjPj5MFRXBo7lZg8SJudRpgvE5IHQRfjXg== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:po3s5pnang2csh2auurmksfhwi:q7ltmms4zwlmtmjw4o4dy2p72fae3fzzvzn7u3pwvcnqpqzwo33q + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAuteEAZr6K+1C2iJbXOS/KguaPV5jI5r8lmZwenO7XiEnnj4f + + iDaXFF0Sg80UhsKmdj8IRPbRIkUMHzui4/g1xUXHJcosuIPxqamru5dDGj7DcRke + + 4TeOfKWLWNwrf5oL9I9NJZ6hicEE2Zg/13dZeSDS6Zyyn6NQb5Txj3H5ntHMlcxv + + UWGCESsA3A+co/AwHtin7Up+29i7NvqUXgn0paBpE9b8ZjQ00QZVksEGTLH0KwHA + + cRDBnHrY0WPZdYS6swgsmto+gtEt8z6J6w4K++eWphQdeRQhQNfdIqNDfoa0G7Sq + + R0uRaTo3TbHbzVWLPBEzS+J7ELggWja4uR1h0QIDAQABAoIBACsNrUPnb70+g1ib + + cR0TMr+gA88fWE3kkU6g1UtKLsMudaAfpYlwNtkA51rKn2+8G7qEpMWrcB5q9bOe + + vNa2I8HM5epdz3dHJCEZ0VI9NT+vdb6ycKyp7iHnzZfQyA8zsoyMltTT3FpPSWxQ + + iml+fXYNa7xcGMbzTX4gvpb0xvXi8gcy5vWEmXaUBeQJrO/5Q2elYxW8ovyXvI+B + + u6+Ul+LCBzqUQsf7xspwydSPDX2lJbUeJXJUsCqC0+1PdRbNkXs7T6sjZ3kvTI5G + + ftGl9tXdLkSfJa/yvtUoKvhWOgyn4JZoc8nuNetQhrQToTkAqYn487dcmVr91HWf + + FS9oRP8CgYEAyC9oX5OUmBHw30gTXn/0doC+SPbuFkgLvE10xzSnSjGhuqegkjHl + + IPOT/wL7dipYfmN2CBttRYfyaelgGnefhCFSBkJYufIZeUWQhbe0WG9TPSmEp0t3 + + rBV2EvLPc+G/Qm63vrtHYlF4SxyUFd4RykETwVJ6LScPEj3hSQGXZ5sCgYEA7u+z + + 8eYaTNC+uttNFB03UDHkylV92daFF2dVykqdcFgk7NiyYOBppSgVsxaGW1epOLbK + + j1QAHmRPGCKnRltzV4+jb7+hxohHhakx8YRgPb6c6HZ1iW8ZBBN8+oRLsE3cSEhe + + cyoiwDoAWzviNRhIKR9cUEs6rKoHLoPiUaTvsQMCgYBpHNuFNAzWPLVpyILDIBTR + + FJDV5zLk6DehTFqBLxiYUK9HPzWFDkXto3iWco5vYZTN6JPVdfFOjS+whSY9P4q2 + + 6ngTaUsFeCYAE5LrY6aCuRHQD7jjzzCrXyl8kZp0kpjG3TQGJng5G+Y6KmtngA9/ + + T+R7oj8c7mFvhqaAmyFQ+QKBgQCM/NYyY7ObJgWVXrfxqXetE3PMTHvxYVqxP6Fo + + t3SFCQ3oz8kZzvGnqap8PUtUdLp+o6WMw2U6ibf+JtyLcITz4ubulqYP7vQ9E2RL + + /e+IH4SYyuV6Dhs1w4YYkJ3Uz2yvHjzVOcS9prv1GbXV3Jkf4shm/K0Hm2CXeuy9 + + flSNHwKBgQCR0jQz/koDWvM79/3w/NjFEPKfeEmdEFEFkOIQQiUf1y/qP6XJ++xn + + BEvFjp09YGjDAZdxrve4AAGgB1KbeBv3w2tKxg9Pu1452YFic8zgqzlQ9Dj1bsxq + + oXB+6DwwyyO6v7MGMZaUTcLGhlzulAFW6eNugvKC7ZZUHzEHdSKDYA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:vkdc7iae4pgkdomhbbb554kv6a:g5xkatxv2entfssfh5eqgexb5j3qs6jvohofjqo5p5erlxlx2dga:1:3:4096 + format: + kind: chk + params: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:c2vvrwbmunt6tov3q5eptobzjy:ppk7ttf2hkwd5ff5rzbqlfjox7ygijqzguzz3egjy5quph5lmrma + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA34lcTzYiozXhilksLDEew9TFTRd21etDcUnHEJNt92MdX6eW + + QKH4V3u8l1cgAVIa983CuLeQcR/Qwr+93aQ2YO0GiC5rYGch9aJ/ybTSjN8MF2QP + + aHjc3etSS5WoF33OjD7wd/D84AkOmXA6On7oTcfteyeTtCql1ssnrIukgw2qy6Hs + + w67kOYeKu/omlmmKHX2YAwFjR+j0lBNS3lV1S+pEz97kNc5mBhyrx+XFWXxYvtIa + + DDaObv6BsTuNbegqLM1fvhDUbcAq8ymnr/IttlFGcGzu1w+1wo6SZQeO23/Mrb0y + + +PJASekB6CDr0tIrvU+0gjG/SroDE6oR/UcC/QIDAQABAoIBAAHE6qWeP8TuuSPC + + rYS5OM1pyP8uTdyTxmgs1ZxnrLZvouMuBnrpehE0ygFt1gBP3/KRJT1LOqOI+G22 + + 92MIy/z7zN7vQYToT0HMV1phCeX47lwRbyvJAedOiv6zjW26+BNur7GLVAnXGILI + + +06ZalZFqR2d6qxBrPSK9BWIbDUBtv9u1fe7eOsjLkZGjYBsOxrTtiMcDILRvzHr + + gWzEMSdfCEiNdmaz4j+JjYjqkFiE6rKDv72XvFWS2NUSmhcLG5lKKqeUk55aAEWz + + lR5xamqdyyD0z7RSJfg9xLn33uFybhzW1smVcjpKP4ylBmxLNFGNQbXPiPH0ja1s + + jlCsU8ECgYEA6jDz0v0oLjUbVceQUZpFaa1orQEH5OCC2Kmn8xPR5APF9GwM0bgI + + phPTOp/3BM9KNMdJbwAHJHDuaKwTZvIYyjFijWQYP3iKRZi0uYG0w28m9aA/4eJE + + ZHoo0LEQuyxp058FLkrZXScaxv5O4piMKtEg1HTq+JX/brI4OB/zx+ECgYEA9Fpn + + hh+j2q6+hhCuDPbxYIUyf7OOx6lCEs0zlPWXfyM7VeArn5ys8us6jQHIUu6/rhf8 + + bP0k1sOvqCMyVfCh5iC+/3zAEmb2UOUQx96ED5k8gU3DNJJbI+HSGt4p1b+D9C/C + + twjkawoXTd5upxouAuscCmnw+Jubt5TOUkY7Lp0CgYEAgUAnMYk6xdXVklAj3IWy + + TZLBNMpe2vj1/jIUWVnU+20BsdZ4dL6HN3G1oKNsp6DoKZzbcIGpb3lMe0SNKMHw + + 4JbE95gIse8LEUIobEGjzEDqVaHt3/MLIBEzuYof2821UnBvYY85y+mrI6xzSSg8 + + I91rqxYkILJYWXXPBVrNJsECgYAVcy4tRu/CTZ6p9CLjPnY369lf/molOsVzExJZ + + HCn9XiFiS3ho3X8NH/sWz7Y/GXg4FyDwjFREig8ManKLusDri6pYkSHnO6SZu1H6 + + yZy8Jc5651GgdsyLXNJty1zOx64UrHCiUqSChPNAwari/lhVpz/h5iTiHf7QYb6u + + 2D1vvQKBgQDUHknI6sHa87DWtHEz17b6HNeEHlP5XBI+THia35LtrI5NUxphFTMp + + HSwnZmDufBVd2Y8f+0+vIuyZKvZbp+YloaTfonm12kNSQWkfwKL2cF1SteKcl8Aj + + HVrd38q2YwG+QV4AHmmc4xU9LySlNECFbT7sJFkZlEDtU4qVCqk6mQ== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:5zoan46gn22bdnxt3z5a7dahyy:o7wkn2sawrdpukt6gdoh6wu6xuxcu4k556mycqtfdhhhoqwfx4ka + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAw01Lm2+OJNohSz0WJZPv3bY4ntD0NDLz3HN6LpNW/YG2+QeE + + 372TJhEORe/V9yNMzxuntGHI27EtsBNy2Ux1IX6ZoJyTn8WmwTxmAM4cnWWVhaFu + + rQgPXwhR/DGfYrsMgg25m0t1y8eB2n7cZHkLRDDePEZdk+54ZbEwrC9hQYis+xt5 + + CyLgpnz40/4ubnwIcifM+QZKiYEVz+tmlFyZPJ3wEdjNAvthQ2/MH50SCxn6JdYU + + OhX/IvJfRgO3PPk6Av0NhN6pJ3ifQscfwjzDtdiEbeiNUxsZggQ3H+taIoABtZ6m + + oaghx0k10mkgcPrcg2kw+qLCZEYoSCyLHsxpxwIDAQABAoIBAD/JNHrpNd8iYQJe + + SqfuR8a9V4PDUibkR1JGYu7oT16PqY9vHb4nf/JMWsGLwfGsFU+FREI9N6lNFlNu + + HrIK7yyH2SwkR3DE0KBHFjeIGb9saKfS4D9iJQcQRBqeqGRKHB0z1115iVkLaYVP + + rrKf+AaHAWZlQvXoSmlINFHgTZ7lrvcBM9iJ2ix3Q5C3rV+wdNrC1j4RRmv818A0 + + 5nVSuCt3rH3j+xwCXkpxzMa4AJo3+RKy41/2uIrV+DETUOnY1uLeHXAOVj+Jycp1 + + leo8pyue6+G2nyK9xCgnSBXTDEk1Vn1pGzY83deMpnkdY/wcAYgBt1dbA+59xp4m + + 2OMHd9UCgYEA08JxY0V9DN7XqBe7UmACuMD7YBxjkmdkFDmJVCyIu1WnL6IDQ1CY + + W57wpnoUMMNDsbVrK2D2YAL5A5yHxcrD4xS6QYhCV0DoH8MGghrIs9yhRH34h5wg + + wSj8HxO0mU3J09oGvFOBlFeEJd8hpuj90hJ9EUOvFwB2LtjOOvozMpsCgYEA7Bqm + + BTjtxtXRuQ03WM7XmSX/XZJvWEOQAv393LnZDTVJVjHKmzl25vF2/l0Kiqak1Jdu + + kw9ipuLmT673v47/0LlTU/iYP/ZEgvbpkZgve9nWLnlRN8gIwJS+P9mDbyJloacL + + qeVhEPsUxSeWnRCOEsJbEU7PmznJwPjl5fZwskUCgYEAr1Tku1RSyPBN0VDs+bSj + + LEQlHpwC2bqfg5tsGHTTNYEi726OkxLNQ7ci/ERCKWnTx/U1afJbrH1pntLhHCTZ + + 8lA8M3xVqZcFWx8IaXsxyLKaGHLQ77+W4zhDIJwZQYHF5ZI1V4Mw3BlmQlEwtNlf + + J6vFQCExfLMWJ429m9mDwJcCgYAVvApGdswkvrA0ucu7iCb+uSm94moPlQCf1ePV + + uuIJPjuHDMRa77pLXjUXC1eaFeccjugl74ekV0TeVvwFjVNtUnIiS8MwOCpsZ/Bu + + b7UHULFPy+k3Gln76HVvCUX5KBB1BhyjwjLiTkrGL3PE964seenKORgRcQtqkT8Q + + oVQUhQKBgQCGXXKYWdX/mGxXNEnaSxqpQKOmQwnldsPggnRA+u3M5pDIImzt3nbR + + KNWavPy6v/v3GLHSCkMvb+oor7qk56YiHpfmL2pjkNAiwZwcdUky05sfHvhs+9d4 + + qLxi7Wt2LH6aidBK4feMC8ReOlu31JCKWjcZ8RwNNPEXbxjtOv1Eeg== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:zvla2jyu5fqlb2s63zftyfjnla:yqspaexhew55cvv7w2m6czezhzkqnyk5fea4gvnplc5za4xlvbva:1:3:131071 + format: + kind: chk + params: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:5c4mykwk6eqqcbh3slodybxfee:uzkht3h5m6ianotz2cgy74zxirzuaxtkxpcy3ghbafoime45n3hq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpQIBAAKCAQEAq5KchP+y3p8pRzcw8rMZd1Bii5k3b4TNm/2jqy/3jUt7JY6T + + pNmSzPnTQVe0iz2Seyg/2qynbX5GcgQ3Yon/XSXQSxkKmVMSq2emWu61G3k0y/fs + + KpblK/YLGUiXvaGZQLeB0jlyaaAWMKrFAFcJ0Fg74zS+8rfeOQxGMtedKJGVBcin + + Q7OgC8JjLBPKlFgossBzZjZRguTCd+6gzz+1j6aHSgPVYjjNQ+g0U//zUdIImSgt + + tSXBDiOmvRWG1oxdtY5xqoY3/H2HKJUNK++Q0D7BLV6bQrYWBusf9JxJbBQH6ite + + Vc5RMW4b/tsPJHvbhtozZO/ByJ0mAgy1QFaVxwIDAQABAoIBADCBDLW+0fME3PcQ + + n8plHpRwCcP8Z0MkMLpiTMRnFZ2A0sot0gifJ9TB5drJsDVTDVe6675m5BhcxA6U + + qZG2gJZ1S7sHU8s/xH3nmgyIAnRHYkktiDsMGLLCZqXZs2g9SKWWm2FysykwREWy + + 6WmssY4Qe7HCZh8ZIv5OvYO/F+NxOSHPcpX/ubbypIlIu6GhhTkqfpw9a4v7IGxk + + 4i7J7rhqC3+jq5iU3oeWGwUXxN/ZVwAmx5V1iWgz8Q+0Khc55od1BgpLU49hxrIP + + qmY6cbiuKo7KlgyuqLBuOrrClJDc546sGnbvudQf6ENRjDhY4p+udwXUSBuCLCYx + + P4Ud/i0CgYEAtlnYKLLuPIq+jwMxefwJtWhoV9qX3sQw2716+HGCbemdFoPLBXia + + 9VGZ4dbx6V+pa2KE56x4s9fe1cpFOVfaoY8NR2hOgy3n5OoqyGv8xnTGqdFGDDDp + + mC9a2fKZ5QCtdDCosMthnT2nADr30NT0RyHVAA729zrTsxC/uTDNYDsCgYEA8N5Z + + ymG7j8dzWvSTLy/FAv6hF/ovPh2/4df33EvNjLPgzhgC1qpuOH7Yw1+CbCMOG5Kf + + YXjEZBBwa4G8mF2NuTriY/ndezhysFGoWagmd8UDm66DnhQMUoYJJR2smJ17XcRc + + mjk9Cx0kvGw2bg7LSq9wZjM85AIandVV/1nfc+UCgYEAitKpWoqmDlc+LZgrwYdc + + SMwcq82R0xkfbRq6lIut1UmFuw9Ir3ia1+pwsVs3PgkC7OrK7akDFz9fuPjNbJNy + + sY47eMJzCzEWmtKfEYgMn3VljQDyR/Ow0pgynTwxZwL2Cj/FHRsozFGUYvuBkG2f + + LswV4X6DC8KwSmGU5ELAB+MCgYEAww895dApxXD+8QyZWSA1SoyMRs+bjJEpACsW + + lXdpuWU+S4hUXCVe5y+KOQXSp+HnndqqaZQUbviFIfrJkRZKHFQcXFxPyWbYMgOe + + 8yRiKqIInv2/preTlwzmwQD78geujSvk1hw+XA643kI//fbLGOtkec9Ko1c02NaY + + MxdEByUCgYEAhy17HFcyGvj0QnwgoTxWB8AGqivfq7ZkCrBZ8a4jtNtNbYBiCH2i + + 9v3uomcjTPhZqu7W/wloYhsf03b4y/V4ABQHpoiGUq6NNAQoSu7/V96jW7K3HOT6 + + b7YMYzPnRAkYmruFnL5Q371e82JeQl7aK6hkxglVF/AeXKpEKDkMw58= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:sjzzvmll3obdgezri6tldxtdsm:5rcvg2egftpcrsli4giqg6txgz36xdackev2yhqdubbhncw5hisq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAjw57luJrvanJrEOiDtCSd09DWTIrDWKDTTuRHlabqUyu7i+Y + + O1+WUn7yWdcryJ+hQoH91gmexQ61krA7RqjkjFBRthSKeVhlEA7gFfFVtBTxHsqW + + nUwyKzlfIH3OYZjppugjyaHX0uFEVFbFPYa7iQHvUbWzzHwlBFbM/fdM7qfwV2cN + + IgohXyTeeM0JHj/FO2LlE7wbbzC+nxDrdTmzbLy1I3a1gWiQeFJKojuY0ONtlEkv + + hD0TS8lp3VQ/L6zFDdAmlBk/GnmiwBetGRJ8CniXIgUQbCYUg0G9NTPBSxKUvykN + + zKWXzTMRt/xkhL4UVH5XcBc4fEOzxYbi3akk/QIDAQABAoIBAACtU57WyhKPA7da + + toOK2+Wtnxum3evLvQtjDQj8+SO+WfSYv8zomUWDuFWRyXYqcY06vdjUMgT17jAu + + aCakv/Nkl//yefqbstGsbPcKi04804U9zlUuNndrLHV8vCwH4sxJcqFzktAYNhxx + + Yf9AB6DYHzYq2MYsRWVs4Vvp9wuwfaDvp/rJ5sndknid2i21ZLukrNXccmdhcOHq + + rDqv5GyyWgDtkxoOFLausInih4Y1IF83MtsqNdsXJ0USFv3jMyyHdX4nLutZvXWr + + e2c+08Em/cMYvnnRRnL1NDHz6NepQqHatLI8UdoZ673VmWXS95pJUHsn/wqlKpXD + + FBSHzHECgYEAxriXuwvlCom/eIALAfjVweHM5HDe/1OFN5E0dZuxbke59oqp2/rx + + zjrUVxAPh9EIwjVi/av5b5kf7TXJNfDb9vrKLZmxn1uohlqxSKmZeEmZPrlD04T6 + + EeNVH4HUdQPoVZM8xzUEKxuLlL2wgmfVZiayxg9Eh+9o57cbQmugPy0CgYEAuEp4 + + B82k8CvOq2IOTam6Nw/72uTxrvi0gBOsHuQ1iEHocB3zF7M7IMdnlHfAp+/4B9Qi + + n7JWs1Zn/L9O7MIHwDguYWs8kSGNWeUrmv2HE80U33yrTnA97pejy7480X9AWjfZ + + eSlKeAMvl/jC5ABjIUu4C/dfP3DDn9ktfbDonxECgYBdPjo70v36rt2/zdzcZQTv + + v2Kjge6wwWDNzP1ffdmIVHGGpFPFW39gdCw0Wd3frY69ic1UGACng6L+a/FotQaR + + YeXB9c7pZlmyCRYMcUAIuAgG7WlM86VfBVtouEOXUGkQ1lB7bH3zOC6LcWJEHjJ8 + + hwX1dy6B1i1UJI/O369GVQKBgF0Jy3s1CLXvh9DiGhJ/Q3GU8jNaLeGF0apvRA/8 + + akI18+DWXelzJMCxqss7gdnGvlNFd41j/X1ge/MPqgrBEiSquE+aUeN7Kt9dYRxI + + Li+C86y4Rcu9wqZ3i8vKaO3i7lwdof8XJ29bPGXewpVU1Lb0gSAZbM6cYUaL3/l6 + + nJXhAoGAZhDKzgaBYkOpsTDOsWFneBDM1+FjNDT0T8vGXBTod+8I06gVlcUq2i9Q + + ufLTXRPLqQWhnDldp0V2YI6wivyQ4ab2sRch4f8iY+N5aAPexrAd6wYxpakEGQ5W + + sg4iw5714ZTy6ZcvPxoa1IFBkOaLhfl05/Zw6jq35Y88gweZD8w= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:3idf2xqm5wnw2q3nbva5vmpaiq:7nmkh5q55omjloezibvyzvveq4gqeaivei5ypfm5vfuugwiz6hpa:1:3:131073 + format: + kind: chk + params: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:2cwnw732f6pd42kjvbin4zv5nq:fzrpbc322pmjemwlhwn4mv7qlfmnxey2wxmcmuksyudcjbomxjaq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEA2C1mTlS/6c3YrjFVNax9bCyDp8WR+EVMdHQBNMrIxOsTkpXo + + 3fvN90vWuxcZNOY+j/XxLIsK551JdzC3PFUPICwBvBWhRNiC3P3AiIk2q9Ju/8fV + + zm3Aj/fmOMk4Yy9cTTmXgk9YEnpLnhJiLni8au3hZpVHnQmfTMKv6OrwKP9DopGg + + p31pfZgoUi1Yfc/wpOVpMyJUNMAgrUOm2s5nnoMcNsA/pxVdmS/Zob9q/78Xh4VA + + 5uYCBVj3V+XKBvNg8xlmkFDp3CrsgjhG10Nz5BpB0ucdXfnGaId26IGHoGrlpzLL + + C1G3bX0y7EHHjBV86G59pVdb4zV3TSxW0mMPTQIDAQABAoIBACVn1agAKiT+pVue + + 1auv6RPqr+071oIrG4ua9wp3fD32nzBiGCUxCPadfM5qtMXegTzPxad7d6uUH74s + + A8jAvxlGBBbTd1A+VoZ+se3uMDOS+fnwTiKmAwfmUUPKLaOb8lC6gmjd6dNoreTw + + MWTxJ0kpWDMz6WxW2eWiWmXnIR82msDgdVUnhdUOvLdD8HTC2HV+Vn9CFL68k+tq + + Iat+G1BEeFp1nDkLbnH2zDMLW8GUt6z24HwjFbHbVkgKa6t92Cg8l+pV1sNu4jt7 + + aU2+AoKYRnWSSLedBkNjSw+nasK3Tg1f5/6xq//hiCMPrN9OPL3fAkN5QLjRW+EV + + c9HbIYECgYEA8XWQQU+6k5/rrkNJh+UoeeH4GkgFUSlzq5H4b5iQdQwSzY/FrZbW + + 1R/S1YkVqq0fjmZjCjTLItBpnbfCt3lhMQ1RTBi3fs6spMjY09aGdJeFJn/BGK+7 + + M/o0p386LOI5VHJtSKdfI97tctOTJ2jvrL8mVJ+oqaA3PJzHtz7nQ8kCgYEA5TIU + + d1N/Z4GlNnf77mEishSs7Ln6J/VTcNzZrzsF5fd9NMKoEPcO9OSFa+ZJKFOyMwkl + + haPY6WOv3b+LpW16/OvntHPbfirk0ruAwItdLva8F6vOYDOrEezMY4bXVAM58p/S + + qYWXfk0mXr1kq+Y3Jk4KHwh+HFE2pqHqPybJSWUCgYB4wOmWsA/H2jdcXAw+6QyX + + /7k7M39tOoS9be/Hp42+633PzbH3gTMJPLQM1FTAmXnpliy7ovFgBMh89rRrW0mO + + 5XEd1FKYGTXf5w4Ayw1M66XMPiHMfb3qXZvNlGP8pFo1cFBVAFclMkyfm03BbMpI + + IwRBV+NaiWR2bJW4f73aeQKBgB0o2/W6h9ERa4Wcik7vyMxDWSTdHHiM+8q2LnPe + + 3Ic/j4xw35UY+awqjtcFe3VIALoBheaUy/oVlsBtqESpwyX+lYId42UP7ADrnhvh + + Hz/kYFXao+0VZcRoDjDzbN3hczPtJY4v0vBcvG79RZuhNI8JCrycBf9wTbWxj+3s + + 71sJAoGABKJgNFvvI5DzXXkV8V7arT2jv08AlPvf3HqJ0V6Fw+VV1sfcXEGkRVKr + + VleFaNqo9zt9atAvSNUTidx4AWL/xlUzhR97lzdRQyhbJYa3T6mU1JOPD/zHJAnQ + + xiQGOW4ll8c+ht5wCademva3C6k+R9/pk11W6JjeTISMZ8W5ICs= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:75ls7trk6lfklxbrush6ex24hu:tho6cve73atgkhgpvjd7446w6c4wlta5bszbfv6qhbs5ykjltwvq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAo3aCyqOOeNyiAhFyMMUjsC8yiJHP1N4AQtNJnaj6fG2p9oFH + + LLVqXIj+9X8wFQvU1APqfp0ZhdyJnCIOmiZCWGZxAdBAKMupEP7KcARD8r3pSeiK + + MeDYJx23rejrtKYjJeJAeocIj8x9vcCNXpimlN1uOLkHJk7FEvxn0qmCblfqGKPc + + xFBsBpBIPKVK9SZ4vOPa02sKz+CgoCLxRSjyLj9XgH5+FxU/O2LeOnrq0TVttvsW + + UXILD0P2IsIZcQgXszyPoCkdqFZ+GgzJA09ut/inlDtB2Z0kvNfrxaqOtTlClrm7 + + 7zYclePeL2mYP607xbqjQzfQAe0hrlLR1L9QQwIDAQABAoIBABSgpRL0PnYzBWtf + + Dzq8o0By8Zy5UHGWIuX2lWtvV6NcZV565DlDRWzwSW2VGMNhzwAtjv8I98pkJlC3 + + yCxHWAehbxuhpM8LxZskQ/XhxpSlxtWG95V2wGAjSGVfp2A7DOQVdTnhEMFbYUy7 + + ipDIO6aSe3vDTEssaoMhqz+yw10FctG1HANZLxStQl+SpOm+iNBUuSE/m4uz5OFy + + ENoaDxgWyar9OF2ZvLZc+hg/QaQs7pAP+z//QyGNVsq588aBA3Tq1J4tfTDBlyKl + + 364z8g2RN5Ap2hW+j1C2uT0pUD8wjkaRPkdGtsieoutCX2PaCRcoQaHHZ39t7LIM + + YjcdT7ECgYEA4/R5aPJLWd3soy8YpMzRQ8EtzDK0yOeNe6RTFKfESkBReJuGBbH6 + + VS9nETazREJapY7y8tekppNSsN6oMCWtJX9EjcLELL4ms7o3KC5IeMI+TwUpGhov + + CZTiem5b40mXimteB2oREcIFA2EucTua/CRDqvmiuhLhcyZejk97jlECgYEAt5LW + + BaDblWaAeo1gRATyumg0vX3MTwXOR4iyz9yJylISMsSLPbSRUk8qLrinBdFftYwz + + GrDEXYJ26Wak6b1JAQMD9GY4hnbrHFcLB8JTofV7c9Aem1hKOP+KhuJFd/aibBBH + + CZKQUvUCi1E7fQA9iQX6qvDmwDxgvG+J783GbFMCgYAk2GI7bVZymyVhpv4jvRti + + CTp+0/9Wrd63inMHVqqqmcTRasn556+fzz6okJ/fO55tPjLUv7hUWGG4RvUGe0CG + + XBDXnRCabs3QpRu/OePq6PKrURk4p9zMfq0wvt/JWB7Pd9VF+4XwydyHlFCuasT9 + + Vls9qoX774tTUnNcK0q8UQKBgCscedTCjS8N7nhZgVUYEGUEmfYyd+vLAkG8cbnt + + IhL4qTtw+v5XzJUW8GIejWMJY7/AGDRZdRQ80m5H48zc3is1qRUZeIbjoJ18N6Pv + + 2DI982skYju7RVsTcFXzB7t/mW9ldzlhSTGiRqGvRxg5GTp3xAGnJ5nX1CQM0ckW + + e1XvAoGAfdS4gbE6hbLmXupDiOeT8/W2FL36QCtdXMf6TfI/n8CEsAiuKsxmU1PO + + dh5f3uzGlGSuk/aFxeDLcpKkG5PkaRS6Ltpy3+39dSy0qhSiZQt3cddW9lFj/YAv + + XConXKubaYCYzR4XfkFJTbbS3oHv3263xIaZlplH0cgb5EBO5v0= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:o7xdokumtcukutzgmmpfme4nnm:nn3s5gau5dychy6wlodwriuit4wyavfze6bo4icywcbhdb4ccxla:1:3:2097151 + format: + kind: chk + params: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:saskbmnnhte42buehoshph4s5e:sgy7cr3nt57e6rap33epd7nx7u4zczj27mwcix4kfiuyyh6fhg6q + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAwn8K/miyxzuWz3uOVkrgIDi+EAvbgh8KVfA3QBrwR22RONhl + + t5eTvdFj5piqIFfGzcxcAizKI2XNVH7iKHWoJJFGFegL404HgW5Oz11zPIahv7qJ + + a2KtCMaZF4WkKeirmcIryLjROV/j1nCsnatkYDc3BYUpdnGNHoOzDccoX/Aun7AQ + + IM/FavvJPTb5vPhWpely3yZluzO+rqyqRoZBJIJ/16vp+8AEAzLKoHiHvvXdG/FL + + 9XJr1NQ1Qai2orM5ibnda4cvCgSBH5QSRN7ZSkM4avQMB2ZA3iJuiOQJMOwC+7Rj + + fMpzXMkx+eVj8TDaeu29VHTdkts2j9UR+etj6QIDAQABAoIBAAeyYlt+BjEnNQkh + + 8RiOHv93b2IQLhAgrVaISo9xYXfaKKiQu7m/uFuHKUZrXTQpdRcY0r1NS3SKJ02E + + Nev29//2dckRJUNKB7cCCAFhx9kp1MXTGnQS9BkITu+k3MHB0OSlT/lCAxmbp1cJ + + n3Mf4LmEBdvkkKb8yGJgQNo3OuxM8coddIMZp9pLJ/Yin6AaKgzaM1SX5HUgVQz0 + + vfY1P38vkZRk1/CN0nitzUm8/PVvUncjF1jnaHglYvy7SA6XuR1e1rNWbNkJ6Dhi + + WZmRGZSUOG8F5WL+3Ib6sr1996TEX++8Lp7IdE4W1Wxw+kMr9xpySb8INg4hgcm0 + + nzwrKT8CgYEA3avzL8w/de2nZogn3tZBVxEgt3/Wx/ZBkMHdQwI3kiceG9HH3L/d + + D9KdWVJP0IDXrqC0ymm3sxnKLz/tct6l+JR2p5pI6uPqcl+HAQRFIGV8WLodH9Hn + + lifonyLFUmUDE7/rlMEKfwOxJGMFq1y3aiOBe7v9izxxnu8JNZg4rNcCgYEA4J29 + + HEpID4gWhQdsNezOYI96fM/EYhkqOm67E9z9CBShqZETvoYwJClzJQyW53ghcuPp + + MqEGcYHaFmFzuNindwp9SwWxvJOX7zvGZNEwPOX7I3q2/9zUinMuPVdXtGeFkIMW + + 5m1E9IqIewebyiKkJwKLYTMyEbL+pl0wpd2vnT8CgYA9D/HEd+n/TUDwwI7jFngX + + SNOPWLrMiGxVOOH/ZGv0aawkk6wPhhaaFjVb9o2f7O38364NmAOPZYpJa724B9cG + + W7c3wgtWEQRzDxd8UzXLj8kqE9KUAlleBo5Qz941LTgkx5hYeLiwdk7krBZStw6b + + QT6Y3BcitLrDwiryRYVPLwKBgGNUnmrCVre3oO3XaH04adO935cOcnRHWKtaiJSy + + J5vJM+y+4ZJh2SxEwEzkEl/ueixKqbfgCe9sUzuOgRR/ix9TnjDtJbqVMp1zO7sd + + 300vDy6TeBYSXFOVuB8cXwbCuQg9UIU6UUIreUufA8ASLbGqqGSltUCqfX6ou3i+ + + XokVAoGAC1g8mfAZ2jAqhm0+ZDnETP7TduBqGMTNBtrNyF5L7SpYjfm5Z3WdoTQe + + OhwKIGVf0gtfQoIllKNtDymxpM4f5rmB0AvnSY4Nx3OriAVfXqXrYA0Mk2OfI3qa + + IPDclojMdyMF5Ch4Bf4WQvbE/sPbXFrFnlt1FXUIv6KkUYzToN8= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:drwxx3brbk5snzvxum7edrm5mu:io3zsepo4wtx5zvsgabwopzrwe7ym6qymgfpt7bdzayyj73orqfa + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAyiq2CDqbIfT1f7icI+VESHA6R/j5AthU06WgxAvb7xHe8XHn + + z6lN8YvL77zK8Jg+B36rZbw2hIhH2tQVWUx7EbXWgSFUJXuYT1DSgM4TDhKQRwwn + + iGqcLjShxhy4bn1Bgr+JCJHdPZhhszn+prKCPK9X2AtLGbBo3FEIbbJTxJFNlEhD + + hP+mUzHaLjCkDE2CaBABKexSYE8aLFxbOuE7IaRwRbJaxx2Wm9rMH8BXM63Dabve + + 6eqjiRen6taW1IIhFI4Cfcno+Lp+cU8AR7c2Rryxe/G4SzaXCGNYmFumrzx7SQv3 + + RndTofKUfsnixPwZ0dsCi3jI5xLVAxt2KoGvDQIDAQABAoIBAAPiNGR1OtUlWnq8 + + IUIzbFRrI83wEQqtweRUBsbvzf/n0VOJq+XpJoFuVfKzxKF74MEruAlCZP3eFgdI + + Zj5IVbhvfbHnHPaWmeIVtzExZrSJ2NSvB4M7eV1JGNs/J3Tshl6bR7a9szVIXCQc + + iCJzRJPAtNxVGuYIUgSYiSPgLIc+FSo0Z4gp4fp8zHDK7ZBFF2eKQAJFA1n1H3GB + + qpBWa5ig0IIAJGjaphQqPD14pWSDAIxGPeESGnzJbiSKau+5gEcwTwuMSREdoGlK + + FOQOFKCw37ILB75F/h7KzJjaFzC6LtiZZuZTavq78HDhJVSILW/nepJV455XQZso + + UgLRhe8CgYEAzmJ/1YEa9zt5xigz5q89h6BLEIf7kI5609dUBUleVvLjFq4Ys32a + + fi8LnijUl12EcST/C6ni4tgiw3Sq/l3lZM68H+26pLnK8BVdHMF6cFwR2Xo22lUK + + xmbD/nsfmIohTJVMpMQC8ClQgOUpnQJvzf3plvlol38lGwOC9rbj0f8CgYEA+sSg + + +uzi1TLjGYa+x+WnRRVAF8Oe0yAXA7wY0MrnKQsQPBcmuoj3abi5mv/+JfHgXzfm + + QJeun0uHcDHjyAQ2tDvAna6sc0DuCopE7Nuvn9IiDoL0dtIttK0qsflmf42WVo/4 + + pDz03rpuTFaRbMv6eZAzRNUgkkZ1FcUsWR3qpvMCgYAbJUGDJ5QQaLY/phINiYci + + S6cT6Y7hGJx3OJ9Igrnx3ciYtxVwplinuDBjASPVNOuyphcVxaaeB6eq5bGH+3ms + + pLSBzpb6C6Xxph21Jo2gMbv3SufkF8NvDR1CX5dsTN7MX+bQ1Sc9x3FbQskSabui + + 8H7E6NEk/Ag5YWDcannUqQKBgQDtavx+lYitEWCx6kD2QRf88AGefjcA7IDdqFhW + + VcRFt5PHUKP6N3MHRT104qlcg4RKokH9JZ7OclPohVODK3ofafMTVy0ucWrtz7sy + + BUxhpDFaS+HoHVXomYqytc21NfgAPI7L8Gpl9Vw4Kj3FI9og/cWMhbwwwURZODSk + + qw5ewQKBgQDIOuJpbnGn18SSI0zat3hz6pFpKsyvk9AcMyLiVD6QJCwbp+fUXe7E + + 29Zck+c0djHBEBWivHbO75dHqWp9ERW0riwxSu7qUIgW73yxKJK4tI9nXa/yznlw + + MULnyIHM/0c07fKut3vRS79Jm+6P+bjUh6KMCIBbZJO1nE+96c7S2w== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:vws2xrl2nch2hsptqb36yqqu3e:fhujsv5rkhyiqstrktjvfy5235c7gcwekyd3gux3bv2glsgstjga:1:3:2097153 + format: + kind: chk + params: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:62g6yna52dvu3dd2ccmtx3uvhi:3jrudt5drwfjwqbh6yfph77rfgmtpxflnijmdqzp4mkrijq2ksnq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEArbOweiMtfKQfeHVWoAU+zw25AZs/mD6OuXicSRxSZjnCapUR + + tekXCsB2fM3CjP38HV7ofiFKoYUSHoD9VJ3mXyFmOynosWWjkVyeXc2o7t8z8rsj + + gWKuHwyFMuPZo8EEEx8dWSTMMprQkqV4tfd3xZelgNMBc0TLc0hvntzlT4g+CzT5 + + 5H0VKvRHCDVtZnkw3hNmWuJNiSBkBRBBfoyJLa0HizR/o0kQBI1gkXBmSva+++Zk + + 5pe07cCyd5cYVA+JD1Fvb0AcyszctyOcIFlpW8Ysthjd/UhXny3G9zHNuNqB84Xy + + 1ykwnMIaZq0/r0GfS8BF1VlVqYJlYhE4lF/RBwIDAQABAoIBABoKDGQa3uhG8ELO + + EEjX0HUYoQnZHJz1j87FAmTBXqbddMQmiaukACTH+ls2OzqInqFGh4LU+cuh17gD + + 7TYgn5bWOm2XGD9ztaQGZuU3/eGlSzPRkv6D7QdRiKw61PcD6dj1+p/Q8N2LMMYz + + ERfyO52+4Hwh5Z9Cil9DVhxSD/wueIM5zGWm6vOqH3mBhNK31D9/QMiWUdVFb8UC + + uESDRYcEt3R/r/AN8hPMptyQRoFXJLiZLt5ca9j4j4E3bNq0JQ2qfdn0nTjpNEQ6 + + zgacohbJ5ute31dBOf8Kj4VEw0zQDzm/vNoCKhj5WFjG378ydL9iMMxe9mrl+/oM + + c1llBF0CgYEAxOn0k2pDXC/lqW1BR4vfrzxH0xJHdl0ro5qmxwg5wgf/pxbbA+od + + 3XhrW77Qj4kz+N2t56tMgr+YaVAK/iyWA6c+icG4HLEirKJWA6wU6ZCPHCpY5e4q + + w3tEUH2WnwlrGo8AGdzd+8LaF4GyyP09XqHtcamyErhmmZ1loPhcO/0CgYEA4dKx + + lwyvdbYsMzkVJ2XskJ6XE1it9KPgtmXkMCCvW8U+7v7QecNBhm64o47pMDwR6IiQ + + IkBatSxV86qQsw4boURIbnQ7bSZAnK1DNie4WED+jc8tt4MERT6r3JkaIbubi0Ck + + lFzG5dDXACRovNBQ43k9xrktREY9h8isBltlNlMCgYBOlfwo1PDbGrZyXor97cGw + + osMbZqEkiNyAp5jFt++tExohagqwTj/rAkL+U3HSxvP57yaXXZLkX2iJJwusEskv + + 3hAkVC6RLNRkx0jCoGucJzgmCnR+FwX0C/7gjK6O++hFqiplJ/NjpYj6dqWOdxqF + + 6OPlR88sj3FK/zju/A97VQKBgB2kxdE5RhMirdyvgppgY9R8LQLKIlO563amG3VB + + 5SMb2m4PHxjMy940zKIT0YKWcBdhTeJhJkcgIcxRuJr4oCHkT8nIEkD6w4KNsAP8 + + 5NMY/RFqf+rWFQpt9quHoYmKEhoOi0w6fZWPe5m2LdWTVvr1YGmkx09uFQetDP/s + + oXWnAoGBAKbOfQ3zJXUQSrBlWJ+mQbS5agy/bT0DRdkaHuYmXoFZ0nokQ6SeUeTp + + YcLDmXiXYzXmsS+I48myk578dACfRUjOxlifK/Clzq7ntc+FfGx2cp2vHBIbOiow + + iZswZxsmQpafxMj+NUe2GfxDmtw/Lwt9BbpMqSsBYAsnpaZcWWWK + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:6ge2un5djzopz4fgfmi34eajbm:cqgc464puzka2edig5ofmwvciu2urojqsrdnt5jen4zvsdnkfuja + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAwzTPdh1mTkZYtu3zkzFo6oHV9KW+xbdnCKojoA3Vlktcy2br + + KJZBkjATEcO/S4YRGRdsn57efyKpMZTMigTt1DyNHyoTYULNV1X6Pnpujz3zHgkb + + FCPwUTp12C0XhM2JtOY9Y5JO1dkECFzd3GLMHCGSP/svLa0Xwh9t54wrdkvl2IKr + + 5VvGrb/ir6E6nFi6kL3LomJ6iuTHiLx5MX7CYQieXdy99OCUbcXVOUO75ZIPG9hW + + /a2cXIrKdgMVh646WV3QMgC6W+zdAHUvB63UyWCRqC2eUicu0v9VLnTo4+7WfOt+ + + o+eUtZ3s/wuGhEmJ/DyFGZwrUrdEVsqM4biyJQIDAQABAoIBACkNVaXy592FSMnr + + v+JQLU7IEE1bgAPHnrkBQu25ixYI8lJqagEGnHKYfqIpRvUklDrxJKxq9kLJcMiX + + EO7ju3p7Y3hO2nWFXXbFA5QZHmAseJDz/EhfiH4kq7zTOtN4gEHVe8qRbdfmREVX + + 9maPNnqiCsY+1nymHs1525yq60b8r2q/mYyXCon6lyIUtuzl9fkHQdTvEBnS1zaZ + + P2f7NHtA50qgFWeRiQWa68TNzv153toKDwo8sapJ/Rw5IdTOibttC75rxNq7t/3R + + 51tzyw6+9ypoLLzRFWFo4bveI9zMxXLx1+2hy3vnt3fAKAL5pEHS439sDJ/1aq5z + + jcNXXW8CgYEA/Z6Toz0rXojny/Gg6ya0JMhJX9KTtA3oLkQmieEjF+xVmO/TlvO1 + + /+thSmAcj0Pd7mmt8hueNss+VtiUMoRiveoYDfbp/WFnqbUnGLCW46Nj0FSBFsMe + + 8vlvkzHejA0Fp60T+bivnLseYHX/QbaPC+bUZYRhYY1Qm37iY5NT4PsCgYEAxQnf + + WgJBDAY/9qIkgvRi+U3g+yobyJ35yLUvBiDOTDXyv6xwOHFnDAg/1SjmdyMYOksB + + dtRgNxq1Uuk4kxbA/Hokf5F/AlNcF2BVQ0+qmRimi1fUCMoXp/PIl6a4hph4j/hM + + 7fmz0d6iElX1AkcEA7Um8LR4gBY3iLP619UEj18CgYAIa7t7MAzAlssbempdZGuW + + zQ+inttIny2WW6zr5w3DPZWZ/lyIJo9kb+xLC+Xm29oCkH+2CjS2nQj02TwScVLV + + +2/RBuG+B/3pJJqntzVLWaF2yVd/6fqdFqsduAornEMTzitbn0Y1bgEUMtbG18jo + + HEHxHPQeyRJkF1Js+/dNAQKBgBNnc31zt2AtxWLOePYEhzKx/rP9Y5sQI6cmYKkj + + 1e0favaBTtPgJxvCPDcLvhaBeENVW6GOLKOAl9bAbPffR8YVaT6+31klSG5s6Dim + + wdAt40jZr2HmNQovMdPtcUKgBU94TmspKhJC8IcJvAUrZTPQRTNzMmK6zWFDCDL1 + + IWvVAoGBAIxc0Zx74SEjrXDFfALloWGR2wisDA6XXwqdWzThVNwRrTgr6u8V9icW + + iR5gFi4Akp4pmbYVUIKKgEOmXbOdtp3wkcstFFi128uECJEuveQeK7WqEbJZEFSR + + BtHC/5tsHyVRg/eA/yD0sU38mzoeoU4zpMJDy3SJnHrSMOaVkuk5 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:jvi4htauhe3qxdruqqxptzg4hu:r5gy5gjgwoqxeffc2g5gsgm274i3z7jo4d5klipof72zw6dmcmcq:1:3:4194304 + format: + kind: chk + params: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:m7vqkjrvlu2kidmivmjwfbmtxy:4t35eoohvrpaomfflyl2pjxvpjb624ytpop7p33zqshtmxz25rta + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAlsm5+iJWoZQm5tDEpkgBP5OnGXNlk7p9jgc+z7GGDnvDBzSZ + + i1tLXjQJnQc0aMfJo4tRn48NbdhTWkFYSRfvq1oolO4xGEXxOgxmcIxK2FDyiexw + + UMI7Tiq5IUsCju8vI5AkYnLyPuPZLS9g2CYkIFXGeE2C2ptCXtBWMz8wH4ErzlAz + + Wq+B/erS4JYLw3Le+QCXjZhvDX9P8o+8+cKNb0+gJQeX2pa7E5JBC2V9t9ayMrnC + + q8M2K6pNUpl2prJ9VeDdoY0RBgA2ZcalViN0BYZWuBek83QmfB1Qk7/I5LATPTg8 + + SssoUGalhu+TDLZBVdeF/JwBDzqW7A+kc1NJiwIDAQABAoIBAAkiWzO91MWg9eJR + + jzgLcJfrV9oA1YxnZaeu5K0sMdS6xouvMgXxF9WzDUoH37LhN2PC0sT5o3SeAB6d + + ir2Sx5/3rDGpZCv2QLClg6cZuIb2EFsuiXc67ODFUcWkh5klABQFbU7Ra79HiiZk + + kldFqDaVO5qaB206roCT2kTsdPvTfXT1LBCKa6Hr/LWeoXFDNt5xxB6y+VaFD9LZ + + O+ykSIcxYSX6AYtglnSir8/EP+pmAM7XieWj3MeEgcFBlRjoT5vUaJgybAB9bR8s + + Tx9t9jQ/fICIDTWwZbpxp2fut438MFutKqnP9S72QSuhMCdosV8T05EL2lYUInzV + + Xi4gxdECgYEA1K1tlFXeUsiZVF46pC2+paPJwO7KmtfyVX/iKi3GyYfEQpR6ISth + + 75PuwUXLww473ydqHpRyYShZ8uZWy86/D+dsnul+mt25qNAnMLKuddabVy+2NX2s + + 3kUdqRE7DMH77RKxsfOKgmMkwbPz4uXasnblQ2GLu6aJbbxP2dLQ2+cCgYEAtYDs + + 32RZv+Mw0yMXwd5mMLbNLYOmfdVxvnvlPMid7j1QRsdrD2vgdgsbie5ZpyWMLF0u + + 1ezmqjxdCfZtVS1+jMLtMF1TtELAMbicFVk7HDgYN6HhYzmTeY4XZNxFkvMhnsJZ + + xFAeiV2fxQIpfES5M9W2YLpPokURE3UklBs5kL0CgYAaMrbh4+X8GpvQqb7dhIkM + + jG2I56Fri5hdceBhQ7xODPxfGz0kItzwjy+E/V0JTRKQ/aDz3WNtlnPmGPuuJWyh + + v+dAeBDRcOiy49lABXK6L1J5XfY7Bp0p0CfEMMwuWSL4ZCohepegUigv+EPdumTD + + QSQitbxpxCz/qIfJlE+IFwKBgCwWX/M3XfGVTvPKT2gBDJOCo74Nf3CLWzCoyZsF + + JA+NhyVaJTA+xOwHcK4FXnOSVEUmcUz3WWQ6e2MDH7WT8mxgoNqhoMZlfGfXbtpk + + rU4Cdid1Q9klUCQzlo0iUCgMtLrqfIGJ8JDvU/K3vrn3u4DSxZUjTFqfKjGuv67W + + GhqFAoGBAJEWr/O/5rWmYdEzllLPZQ9e2jgHXRMrlu2LozHqylEyJgmiZOvqEBV3 + + pj6hGSwX9Jp0cdqBaJEoFWgQh/ZYPnK9gydWYy/lGfIUXwIEfHJ2bG6tZiMCLw8q + + 0YuPnzqa1EW/R0vZW6J1BQHJBo6bS/Xj6NxTTGQh3S0dblebt8Op + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:zlfyvqls7elrevid5czlhls7gq:bt3bw33shbxg3suboavalydnot6zm3ftie6br72swiiwmwhn4zua + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEA8XlSFU/GAVeRwcn+hm/CmqMu9fmkYi+Zll0SBiQjY7vCtd9I + + 8eZiOv4J84wHhvNKOho35oAvZqCrAWfZ5vEM9fLW33Sr+TzmeXeKkl13WWlP8u/J + + oXTwYM4oiqa7F3OHwzGrbrAVF09ByG+9ukwZhhBf2LHNx7/H851WMJ1TQ9CES9r7 + + w+wRwmxlFuhA9pqhImo4upleKq7PfymzEovIWEVeuaCNrfoU9I6f1HXKcVcmLauE + + 7A7Jtu89sdZC0jeXLv4q1YuR1j2GaZJQv5MuH8kIpQyGhV0N6rBZ3FZAlc6+SREA + + muUveyC6lUDCkznoOeaDYZmMu++l9th5k0AJsQIDAQABAoIBAAEJDSIMQoQU9QoI + + rK+04Pe6xWPGmz7Uh2sOoRono4M09ePDvlNTMo6gMji6G/onJuVS4XR6jjl5bOJH + + qLaFyBFx5hv1KxuZeD+DFLQF9JIMkowvHQU1NCamG6RkjJ7QHv/mQZ7q4FxGObj7 + + Sav30ZAyl8adFI3Ls7bGsOzqb8X1p8magM+yerypB2PktePm7P1Jy+x10kAdc/vv + + +h6KR32NPTo6QNNoIPUGsn+JDFdP+cq3zHVCZRSZhwsLAaVpF9Gx1qZjaVs9K8zl + + yFUomMfdlWFoGHbBlqx3GW2DH7Rxzt/qVLoU/ShKiZTu7mWeHSiSUVyrHlzVkptD + + O4Tw5tUCgYEA/jXZa1W3mY2mq72xwCasHlyj7fjme0QmVSbmzVI3CdbF8jDP/8OY + + ZpFi1hc5ub3wnEYBrCPyxtUXBgPwbuHrk8j8e8TFVoPIUTIGkvZcc7o3LycRwIAf + + Q6HXcVUzT+PVzLxIcoqkKDeyqCZthqSmpvzKAVl2XoksjBRAfT2v0GsCgYEA8yyE + + X/aYSZ3BH3RzvUsWsfOFamimZTxws2JRbBzkqMuJpYEWp7v12RCU/XTD6+5UORok + + rP/Q/z43SkqM1HhHP7hPt/ge1ZrcHmDsGdqWcRfM163UoM2e4QLnlREcXYBPM2Vr + + 9KJHMAEz4bgmv4gTANeNiVUjJvP1t6TKSoZgJVMCgYAzAd8UWGi0mOWehDuMULYs + + iW4jK9QjW7NNVrbs79g3Uy74v66cpUSJIBby2kos6N3EnY9sWPI3zz4FaPjvZsl8 + + J9Hxi7QE/gBNunnzNxep6O11uqMnOw4K5ghypyPand6ibA0lXog9wZ9Jehxz7cm6 + + q/JkfuzvXxrfKJkgCCak7QKBgA/1cvaNS29BYCQ9Uz8wB1xEXBQgrBLmxYqwQCG1 + + P7hoKy9mamM1ravCL9T2bck1Cef5dEC6RTALGDvS6q0i+6IN6YVsTjG8iQehWr1T + + oB3p7vKUoOiwteWUeDhLOC5WtlvsIwqZ/8wBuDLvD/Pv7TdX7hz+LmFnD1AvC2ua + + qAKrAoGBAJyIAekG9AN3qgHNBdGx/g8DWaviWbINlL8A/7PV11OEXZ9NxhQfxYoD + + fSzPe0tpaHMmS5g5G9+9tgkzrPIy4oD1EPwV/vGOwX+r3lL6RaNZTdpa5YT/yL+D + + UF2ApoYl77aXQk3OmocC6J3nq7Z7WDjWAcHCrPJahFPnWMzKWdHg + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:5anac46dpfsjumutzsoovvtlpe:yuc3gqe7m6s6xp7k3uodcg2g6k2bac32gt5amu7gdx7k43qnudxa:1:3:8388607 + format: + kind: chk + params: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:dhq6qvx4nkx22cugbnilxv463q:hzssbukng6i6a3zkco6dftck77in7go7gsjf2q7trdisy6htwusa + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAwVi6IPmkilRG5OvCNOPDKwHWrNKki412A//p643I2qdy1WqR + + N405gGIlMmWpYnpEY5gpD0F+TKNjLEjpIWY1GLbzHsBOkgQR/6KcIBaNjBJ5GZqH + + AwqHNjnHwmyOj+GIY0lLkqviXUJWf5Vbp7ngeWXqJ8i7eX8Q0PK+deWt7wSwQHpG + + mc2UHjmn8zbp+5zBkvlVsxBPdTB2r9Yj8dShoUGZw3hDpfWEkUhVTYS8awHjSDBc + + BPzFXzH+LeK7RooBXvhg16Ar7a3YQh3bBPmtj/yuvuFNSwrItjQRCRiXoBARXy9f + + QytfelzIiQcfPLAmZ79bwEPWHFvRUGYuhhj+TQIDAQABAoIBAB2qBkye4K7UhVgE + + D/TbaQtFKfG0E8jReGTes74PL9zUShsSUZtrUIIxDLHxxQ414h5BrzMHAmCxxLp7 + + qUEVlFnpX9289ZETjMti4H9P1oHCJ9BU1BgUWnBoZwyeaTUMDkfla/Hh60YRsoG6 + + oahXLWiSyV03QARBCYx1YeFmzsvX8PLJmD6mwYoO0e1mABE5ase59vC4GT+X82LR + + Rwm6gb2JE8Ko83lleAKnWOJ1ZnBXjh1V13TYDgsX3JDTYK+PrEe2/YDFm3LFf4o4 + + oWZAMnG4kc70e/MxEO4iZ4S4+VWB+8x4htSn+1VLnwXSIbVqbazcgtL7whbvMfY5 + + 0WVU1WUCgYEA00wfFvlPBXpvYsYI6Hw3atiSGtAh8RrkVVIgKJTF5Q0frK9v5EjC + + tSUkXj/Y0ssq/oIUZavKmK4ALD24rg3HV1ewe8Ih/M5MnBie/fPp7Fa1b+7HQ8m7 + + 5n9GP4IAvH5DASxTQadQ4yneLudKWnbG/fzFZm8xDJjltoPjnpvNXncCgYEA6kBk + + LC7+aYyjSpgJfAFbTrRgL6QlVmQ/RgjI9Afn0mWSJWcDm6t9RTJ27OZapaCadyvm + + OLsGkIRWKP5l2BacBR1FeHpPw9MaZb7QXt1WT+dL/i6gi2j+U0BxRvIjy5bVGc+b + + y80fjwvA91az8i4DWpeT2KlhEyBJmnHGumdzZlsCgYEAzouHjI6R9znyrewFgzUB + + evlPANTZiPUPpHOOKf0b4UZN4yDvUIjrg+VVwqfIzG17jqQbSjN+7HaShqyi3cls + + Re3a/28KiDQlYSUULgyDatprq4oO0S3e9ncNdUEgdSE7YGcyz2e9wwEHRnQjE4Eu + + DdNMJ1Cj8rt3OU19cGq+ewsCgYEApIJOFz11jBipgLRfTMgDILXKKwsC8bX7Parj + + vYVjx71vMncy8HsxwYvcOyjXFiRA9lpNFyA5TvqxK57lVSkjru/Mnvx+0g6KJlQo + + L8cPW5QbKUoDk4RLv5mtM97PRqYJyFOlnS3T8PiXLtykCPtJfbCfsvPY6b6uEhm/ + + L5+BSqsCgYBsQScpNkJMelNcQXA8XB3d2jD3HN5s8qBFi4rPaCsrL+/Gq+qu78x7 + + zcv+7hi/1R0G0dnPLeZzSUjvV1snksxnAjl8BpQ2zfkBxRPRsfVuNNgoDBPHZCT+ + + Aag+rzAJ8Taa+D8ES3pkoDJ2QiroClMmaCZbpy5SUMt0QGldlMAnXA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:3b4gpaaarpdku2s24mhbrzfmyy:gpvx3x6yrifjb3yrzxkzekewe7hvuyjrnqz3t4jh3bvby3bwsuyq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAw6dMaIBLZvPlxEoB+56rygwktOgnaWOp+NS/v2vBMyb1Ih06 + + qT2JIXlYUHzPqAn/a9XCde3Fl+srlXenNoH3WUWzKvvbfXf4JutygPoJfiAUsZCb + + w/MtxO/2Tp0U4KPDML/Lx/uE8e79OHwByUvwdgM+DED4vbqFAhRJGGU6v2oy9oLs + + No5y20W479nGhOF8jnxet0FiWwX7ONZtFimqBB77zg5rpBSf7Yo7JTGaBZtL41W7 + + YsyEklthyBS+H2DqQXiMONYyBgTwuSRBUYbBstgm0/gNGRY+aIJnnVtq/PedB9a5 + + 5m23qm7q9tXui0UaIGp+wo4/aG5W8D4xd9yy0QIDAQABAoIBAAmQf135bZY0FJae + + po+qH0xCgTXdxnVys4+wOMpvBlQNkryu8Jvu3+oEwFI4877LdFLNcZLpw3fMfYYy + + QfiabGDPFTXj94Qf7f/bES8oaffiMhjHELJIzEM9Fs2bhLaBkuxqZ9gYHdTk4biV + + 6VFxlqjyOiGHuJEv0co6+yLH6hIK1U7WJgsKMkdKKGQzEQt8mAqz/PaGZN9t+KO4 + + ovUSDyoCzyh4xiwK0lzhnoI8051Wg1s3x37mwimqbT9v3zU2IDWPaPE9is1xG461 + + z+ViexDRQSz8G2Cxm5LfEhc89wi/k9vgUb9nB5f2n8eQP2FQvNIOcA6VqI/bkhNh + + ja92AQUCgYEA8V+pcG040UqLXPZHszQPRbj4LtkDBwBqhX1HqzlQMHYHiiLg/1r4 + + Woe2hPL2nPxnCLxxBN+sDz73wpmOBN5fk3Lhz69vd4GvgX/5sChXFQy3o/P3xGCI + + rcepn35M4AJrL780C5gy/WK522/4zp33sPSBV8lOcAaSdkND1NUSALUCgYEAz4Jl + + oQH9K1hRtRoTbKB069aSneXtD2/f9D+9m6u+6D1eSZfYc3jLiIXZ1VTIeXT81Oww + + /IkjsHuD1LraLJyw63AcKPAK0BfJwhnLIWBKoHWZJoEMvY9Xqq3wnef0+Jfa8kBp + + LaiZ77fRhyGzPQICOmIBtuIZYiYjZi7gG3ibJy0CgYEA18yAL4znDG9KM/3YUsaL + + lPlvomrRAxSDJ/++8L2YDQupZ/4RDRxnCIFnVGvowqgC8lOP9ByJt0PDvU8OIxox + + dyFx8/3UeZMPt4cUVENsv9wT31iCvybTbBMjev4veuOOsyyOOoODqvj2U9NDLm8b + + ATFI5pSLNSsbDPLMlV897jUCgYEAyp4Qjf5bLg+2+JbVkKO8huuljfgMWZ5rlxsG + + ERLJ/gquHj3eZCH22v+Xi+6VMcNBfMaDrpJZ/uEcAIPStOzq83ksheydIkOYBacZ + + 6SUUuUkambY4sn9copPk9sqfMH1WlGTATozqgl+Cf+gwE8n6UvePpPtwvZ1vwz7S + + JQDGvqECgYB99QTs804jni1La1QdCdbumonHWEfMrM60csGkrGOjLiyR0xUwQyyr + + cokRNpIAp+kNNSgM5QRK7WKXjdKrrmPndnV+FUr4GN+WDJKs8lumCKHveE7Cn0YG + + I4hG3f+HgjdN6HXLN1wgCgP8yufvRwAbsIYkm9Ok2hRrfndxErdxHg== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:fb6g3fkfbtlwzheujzvc2dn7dq:644je55ri6q5halshuxfesjz2afhqtebetuzqqbfecp6yhqwbb5q:1:3:8388609 + format: + kind: chk + params: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:nrfalxtfzw2mju7hd5xuvvqvhu:neuq5ftw4keuikytduj7ql5xmo3pzp4qpdsn2zt3magaivpzyrja + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAiPrrD/TaDA1eRnPu7gUE2shwo5Knp/+WbuH9TdEkY0HUa+G8 + + j1ohiQsILqaE+EdJRhmpHb7RE9ZtoosSTss43tlfvyK+XevfWy8XzUExXU8OhEEA + + +OYCvGPF1Bhmp1AXJ5HHLzieTiuY7dXUfsbz34D3AqMPRHUx/mN84D07rdFT4pDM + + At1y+vMb2Gjjb/iJP52YWlcWl6D+381JsYASccx3lWuSOMz64gtfaNCzyE3TpQ9S + + 47OCIf36VD8B5AW6E4qgenZ6tV8Auc+QKHmzFkxfTMeGOZOGNL7F0xxoKp8FlNZq + + XFkN9syy6+gTdP4ziWtzKl1Ry0sqcU230MCY5QIDAQABAoIBAAdIdAwOpDNE2LDd + + RCRinu30/0wrJX/groJpwJqNFqayXtV3lJt4mtTbAc3dK6+5tpMkFSJQPXSVD5I3 + + W7tVwcnTe+xBMCb1PhRujhDrOPExnV95x0/0hsu2cFPFElwt/XUsoo8Hrx8P4Vsv + + 5dLxyBCnnjqFKfAlXQmeB4sypkQpC6YpiaI80hkNB9a2aXZr/vc0IW5NUTPKe1h9 + + Hya4OkFs0GMuapsvE2oAhpESU7jBuAvA3t+mtFvStKzimYRS+mDKMHp68LWsGs15 + + KHTiBSCWIuWx5vs+r87UmD/4SjDE0ANiIx1bXxitS+NU+R1vfOS5xFDwKheeO/Xn + + BjIYKNcCgYEAv5JRvXmS5jY8dfY1SfzOcJYx4uGeLx96+8gUOpRNvSprZM9zliTb + + RCJu1ums+nozq3DDzzF6er7bCMHxQ501GH5LiY9tXQEkgYvqn7oFLs867N9KuEnI + + OyXTH0QKk0Pnmc5UaYOSMWtEshV0sS5wtTg7xFnDK55EZ3SNgk72m48CgYEAtwx1 + + SpOLGpJOCY/mE9wwt3xEd4r1/iBXJvvq5Z28+C7yJm0o94P4SDW3TquFrBMcjywJ + + MzXMcS3w1qODA9HkAw4+XzLIhgVHD8u0+DdRp8FDXq1Ym79BiVJC42LD8YyuU7M/ + + g8SAJtrGTK8nyfuSIm3v8jT2Dj3A0GwFATnDmksCgYAX330ONqNGywV30cnMQZPc + + Ves7kdAroSmrTMCwmCCj7TBa7LtDv64PbJcRcydaQ3ZC7BeKr2jK+RPEoJ6XRXUD + + a2Gwb846I9VPy4behsj0j2CRejYOhytLq6gGom0K8xBei2bbi0jhnbN+2cuj9NyY + + yLwx+Nmoit2NYunrjjmPIwKBgBY+cVJqs5C7Driiv/bR3yms9DUCsfn7vBuEqXrV + + vEz8h3ib80qAwv8jZ+8rcMcEW4gaddO/SeTHDGlI3XbtXqPwayvuY+fFZGlK++bd + + 8hJMrf8nWYkzqKcjU/WF3wHPcq/BLIq6qkgOdeKDtnYZGB0O9wWb4frBDllFhyYq + + +tYZAoGAHwFRUIo8mJXHw1HoHrYcCkfd541hl6IAj4YYMY62SyVOYxlaDUUVxEMG + + KX+wyRsKO4nthVDnkzCRXkbAXAdCatt54PGRRxg9wwi3xwZ5lMVa806AAwnXjKke + + TZ0ofqZaGVwqE+zo1gVwqM81Vs2QsKSP47yK2/k8xS09dHWp5CU= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:sg2kk2aboiazxpzii6niebisru:ddxs4jaogxyavypn5lxfqiuymic7ghlbiesozaq54ubfiqpjyyaa + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAxz5hpr1q3n6XotU59zDx5oXx7hZil0g4WOX6xHhi82scQ2dE + + Kfz66yEpuGBktOq7Y4xhXC1w8BXY+rt02f1t3tQpmZ77mAZWbUmR+y9nFU0MWs+e + + SHKTSKynUT/AWy86PNdQd9bkvhIynGq2WUwHCpE9ZjilkZOZTrgjBs2IavmkkH++ + + DVje/KBSnzDebi79/0kBa+O90BiBIGNkfNMy8nsJJn3NaVwbsm4cVgVNjns0adDi + + ar8fxlzyKYlwP/LvODp1u38I+/9sxStKtzlVYiCD48ILrwUtu5YBSAs89xnnRIrx + + VJztUMUfXCY8czyThxEj7QGuac2HH/o1EWWDnQIDAQABAoIBAAWoxnJLgA0Y0GrO + + HqrgR4ayOEh4d01vvjX8Nk69UqYyXSem978vkdr64qn7hvDCmBcARkWtfr87B6I7 + + j9qfQa8qHJJh+yh6yZqqE8zsMEPhH+A2dHr/UiKm0MvUgTy1zzQqM31r66xgbPKX + + b12C9NI+hN1x73X+mMStI0WOsfjD2KgJBqm4Bl2b/jBEnr17DOVGAHIvfSyjrObk + + hChdjgBQaiW5L6qv97f6B6CTqdk9Z3sFIDQp3oD9veim10nK+6c5akQQ37MFERdO + + k4ariqAunYP5NcjUeZWEu870eZFoQR3DB7ucza0Zt+FAvw99f6ijvX7nteVUpZhC + + aqLrbCECgYEA76SYmfHhhBHv2XRI/NwY3/ACcW3xxPDpSLilT4NuFywDUrODDVzj + + //s+/b8PCUW5kXvR0MXGKXh8ADNBcbhym3nhcC3XGqCo+M2n78ms0yRF9DzWDjn5 + + yt0TQsuEalHMk/sL0gz2RUarcmyt0RrfM7WQJQHdZ+Vx4EaQjp5+S80CgYEA1Nfe + + PlcxwbJ2MO6PLWaoZAO83wrQBes9CSgUk8mB8CEtCvA3sC6gEl8ihZSD7RuLjzSZ + + nVQZQdLmHbWKxSnhFRK1gfIqnFaxr0HAtrDkRt6V6OSAV6rHziNvYIgtXFKsw29u + + 04222C+XB+lDtGgIxt+LkQRq69CKSFCTQ++PZxECgYAOfqosRZEaZ+tV/86aXMW0 + + ZdQAAGJrQxcZKvH0yUJTbHoW+nymxkOULCI3PuMt8GW1AwRB2HSP9ZWqfW8r7bgg + + 51JXcq5cEfOmeOn7evtVGhCRIUzhN2iAeLa9h4nO1HvHR5wDbH1I22lrVl99El8F + + xameU2qM6jflFN+RgMyq0QKBgHymM4zU6dnjVx6PB6DyJxnzqnABWBSvUJ6FL4/h + + ikyEUWm/hw2SMMKxnnkWojCBWjky9+fQsb3/8i5h/HQ9c4kw3MXOei/3AbZ+zorv + + i7EJeEfdUmCFLuDFldu1xML11CHcp84Th4qSTGQgszr7VnCJyKXULX4PMnzpW2WE + + 7bnRAoGBAOMSiSaRliUYjKBF2ggI7tfVEjslmKfg6MPb8Yuvt5Q6yJ9U0qDqghIw + + tFgDh3H9OhFiJSx4SoP9ouKE93iQ3F1XPZa06uJ5sWVxnSL0tVDhqXHcEM0JCEB8 + + 2KGer2nGo5KcgBbwxILp7v/rZdjXJIvCz/ktqUneyW5HQtPEffnI + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 1 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:r3orpfw7tqaxbgxc62hpmhihye:meyeb5lt7i4iyewahb6lxzohn6jxrqgi6b73zv4gxzirykpnyd7a:2:3:56 + format: + kind: chk + params: null + sample: + length: 56 + seed: YQ== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:zyx4vx7xud3ibmvazav7xf5omi:biaeawjobvdp7v26ed4re6bfc2mwis22aoolyz6n7ciptaqktpua + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAuIn3ExvxrpPLR3mwsmSlgk6/qESjM+a4KTrWAwDF6W9CFl0e + + +jdauS+LT4yWOwXQ9jemGfwUN6/zNqZKwHl2IfK1nxdLmi2yDs3OTWPSNQ/BV1gU + + FTpRio2ZCJJ1prONHEvuABzY1un7CCmN8jKd+VhCPXMZ4AS/RI3nmREP/Hhj2nkf + + 7PMDP1RCvk4zl7YY+IqKCk7IixK/4DH9FaV4DRiQbk0W2DeYRtaGjf62AFaKSVY4 + + 0PnVtt450CWGIl152YyJWdP83efIxT6MbEF9rayPZqxfGgXswFSAAKxtTY/EFllg + + OJomAqQYTZLo8Qh3HsM6YsTbrbdU9bbZoNnnkwIDAQABAoIBABkeK54okziC72FW + + 2UvUJ98DtFHv/b17wvIBtc2KK7tpTw0s4aGHKqHC10By+fkmAoTw8CU19X6FmXeF + + IkRD2WYnhhHAdvmRDLJieu53uXigILMweZNQxnaIbXMIPwamBn1P1yD2zDZRz2m+ + + AORy1U5yH+9YnQYBP2jSXoVWwt6ckolyduyhqJbyFtvzduK+RCXLtYUYSz/g2qTz + + TZLMeL7GC9ahigclXmEOhxrrC6b7+rNNMidCWR+S9JwSXZUUOld4+4jMDjC7f2+Z + + 2dQO3LcwTi6DJzuEIE+NAw0P9D/43VVjw0LXsQ+1b/Z0Y60SbTs6qa23KtvrnMyk + + 7tZDxbECgYEA3gRy7JugmPf8Ir/7ourB39ks9gY8Lrgx1OFd1jKDPicmm1V0y34I + + YcXqIVgwkui/C/PXigYselySyIun/hRI0l970W5Koo0Gizy1J6vgAJKsRpip8Amc + + iXKdjOZlJqBo89Mey3QIjUuq95bdwKM/w61TLb0us/tCNN9F1QRk42kCgYEA1Mj0 + + 8SfE2UlERW8qS9nG00UUVxXI82CPXGrcYgW2zT0eM9TlK62eYDC2vmH3xZKQ1b3R + + GfBzIxFSX2WtE10hnST3CuwhwGvxH/sLgnxYeWvJZvtLsHLgG+HgKZlTkbcnj3ub + + 6F5HdijrpiBf+gJwHoIE1aPeelYtqiOcuLeWn5sCgYEAhsEoiBhFt9L8xJLGRzI6 + + DoYg1gseyDSgeld3vyTVqAnXUvzhcQnESKP54ddHVEPUgYq1Tl9E69f4d6TciEkD + + kjzGSG2q+1KhoC1uvu+BfJeJ3SeYLcuHqZ1Zp0XIK1O9oBCKZm69KhW9ZZ26Zswv + + TbOMAv0Ktc9Rdgn2tr5+BdECgYEAij7BvQg8gXtzirUNwtgLsGmaLHYv58edfMrE + + wj66JKAHxl8UQYt8cTxVDl4yD0AJL4UynGq3M1pmrSovB3yjgShqBMOjrhOzRjbh + + pHZLOSAJawnrhAkuh476B6zhObPIVRVXFuJiBWfSqk0wbgs1cuzAXVkpC0yAQKEA + + ipZkmu8CgYABb10dBN247Pr/knhFFQkXm8Zt/iYIo1aZfNEziZpi0CEewYY39ZBM + + 6OHKFNjCCsXSmxWDhWvLC8CHIk9ExdI+M5F2kdM9E3TSvqtjVL2e6qqTSTWW4M2H + + AsqoAnHhTsnxjfu2TMFPmdeCfbbbZHoeF3AgUTyWT6TaV5g6n5bw1A== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:xyhviiiiqqoljw7poxi4uqun3m:rdzfyy4nplb47auypcxjdvxklg4wyseiitzsclj5aeogss674aeq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpQIBAAKCAQEA0aImdZWIU3aUzKiTP/h9QMteuUvRtjuHxs5ysQQQfHQaoyHk + + rR5glq7Wb/9FrTOI99ENDR1SxrI6Eqsd7FvWeASY6fTULi7gSIH2bAs0Fwc5cZnE + + Vga2hMnTUchzGQCewrskGFG+JkBdLP1YiixBj/QI4GXOgVOkZQhtdjQ63vVWy/Hu + + kmZHy0PbHrl9n6e0Q/FCdqho1D7BJ3+3CbuSnUcHDuM/+kk/cYuSXHL0BZLV3vSc + + orEgF29LDRu7dZRMHT7ziP/e+LjK5LpL3XaaNkNfs5h8aXDHRIEjuOlKaVSHvArG + + RiHZ/4CiHObaQSHUJ0BgEFXwCPsKw5BU3woMOQIDAQABAoIBAARXlyyfYsPACCAN + + Np4KyiGaXa5V7knXt4wBZjYLYd5e82WEzNfsXL1Pm/ROUEd18roXkvRv8qMCzuSF + + CyB9VX3i3O+mF2R3Y2C8NtoqAgWC3cTuTpOYXbkvPK/k8ydeuvI6sfFXkaoxx34T + + GTZsIhWhYno9a2Fn5AixmD6eIWTRVVPyPotXdO5krHnFkgq6ZWvjzZEIg0JuV1yf + + 23S6yhqlohLRPGyU0uw8fW+O5gQCBp16KbsCwYI3sKxadfPZfl8Pw7I9DDsWSg20 + + sORw+9RWs8l6egCXa++3qYn832UMeUPWOarEUVO6u9RwYnVpVP5CCPgCEprV6eRU + + mDKGnRUCgYEA1X/3H0bnVQK2zayV0DjvZGkMDLgAMMxf+0low85T+DWx8zo11n13 + + S7n2CUOIqCh/C4EFw1m72ZygxRO2haK7q4JvO/a19KQhyhpzXrdS5cbcvyfIJwtc + + rTjX7UbX7r7SWCH0jI/1/ckWDHWs3fc6MJiw0YBMXW01LOSfH/XCj+UCgYEA+10l + + +2GvmryjypY1Svn7AaXkT9j95ZkRSO5AQZa6bczjAsiYv3BGODmsIiWGK2cOggHM + + XHtULBaD0nFaQalmHo3OB1/GafpDkbrQZbraLxIEy6VVS5sXA6qq2Isf3VrFo/gJ + + K53F9ti+7yLUi68NcTAAKFCXZjwaoo/OjH3s/cUCgYEAgSeeeX9NJnIz4AxNvN8U + + guvBbFhLVTntvnhUNk+1IGxrMDbApvbTmi3vFv+Rxhhpcq4krF62cxh7cX1RZ1pg + + qYqIe//tZwd7oWWK8Xt5XKOGmuUYAfavo+LFTTcUHcu2N7ai1/2m1FY3TmZJoyWS + + QB++p54zlDkid/v9/zmO77kCgYEAkmoZm6m0/e7vgSupczjVKoqUyKXejoRwewi8 + + SPghM5/qg06RGsGtRUbiqyksU8+9taCShzQXPW8H7ea06hZgM1/qKIVzL3vlK9ej + + V/5U5KIcRPrTCi0WZL5esa+oKembwfzSaqOGElkCLo0dPRgEPm/1R4ZaCeTsptAZ + + QeB/0PkCgYEAu/Z0j5XOD2jpzUIIiXOkV6mj2XoGEZu7KPSYWVpZooJ8rkc+Eur0 + + 3y1KkBtCbfk4AtTtPNcBMIr+SCZhrI++3hbpvA6uBAyWdMHTeNwCDsBzP8i77opG + + OlssRXcBgtxaG0lbrYO5KpyxCSv57wwBmTX/VnRbxSEVnXXU+vpXkHM= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:jdm6ytmx2i3ya2bllsdzxurjtu:iqr6tyysaseegzfurhuywy3mbbwkbbsov5bt3fo6oazpyv7olvda:2:3:1024 + format: + kind: chk + params: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:5bjpib44tzcjlbiysarvufdaim:iitswphabeqczmg6ntnjeuxc5rfca33lorjsauils4bl6c7p5tva + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA30QPR+SkpkThBIubgnvyvrEpBWFE14xSJKwE5x9RZqvrRdb6 + + Kf4YygB4Hp/Mhdw9E94EP8zfxLkYE9njeAzxPsnhckRZz1xmxY+jQTZ9kVs78OJR + + fqsa/HYKP/5yfOKxMFpUVJ0fJKzj6JVRQAnvJVun3YXA0I8eRkmDz/+aByvWQ99X + + xRzsvr5xsFmtAa/Du6AZBLhlP5k06CnqFsHQb9a7HZnyC1OyfMCyCQSQf/N0VH1X + + iQ3KLyWufJhq2UGl403XaxNi3Or59brp+R6wIQzIUn9rzXe/ftjkrqp+TTT9FyMB + + nUbO4HSU4of0uzXeYINZDkiN9Q17mpx5FSvzHwIDAQABAoIBAF5vIyd9hkbti1+o + + zTX7x0jxFjC4W63wJC5utAQuMvgCb5kyvM1WNJX8bNJHNPLJnOvyVEnIFj3XLF/5 + + IUV98+xi54C1eGdE5hNaFetXaPU6abgRgfbZ2KhAJUW8EiDQobGaA2Fms+2HUz7l + + KWC00voyMmZ4VH3iiyOfpKktq7CsB8gPIV8a+BJqCrRjRnI4n/+/8qfd+Wa655QC + + H9TjuGGeTKn9GwofeXHbwION+ygPOXQpez2YudigZBHK0wNQeJYkvDNAS6NdfUdY + + OtDOiQwhLQYOa3XeLGkcxLh9Al0c0cDEWLToNqPfVKTfsqKIiNail7ij+7EJ9rOB + + q0suCgECgYEA61GSBETDtsJLxoxMgGgdxyW2Wc/GdFBi/f3cHy+BSRHROshP+H+n + + LdT9H4U86PK1QmQZts9djBKh63AYhKgyRyMqSvFxpqywbznnkt3qygPr83VIXXmY + + 2Yj8SKwjgslq2xNl6kTPKdBv/r+Es1nhYvSZf5UTbgdQ5nlB2CvCp38CgYEA8uNQ + + an6u2OtcmsDrwOdNZ2o/3uddDtAkRuI8LFzD13rGDYS0ZE/OeIbDxaW0mty3t2l0 + + QbjW0GJ1wUFoutcdJmRBOj604Xb/NYDkF2kiqtYIlnDyVP4nIi44+0TIfTs+zXE/ + + s0xXnwpZB0jfuQxDT3g4U7fARx0K5kaWQxxzhGECgYEAr4q9K3QEr/xHPMkCdLO2 + + qw4F9v+ZYsFo52KN57Gvd0vUUk6F4bGQjA8b+HyTUI9mCi3URNxyQ4DOy2xmzecP + + AqRH83ojtxuRzpdameP0N1kvlgFCx4BjNrwKv0eygekxTlYtK9LC28WDFn3WR1jg + + WspvC46w3N6WSifgp5sVbY8CgYA9Zjcy5JlgnobHXBN8rTwE83f36ja9AuLYxGH/ + + uOeM9i1Qx4YugXopP7AHq67vIvKSO+c2ofozrWAlHVrTOIPW66sNhUKGaGV1agK+ + + 5EXuN7LuDNlFoQXVfyfKZQXlmm9y0bkPozHXM290Bvj/N1lgonxitWW21GGn+poL + + lwqgoQKBgQCiXSbwbmUtTSmgDUNdHJ7tkwk0+jP56Yv9feH9FUWVJE8UF5InBB62 + + 8KckguXC3OcVmXgoPhMFgMwHJE0Mqx6FdhQPCM8oEMYYg+sM9kTHiykFfTmdt2hJ + + L5A9omWNxiWRKgIerKOvO3ljmYsSXWp8VQJjvCm6DlznDg5SDSQrUg== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:pxo4i23yfsjuv4ntm3xb2rwp3u:3vqfgh6u6k3qh457kru5nxe4enc2zkmnb6jmkm6xj4x5qshby6ya + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAxr4IL8ukM5awbx8SFDOqHXyWEEm72ceADkaBYiwJIemJESPx + + 9kd3Vvc3Lu5clWbBeyMvys6LAjcMz7xWlvDZ4/rUDMRVzY406P8c0dSYGdnoJBLD + + jr6CDz0JfiInMKGsBaVUejnKoW4l2PMCj1tbl4v2egLKSW+XzNCfFU7kFJHfOK6R + + MrRq/HVMt8eIaHQt92Gziukx/AgoktQhGxmxUJwd3ETBuVYFAqmPBT8uuf7bcAfV + + JCfjrxxH4wAwpWXuUSu62Q1yOw7FqVQ+k86IUNFVoJQMlJHQscXTu9EBAboAqgM7 + + A4rcgVMqo4QkqvhX0/trklqyU2kwslODo+pv+wIDAQABAoIBABJCYlPV1K5isVoR + + k40kmVlf2WpXWTlvJq4+abkLIlmpxaHmrgkk/sC63NM51hVp3UEtdet0TM7gH5QJ + + vIuInFQ29tAnd69M2e1FhwXQ5O/hwSi99UFCR9u+OAbifLxElkk8gTBA6q4ohbSo + + JaY7IVqhOCd0kIDtDkLPMKvPVY19bG1FKiDx0BSBm+O393i+ht4vIahZEPhxPyWS + + hF01XWqUyeG2EnMzmAOmdaTUck9hbu32It9IWD+Lxv7mN4L5CIeRKVRTEF4/nV0+ + + SpMCg5U4ZcVkR7Lm1k3Fl5qUZe+W5j0sb6z1r+M1jKgFwTmDMKrmU804qABDw9LK + + PlfXe+ECgYEAyqny0/ff2y0sq6TDGq1+tE1hgu47fI/arQhkNR6GmFJH+KZfQPSU + + Uc1TF9XQnEweLr1/U4HZE7bGGVUfbhAALmvZVKT5E8gfYOoPkm1fDQoavFJW4+26 + + ZSPu4gl0d5Z/AmBPk4BJRdHsRDDDKvB0BdKiqKpSkMTgnNklznVPeVsCgYEA+wvg + + l10zUPML3sX4EcduNfRR5kXCAtLlU/cnB5lEZNpEDesiBMVvXxmYw9D9z9LUiX2w + + dTcruHv3mgsESyytThh7idyBv8Nw8bP7BSOP1TI090MjztPYByOjK9LqPLSD0xhN + + 2sVgH0DN+THvD2aV4uCEWLgKEC9bD0dttC+2BeECgYBDsEmTdInHCaqO1aP4iBP2 + + opW8BlfF/cIa4t+dQknQHEM/kEnmRwo23C4xms9nNKEsGUyqlobrZ7N4iI7L0vpM + + hub6t3MdoUyhsOtsi60gjMxrM9EjpaYI29yQkHne61wWbhaF/GX8tOWFzQeSkucd + + fsGnNeQHyEoA+SIAd/wIWwKBgFdVf3FI1ARSOQvr1OvidB3C/Abet6qh0XPPZD2J + + fTiUkd5BsVj1klQEJJfiiZmV36hhGFT+t2/7eFyXfovkY/nqHHgORPkANbdwBGB6 + + SZxCVhi6u6dFHT8Gj8o8Go65wa5bIyJ7TYAx3DXXwDGcX4JI1uHCTIXq44PCNpDb + + lDghAoGAYdL8Fl337G/8wO6e1cIbxLWYWdtQvr8ibsQJCYznbQkligQ7+Cb4Ch1F + + ceMke2mC511kxIdPbpECf8eP6JSqtAd/zrBASe60N+QSvaMcQwRqtoVnduERfjDH + + thD/UyoA81pRl/hGX2ddp7tT/Fu9A8/6FLJzd7g/k5ZgjkdjfpA= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:7wz5dhkyob3gmkydzgbptk5qku:cl6ovq5a3km7rpfhyb3putg656sp57lhnp7aexbocsbbahdxnfia:2:3:4096 + format: + kind: chk + params: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:dq7opblcokohsb2hhcuej3keke:25zjdhlue2o56qcu3teoqltk2blks2qm2n66rmd2qcwfujbvo4ca + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAqF3wcmVUimtXuQl8xGtHK4q0zCOrUBkBVIMakmXC+jlLBnKA + + pyNjX8S5JP+nrQy2ZfsNONSGgNS3HoReEe/U/y+hb3hXHTyXpYrrSEgbKRgTxREQ + + uoPW8ljJWyPhe+h78v8EooMuz+5E8pGuROQba9It3b84rg7TKRSAEHS+qN5v1JTa + + B5SkfPvrFj2BrKefGupn+ac+lX63B4a6KxQtVrnhHZ+HZJ91AU8sNXnIRLVc2jdT + + VLfUKHjwpxEXWnpZdv4ypUfrjj6oUfn6K3y/vPM/BR68NadLUx6+EYX9Y4ZBaCLA + + c2pNzT1gmVS5KYvNipt8SEZu1wjj88/8UxSoMQIDAQABAoIBAAD+kwu97PlR/Sc8 + + Ntwwd6+6ZpXOanXd5pvcQrUq0Yi++2I1nWxp9q2LYNORiKjqlQ42T3juimqANyJV + + 7Pn6pRL/x0U8XE6xe6zQN1Jf41HYu6R+P2TL9WJM43LncUQVLAbpT4VH3z1+U7Js + + vq8sphEli+dOIz8l78SDmOZf+3tNrejZqHPfN57WzZ2iI6XdylU047fk9l6XQvcQ + + II9o6LfgB6pXeAT4yhMQwK8LGuUTkwQBH8D29cAe1Vqt2uztJ4NfiEd+n6EsH15m + + MyAuZDVWp++T4m6w6TJbcFBL2LioMkRb6zsdb6XK2QsTDDyEqSGfBxNPIpsRJTqP + + mEjrOuUCgYEAxvyWv3vRyptCOZuh9e/6Pt4eYoAk4gLVFuxKNR+WXjSarPYAVtt/ + + 0u5FbccjQQcZRTej8rTVKHbRA1jT56mPzutKFlftpFuoUGoeDQ0Y7g2nVVXnHHIR + + KeBgifVwUIJQRWfsiVt2Gq5ja++ccGoJHPcnCHokqZ5UIAuuGNEIR70CgYEA2Jtv + + DOnIJZ/4jU/IDnYeR+9Fbg6GioB/s9e3j4wJDNZXuQ/ju1l3kBfkYLVaXzgCNgFj + + UECedrt3OUxWP6cp2dXM67rQ9uw04Fnl1IG/Uxp4icxvjqEGI7IkxCFFAXnRdA4y + + oONgNn8jCLhuD9PqUEXAKGiHkLr+FygDye0tn4UCgYATVn7L8xuLRhVkhdRykzTN + + oUZwqiVrdX0B8kqv6PbzBse1YV7dEg3VEOTca0royejRyjt7nclNWmarnZlSXS8l + + m8Yib78fhuzPi9CJ0ikHEXqel2+TWx6B5FVdcuXMXS2x4QyiuKm8pA/zcGDSp+tk + + zjwT3dLsTP+98YSk0sOsPQKBgQCRpKGcyyy6r7+ONNDNeHqP38CNadLpGdHD+Q4B + + xSTors65Lofvlw6fopD6vbYQRDaoXXKLqYdjSlW1/zAXCK1JPUrWTfznqpc7Kvcw + + VjVxCWF3NjDkdD0Oj1/NSJl/jotZP5qnN3uf6QiDeo72sYThiKTWBsLwe+sRYuR5 + + R7LfiQKBgEG5Zdbp6tVMxI6CPdDpVnMujkoGHHdNwVEX57I2a4Est9dXp37EzSi5 + + /HUwzsS7WR1VmSxpffg/05yFBW1wpPW6Eydd0tip6V6u3XL8CJDgxLUIg5GMWFOk + + tvWYmVQgapm+yt4MNe1VVui8FL5V3jxJrlBJoVherf4GYF3N57Qo + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:tvewutqp6vuesamnwdvshuhqiy:43e5ze3qcoatis7rfqugjf2s5oty73qr7bcfgschuesycfsi472a + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAv7qz/SkzzSobYo18DO2SgJN2j3JO2XWCWFHBK9bO6R/SzFmP + + OY+Gx7WFU/jJD0Xzs+bS92HcUgcnF+vC6gAAqcRaqiyDIuAWumrQquDcUyShKwcJ + + QmWBX0Y566teaguVf3RKlLA/s/Oe0b0CVV0mFmr7W4aPjszTXTH54whHAuO9hdha + + vRZzqXWKsgH6KGp4RFMh84zwF2Cq34YSKSg2+cOl8at4L7lW3nbQJF6GKZke4hd/ + + FtSAM1U/ynJQcIFfVODmTVLnj5xXpLtT4j0Em+EpDVVuk8vQKOX4k0L4Wkc8JZ5I + + sICrCqCEDdmXxOGRsFKWEnePXfl0QdgMDhoCbQIDAQABAoIBABEcX6mAJAQzNfwe + + 0LYzUkf7oRuejHnsz6QD29UGl92x/jVIPYzaCcxzSexbaiTc+H8S4mYQi5dIgHfV + + OHEmFz5kweCpFfpRUcXzosl1RZnSouLnqa8NcbUn283JXqThLBzoAICc6s/WYKiq + + 5nU+2cIrbGECineollPIEjCYUa9orc9Lzi2k+SSvfXUd8bWJunCmNn9fOenSYmzu + + dzabXlDvQnaA7C3ym/2NsRl5w6q0px4lPqnL8MJowNsBYRkd7pgCRSQqd3eBCeHD + + qbOT1SXyjCklnse5bjEP+rMUcMRDktFCBw2WTa5GZcYgzoWtDL8WCZ39jJQTImti + + qiU3tPMCgYEA7NY+pUBRrtkdebwHiv3zvD7pKzGvUxxyY7Uc0rtm8zIvty2XNU/A + + +3Yle+7ffv63Z0ub/0yTTFw4kSl/jE8iQwqySSmDKZ3EhC7dWbbzOjHIvtVvcCaR + + 8EbX/jpMpASX10iYonJXAwtrtn5vm5frKKjm3sxGNORBK1M+OllbOosCgYEAzz4d + + quPcYInY99yYp1g992vuBj2bSkD/9u/lbCwn1Z7gVFJDBVVec7FAXm+3Kc/zi9s+ + + Leco7DBkz1IHcVGchF5lGaUQrOXV3mHZEbp4mXfNHLkwMYjFCwZ/70ktXm++AmlU + + 6rFzSVtQyLj6EUBZZ84G0Yq6eQGqZydo7WjxbecCgYB7hASp5FB1UtAXg+OfLnBm + + FZ0/JKteOfDCZVtB3/CCFwNhkgpRCGYJ/wTvjJXMwoTd/0W9MK+FXHc35Z+aik7B + + DhwLIfZAxwINOe/A8TQKfppGREPZBpSH7jqJYNhFlgumgDryRZVxhgxH4crNJ77B + + tsypF3np7by7Hq/OeHmmnwKBgQCAy1CyuINn96NAfvbb4Uo5bvjxJe5RWk35ECPb + + cyGab+9oV+tQ8DoP2lNvnSwOry7jdvCQpH1ZM8Yi1g7MDPUhimx4YI4ZdYjReKvn + + iaSTc9GkDS73SdFzRanScv7gFr/WTdG5PWixaS+uXs8CU6R8j5zLMtUqiK93BhX+ + + nV6VdwKBgQDBeauKIaw6AaWFY5ALiKmWylQp50hF5S/dFocCzVm+isnmIhogjwu4 + + cYG7zKtAOU+6a5UhZLw4j2Nx/YFjExHpY31qEap7CAZ1u8VD/MqVzohweh908Yd3 + + X2X+o+t2k+sxaRCNJZwYgpxpThULs17PoFfgKS7hYUrbwk1ev4SLiQ== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:aagw6esdm2msphvgnr3rlzg43e:zqxtrlee6hh3vtfg6ihtssjqvr27tpvi6gkmngnhiguttjz7yyja:2:3:131071 + format: + kind: chk + params: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:4r4hejfttcshuxaatgims3x5ge:25nruf367dyhnsykeid2owcdrbtbg24yuvynqqswvbs675ieaa7a + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEArW/N7PGvnQp/v+hiC9FjnwBTA2sTB7SRXQsta3Up3vBHyQFk + + RCcpGL4/oQA0tXH0QkG/nIyjT7VwUHWUuTs7HuKIVhfg42p5aWrwW2lWbZpCed3Y + + +AJCp3/pqSd+7SDV+2Tc15lKLsx4bmN7+hnVpMkTDj5bnpTFPkjL60SL/jEduOvT + + i/K7hJoIiqkccaA/+ldlRwVq9GTom7RG+AP6l8/tbOUyV6heYCFOTe5jPEPhcF9p + + Knsvwu03rF2HRjKqb4TcdqGjZqJbcMnPIlJRbUDwHOwKRpx+2SqeQFe/jrF61uZi + + J6EJdliKPIrGU2GEEz+PCwQwvUXrZ78+lfKU1QIDAQABAoIBAC//ZhTx2fDzC+G3 + + VRMCMri5JUAn7M9QGH6QblRzy0+oXaVyHiaCU+xGEmPjI7MnQzrm6TlQ7o1LoDaU + + I8GTc8wbUzHIT/N/7vPOpZAWudWBQG5rh/Uy4vFqA+dNR+ImGTIGeturz4zeB2I9 + + Y/0WEzsUbpKdjUt4zpFrIgwNH1UMGsyjkdgHOBFW4j+aia6cV5z1Yr/3vuZ61Jad + + Jf6Mz5+zyLNqKB9iIM4HBwJnohvrW3n62o8Fxu10ZiManu6DG7X4xCUrMk34mq7S + + F/M5CR5a5ZN6ZFO+uGlI5Vn9kKXylYDyJqezAfS6eho/Nm2J6KNm2lvr1URI8nFQ + + T/Jn+e0CgYEA9GcoSa/Nt2Ulzzalmyi8Q7TfcJCMmPVCsMYe8J55ncYmBwyyUy7u + + TKd/osu+nWsuTxtSQ1bbWMrKoMghMi+mfc2cDGIjvfanmr4jYxpiiUy8UZdOGkRo + + QBovn5jWNk5yhC6PksUp2GR9ykezD1KWSEK6RrlzrqfYTF6F3nv/qNcCgYEAtaqY + + 3NizH1tF7uh54bqgke0fxVMN8egv0uSlxb968l9B7LXkQkP6nQ9s/jcSnlDUpu9g + + ulu+IXpeIaIDMff10gdKDEfSjNKHwBIgLe+BD3xkGYbuGSMeffO1f6apCNAvceoi + + DfxgyEHL3HGnweOdJXdoMf4AkBV7TWRzlMSDXjMCgYBeCzvDbvSXt0IfRAXheIFJ + + BFZeOCEB0o5A+1t4d2KQxWhomggcXhiwQluoxwGoDVAafIbhBpEMz6u8xoWPjCpi + + ijWbxj++nyTLNJLlVYfJEU/9jV0uWlhLIkhk/yieaP1Dw67XaSq666BDr+dE5CCT + + 2alYAZB0Cn3+lPiqLciorQKBgFcOb761ofEO3k6E3ZOMydHkXmtDR9V7PR/FLqO7 + + BQINIBx9detDhF+rusGARs3TUnTFFgd4W37TkzRu3TUe7JA/qf6ElKOjaCZlCUK6 + + GZEAPT/2Zzdomv4kwf9wMGTmzzW5y6QEI6UT0svLM2c42l/P/wCmBkMF6FbIIpNS + + MNn9AoGAN5Q51YWtiagbvyAzvanQbUPumSaU4e2Pj02sUj18XiHR0H05lzcQPa00 + + R3NRqz0/V77zVH78+hiUwaMq+cS98a9en2IYNbIQRzxGKQx+1f9+0XlpmJUe93k4 + + wC7hG8l53bXeMj5NXC2W2RtzDRcfnEiwcIYUkfmF0DWMKlVdLAU= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:s4hyspsx3kyont4eaad5jecjzy:43ak77lwoa3ent4nmpsb222hgb5zcwpcmhq6ct376qn2747u6mla + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAoV5+u55DxsXqpuB98wwyL/XrkYXYsqX0kXcJ+uF5XMFkSd9+ + + YneCtRJJHeQvilh69d1RZcisuVpXxKFa+IiTZChXWih8CTtRQQArp3XlSllYlM6N + + bSQ6h3GOU+M+VCo78AJJBLQ5MIJRJFzJVOSbtA/b1qV/tZ7NrXkA0c7wQz/bwwO+ + + hlXdAuwPlcR8vyDkO0ZapQBlq2Mp872l6S97vjNX4mmWmJoMRuHwSciyntjdn0Xv + + QIMy/tmwrM1K2yxjjLoXU6anMTNrxdGG8t2N8j5hKprX0UWsiEAVQ4otCzJvmYwi + + hxCccJiLchsnSVRhO56KTUO6H9PmqilpKIdPgwIDAQABAoIBAEY61Ig+JHw9hdbz + + 7Azb2XnTGx9986YooOywNKk5+TI7vrSB7sTXA41ftG+scF5TDMy1cigMstOGdJ1Q + + pkF1W0RjZEUKSpVP+hiChP1AS7bUdL9qt9Vwx4JMEygCRg2mRei6beH8t8kbZkof + + kcX/Kp6uqjxcUd0PDK+7cnZdjGaPkuAaiMFe+xpw1OQBVz+g7AMMMfWzSpPdU5+z + + 2mNnoPzcePbFojXbv4YQb7IExJxp44cS407IMrM008s2SN5XpNEsM62JdCbvCQuC + + LQJXvlccg6GzOv68QFtiShAWhskhsAcDGFDZLJyLPCTTUsav5MJsheoD8gCo1ogi + + 1yNVFqECgYEAy+ZMATNtYyRuP7w8sW8FbK0uHRwtA1XaSfde5Edv9dy8bVn9iDik + + G8pnbYWoUOz9ol5PWG5A44WiuAoO1/wLBsbNXyBM015SN/zwh0YHV7G6I5cs+018 + + 2RiyPECdQjYNPvO+JaGJ8KLLwh71rn5Jijs1azQNHWOoHqz7hSjC2iECgYEAypol + + 8eS5bo+Tet9UuNirpozlBbjZxad3mhlNbbkn3yCdlo16lTv+BdTT9iUYwqdvm7r4 + + f1qLpNSDD1fSQtkFwLKVJMl2eHt/kFxZ6i3Qp47y+x/0CXRrwUcBrOKXRDisY5uo + + KahCoRJnkcoFXbSTUd8feJZKdgu3d7j0Qd713SMCgYEAvihJOdV8brnLGCW1dMTV + + ikT4fK2KTVIEAndxR/RXtjPmhxUmHaS1aDWbv8im8NIUuRi7Zv1sBsTavEilD0k/ + + /1Hoto6pF3cftpduurnUnzRhJFAY40Wg6dbeYtLf1qASOmOXMgE1Y/ZvkNrOxa2B + + aClP6Gri0EXgxLsO12DsWAECgYAhBcMNjFRVGv3U1zX98wL3YJurtRd5yfQKn/ko + + 2zcOfUhyU5kZXe/nj2sFAcLpZ1Ufsvfx+hYsxZ6fD5dr4ee4IuOAXX374VVHeGYH + + b3RE+13LZMfoCpvNov846K3zJrGigqqEL7K8gj1zW9RIE7i3bYC5rPVyDDLZRsI5 + + QlgctQKBgCPvT5jiJXsM62rhhO/7HxrtDO0o/CijpZiXQkPaRuchNdNh+KaleHg/ + + HxYQwPLnDaqxKt+FBwgq0Mu3cRFuAN1snsjx6dvpFtIQtZi/IbwSUf2zEGfVwsCn + + jafG20PQo9VmtwInm90Kqbz+qTXTTCTzwAFGHiq6tPoSggTp8AYv + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:e6uhixvbm3g5amrzdzd3mnmsqm:4n5aafrb64sqpfnhcdpdfrk5gh2qznprjky6q6jybuuqy2q6z6mq:2:3:131073 + format: + kind: chk + params: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:32tsxhwsbircqp6wt3z37xh4ga:f3iewkiw2njala45sk5dmmjsl7po57q44kutdflgfudorwxox5yq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEArrji79TMBXxEgTKfQOBjShN0/1TrT9AGpPdxIt7VEUP/pTdP + + r25PjSpO90YXJsQYExyf5Z/rp88gHzC8hfJ7JBFsRi6FNG02303zhAzRydNn6AW0 + + X1+bfjH3ELdf+skvC8mcT6jZP4mRZIQOSAC6e426FeLrjB2qbHVj37pXyKSi/OvP + + Hs9esqG74KyISQ22xD9X9aPE5eK+1S0V2Rqbh4NnQg1qoA0iUiAbRJyga8TIpRuN + + KFcprOLd3POeIExviCPjVosH7Ta1SvDH5nZ0qsV76tiZGBzads84wFzg6RMdF84d + + EmNhb+5crWs0ImtXzdSWDF6+ql56c4wYPUANNwIDAQABAoIBAEwKNum9qNUyUfYQ + + e/KWNWAFu8Nrx8VCecHN1rUgWYZcG6RhwBJPZdu/8AH5xRWf/gJDUOt0f/DWWdp5 + + MXLyJtl5o6+fi2VXqqvglvx/P8YgdXYrFWb0iw2O3UGvLNxONmVg5uBcUcAvNNGU + + D0sS1hXzhmsECRM/ze3J4R97T70IAXVoW/1IBFKVRVS5YHEYDy8IHRm2TnhXGMu4 + + tp51j//PQwR6TB6vh5eBGqBQKSY+Iygb3r2Doy6ef/nFy+NmL1vX94IbfueGNb9/ + + mNB9obqiqikzHV9573x4oahvinJQixuWkmPzoVDKtUsHlO1xXyulZjVmPuJyl5lZ + + NCUGFiECgYEAyHt5brRA/EWoFSZ5/YdGZa4B/p94qj87iWCilUkxrIDTYtM2iMiv + + oGDp6waCiF+QqiDAFY6Ggq1n4wyMiYKwfD9ggGhHLcdtnaRaeiYoOBNPNAMvLixd + + saw0g+GOn9P0QpHWanqIFF3UBhGTNLoWBKI02FAhPvbruMTpM8Cvng8CgYEA3xs8 + + MBPH3ggBENqg7jgye2fZ+IKaQWIxFKa8kIvHjv5dasY2nBxM6xcZoIyz2XwQIfq3 + + o2OU3nt9N+GQ36Y5m46igYTGkAlXTqMuq2ITlq+DTmLwLPLTXRRGPuLsdTtLJOAa + + Sh0Ec5JqtFVlXNVmtsWMlrPaXty9ik2/S69mRlkCgYEAsT7g9CvvDFo1KUXUMn5n + + kbvOzaOF1daDt4g1FZEZlq5qtQORQktTYpJsHLqrqw/6YT8FM8nHSD8xCr5sfaKK + + j76kfcIzs9iOJAJLb5TOmA0SSCTMkKDu0QczgqlnJA0K9dPj4k2kg4UUz6y4HbSr + + hLs3x0rIqdc6PifxGS0w1qcCgYBdWwkZWP2WA6VmhwU2CR/ekXscyJGBcHP3Hzni + + BgtP41H1ntE1C4aIDJd1ncqX45jgjweOf9nIKsYfvuwfGXAbjlijd4qatL3qss+R + + eS2XLQP1peK3/DfDR/uIzu2AtHniCUAW6QN21Lp/kQgkC0u6iPkmCkYC0b0iBRxZ + + sCBMqQKBgFysV4/bJM86H6lq6Z72bJPoFCrkvdEDvWPyGcRAsdzp23AsxzZyXwe6 + + jOclUXpeQZ92YTk6G0+lijFZTclVLVyF3OuT7lJn7iAt1nWP4CW1UIDn3AsYiB8v + + TnXZ588sEdQUC1ntog+ikIZNF5am2fpC7uBLUtvHCzgbf6Zg9sWB + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:chrm7iotsma3yt3dw3sc6tr4cy:dmhl2yrc3bg4vjdyuc2xvcdqflt6zz36mpchvc4bulhu4c7dru2q + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAz8YPORhy3pEpkHDKkUXz3UJXmGEQ+88Xd18ingrmzACZ52f5 + + 6R6ZCw2F7cAVgrWbvPZ81jClKgmatIaFqvN490NUVGm/BgyLd4kDVUxqVu8Qb8MK + + dQJS06N/xV8ZrJQLFp3mgccH+dY0OIZPYMwRhchACbPWlgDngIwXkySe+O89b1Rq + + JfCjj+t4VMYYLDg4es55CFum8VUQmK9Gno2hZYGoJUumIlS6bhQ0OUq44Rp0lKqy + + 4pB0e1EV0MnowzXb604xFUTaWN7kI/2Y5DvjgiwKMNWsdy/yECz5gvuesr9zw1OP + + VuXnAPt9W9p5L2Uq0xHjMd7vE0ns6CRlUUHeSQIDAQABAoIBADGDa7fJv4ASHFNX + + SbK2dp+s8mZ2BT/Y0WkJUxzSEL+fSg3nDZt0BvknExo+CvI/+JnerI+3fnim2sb4 + + As0jIdnc4fEO1S6qGSdWj6SJZhMK/AICOxD4yYe1YBrHJ71kd0L/xIF7ToeHeCDb + + QvnYj7lvp/EX+gR7uS99UxXl5XuBS1eZkw1wc6DW+Auz3G211ge5iewdG7IceQqK + + clhpeLMbi6JOieeAIwQFzjLuWr05h8oRSU7f1pjBsy1oXSc1jqbFSPqD7LMv7gQK + + qtdz8yLr6zewUlVdljoY/Iuq8J+pOXnOLwtN4/aeRxHGNVd5/fKKZy4GhtM9eCIh + + +wVkE60CgYEA/dOEl8KGyD3jpEvIUziMpe2MKO2rWtGKmzM3atXAMyoQamL2hA/P + + xEkNC419qtA0Su5OP260vJlORG3DULchOv3PxirUOnRAgQ/WjLVNqmjypqDch1qt + + 56KqA2KMdqkRAzGh2rVbcS2RAGJAsz9qK+RK2GDf/mPvoAglGBJu2RUCgYEA0Y2T + + ukrd9AjBIQk1wsuQVXi5K411RSy1cjZKDMoWjkntsuEqmw/u1JztYzwDcpxe7oUG + + Af7VBMlfqC4/ip3+xc/H7g5VxYfKeRfWb4Kof/hBcnw1WdkpB1jkOyD0crqGVNFQ + + yNRr5deCii+Ptw0CNI2gfVumKV3rI3r8ecu5lWUCgYEAzy33+ztPcmB5z9//alYt + + Dz6++aVNCXshrJ54bF5+XATIk12lo5OYXbnWyQg7e9In7MVo3wPIyGxF4zrIgriH + + ijQjhLfutKuw/udjk0RlWWQCM5n3hw+wBPLT3KFyz2QeO8ImP3NqaQ3tXifUcbqg + + OaVEpiskieseazxasrRRACUCgYAUtpfZnDjgPFVKvKxRuuljaXF0OqFS2x58UZ10 + + Mi2Ulv4l5lfdx+9lmLVmT7JPi+FeVkvHura3qGgKKbgiLYfXabhrcYNM6SU8XvzV + + POcG7zUY6eokHXEOxtyYc6N2C7XngGEp2MVpjKncmuLrxx9mrnEkswBzqlLpyOhX + + 6alIeQKBgCDaVMa2HCfRQJ5p3fBT9aBv0huV8LsyurN635B0rF6qUhLA/bhYgDfc + + ADjsEsgz38LahiGqE52YtrfuoJwX25MdFtrDjXiGVpIwcByqDu0lLK8ctox1OufO + + xRSKmBf2aI6Ruhs50y/Hq6Q8m5Nshtkq3Kmqd0oMEuRn3pynTtHO + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:abfmzuiczjwt6e67f3uoyg5i7q:rk7ie3afnp3xgvnxu5e2vioq476xqwslqxgyx3i5xxevsjhkcvba:2:3:2097151 + format: + kind: chk + params: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:fqulzqs2c3wfj72rw5qxe6a5bi:3uld5nrzgjvcmsjvesheeuowyhnmhmz2fdlqzvrkzyck3g7se4ma + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpQIBAAKCAQEAraetHF9xlUFy1ZkIoBHJ+5WhRkta5xo1WeN4/1UdPkqzbhUc + + n7ujkO8D6XuSuhzDLoTUjzYvO1BVkIegZilF/seP5EJSkGlbhwjsiLmWx+U5Ca2g + + 0Yt+1dhgx2uVapn5sxlarlSkexbjx26+fmiibDrasOu84R8YvMKybia+H+KX3+Yy + + z55rqjqn2B5iIHVtt/IOhHSU/8AIBwig315Dd+PfseAiocZhTEjloWpj6M9ctIHP + + Npp+WoytoS83NmqduZOh4cLRMBNxddAsXj/XFrEforfGWavcxsUpgKapDecUmZHh + + meWvE8A3w83l6PlDmFQ3kmDoD5o1PXCB6kLTqQIDAQABAoIBAAHw6+md+pHy+LRW + + UxUOtD9SHgZ79iaqKzuiKGZR/XmXnKM+NNZFL7VfZg6yLI2IFz3Ftbu/aaw2KLSO + + 6a63df2rhELgFsdxM2GL7uPdBvMDIsMu1StlqWowUn8E9mhORoIpf9MXo1HWpRiQ + + oaOd5AEHo4UqSa+j/E/V+fk3s3KwxpP9Gsgh/83xfb5U2D2WyXGFdZiEADwWu7u8 + + Pee1mTsi5W1o8lCoRI5LIVdIdLfNMUpLlXx1eAKhv9N6IcJYn84yTbGKJSm6OzO7 + + lR+KpEZcT9fovahhw4IvYyC4kN8IwsnISIeAOl1LDEPnSC8ItRP8Jse7AuYuox4C + + ki7LEnkCgYEA11TWaXl2ltiQqMP+UnhgjLixC903Yv4pVRPHMNZG6azylMNhPDZr + + HLdwvO1Lc/3WqVsFssHGYJFtcNMM2gbcNB6k9Y0KwzdDe+g780Nj5oqpmiYC39as + + 7cwXl99wQYOOzwVSjaGAZJKaKJKCGF6NhAz7QNh8oH+xaKPyJSxh6m0CgYEAznPO + + Oia3vGx5hRBa8VLZ6g6JLcL5sp8Ql+DZE2xh0UJ4arTqF8XcKoONGG0K48KPHAQW + + NF4VRAoKtEZrjP/BqOJtjXV5CPj9NBVWI9x4XLbsh8ZIyk15lZgz5TSFrO5ENIWs + + YKk8RujLB2S1OgeWbJwBmakb8AOUDmiztBVUCK0CgYEAr8g18IyTXrkT/nFhH/nc + + 94OeJE1Gda1+GFG4/gkugnwI26BTtE/ISP0HL3OXcOz7W+1OTYsaYqLVcJEZoLKQ + + +Is7pqio7IwkrvX6Wq/c0crIgWoeVpRtPwKpD/X7McAvyJhTuALrSS7UYeKYCUTG + + ydG/GkSgGHWlYgLUHbyJglECgYEAsF5IOG9pGYQF0EInnu+rkAN492oQjKLMtyLz + + 717wtac2XdpN/Z8fNgaKG+rTmb1VKpbnLTeOrUBy4o0iRiMbmx5MfsNzcdHb5Ymw + + vBQVkwcGS/t9pa3IB58t/kn/RLuL8t6bYzxQbTdkct164Kcov4IK7+2DG2jDLAgQ + + NPDfiEUCgYEAhlf3Y70hQaGmH8q4sQ1Zzv1KDk/k8vxcTEoqB4rTjFuV7ueqCtHt + + Oa5VB+JofexDmfs9TueIwTjFsFaCPw6PSqaish7ZTnzOwr8mzHsyvp56c3c5sQsk + + +ILbxcckY65gmD2ZKru3gW1oOpOpk24n6szu3UXuiWp8+AXg6sTsoz4= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:wif4gkouxqe6bdypqiqjmkzy5m:lhv5p6hp2ok6iio5rar6uu3n5u37f4k465o5hqxzgsaxly44bowa + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAuwTMW8VXBzjm++6G+QDQQBS9NRUNiKNN8Ni+TFmSH+mIYt5c + + uV+zZnZyvSiHdvcQo3VrM7lko5qWORmfmt+pLhO/BbTl8G1u0+RNnEi/++1f3Lji + + UyL+U5N45+HhQpIIrIPqp9JfbJwxNMFZG20JGs+HQgCJs7APgp4EQgccGlizJw7u + + 7DfJJb1z52y5t7rESKlFPh0hhYR6/3/nHa1i/kuigSDtE6HZHd3cx+BfVBE9zUqy + + gkU0sdiDK8OzIIwlFkOf9wdkl27HwfRYSkbqx4ke0yejsdEtnmqwknd4d2uDnWBY + + TdcXn9+3kGknnt31GVz9AMvjlNHhL+SBq+wBMwIDAQABAoIBAAD3+5UsjLtTyhd5 + + Dunv/07PFe996DDfPwwR3Arrh85lr+ZwRB/dGmbs/Eq8YEiVTB0LCE89T1uug30x + + Z0Luu+4kiNznsWYjqblfRCsn/Hc047IrCW6rXMPldNzgH7f7TE2xQGJmvzr2k02P + + hzfzLnbhV5GThEU1iitLIQ77yHdb7UvvlJ2F0CNpsc6bWN3udjg6iUPeQ9HXQeCH + + YakYdqZ2iGaISE8Tk+GngrMY8UEDrJe5loUzR6k9IG1NwLbIbbf/vE01aMMIvYqZ + + VTeJXcxZNet8KEv+i8rL5hXgTKg8cJPJ6ex9HO1+aZ3J9TDS4zFl6Vr4YFAOW1q5 + + xO3D09UCgYEAzXZw93h/CZ87rBnP0Y5LZWFZ/+iXZ07+Q7N1GM8IWA960EkeEb4/ + + ImgPg+YD/YoOi4SWq8Ckv54JgpMUr5HLdo2KTmB2TVjEgPCsPOIxO0SbVhkI/kyu + + FfdkiPfDzFuBmtdNLDRSqy5wX89OAgVN4N/fkO/Nf7rHgnluQoNLynUCgYEA6QT7 + + TM1vhCM6UGUuXC7NJfSstMkaNAmE2FOdbcbWgWXmc2pAFE2Al8ypq/EJsVWKVDeX + + G70pDe3iV76mPFOUGhjIthdb3J1eJrP29QBkuCGLd4rCcbJlhr1v/f6DI303zYCZ + + cXZ1loch5psTFaA1F0TQxiAg16XZ6YXX5sWUmAcCgYBlnrUU0PYULjt3TXTp8nT7 + + +YBoAAQSRpGfrny1/n/j/hQCPIewwuW7ALjbxcInfkbfXn6fCDLzyxhtCo3qoDN/ + + uVW0miUo8ESQeXjWzBEJfU9O8Cbwj8BygN+qltCynHenu+Ehged5XwiZepDckv8H + + v/J1XwXGrPzMXX7ZStMLmQKBgQDdbMJv5PeHFQKgysUXC9Idszc6Q68Gq9T0y9/Z + + JQ1IwNAP9HMX193OYckJfm67eJGOHZUV4tZUSiy/PIcy5Cjj85Eml2PPbCq/lFuj + + zM/ouNeSrOTArckUFIeLUILFAoQ4X29wBiUO+TIZtFqaPja0+ct5uaX7xbog0fKr + + dC0TmQKBgQDI46RUW9S72Rtp9DPSyfqcAR9vpdS2uUKlLqM0h7gyAjGe+QARst3p + + eeL1WFMMZKbcEgqjJOL549WKSt6P0LsmHBgKsYX2ZeKxC9FOqdthuwWLUwMoJwA2 + + z+U+YG+nKqIWQJh3wcUEOE6f4/FbumdaTmgmb/vvnJZDJhaBSMvMlw== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:34bzejdzpqinmuzdmqenkidf74:pcecbl4ulygiadgdagonbgqmpb4lomjz4v4vqyssr56reyib4q2q:2:3:2097153 + format: + kind: chk + params: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:opwonr3pv7h3vcqn2on4ucve2y:necbkc6fuo4ahphse3nyfrcsrgjywctzxnwzt67qxzwewpklmkzq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAzDTLbYcIX95OIeuVqrdLTNNVsE4d6/aQkVUAHpl6b7w34/5/ + + jvACFGBSmiJ70J66fDpAHrnTu+WxZQTS2xpi4CIla03wkEiZ+BrlFekODpot8Qtd + + CqhGD30Xv3KYIG2qo6tHQShGG+9GY9/hswSzfjaxMeIJIjy41XzTPdrCFCVzrLqD + + y2vpEbDYhsuujSiBMlOO9c2l8R1DAb6s3ci4X9X8ITp3NT7a5KlA6/Bp1obEiqfP + + ljOYNZUWC/wyBUsVXi1g1iF9wK9EbVcWAgYmEKL8d0wwBNkGGpyMGD4clxPS4QzS + + NEyh5iJN+sCymNWBkyJj0AWQT3G9a3I3G51ubQIDAQABAoIBAAWbAWaNSV6QVKa7 + + t80K4QdH2ddQHaQnjYpfwfQVFHZSvVoF12yODBCRIFNY1PtCEC5uzunJAhXrVTZH + + rp4TGFm8tjg+2Hatd4SHAHjcf+VIuDAgtrofKmUscuVveNuTBxcdEYSpXVtQ8ya0 + + s5Zdb6vsRmrvIH8PGafKmGXfRmqVHAmT0fmV+aOrk6C/Q3sC5JqsyjVkhHUj+OSI + + fmgSzFAizPPEKMVBwbBDHGkWYdvPp+szWrIWPareT6JtnCNL9mDFZVjmVSIdz/y0 + + phHU0D0sGT0lJGdKJTRv+6PVlL/y/6BRZ8b9zVPAHrIpTRY64ILa5Z6OW7PYlhQP + + P9a/14ECgYEA2ec1RSbVj68ttDqNFwqTFA+1wf5sYtlzEaCMUhTsv15xdHWshJTx + + ibGyWHBeVsoPI1pxNzTcGJZCrS5J2BvmdngGyYXf2XowaHGFtXkg/Zmwn5GHvC3X + + aHM71vXU2P6Ye7xvPWQZipK6IWGudAFKvkfvyeWjELlpxgaj/9ZaR40CgYEA7+iL + + /x5nf/+5dV0NLk9xw25oK22r2WlI2qVyQairiQYSjUgR7U1D7t3BKjKZe1P4vD75 + + RzSrEHgxLo9A/7S6MKdJJ0fDjt3E/8UVbde/J1QI7icHimeBndsBo6hJBVWi0Xoi + + VbekLOCBlSY8C7b+6uRBebDl8q3BHHFICk20GmECgYBF9zIokQ6TgykGrKIu2stc + + 7qpqrrm4h5+l8kn79RILZFTDkyEgtP5VOwRL11DDRz/TFzAxDLz6/AxOtQUq6dJ3 + + CZUMUfsNRmmSr5jCKzGHnDiVE9JkfseilxWIsQh14FGvsVJ6gNCeqPwwyb+NKfkI + + 3epFhoF0VkR7PBiehgIY5QKBgQDMWUl+KljAt4MyS+thSfw+GjoS29zoWHzc+NYE + + xXYvRgPhYcUbW5gEy9Cwb986JIGXXxCYLW2UnrxNy2nzJO7/aE6wbblOZOpbbnVd + + VcsV5cehi48pvhay7gxMaZihOZtxUNYUK1Nlgmn+ME4vMFWcoIaA8EQ93PDDmF5j + + oGJLoQKBgCOlFH7EUlugO878go0qGU0E/t948pBGGM214H0iNkxGEgA9YhOfHVWk + + cNpJQcFULt1ue7yDapBypWD0Xh97bc+nuTV0u5mR6jt+E0qm91T74czMCdVKvAzN + + CfYOaXmNMWOJxdT/e+VJr9KDeF5VDnUl2EX/sGJXQETZQRZ5RFNf + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:cwkenndny6nlpubc3mddn4jdh4:op3xzbo4xhsmqwgsjfi4oqztbb23grvho7mlmzgzaq2qudpucota + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpQIBAAKCAQEAs6M2GcOwvLQXv7fPJMCmLUSi/ulwirddTqaAIKVQ/sxEovqb + + 7gx/yhUX6p+K1OaQj/Y/le9oaJR0W0bD42ElSjCopcyDgg482S56w5vUJ1zGAjgD + + PMGBB5vSpkuEsuuuoH6sDJEW1fLDxraVt55vK9fwAHeGYnvGkTfwi470ArIj/EkA + + JIYKOsfZ25rJkvaQcJ3iHJlNGZyB/mmFXpl56exkPeHdgAelgmHOgb02uIzoyMZo + + KK+uzzwkCoLtWQutRIowiYUH1HN8PvRhgEQ7k6HbIYA/jxT+AuqrNCW1TDCqLJjD + + Q8KTVt5TxLTYb94+3Ndk08czGjhmit1gNcjgpwIDAQABAoIBACNytxfRdnxeW3th + + Jba+b2xqaXG9FhDBi1+cWpdWmAuXuomgw4lvnP3/OJd7gTVvBCLseHK5ahSNCwMC + + DWC+yFGCFZ2WJHNTJO3EjsQv6WcVFxvT+suP8crTFHftWhPGj1Crfn8CWIvCmqCJ + + YjT4Rj7UH0+wRmwDudTpQYYAoSUwtg2XJv0H0CUL0+S+Slu7mL0ryHhYngOK9qiS + + GZ6Sf+ztYWTW0HTHun9jVzobInL4DxpZJdGJi2+j2WLdBQpUkEnmtIu+8EPHU7+o + + 7hpqMw1s4jHmPcrFBVaxfXhHH9FhXyhdWXe5BgX+0jbeaxZU/DRV3M1MxTvhYTDI + + VYKozvUCgYEAw8W3d9m/t+qOLX0nfo04S6kUb0/QaGvxBa206SzED+L4zUqI1j3A + + oQ1Aub5kRMnzuJoXa6kZa9JSgbaMEXe3sDhxlv/yMOuwDDM78xtWXdYkAkQa2tx2 + + 4O8jkf1+15p9+mKkY4JW1Pnu92kPZwyUqnoeJ/bX/vcSnYJRwkc50SUCgYEA6ubH + + wm042jqwU4b1FmMHSxa6CuchWbQsScwpkM8fgdocDeorKsySeIYTt5axRQeehYVU + + 4rs9/vLUzyPOAPWYoCZLhg9b6GxwRKAhbdfS4thJF0DblgbhDvWIwrGY5g62HEf1 + + yGae9+q4bN0Dsrdx3y9YFzMcYqIKsb3CUJo8PtsCgYEAt6KkoBVuknO//cdh3oFV + + BxOIiYkScoCdyrfP9ND67/P1cYuyo1O1dtxZlGGU6DmPFd/kjCZIJC1bGzVCWbg0 + + Y2XulrdqVJ0fu7HrT/SapNaTXFTJ4/XcxM1MTkq8Sj0uYklY7cZ68LeoggbYXc8d + + PHPkCZSvswfLPFfbnSL2hskCgYEAvRg8lJpCGwMFsKfColvjoiHQcDhxk3nD8UBl + + 8Ymaznha/ySTzWdTPayJMNAhMfWZOdkEZWTf2l12zK0BB6qtS7aoM2onzWmF0uip + + IHiN7ki4RfzTB+nPwLANgNVgxUnwdcHD7KgXrnGINzKP6I1eIJFHM53Uat4RB9Y/ + + F42hk+MCgYEAn31DL0OuzeNEaR/Lb4nOJFJWwCGO6XN/oAujp8bqkuRpkS8WzgWI + + fkJK7eI3TI0c8k9hF4hpt8y6Nh7aG3FZdXTHMBS9PRHaLgpHaIFCihibSz8a6kAT + + DIGjDMXH71CluU5SquvYcb1OY0ruKEMlDv4POOnUQiy8T4NPb9K9AnM= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:7egadlquaxmh52avu4kcwamo5a:svlhtfwkizf4z7adzqc4f3ijpkhhpoocvemtftgwatzbhxhxsyxa:2:3:4194304 + format: + kind: chk + params: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:yfkgi475avdtdiruhn65b72vai:v5psft4tzbi7bdvht75xdp7sxosfnlpr2hrloo2e5zkutvox6kea + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAltENlm6LjBC6ZIcNJlDZpnYREFWSKsK2G/FxzXRmgB7wGILu + + gg5bAK2RIv5/qj2iuPc1pvc0XxAEiwzarWW/Slgbi0p55Jnj1SU2IO/Go3iArELV + + pvPT3pUHSTx3+mRYcJyWgeofvC7yzcMoR9Khb04/KV+/zkgfugivS9i8phjt/LBP + + hzQ9SGRLR8CDTBXGnXJd7Ym3Fu6pbnlS9s6VJGjGoymqyjJts5Icu10BGNrf8cUi + + Bkfl855JZJ8cwpyoK9HT8tirHGlhuT+/EFmt0dhsTrUcFZwCqai44L+KWdrTckyA + + qrlrVG10WEjRba4Z5dDFla57E6dyhRtXTZ6NAwIDAQABAoIBACjevi/mBSsP3XMg + + pg+cGV9i33zts46i9XbdF1n2EVDnEWmTEc9s1Hx6jLpO/YnE6jP1yjRVCXw5ewGz + + mg8jY5NiDRTSOfYZPgSk8OY8FDh4j2YfNobnzKKlADR4jorsZosd5CuQpsj4cBQS + + rvfHvLfNHJC5weDE6tQfRmHnejgIXvYlL0XVZokLzQPpLmvqjA1ueuQp29pUjUQt + + hxcsYwtpxf+g/LGhRxBl3HPeKID44bdsN8Zz/hzkqE9F24vKFNcuvjMnK8mM6oin + + tu3fFLrYhXHzQIjQjtK5UNMFxbKZe1Ya48njuEBwGxraYP8WL3MjTqPyyjDPb4Wt + + tFZLUwECgYEAwzQHBOE9Q95aosur5sXawVMdZggKK0HNx8uoOjgEsX27gL63emSj + + QcCv5ccPX3fbuYKbUA6W4vQ44z5/0EqJ9sJ44631FigwD3U3+ZZzeBuzGwszgHHP + + Byj/CoiQfCV2lZdwhwWxyHRn6IKvAuUnfiaVzVHE3EJrZTkx/qD2El0CgYEAxcn8 + + PZL8VMQuPbT0+z1z3x8NWlZwUwl5y5mqpkji4XaFWoN7AYp9Sa0MARO/J2g918jo + + kDMGR3tveMX5x9Uqq2WbDU0dQTmErb7skB62qZNcNA2k28/V3e8/ijAwi1EAaIcM + + 9PaQmZcFnEglKuE3D9m2j+poQ2+KABOtd/uJ5t8CgYEAkoW2ExKi8xOvgu1Qnku7 + + dUvXEGROhcPCHAuhvfmYhEY1fWEqxgNOjCd/oQF3Z5jHZItF26Tn23moTeL2+7lH + + r+Kv7W8BPd1yndfF6WHmUKyyF0WkJfDHjr9WGWkC0z0nswfWnnNGzImcCWo2xfyO + + VWHPJiwPkamFhZiWD2Rw8L0CgYAXPQgZ4+8ptnMIZP5zlmDK0kcrWgSQfQiGV2Op + + bd7aRqacX95P7AmUYnSKm9tVsfWyKLTKXHRcabBLLFeQlwcQZDu3cFwDkdJ453m7 + + 5R/pBJtMsl2wRdcG4FlCzy6k77twjI2FKoMKyKesGP3k79kcT6QXfJ8LbUt1ftpe + + wnsNWQKBgAyP1Ehw6s89JJMrVQ4AZwm0xsBBdLQGcFKxuSz1zfujc9GT56nY+sbr + + 2PbSIwCfNirtErxPvznuciUwoHJVLUiPryQDR9g4+fq7+Y3TkeJXhzJM2GMM+se/ + + JRDDbe/EfL8E498RrE0BvyzLnPsj1FWHSRUpqNFsbI6Qm+lcS//2 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:qv5ksusf4pk762lpi5rn5l5eqq:pbsbizy6dyjzjhjr3acia2hbfqlim7zp3o7ljxlzw4vh6x7smk3q + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEA2iBFBQhED9S2ED4ZeUbf2wLzriDl4rFDVEecZdabAKqh58zq + + EPS6fqT0pUAH0zQV3RhUOEXLENcro6ilO6gpcDminsEsm7jgJNCvAfRF1JCTgMQI + + eLuDE7FvA+ZCdvHQRyok3RIn41qi35qVftx3TzQjsJX7qKDhnrfhuKmFIbMYeQhv + + 5PZ6AFWWvdwFQ/3fpqC2yRP/dS+7Bub9JbXm1t2qVaFEzFj7mwSTnhP8SMT0I7rE + + WqZqRfBx6DFzlBshzovCHT/QhEk7d/QyviH20cqPqeQjNAImsyfFKjnxmIBM5F7D + + eXsfQSbFlkGP7JAqfW990KoBh/9RxmBd/5vYEQIDAQABAoIBAAiyz73PIhO1ilsk + + dtSYyHWN7RTNGA3Nvt8eCfUftUe2BkXdrJnngIZrYpwybP11rseF4FnsIph11DYv + + FAPIhXqFud/12ScOnNWrAsejq6M57r/sUWArLiN7aG9x38WpiAJGgnjUcAXHiAY9 + + vmd0OEfOzvuMR6BmZgjz0UsRa483589VqSk4y1v7v2XjqJ6ubWPlXvvhU05dwMEh + + UmHHGi08/MLdsDKaKUMj8Z+sq0TLu+hDyew+fYEcFb3lNwwkNgPxiShJOp0LkhGs + + Lx6nt7f8FBTYU0x4vcrvMPNGz3cap9pJrmHuxhHrxSC56lQE7jYBiixgXBiFKflS + + 2nKagYcCgYEA5TzUGcMPQdNiCGOGdzSWsmrQNhPvA95MK7ChRI3aCzi8/f230oX4 + + uJVJ3o6CJ7rX00iplZbASWD2EmKF4Vr2ko34Z107Un+0HdiFrkYPcjl6iG+JL16T + + Lotx458Taa3mbHwGWtH82z7evWZtTG37RqXDQrIocRwJrkNPEjcpuS8CgYEA85da + + PqKeI3r13pykjZYI49XR4shpMFF7xFdLG3eJMKgNxLL2z4vvuicP5oP1QhG4wKqD + + Ie4gCxcSe+4wseBGMOiPVBtSITrICkDVVuM/r8W70ae6luVx4c38tll2fF3IKeC5 + + 533+roN/OUL5h4d4UG30/g3AvxupYggaxVjIsr8CgYANQ4fCNdccJ+70LU4Kd7CA + + gk2p011xC9u8a2vpW4vSOmY1DAkm1Tme9IRhrD07r0PtpbaqQR6/IC0cwzab43eA + + 41YMJQjZrSnu0Chr/QHHyiuc2VdGtmItv0PHt9yXsMg0Xri/aIcI6IpayyJn2bVA + + UTcLFOPiJ40n2B0rIKX5YQKBgGMFRrEph+FibapVwOqxb+G2HMD0uRXkOczBs41x + + 1ToLRrWMDpqmBwiEMomBYOS/sXvYlL/pPetkMKZiWDcmtUHSd9k31fYeIA1S96Z/ + + cHcyiTwb09TdZqLlCnLSAUFjGigz6z54UFx+pewQFsGKR1VirXHNA2psgzmPk9pf + + Ug6fAoGAfqx8mQe5FKrT9gvO1TXIwFZopjd4PGhCF/QZx/1YhVh7wkpocnXJQRYB + + LIwKrkybc6c4pVziJUKWop26kM3Hs4G8tZIWy4D2GQqpzTvrabvU8SKOKcoUssof + + pMF5GV25DLc99P/xQG66qK2+5kwqU8oeaf8mElO3Um7YAPLpzYM= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:ej56vl65kawat62wsxuvbcd2wy:tv65cgghaddq2p7dyvimmff24l34wsx6mhzd3dyprfvrdo52qtma:2:3:8388607 + format: + kind: chk + params: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:6h7uw7l7w7kyvwnfbvdez5iqii:doonvzgovuzwr5nzwltigr4s5qnsmopyeh6a47dgi63nn3rdzhaq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAsugmXmLZaZ+2aa+sz48xKVllHO5++98qvP9YdGMUa6SdPETB + + lzIIrtvff8Krl8KjaeRy0TW7MpSw/QO/BK8aDWPdAmLhSn6gayt4Zy/r3pCgM2Ed + + a0lkTL+d/+J/tjgGf4u3h+SPktv4plhqfU8ZTHi9PMFzkIdz63thUC+RSSrVEP1b + + KHzIXU4Q2nKVG3TCTQp/yW6ytv5OBqCu6Mci9N9UF6fNecoSKpLCYl2iNYLHp5Z8 + + 1WPwLLdKoCWwDGT1ufkyLb6nroYB6V38IHgb7FKFGXOzcHBPd3STCY/oFiaTvmgn + + oknsBEnunVa2kmbyOfy26NOZ+qdqmpPGVuoxMwIDAQABAoIBAFAtZwiUvz+tUmgh + + 0U4Bq7QOuphRL/p75KDnxIIAZ0Xoc4jvfVzfkPGgWxTcLt9n3KlXtrcYn+jGp1z0 + + oVYdjQzkLMdlffbPMeBljmOcH9ZSNWFhS/hpXzhgBZSIMtj8WbkuadVOcqOLzm7q + + H+tBmCJj19cTEVH7ylEFrbJsZu2FMipm+Eirn7bALbUkPlNlzmcjUT4Je6/OOny3 + + rjMEOhQwnfhcxTL6IRDeu7lJq4l79+1+7HlRGazt4QrH9fjAWNJaWZyDJEZ9wmge + + TQKueupXiqWC8w+IL/3w+8arPYG05iSxCtdb7ehzF7FVAKQ9sw80YahsbClQHNTH + + XAwZl/0CgYEAwHwVf8AXyOIDgGAozn8IcmICdlXBf3Jr84LcDlAgtNtbx85fZGT6 + + r5aMwpEByg2RyhDCzx4S2A2+FGhRbvjUFYV5/Nak/5hN1leTVAItnzWGTSkcJuVp + + 0VC295BhRKBjdmZVBc3zUIwGKanD03NcKJ99nxXGm7bmmJRrMO9bxLcCgYEA7fEX + + ef+UDJeHRxI4oH5lAEGwydwJ0kIRB0DfuPKdhXFEY4ihW+rppY3A/NvEUZAil08T + + f8MUJEaIF8MeaOCSvY1M+z0ynykEAkFxkSfTnCsJqMh0p/2aNfrWVDm/60lbsGzd + + nT0xMwP7RDn7VYCQpzFSlbgOTwJ/LEFiNmTEE2UCgYAo7LjtdnwYG+W+r7M9ZEj5 + + eNkpK8Z+QGevWI1NBcBOc60p6Djj8YxTNOEspQQKX6Q1oCarPqunABT/5cYaoBEH + + ml97YG+oYEt8XRZX8Dae+RRa53iy1GgRNuYP8MSdgLRlAhDlsQoggAT3ar7WAFsB + + 0Bc3cbvOc67HlhbMSrfqNQKBgQClIQ4z4oUyf+6oCiM2bsFVfkFctdIzExqSOBmL + + VwSu2T6m/OlOyya/eDMYyMPj/u2iqIRVxGK0EibcptLx4fi0h92G9p+tCV/42MYi + + AMvAs7WOZx9efoeJMr2P0kw4075IICVkvFTqnLbCUKL2YbUB8x7nPMbhWlA7vFyW + + dzQEaQKBgQCrN7DDrxZapHLMnyL7Pjgm7st0CZxMxbcY/dkmw/7yo4X4tUiTyYoU + + oQqtcEzYKFrgQ6OEtq8i/9yVmtgE8k70XVJqpUGM9VR97MRcdzEKypGBnUKOamcs + + Z28avBZ3ygIN5L48UK+vLMayI4m4RmUtMtU0jFYkhIEcuhTWEyGQcA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:e6pfkqn4wpggq2lnvc2josmkpm:hcfjov4qzb7fkiucisoes4edaymbp7qzlmrn6rqqbt6tsov6ojaa + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEoAIBAAKCAQEA4AffDXMatwajd6NXgtaCi9gCsfH/r4+0zwHKTdwl120zd/yS + + 5HZYFhO6TbCAuipADxUbKgaRTf7NAo9NY+T2Wke8UFfds+tNfKKlg8uGIKC4IsHS + + E0ay3GVl4nCr7y/Y5Rg9ECmIE0ROM8b+iDKVfgD4ztigPQPv8r3l/XAzU1yx6bVf + + n2Sz3QzCHlkjbmS7UpcEWifUG+8sgO8Ee0zBsOjdFyaOZMiaTUPlNvosXvKQUGoh + + +aBrEung6ZW9ahWKeBaqpGh5D+T/pqWIzXBX//W4gMS0v7+GAl8h/JyAz4yW4zac + + ox3br5okjgEffJbClMI8mU3ltxDTWbKNeps4tQIDAQABAoH/cKOEgofKwjtLGdwG + + kZ2SBZlM5is0PqNLjoAOyakvprMcdsnQNemTzhgdJhcqod0uoUIf+JOd6OJYMV8A + + QRUCNc/bla5OygLN8txmW7j4tyXOk6so9Igpru+Vk42lLea8Jry/9vKhnMUo1bsT + + GkLm5uFNE0VFS7Fl810+IS7/esTmSkv+HNNxFUHjBDEx6ntLH4JYvZ3Z6Brfum/N + + Mq7KBj26v+ShSrTn7uC1cQlxKTyRdudkpRX8qiNqPrL98b/KrEGaCBsyODx0JgA4 + + Q6mCt6zdniHrRAXhamFdO3f8MEx3VVYKVy0YyFsWjo4DNhWVKYQHil143CoEwXku + + JVlBAoGBAOdJUfNvYyawktf8LmunH9WAjpqDtCm8/XJomwAD1ozf3P522+yg/Dxv + + 2bwW4b+9iI/hXWuXJ3RoAVRfSy7TVg7xN6WmnZoNzv4tRGCGAOqg9+VYy57IC8Ya + + 7HIgcBqbcVvj1TMIZitRDl2t1sSbzdunFJlbHKfq86n26D7RRERxAoGBAPf4E99M + + NiY3TaaI3UxyY7xFf2GrVlwMENllj8RpbbVByQVJPxR4Hl7dj4aJcd9PFnAfaDNy + + SIqgbjcbibsNvpY/daFB/vtXUS+XKU3TLWOGHZ+25LKVOZFJhZfpSqcZHjCl/Tlg + + m9Ou6Bd1MOTcnUWfECIi+8zVpTd3sJQti0qFAoGAQHrpdQPF0cCCf+KXkn26W0yG + + 9T7omIZO5nmRVPS8+PNkajD66UKMb9EDE/QRJeKSUwKSh+9RGZvxWvNiQ4C5ylqn + + l/AWmh9laOl32a0iTkdoNTGHOxIsbiONbdfrSQ+zD9o50wtxaHwllCpl6NRDFQzE + + qmiDWbEgE295miG/dZECgYASV9egfRLEYPLtjtJQBWY7VyjFINeSl5HngwvPi70B + + 24vzSCfSa9BTVDB501EJI+CVCr26kImtN5DvoqndnHasxqT8+NTT4vGug5AaobSJ + + 2DH4zp68Vy2bAcVQJ4HOOp1xG9ZPmEXustGYaqLjSy6XJ90ZqVzXGjbOk5wMWhIj + + wQKBgHgsuqxEqTLQIqn0I4E/VAKdBuYaGngSUB0frVKn1N9O3HtJhG50L5yhWUUf + + UvHCgaP06ft5YLVUbTkelhgCbG+VzsFAjRFhesiHp+q65gCZeQJN1Q3JReeFjaw1 + + ECQqk2PgtN/3t0VVrmuyLBcw9RETnN7f2mHayo0uU68AU1pk + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:ufshkmuewwql2xsyiecbjhh7x4:qoywwygjqrmifowob3mvwkhfskk6geomyw5e3qlgqzui3kymu6nq:2:3:8388609 + format: + kind: chk + params: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:zily66v5xodir4wz5conizmonq:6zksu33nods7hita33ju4wbvlnqyxp6qkl6mmtqyl7bh6gk46isa + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAttP9lzqc+4EZYh3s6IcBlfOpaL7svgQRLuXBYEwd0GNz4FfN + + v6L68CgaD7pI6vgUZBOnYm7c3z8zBdWibpH3V0qBufWnfFdtYU+20ed3wWWDoV8L + + z60tj4GNEHxwggLIclX3IalHySFyOTY4K6lmYptxtFBksULuYqFIZyX724O17Hf/ + + H9XGx+D6ffpUyR2D+hZpXGy5ly06pN4XJsdNkgmv4YfboUN2Tlm6Ldg8b6ni9P+d + + BhjdyBqg6E7RFKG7Z1ROT+GzeN0Q61PNKPJ8/5fCYPoZEeHWWfZFNO/c/fAo7nsn + + 9Ei/HxUju+PZmEU0jcsHXIvL9LQY/2b59S1dnwIDAQABAoIBAEQ90HP4Lsw5nc3f + + uaP5cIAWGO++BAPQ5NEKdSmKf75ewMvGOkgDf4LQlRm1wK3jt0i7hUjadJrnrhXJ + + bf2zgg0VBGLy7Hce8vbVmDm1GiAX0hATuAbmbxEXnB3BNQVyIHt81ue7lc3fLBFq + + yYCSlGLN/pz9PPhlMTGjXbESnnWKi4NVtX/CZUUEymgfiynfdXCf7s6wlYUp0WiN + + LIHBRNwW4FMCvp9Du4W4BvJufWp9v4yCoIpL2SyGbYE/P1dMg+8ZLVHdisSN4+N6 + + V5+xVjcx/5IX+D9FR5wXR3LXZx/eMa+dsEvAPmQAJTYUAVKzMNOXoixCSsmyV9qj + + zRB7MFECgYEA8fAAPiq1W2miLyTR/bKMHLIbdrEfMpGTrSVPTw7PdYkiDbNzpTJH + + uUwxwhHFerrlPAnxsGbbVZG7iT408a9gBqL3fOCP2095f3wg3Hxu+iWtEXLuaGJw + + hTOKgN9mw9LtHh9WT9JBeQgHU3V199TYNdd7a435jxf/+77eUN+pYZ0CgYEAwXRy + + +Q5+02YChU42JAuCnOITnCLifK9U38A/YaaBtMYTpVd32GSefCv1emA1KvnTjddA + + m1br82oCz1US5YsZsfdmdnB7mA0AGiM4GTHfUBw+N2gol24lCY2+wsi07U4z+3K+ + + HmGcAP8FLXR7/slX/FwYlcJXz1qxYIh5rWnnhWsCgYEAj18PdcevY3WM4+0o9/O3 + + 7kVp2wOJnlkAr4m9nvcC3/8dDAt9C7dpI5jQn9YSNfHNaK/n5wZ9Eg9jmCgiDdtE + + x4oJqZoWBfvp3y969c5Toa90CTQXrgov7e+mM0qwRnmXhNNDPdg2bnfgh4fDGdOr + + MPT6MbmX20F4tAHfEwQIB00CgYEAiAedLNnvfjC1xwzG7zOUxUIHLfwtrCURlkA1 + + kTGm9PlvKQ1HPUcLVh8G/uUVncGL66oXSOOnCENb9HRK1FOqXsSrLM9NaQ6DKt3m + + /XhfIZKqgQVhvZF6w6wDHi5JYrBhxwbY/r3+F4k7F8pXwkHL96y+sNe2LR0Fqu5s + + OO9GGD8CgYBKoK1W1QcPbScWEehb0BFhDfew1dARf0M9BQDo59n1IP/fJLAj+irm + + dT0Qq38vskUzH2nlohVbOiLRnKIqx0AJKYCpfJNe3jYkzTDenQ2hONwhK+0I4OWx + + MHe2ZVX046Ou4SvrFWoDJ1xKCeF7sjw22SEaF41OWyzyarhsK5pDVA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:nk5g5nxt743nwaoqk46dl2iy5e:jpwpdslpyhgafsbny34auvuyh65h7hvz3fioq64efhijdz6aij5q + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpQIBAAKCAQEA6bFJtYwt7TfdVJpjgXe/PTdvk7MVxPiS8qFSmtFILf3Fvof1 + + hmCdLayS4QcL5EulLTtotvmFPb4rG/EHEwY4VWEwulw0FXcoi7gPhTYwbyf/MPBU + + IBI5crgXrkVRBY2dqVKlJnds6EjZ33jgs3sVx8q3eKUmLencGKhalUWGiGbEtB9z + + YgpZetlWdXObPyU+KSKB6kv3RhHsBTS42WxQho85wjMiT58y/duC5IvDcI+ibD2A + + Iknql5DnN7TdanjpufGYh3x0nUhOUlK9whs+U0ip4P+YooWcB+ySlgsHy2lSOYvO + + fgWFWt1Wm94fFyUoRwA5ZUSU22y8lkVpVHVj6wIDAQABAoIBAAgd8fsjU6kMqrG4 + + lxeKdZMGbOgKwrPPv9LmSuc5Dl0X68QSwYgZrPvxeuc5W8QMjeqEnhN/lo2EIHKH + + rFY7KL5rx3SBpVwlgmav+Eyz4CTMP9nkTxe6FP1owDHREzUH1opou3ca6NEnTqIA + + zH/kw5Hn9vgddwuwjBuIespP2udmkWo5UXxD+3irKP7lG46OBYF/QHQLrzklXeR8 + + fWyUgTxKD43D1RtyyYRhV/7hNTrEgNgmzEMmFVTpqfBMxOu3dmBuC3XfORm87rPu + + lxh1glQ6YlES5zYyl/tSNa1CjCP54k7z6Z7mIvXKb6LCrZYw3gjCayqbec1n/OMq + + trB+xwECgYEA+Eyq+Frp9KwiGwmAZwIEhRicm9e92fO/v3vQfR4Mz4u3rFSzTzQM + + P/LXRx4YqDs04WJHOy0+oO1tiDoaCQbNQLqKy2xZBukGiUX+K7koMLieC48eeZEF + + TFUhbuTDMQWjIMlFGmWDG/L3yU3HGPazUkJSetf3+Q40XkVhPUvX+GsCgYEA8PCm + + jwSP8p+PkikseJ2q0aKjS1RUZDqZqF+WeepQpGQSnMeTOEqh34fm6epggBbVHk7w + + 7+efTuAx+GRtcgF8anGT34WGb27TsfuAzV8jsqb+mnttjcG2f2RGGP5KpJfjGeo8 + + EU9hZsCfLZQBd519BPTrrnePNvlw8Ok1iQ2pIoECgYEA2KBC9YST4uQeqUoD3Vq0 + + SM5tK8Xwm+t92fiSr+X8tUInT9Fh0vMM0On0CdbnGjb1bsGIdceGgW5DhntyZXeq + + sRNOriVsEoxRKIiJNOpIdyFKubj2lIcCgVMwZQhuhyFs7djLUjlIRqUWq2kRD+WE + + E3tLbGNps79BzxFmwcyestsCgYEAmb1U6klE+NHrsJ3pLIWeq+mVPMnwl4v05EUq + + JVzoXB0m6zdFr1Of+pwjMftF3DW1g4Nnpg0r0A6qlA6w72AXXWxfqO7wm0Yiep06 + + 0ND2XFbGexhrDVsf8iWvvN72DhSE6tJVxc3bHs+mQlUAoqyxS2pkwIy1q6R69p44 + + dN1soQECgYEAj7BSQZHlzSfYK6SgZTiJ2qDYxsIkcAjCicijT+5h7FJd2U8pBTaV + + 1/A6UbqwmoFlCXHuGUIOpI69V9nL4BYrzBL53RMkpx2RSvSwUeHW89wsgZh+EB+v + + sgq76XdP55PL+2oJNJEsB4vdxQbp40kNOfmo/bEIF/vurNYQklKb9FI= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:sxrylthoxskesmlhrfuxnifkdu:kizgaeiazgpjgsffkotumbu2dtxziezw73ybwo5pfzleuckqaiwq:2:3:56 + format: + kind: chk + params: null + sample: + length: 56 + seed: YQ== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:2tvuk7uigxcngmcxx3nt53a2my:ssk6vy6plxp6lp7odlqhybbjxs3tsxe6j2ehogpf5b3ml7wgkevq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAwHgjo5YUy7LGiFX6GhFgnv3iB+yzyB6jjIrCjJe39OTI3v77 + + DWqgLyKSUApAaSuZ2bMqIVcEvTIrBuWczfEhPm45ZqpvR57UqNnAe2z2CDIlTNS2 + + Mg7UWNaQigrvrM9wOnNoANXb0Gsm6n1KkY3JTszsAH/P8vmRXvm+rZsjeLwAqkO7 + + R2eUYme0hZUldOHHZSlmz9FAge6ajvw/lIIVg8gbk56EHBlpJq4BTK48REm/hzVu + + xQV0JGeak3zl7GWALoJXtkS81NctORsg47EiTkTxGEgXS3PXqDImEKoLrX9Sfl/f + + kjvpxOsnTwqBTu2fBhUJREoXMbJbioPFwwPIpwIDAQABAoIBACFiVVAxHo9MhZot + + S5HM9NTvFY8pU+/AvL6KbP9k65ALRPpFAPfNSFaUqQtAE/cKDIgRxxt8VAKbGpJ6 + + Lk4cZpdFGCjCJEYoexuElZnzBuPaCtU+ShH5t4RnRy/igLsZSg6haOdIMPYAOAJR + + VCdWEBZefgsCIGg1OK1gJV5IfAkbSXbgUKQrt9FNu5Nbt5rRuuLg2CbB77UAXsiA + + tCynDL4kgx4sgDmU496tU1Wt60flwb2Ka5SwnQwkVDM5LLaubMyKsL9clFow1Oyf + + TwSsERNPxrYzkWUFhYxQmNUZggzdixiHdKsqYCnUzPJ8s5DV2iKSqQPVW5DR7FFN + + MC8ZM4kCgYEA+hOBpsfBTt48yBCPAUkeiLwcF0oFrQ2Q4wRZ4ldtaGlD5J16Jk/4 + + HIx9BPvfsm2Q7EKZC8CfiMEtBVhjZZmJTWbZz5dH4SNZwkaxL/JiaJ3acWylZmj/ + + 3BUj50Ysh50OCfgosXLUE3vALQQRGfQrYxV8SRo1UegeK7tX2pBZzO8CgYEAxQdM + + GHlKaUz1OgPgDIj3goXhm/c0qB5Nh5k9LSNsz9kfds1vEZNPBEES7pQAdKa4EVaw + + 7C+k4YpcWDAz3Nzt43bJu0CXi6wmAKEh9ocwk4f5Cms1s5uG8XpNcf9wcH806xu3 + + NxJgaMDjB7slDAW/DFVhPGum6E1z3fFYlpI+L8kCgYEAhzGgd++p299dcLMy/Hjx + + Hu7DKPwFkYax+2jQxwKIzVeLMr7H2IqHEbgJpnYcezOsk211m9ro5F+63RbptXWJ + + uuSNgCLC4z3fOp5JECizdudPvt4DlRfSqsJrBI71Z+NKQa19ImF3sYjHXg7CyAsu + + oYRuCn82sC8SkIXZevlq8tUCgYEArxphIo8I9rSSbFDtWbaQYcuiSf5VKeRketJR + + cEA/gCkysV66CyCj5OAAd0/JZ+KTS7WD3yQooNlaYHXWYb9nG/SCLIynIlaIH58U + + lAhpv3PkfMHzJABg2VMcaOffgdtLqHclSShnzjE+k6xarGie9dMba5sw5tuO0fyg + + ApFN+yECgYBG0t1+2RdeRK6z/hCoiX7kr+vWeki5uoJ2dpt1OfrgBv6ti0qtQFXZ + + rCcE6TzeFy/z54TG2kGwA4CesO5DtaTO6pXCHuMnP6KSm7pCCjVv2p3+7ZjxrRdG + + t0FDgTvyfy6hC2ecVKfdwmJtnarH5D8f41hUSzAnctzaVGJ88GJBCQ== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:zy6rnxe2nwn2v6a2whohnb6qcq:gisq3xdaad4fzcle3plmfosulcioxsahzvc64aaftsjdetlkfvoq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAxZUYtjBWRWXbZxD/kkUFjn1ZzD4xpFq2Ti0mp5z5OJYISpRN + + ssUP1Xb0YnPY9w1QID1/ZG99xvMyTxDAZ3/fZbHpsnkdi4MRmGS6BCpa55VTRSyu + + VKqt++uTyanxpMVX54pCjfkWsf7Lqw2n7SZ3DUyVd+FL5gka9Z1GwxjUJl0kTaDY + + to/EQtEImLqmPZ+Rhx+FWatN7dr8dfiJugGaXlkEspkCxXHKbqYKVpWD/6bkAVGm + + 6+ILTVwT0jFWIiY6ERAJ/74AUEudGKKbgQCvdowrlfboyPL9SXumYPGiWCn2nM5J + + tLp1px8P1WOWYrSL7SQHiNwLhwdeGt5YKOfeXQIDAQABAoIBABv9JbHDUalFh0nH + + oOiTwfiAHc0ev1IArqQO5dOnGy/OoxCLhyEspLRQxEhBEGpY1rGmfIoZ+BeLgmQs + + Y5EVzmvdwtTvLsYBVGgB1s75wARfxRq+vFhOkFRoN/iAjDRS50OrtIdfkn028puB + + 1Pi1cvZtk5vWjLWisxC5jZlcBkuDtNk7tUeADjzUa09krQUHNoZ0pXb3T/39shrV + + gbD+oz2H7K/rKUHhj8FGt0DPZUPqQjnIk1nnugIdPqWmwM29JA1jjOKw/DLam5O6 + + Mx8VwSBc0YeAr7jsx9xBvqHiV6esSmRcydQn5wKMNCXnbAatyfkBxbyPNRIp/R57 + + yZyFYdECgYEA8hqLCZpfCnx3fSUi86+kx96BfBXm5R4U7wfVii3mTBj+Uex1J0gP + + 89KaOEDYAkAQkgZ6+0RqXpoBx9rRSH+X5pDjN2l5Hs9CY4ZCqeymt21aw1D+KV3R + + laqF9fiJz/8YaLunOgu1WosiPrBupIEjO8k3rogbw2tV4Zt8ve5zJRECgYEA0Oxc + + SJ0oFMveC8NzSXllNJACPUEgTC4qMExOUWWd2LT1tmI9YxUD+5shTsWRjNMzBL9w + + wZq9TBFz2gPlpFN3HuE3Psx1J3BYPHqFiFvm3V/erZ5mB0Tlx5TEkTI6Z4rZlr2c + + OOvEEoqwTDO25V+AS4i10XLUp/Loe976zMylNI0CgYBrlqgbCGMcAdwH3Sz/JhsQ + + Ry07u2/0eb3Ly6t10Jf7UVATkAUwA7IzJHAsd4SG23mBqyeT6f9rMv1/lxpSIYGb + + kN+ojFKrAmf6Wnvdj7E26n3fNmr8bxjobfNCL8TujeqHAH18Kh/ZsOLzAOzqZgkG + + VJFOGmZcHaL4s2Rn80NwQQKBgQDN0k0H7Gt0MXPLOv30wHeH1Oef2O0sn75IXqQ5 + + ZFahC4WV7Cp11lpaIXYq2FCP3/E/GCrJUNx0eC0d9wDhZqjP7ygx4dL4y6Dh1AKB + + V6iVJsGFYas6NhH5EQKl2EnZf9zkuF+TZBGCAse0Cq6AQhluUHxunyYJXzDR99Y0 + + tNd8cQKBgGhyn0VLhxicircIRSq1USwAmq1447wQ9JUFlhVlGDnScG0K/8fTKqA+ + + O5nP8tMyJoTRrTxofYQLl9PwENP3wCXHQNM8jUsM9g5nHJrc60cf9OZooL7o+y8/ + + Ntkn/f1DLOpYsnIHg+FLPBomF0jX6fZU0uxymF7XJU17ZW6ALVrz + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:ld3thziutpaqv25nbtoqebhtru:ri2obsvzl2etyuv3qnchn2wvw5mh5rjuvbjaqkqbvyprn7xwamoq:2:3:1024 + format: + kind: chk + params: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:dgbwqgau4tc3ndunfr4nvp77xi:vpob3vkn4ogz7vbkdspr2kge5mezvgf2sb6ghpjtboafjih3patq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAvE+dyJ0fXNy4h7KR9nekUSjgkbKUektsFAbSPJdyPI5Oidyu + + CPCCc3p2dtyWu7IvB7psDEqaZpSc/brUxGxtW7AJWybtIK1MPCZ5ZxnOYUDkgaLn + + gnkukWTB9+ugXSD2oDqK90DKCGQ4PykUynehYJpgS3B86hdS5MzFnrTcchzU/9SL + + Oii6oykEtrp+u3Gx/Nzx+n62AWlcvwcYlVN1wwHBRBSx1qt4lTUh1crqR/cLJnLu + + nNFuNC9rh7ucDDcml0HAERddsEwXxgzNLt5yPmGSlTTP0q01mlBHfP4FMpijPQP2 + + KQUULouYCebDEhWa3A93XU80230BQ8oX0bGmqQIDAQABAoIBAAvCuSr3TScijc/D + + wkPvUu7Sq7vNuGIu5bAWgPjRyIupo5QOmTvrsWn+4vkna66LQU6tQOQ/oIb5jxh1 + + m6Ys02OfieYMd1DMIe+7w2dCAFaok9zYzLakVNk1vrt6FsjaLyzwmw84F6YQhEbF + + jQvcDtMWsR7lBpgkHsQ1Wb5As2fY86walvgkbcM1pJSwan1HLDUGNjTGyXQZn1A6 + + HBDDpV8aHjoObUwUtmcYZKw7KIs3Ft681x2ykUUPcfVPDqCYFCqM9M54QAMg14QL + + 0hw5Tl/nEnPuiWRQgiCmsmAGuvYJvHvkA0QzjBo8B7S9wC9VfTaAHX33+IZ9e6NQ + + 7mXgCrUCgYEA5gLa9HZuKSC/vJuwj5yIp0BrRKyACOl7YYej4+/TjGiMPqqHDU0c + + MGTi+W11tAnZIwr2xbgg+rBgxpWwrlCbwYb1oayhHQfbsMrGc/RHSl48sjrDzgQG + + OrbBU3uWIHRRsikislaaCls/LUFfVddEkdoA3PqWE0cg0VRInPe3oqMCgYEA0ZaS + + MjUpd9Nh+hcZPyLPLh/toNvZYmu8e1HRZKCDl982kYmQL1t2e80S0D3P8NshN10d + + GWkuW6VJe649+FMshBwMXLlVQsLHrQozwz3BFil7eudbmJ1PU2GZZYsgB3h+sy4+ + + Qr7KVtU6qGe9+KOZ5eulJruyaGpEKWF9vrnM8kMCgYAtyIG2yWASFa+0pjTV0S2u + + RPdVGxT9MSRa/HnV5CXyu9i2nJD3R9MFmv9G8M/N/2vWOtd18bm2zKbmwGMDv43R + + TsDT5p3HPoovPZ2U9Rm/ptRkEahp+IkY5MnEiUQPv7eHRALhBrXwu8rugiWs24WN + + lpw3YDXBLpZMtH8jp3dJCwKBgQCAg+2db8+/tBRt/9/xQOz9gYJ6kpSXryxiCed1 + + 5p8Kb0rMryeEgncCrtsMafqp3BRgGG6ReFd+xrlqZ4uES6wOTgyeht5rE3jQ+GKJ + + I8LUThdzY45c5IkRvdUL2OWI7y/xuzdeQhNcb1+KiCKK5famb3pTZ+Cb+h1Vqnwq + + iU/MdQKBgQDVlggsXCqDvj9BPvdyMy+PpKKnYlixAt6xdrvLfMCImhvkYvJvmM2A + + WK1SaZqQ50ETTaxMyBF22NOpm133yadWSpBWIA59MBi4PAbVkItAKO7MDbvSXNOr + + Eqxj2sI5gwHV99+NqMcW1psLcFDb5E/N4SZQmuOKblpaEJTlZ80L3Q== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:6jynferbnzteludtwg2vd7thvu:oek57a2425optdwaggj3zxaffx6i7kzlbk6sbvje2lphuxdkoakq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA4110ZTjzqHn9RkYJDatXxPP/GuqSc7RFisqXYr7BnJ7fQZo1 + + W9Qij0tIMi8wgAkb+r+oeLaTMQqZtnBbnsGoWEk68kHFSglMInkTq/MZDgwPFbKe + + gaMdGQU1k0IAALu1Du4RPUWPUTsuxxh8u5dPWelT+SH99F7vxv6WZ4pk0N43hfrc + + Khfha+oTgBK0rVq5DmmZ2Fd0EHw0PCFFR/8BLrmyZz3jvuk1K7StW9hqXWqNILJk + + RL3FEy0rkYqxZuOXQ49inJE+o24hNYSB01H+IVZRUDB6s4/ZZBRHhrj1Mr75KzpF + + y4gfd70N1vVi/zOVj8iMMCvEBdXqDUdPYZTSFwIDAQABAoIBAC742jilTPV0CmbP + + xkQML4xRilUhvBLqXemgFCmC1lYInoAbn0Vy7JblCyvPAvqYpy6lFOWndn5Nvdbq + + nIsOYDypGGP/QYabqB6BHBbMmNMFm8I0TjnjHHpUUK61FnIQVYYZmfcqHUM/clkQ + + johk6LBfG4mfQ6uOR4Q5iZInjc92jYi2XAsDMaGWefVsiTFGpUUjlYCZ39QWQv8k + + JoxsXWLM+qyxYs+A1xyqRa+/mJLpuUV2uQ6H7rDtRlLiHuz8euOibUQDDa/ovXhW + + nId9SEatxYs4E34JBfc1ibqasboBVtrWhTel1OmF7QnNYZKXBYl3Zf6t3oD64m0s + + lkCOcfUCgYEA+qkiX25dgXAqyDrivkme/u+PnKXhxiA+2wdrY93j++dhi2mbIhxp + + 4KwySBQoCkHydawkdceMikS1UKSakRlJl8EuIntv370gT6v+22QxpWb/M9GYkxZL + + Vju5/8Dm8QQ1W2LmmaGjO0ef/+J3UCTNGncAQ+ZTNQSmy2Mi7uppg80CgYEA6DVJ + + xH1/neG1mlzg9GX8g2Ckts9WtcbBK8Nifoavo2ZwxUEAptNomdURkSx5vFaupWkK + + LsoDpg2mEfZzF/QQ8sqvqZGsC4bvajlv7YGJWFwkOwTasSYVQK70wcfy7aPeIG5U + + LpXbFixMFgsPbbIH3VdUfiLN4bzkS2iPiYomEXMCgYEAyijOyBjC4ToNxx927/GA + + givDr5s51Aj9qLj7K7gxv2CFk2LA82nnGoTGqMtY360AV1dWsIcYGgwAD+IxpwS2 + + DeaHxte3CsQF4zvceCT+xV+kQ66vVzGL4SiagmKZ35h9UA8b3Jw4gf7qU/3aLJEB + + um5vkFOamBAAVdjGu9ni76ECgYBDJJVt8XPjLQ6b0dtiD9NSEbHPAmjqKsxUYSyr + + tTo4HzjgcIlFs799K7Tmq1uP7+iT/6loGhWwACZS71YcSQBVk/HzMCH1O1Ei//Sz + + Uk9qc0ounpq1unNOvsga+DvwJv/llMFWrxIoeSqO/Se66k2H3OabimjJqxrACz3l + + 4UJcrQKBgQDk98mfTuvDdcUPQPK+eYK/RNp0W6jNsLht3s2823K2K2+R5f7a4ug2 + + SjW9v4cT+IVDR5Nz1XZiYWNuTv679v682o7c2SoalWne6RQzickG2s8h6k5D2qSj + + LyRqPtyr/3ty3pRT5ki10yvLvNbPusfrUMHYvRv8slJt+1NsGo7svQ== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:pz3mga5pvqd4bpsz7pbyna2qtq:w3ptt7xwwgywod3u2rpgx7iggvm2kpwl7d2ryr3u7mcaxgqotlaq:2:3:4096 + format: + kind: chk + params: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:gzcs2in25goucrttjikx67zehu:sbnsmodidyov64f53aqpdkunrpx6siqw6v23h7vs375ftre3i5hq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAsy9CIECvk/doQ4KfrSYIwRNZxr2JmOvfdTA7fJr91c7znNob + + 42FTrDmG41G/Kxfx7S3hF6YBbmT+Nu/LDs9aSO3mPoKjiLldKL4rqp3VXLGp8pnH + + EDq8oYLSu/mRaR0yjtrVqUsj8nhtwxGg4kMbORdNe9J4XtCCxQvMy4h89qiLp8uj + + 5fLGIFOkfYlaUQbw6pFxT0THVGHyRx5Mzzrdivdjeo8RFJsqJoZdrQyEEAXKkeAZ + + +njTxR1jrUQAx+RxtZygV5X0urUvSt1cVI7fAhbfID2MfszqHa8JAXu4DxnkBCNY + + ukf8Y7tt0VmfM4zWb7wmubQ3FzKmp1vZ3QprGwIDAQABAoIBABSlKkpLCa/Tvrig + + kUNC8ZlFYH+skPEEpE99Si1WMk7zNFBrNPFi4mAilK8WWR9e9+nq8ldmMh4FFuE/ + + ibbgHzft6SxkovD64ofyOVfELbQraDhijXQKQHefeiZcX+uriIq7HgkOdkrWo1bp + + bg8DylwumifdHS9XeOm5LVR6GmPU4D9I73OuxyiQZGY9OBAoEHa/5QbnqK5Olqa/ + + 3xdA0zsrHHQpJk2C86hR7+qshKBSdj8Z8HvsnMjUrHST/JEv4ptCCi7MdvXtlQB/ + + b79Cs6Zp7AGZH08X5fSnb2BgEB2+c6Sz3gRWBnNcNHXI0mty02G8MX++fTwXBJm3 + + 7bHPrEECgYEA1GkxFGYgHtYL4wyBINP40fwbQRVjp7gQdKP4y6/SCNaIojUTfbGn + + U+GSDtEUchrFqeCEd7bJwZPvTI5mz4LJVtBiikznqk7YPZc1IYIRl/qR0tLjPaE0 + + vLPNbe0hUWaJCfuEGy0J4vOKjXTqNud5qen28dqHp7alw4+jbP3GV2ECgYEA1/SN + + ycHbk0aiT60e7d9ecwz2R9z2ZQLDcIFlPz7Oa89OUqMrae9V2B/yO2Dabc5catGx + + 5Yg7Z5QGahJ5ers95XWy3ed3v9A7GDZwc41HTqdRbwsaM3b/YDcU5lD02HFaUqeT + + LjiEn9mJ4JlH+LbCOESORqxAoZjjYjQC5U2+H/sCgYBKjf1/FnVxvVmAwRPVzPEJ + + 6z45suM+rDmCZ0ddXwIOvhZJMO39cUy1AXi9oJ9XiZQVk0uLpWndeypEKbtmXJaE + + 1TGxL1slCPWXcKpib3/zYyyp3gGK0TlsfoO9cL0AEEhLa6+rxjwxH6BjFEVdLhQj + + Eo7txvFUaaR3JsSK7ewfIQKBgAq9K4vb6wpg2dNyfXZAxFaeT2T1dP9C6useVCWX + + /vXXgkKTwKXs8+zicc5IG7SYLXpWYS1T3/hfoQ4HSykyRHqzpqhoSUktlrK2ilME + + tIYRxffqqmviwAJN2uk1H2fgAyjXEnea8eVtEPEtTintFK1to1GaYUBn9O6+PWKf + + whOfAoGBAKSFBWcsn3qk42K7ZFdurqj8FbipiYNZYXU6AdOWxWqIMzZjEi9VBPNI + + rCun8bXlBFUNtJqLE17n0qzpcr7GJGcy6pqFcE+U1Q3j8uxJEqG/z5ZG4phRxVd9 + + gWP2U/qIZivdIymYn07e8hlpTEZgJcWjkVJ4AdB/PPqfa/m8t8Xx + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:uvvcinwvc2gxig3hpi2os6ajva:yphtwtymxs2t7kslcwgdjmx3xu7wqzknff2m5jte4meilabbmhpa + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA8mW7LoF4muvoZ0pfWJW3wsY3VWXLF/4RgeJvrBQy8k7ht5Lp + + 8qM0z9SJ4EgryFgXtG9e9n4AN2dPnbsEpozcxrhYLrQZPG1F9FwcppKPZqqGoBYw + + D1qeUHTqrOvtG5xHmoTCD2kySrzn2JA4pbAXvVn5EH077LgayDX9jaZGofUsx92d + + THw9argVEYhVGv2NtAGvMLgyXBSaH6SZe5X5r5YduLE7d/maZ0e6i7tktjLAyZmu + + eDw5QE6AVfy8/2bf4hCbMACnuhMSu2Mb29zgIn1iOgr9sgxRcnaKRiTl5AD+Dc+q + + MqekwTQP9KfgV+JM/x2vMxL7IO2YZbdc6f5Q6wIDAQABAoIBAEY45pNAetoWwcs6 + + poiZRxUsK1eYF9ApkJTaLpPhfijoZUezTgc29NPItPC+t8BglO123kH2msVyLoR5 + + a418fXEsco+FKVJyLbPvA3XWO6j4eeviwaWRERAp7tqNtrErAytmjnm8dg9kzp3U + + mjSV4Sq/6AG45iVb6JZb3cqtgwTj2+7JJIzk7MsG1NsL4CH3bQTkcfK1XHX85WD7 + + vkmN1D5CigOJSxeyltPpMibCpuZmyUbauVYN0X/0hPCWeEy0u5cnyDL6eKqfnF5U + + OhWnrghTqWcVStXbDwlXp8g0kO3hwu3GIMrcJPjQjCJn7Nfq+c/IiFEuiwdQSPof + + NdmBYgECgYEA9epQwTVK3JUYgFDmmR+PjYTAR1TNL5KOqt8tqsUXEcZWNYa4Xyfw + + 6EJdvSRaZCJJd6luO82QsqhQu8t8/pD6vM6qs5gNwnQ27bYHdy23gmP1iXyHTW1i + + a7nq709lDardbJsriSvk/YMCH24SFRgyozW/3vrZl4Nii9daPlxXDIECgYEA/FZ7 + + 2jPP7EF4bRy98/7HkpXfgNeV6dntjR6h7Zb4Lija1IIS+7459Ttgni7fKNpF5T5t + + TKHj3cgnjg1hBCcba4nK7sGvX9noBhcfEbzD8BGDUd35PWY2W4/zR3mkx/ueCtxP + + vapwWf7vuOEj6+GZlKk3hL3/PrL9ym1MpIs8l2sCgYBRmf5QDpIX3jWyJqZOe2WU + + TU/Mm7w2pAhJdSNfPmVoVYs32cuGb+eF+rfGUrDX93Svi35zw8PXNPkNR/njM1Kw + + oleMntE6DHxJpxSVHIt/bhIFHFh9feWh36Cw5oSe42r0Zg0tSG6FHRrwOQMxEsWC + + 2QwhPtZDa6qgwsZEWTndgQKBgQCs8cU+/uXRoemctudFtGgqAkhF8PwRY8iZQNZw + + lVkRofShU2kZWv40IncM00klobvn64pTzFz1Yzog9PB6PSdg4/bO/rZo9ls82Vn0 + + +TA9eHNNh9pMB6LXzGhLo4aZfc2K2gZZEtigBcddKglJoLx3FCc19lZbLagdth2i + + ZL+pcwKBgQDxoY2CkBAuOcyY29WmyBWUEO13odEzC3uUuvLu4mqxRYXvEIR6OMy7 + + BgPMOFSgqsA+qUTzahSAcXgNauS9oks9F6uAfDNVVuBNok1eBK2XwaoepxRFL1TN + + YBqj+On0RnnKTqRBNSFjmtopTDDtZUzu4qzpLzBgDP8Zn+dLSrgoMg== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:rsh7ysvsgbwh5g3xuj6z7zky3y:ykg5rhzpzqkvdy53qqyghfbcusibynfylggbhypo3iibsrrzvlkq:2:3:131071 + format: + kind: chk + params: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:qidwnh4eshzips6trdeog3tczi:wqxqxiacoq4zlm34kw5y36u6n5qnwusscm54fqu32cwk4v2df5uq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAtbn+aJL00xu3hzUW76iOnI1++uKgSaLljxTqKEyuMdLq5ba7 + + zavatvLxnDiscV7FKTRQOh9c2NEnANA0CDI9MdrCDFM/SQvvU2X2s71F1T6HrgWg + + 6bII/Tzu3YdbOatvk1nbSKWnDTuBQMqdLKojT+58lYe6/n7kgo7atmxSOuRG8bZN + + sLBE0Lab2EI7jYyO2fKr3PmXxfCMqFvIVA7AjTfATs0ekFIMO5Xus2w/Sgz+plCU + + 6hC1S9hGMMLWtWGTOIwwlG5hDknJ7xY44F4o9M5S2fDHkdXChGDSpsd9dQotFMZx + + Gc7iCNHBf40yYZPKrdZqaTnonDUtsyZsoSPj5wIDAQABAoIBADWxsv3rDfOiaOPG + + R/Sf9SNEm5Q9iea3/uP75gPqRD3seANPrsXiVUlhFwp1pF4LBm5aSqohwik+Ayw2 + + WGljjrlATb2ei0BmCly17+LDtfJ/+07r0tO6CvXoHxvNdqLfiKQdFLGuYGGEh3hB + + ZZdg3fYsHRuBczrm+1WoJ+9mqhVEBM6fx+JI/KDHkDu1aKWEeP2Cl7Eol/ZglY6E + + NEodDfhGOdAaXKCAYR7IHq7LBqHLCK6moWXrM/CQs6E4Fh5mal5n7jeRzutg7ZPD + + /tWJTWRPWVd2oBYnKeGkgWJ4ADGddtLKbaumxZ1jPu19fKIoU5Sw2xn10vVk+/KQ + + YKP7SSECgYEAuR7MpUtt4/jsoJrvf0idHvRVieBTxYVQ5nBfsHghDWtag4X2OkGM + + dX3QU/q4KMbsHB11xg8dPp9VJEH6wnqYLp+TAjTwetZE05SzcyacfaOZCLZSAhew + + P1T/uIvcg4OYuJyYvaciIi7q4frBANGIC8u5JQHgbyDZCayeMf/YRPUCgYEA+06K + + ztAsqIbuMSKztzTzQ6ljGxANMKjmJDHTwqLZ+UnknP+R0ZpGBFK5Gt46EjfNkEgH + + 6liyE8nM2NMuXGFUkTgCzCAjpnsTR9Wh02bWGH8ZgUHr/Apq5IeN59QJxWkKHlJn + + jCEgHtMRmOl6kbTJMpMoAkJ6kPi+N+o8dczD2+sCgYEAh5Nb807by1NaEYGHF2QZ + + 1jrBjrmhAI7TogD4w6gnJMnTv3FT1HR/JukesvJy/0I4V5rnz0bwdxV/6I791IKu + + g67QnpQg7wWP4JkOF65We9ld0bidNPUeWjOpGQItXI/7QHFHl9YYtIpB8YCQ60WJ + + aoIoNUc7lIetDF3Eef/S5yUCgYEAwmbSuAOP2Fpwne/zSBEc8cVx1fiHy5GMXolw + + /4rMxawkvlJxccw+x49ag+9OytMCIM+n19/++ZHM9hn/LhVYvvGuMEvYaCujEZmw + + EoHlspN3nmbpb1J7uAcofiKn4F9OJYCne14Qo+exIDHU0CwizA3MEFtuxwC03TpE + + xPe+tzsCgYA2lWPwtLMK2u7mstYsYZs+g2KHlZzKf7mBtZzmmL4IkhvU38wuKMuw + + xIlmsOIHjkXsfPT0tBKD4mAngPVPN7Z9sgJGCuWqxXoXxQltlBSH3WzQEGvipt/T + + i7pJItBHrPl1Mw5m2baRRIPQMREtRqwHR8VRlXy4vCVg9Wky1RGCHg== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:qxw7uvecwrhqibsdufbzs72voe:6goxtexixkeemwwpf6xu5is2723hkpzvxtoy3d7772h77jn63ewq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAyGa9FcNFgZ+FZa375YVsROi+KaCNNfZDjSW4xOhvrAO2ch2Z + + cY0mQ9iAcDMIE1NtJs95HqLGbBzkRF3+XxLN03aqRaTai+wAao2xZXiLstZJV2tc + + GIFmoKx1pGlhHpk3sysQMJTIgg49HsGxf6KW9TFLMkeNA5kvE0tSJghQ7G3SSKm5 + + HTq5ZFF6Hh/i2zy3G1hzwvB/kzGmQb0Aj33C9IbjbtiI8wz79+TBecEMILwUHg5r + + NdwP+9+i/ZoMbQ1RW5Vzfvcb7w+/yp0GtvZO/6yUihI6qM5ZZ9q83O575/HY8R6t + + LKgSw7A81WVL13vwDHZtcOwlGg7ElAITsRRgRwIDAQABAoIBAFTWjajnbIb2IfKM + + R84AgHfhshOMWQulsC5ScFjH7/K59aR03G9ianchcipNqFcKI/Tgs21hSrAdQROi + + WRwlqUYi/+2Q30aKCBkT2CxVboqsxgrAtBHZwjk2GW2bgRaDlb1/dxCiBApu/bgG + + Ft8qVU8C6csk2FFUKcY4xlO8hIm6KpnYoHJXmtxPctJTYmRS0zuvfV95trfooFdk + + z/0rHorBq0xMEQR4CiJgrNyvdVPXnlljpmjvFOPCBwbGjY5MfmnYfS7dKYdmyKTF + + 4yndMKCtEwC917ncUo9NZ3dwsaG+uxaK0F8gx3STP0daE1Ct8hSiydCgdNDl97E+ + + uJJUbzECgYEA5DpKwzgXb4xrTq/03DhzX2X4uSIh9/kxHW17BwmOQ++q+lJtM+63 + + ND8tp9w6LiUTe9dWPFebBBrQAmsVpTEw9/rLcinFEJexTto2SjDH7dt7PXgt9dxs + + ki6GoxgRq/AjL8i11gWlz5ydH3/hQnfbQx92Ddcwt9nc4Q9oiSkGWZECgYEA4Mma + + rhAPxqY2kq7jq82HYLywXknqL52pe/GAXn65PTjVREBllKtgY+woZsXzw8L30jer + + jsX920uqpMunrgn7wrvrxXbjtlPFp5JqDK1f2uE3X3o5MLMxD2M4PLHgXAkFh+7u + + NYXJqaz/tiDniiTaf8EFnDkZbFyX15z0k7K08FcCgYEAjv7O9P3iASwz17t7abec + + 4frcGfL+4YWqdkuwN7qO/pXdxLV8YnuBIiUrj+72LQ9h48gJ6gjhwXKjPcCmcTge + + /GCQs9jj9f91QniKZ3Wk7q0DzIHOGiufgv/Pr8RW3im5gij5dT1YpHn2IFRZaPH7 + + 2VSO/SEFD0xbjk+/KaEgr2ECgYAcuAUoGes6EQBF60wxJfgW1uSdl0nxPW5q5Gbn + + K7+U3873gla4ENEm0wQyZTYIm783v18OxaLyQo+RsGdC6AmfTo0H1HGxWLCXATDF + + X90wRLfjXeUyoKIy+hU0Q/GLMKfhPxh2BBrIr86XwUpzrtOvoMSLugvSeV348Rea + + SymszwKBgQDeESijXcYtztiM4LJZtW+lFRbuUDk96VivUNAfWgb76uonN00wzigB + + xQrq4VIkmMv4EAHs4WDzwiR112BAPiY6Sg4YoCaA2VEH0Lj7vlGv92CZ8YNyjNpt + + +zAhdRunSIgpgw0ovhCe9tko6PX8Ui4zdvd94GxQ8FWmRovm5LOBqQ== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:7xqejfqfej3u3zp2qymqd2iqeq:po4t2tzkh4d34ku6gdhlocadfwdyweiyckyqz57zdi7ndt4ikj3q:2:3:131073 + format: + kind: chk + params: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:nofb42x37o3rgbt665fas5kzie:k3s6wi5ohmsqytd7lsbsagknyg3elwphviakrrsazcsb4l5qgyda + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAtxoNXh81HfkF0TLH3nkvVnHZP3E3SHFzlYTVt/+0hxjqd7H1 + + ML5/AzYtDjsFamK/GM9PJw1whNyqqt5lFwJvFku+qk9XxGpj1JtHgirNnGdhs6Rp + + KCQE3Wx7gQAI04Z+tHmqqYdZsEefsC0n35K1d0P5n3zToi/sNeW8bAXONuRvqNxP + + ocROo60hKIu4NvnnZ4mAxLyNge4DEj9MAOAURqBW7ZoyEjaZ/RJ2XZvXhm3+oKd9 + + FyYcUeRzsSLTE6H5cDsiVi9if+juc8xhd6yA+0Kd6Nyl6FkxKo24tCGQp5s0dDdp + + ZmlVWSu9EzXLL2ibYC3lKRWKTpln8NiczWf5LQIDAQABAoIBAAYvKZNrv2oHPqSN + + sAV6F1i2mK1VYBYgytQae/NufgTwGP848fyW+og7vLLV2H1631RxsA00HYBHSbZi + + s4xe5yycG1D6RA8cvslwAy7IzlABh+G+5FRYPxfRcaxuOV4XlVD4KQT3ztYu3Rxg + + sg+Rj3J7R8OUvjsknjhFzaLiYVAmQTDJDuUofboApGD7iZLwvNRBHM2LjLyjzoDU + + v81XItqMUix/PjeDKc3u42/R/+5P6CV+j5iVqjG7DODKih+jm/lMxzKPfGZM4XZ/ + + 8DIPmoJDxZtpDutB6C2rEH6UE2oQreWG4vYLCuQkittaTzAaLabMRHfQF3E1HgtE + + XkoU9SsCgYEAty2tYRxDGiLviTyPfuLTgHBRxayZBVivzdHGph9aZf32oUsTlmLS + + TiU3IcsMt4x3Pv0Rm1V+Oj3capC3QdGyL8hQ/qmDMC88+tKd1gbxTTliDKULoIQp + + EvN2nrHKi40YjxaxKU5kVD0bIZu8x1VQnk24vnJRZIQu/vlck9gLd6cCgYEA/+SS + + uF7U5OA6ZdRJL2KSGkm+WGxaFoJZbY8z0YeqjU0YbChnpV7aSNM1UvytzU48jO+p + + artSjltckOw2QsQFOnyhgkxPyy0w+ykzyMt8Kacg8ZVkJxFmXf1Z0KsDWFurplMV + + MzTf8eE0ZvjP+2te2uYr6Vmoa0OdNfoJ/mHCIwsCgYEAjzzBmfFOq25sHsVjdBYM + + yx+JYejAU4TxHCGQk7BqsNxxcdjSPUOTLhY90UgE7raBPJkJnoywwvxCknYNRwOh + + sWmTpD+LXS9jIMN3NriBEiDwAfFBcUhHEhGdTSS7vHodnS5iZGlvXMvXnmU4riqR + + euhNsWaVLOOMGEeH0/gZp3sCgYEAq0X5MjA+/KZcT/XjujSWp8O+BH8ZWUGLy7ny + + rAbLD+KPOy1cGiK/pcjAQzheuDDqdEahNZAFtMTP0yxXMR70hO4QSB79tXcc9q7g + + O0B/bX2wniIos8GAq948NF+SUJyi6iNn6Cs2zTW4FkfpJVX7WjZ/I6PgB1NtMUiX + + Uc1q3HsCgYBdqsVcc7xtNsoa6VLWs0UT4ddnesOGNPh2OE+wD7GAZubT8A6Ndktw + + 8P17MAMpFjhW9OunA9Vnat41iHdqgZtyKUjuW87O+1pRp4l9MvKal7Pf6pfQJ3c0 + + FyhWqljYymTn/LFep1x8OzMC/YUxclpLu4gUbHu5bfqdd2mZsxW+Eg== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:h4khks5ufaa6ka5lyr4nz4hetu:g3mjwxhi6jswmwppy3upio73yth3jfx6toguraonmjysc2boat6q + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpQIBAAKCAQEAwSglyEQntPe1VGZwA8UR04afb3YU14LM+yUtPmF86CFqtkYF + + jERO70XwmKJDI9qPskII5G02KMr3L6L8fXvBAgq6YBdJlqNGpxnS6YlzrPIgxLJE + + 2dcxoezU+NQTA/3YSCGukmqBqrLyHOtzlpyU0zo9W3crsUbdM9yCfy8N7+98vLiB + + tGAtjDDMsg7rU+L5nKJRwNSV/eEw85lcmj+6Xj/7JZOLisTI4MAspt3yG3YmapQi + + xpDif7Hgz6d/zKnkys/BAw2e7/iFJyK9hbPDtobtJkUK8UAs0d4fbcrQZKg9CsM8 + + VDgp8bR9oBqfhbDHQfNtZV+82M3qewbgL0BWrQIDAQABAoIBACB0QfDlvrQx4KZH + + Ne/0NzwOxRAhy3uwbweNpg3yrF2Ga9snZbw9J/QdEMFcliJakUVWwg67aNuuypyW + + 6oyc8/+HVOxbTVKBqZffB2iU3zpCTo4uE9J0TVMTK2+Jlo5XovTvr9jLC3Fmcra4 + + Ouol8f2RrgiFu/Ij4Xvaw5RiEBntnRJxUlvU1ik40uDOc0usbkqHTMS3drBSDU8a + + 09317Sta5iS1EDcDDAg2qOwXScy5pzVAhUapN/2HSvckbKJkjL1KuaAq3Jk5y/Pm + + FaHls3slBPsrfeT/MW61PcSgraz3xk/hw1ajeB+swxqQfkhwgAFe3RxJbOwi9uva + + gWdoKRMCgYEA3mDRx3GqV7B1oml6tuPoYwONKmXSIDh4HLDX/qxHtBLHIInNp7vc + + T5ntId4EXvz3DY1PcRqpsMpGLpkMdoeqzE0eS8a5BAK1I15uSZA1sQAjWfu9o72l + + +wjjaqAuBTm2k8E0XJ1osJyvGG3q0zb9lvBmhblV8d+Xst2iEATfXZsCgYEA3lxP + + g0lADCRcCrt8/SlSVxuECrORryRCKISjYInOzVdcsEJO6vqHFcdrJ0UJv3oPvR3b + + 185qXrBJrxCM7frpPTQ0rIjXYgwprTtyVW8nxCLpn1IWNH89MNiHSj9Ag3pTroa2 + + Gbig19hgl6YutX2b00sCT1hI9Db5Thaxs4mChVcCgYEAg2u+rlrLa9VaP+iMYEei + + j9mKdNMF8orM2U/d5qFUIuSyD9XA128bjWOPk+NMvAJN0xF/MH2saVGxVlqW1fnp + + g6HT1L6VmvwqpsNo9EqooHlPax9ufLVYwVoIZHxTljz8XKfi1RUlyLJgfFSBYd/u + + 0GQ0grT8SNx2H3wCCeuHQh8CgYEAuZoVqIEI29mxlie/AVVvbFQEWCZg0O8T5dwo + + vtjobE+ih2EhnFN3U/97enDO3SuWXYXBzhV2hgjhyCWpbK8F5ldgLC+gkC+UzgsT + + uSop6DY4CQssi681NUNXUesP/26o0MGS2E9aui/bGFnXHRh2a9xtVitb4bTNTZf1 + + xeVes8sCgYEAsILzo7kk+SgV6ZqyubbN52t/q/0xyZoNFRhlNtlJyhbCE7YwUY8B + + qmbXrbuJeiuDgopMZ2OHUWh4aLMpBxbYGdAlCz0vPS7a8XamQXizQSD6GZK6+AFX + + tcrIMtrx++XsrD6RkikqaQmgPq320rMJDFETZOzxXMq5Hr9NgKL9Nyo= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:kzhg5zp4hcs75eboack4kqpcdm:g2cwnhpfwxrv3c4lrmpjyj327a274oa5qt4isu4avjixbu7vblrq:2:3:2097151 + format: + kind: chk + params: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:smbhexz3nnaz27sp4ycnn53qxq:v63l2vvp3z2brgul3vhlbudiybaoxo6fa6qe7n6cqrmbkbtq3pjq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAnluBaJX/nthl3VTvSPm6f5LZ7ZoP1pXr8dmUZYn8cyyAmnLR + + 5TLc61DWPVjD9A5fiBeF8MTgU7h/RZeb02MaWbXtriqarpvvPqOgsb6P5J8viZ6J + + YocarZ2ZJoBdR5kJ1sNbSf5YAABEZX8KbZWU+vhGyx+rRpjcsM+31ZLxSEg4kmX6 + + W1YyVBfwtkYLhRwaC9s53ZuJUfQa4H5tynbgrYwh21vxw2C8NQgwPLh9mNkCAJar + + x++YD4ipkEidwlHGSaDQ/miYN/fXNbwOBVk/VOsPEyVrPLQa/dfsguOZV5ugIOJK + + fu9bdAQQtstfGeryekVWvvq6fi3bmetXwjiYuQIDAQABAoIBABZ5XBtQtm9/vK01 + + waP0tTAn0j/zTm4g5tRzEal7dNWPqkzBIOLLXikTVuRr9ZtscshotjyeZEvdckqZ + + IqdUeEflFu9R4pQHU2PrawHuzpMeuGtqkYrnK7UaGcMqEpL3uDq/jPQqYajWYN6a + + sgstYHBhzgJD41XomeGKCUgJS677P8X3+g+i0MxHx8fgnVLxURm6QCXeLFzkgqIq + + yt92rNkTNa9wyDdYcvAcx4b5RLOxXrkkklOVc/oJlteM/X07itbuYDZBQDOYpZPP + + EkTckkBSbcoPy6nGfcHDintuIWxKe3jbWRcRl1uCN9JNx9rn89DQzEAfeFMorROs + + 2DSDI7UCgYEAz/eEfIZ7EJ8VZ4CV+4oK6s6+UEKuoTW4kN3tk4BoBuUD/kaeAkMv + + q943rKeDtcOYOS2FxSu8XKX8s7Ad6IJV/l7RMtjRy0ihTsklGUnQYx9ptkBvWYnR + + M6SwLfOS4etG/+4VomUx/YGAsffZjInQbAqiZKhVeX7qFr7YSiBfkTsCgYEAwu65 + + vwTOWT9pazDHER8UC7tM6uxOCEGkHZMcGFlDftLaB30dEVL6pCeXW1J3XrFVFe6x + + IdZdPuTyywtpWfJrlMPGsjHE+R1x3AB1NaLEeVGIjh+Y0Zt8nDNwdK+4u8Kl8B5a + + p22ySmcPpoOPFv75q1MTyrs/C5VaRuyU8SKYXpsCgYEAkGZbzp8N2jerhAdrnJF4 + + DRvqVw5F9Ne5RJVj/bP+BzODN05PLmD6O8r7O13A/TdHfgQWyxYYHvh940JZMfU1 + + wn6RoU2dNhpDLtJJeSqgkALiwtIwvqoL4WDrl6x1g3p6/P+SdATx1gTSmD/xBT03 + + w50KrvuXBdpSreJrieS6lrsCgYBJSTUeIrFtjlCU0xbUUgnYS0ekvsirg/ougEM8 + + yDp+8Mi1rg0CmV7P3m6iD8P/Hs5tW3rOzOfroGnDenvWLDTUDjKiheGXAsHuw2FN + + k+8n6UZcoHZ0v28+znwF8paSSKDYQKE2dyBjppGUubtPGvdEuQwk2Pbf5Pu21HU+ + + nxIH5wKBgQCJbSa6TDgwwZmqOCfMHkBLXH+up9o30VcREGHu8uZyybq5q3va03LB + + Ket2oM88ot04aF0wAOIJqo35FB51Jxgi9GNq1LE1U0TlB0o12dz+yjFGWy0A/o1Q + + aCBedc455WFm1R/BeP5bjaKLM8suSiVUIrHGTTKhtobITgvrmIZOLw== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:cplunzl5zzvjzx3hurccgxjaya:ew2dgbe6r2ivhbdn5nmueb3tsr7dziypwpxzqmzowwexv73n4aca + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA5rZ0EoSYGWTYYnRw8LBLb7XKPgyB8kENRSRc8C4zkQjGlxay + + rBEdxXypt5KoQeBXEyfwoALfuuMA7lP11Tn6TnO00AXvMEuvqwspkdIptBddnkGG + + 0hfW6liNrH/cVbwOkz3cdxhf7jL3hTTw4DeJdGepV/ey2K1qHvbd4ckgwjwj2hy2 + + 6PWzVVIXJe1m7crNmB6DG374rYlRgh6QnXBP09zOi7eo1nxGVPbu/jfIwpRGC6W5 + + tZCr+ShQenXofqbZ9po547ErafUaeRsiWMSMyzIT0uLvoKv3RAS93bkfqM1USLR5 + + 7f4Bp2yQcNjlUUnarZ4M+Q+PWTQOZLeX9mLrnQIDAQABAoIBAEdxj58eZVVTx7gx + + U7oM9cdJla/CQslIgLn9CTStMfXDMHAgLMMg58W8lXfN2AHSXVSGxTpfuXWPjz2+ + + TT2y3wLFTOQwOkIL5gHDCqPn31cv9yMnKn9Lt5dJRdH5pDr+acsJ2IgeybIjIUgk + + PUVJnWypHyUpBL6ZcOfWzZ36IQVUcME0PMjykTav39K/uHXWF7Zk1wy8FFdtsh+7 + + gpq6hsKzZG68vbwySml1V+0utsiNArOTiudzUEY3NHrrjAYgKgFxX2C8tSUzWTEd + + Z2Yl/AMnfL2/jtQjjWoNGaG3WZdkPIBfrgvF45uy0QJ/Rm3K25E+EyQNkz2CdFi3 + + vMKui0cCgYEA7qtx0wbCw7wPML86BIp2y2tIo8kmGeKn2NiCsCZl1bW4lkfuRmdN + + BYTHqTUaB4PaHijsXrLcxuGqjGQ7/OUqwmEz2BOZZT0ehFAkkSeA/418ZoqwbH7K + + AylmDTtZ9JF0hXDVF7Wwwx3RG4O0OYROie6Y8RXvdKE0DFkcoiywzN8CgYEA93cZ + + Q9m6+OrbA+KsThqC/vimTGPPWUVATF/yPgwSBT5pwq+JEjIhmmpaVTMs1E7h3/ob + + mHegt+g51fJwc9M0h6DcjGJ0ek7Qlv/TrQtHCzljqQporuUgqIfaU+82Mxxemwdg + + JqfwT21KCRBvMNRtQwILRCM/S7SEGzVmzobrGwMCgYEAtkJIfu1XyF+RfhlZ9ePD + + Sh5Yb3MJXJUgtlDIpDn+ZFAMcP7nL+5s+/zk+AtsIDcJVyTLNJoETQBB3EojIUHk + + AGJ4U9bLumsNJd+JvStcsErcp/XbOk8sd3Oi0hHz5Pc68zgyEpQWMzpO2GMgOxgV + + XfHN67Vjkj4UYCWg3xufvCMCgYA7nmDi3NjT0VkUlY6nfnGi1erSqpUwz6NPAyqM + + UkIhK0k1ky61yIgZ+JdswViCicKXQF1XnTKGPBd6+N6ouPCF4HZiB/JB6S0Nw/KO + + VRI3nQrqlcxknmUA1UH/SLlJFQOh2+QJTBp0OENG7cOsAvGT3DE0qD0+ku3k1DfB + + d/W6WwKBgQDS8pkBm+pZ0EbZvpYTy804jUtErk2Dp5M6m+KEFE9NF06ncb8voRnQ + + sYg4P6NuweJnNlQSUIneNwQ3liuZTdLz3WDt99+DLlcToB6+jL6ppXs1h0dYpkHW + + WEQ2KDzRt+BEcdPe/sbkF80jhkZRPos5gMmy5rgcDiQ83aA4RYPoNA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:6sf35smixwvpapf2zw7sllxet4:ks5zd3hkd7ppwobhnymezckszexpychngnkipxfvmlcng5fgjbea:2:3:2097153 + format: + kind: chk + params: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:lbjianktlydwc7qn5oco6sp2xi:mqnj4jq72xxuwrff2f4kyuv5xvu5yzzkhlgxumm44n3uwvxeelsq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAuLx+Rysw67iIOHq3IvRgaYAn1OT6xugUvhXPc5gvPAS599bS + + rhPVP71CazMQHzCPTx8KcDskNnNb08xoeae5DfNVCs/E7PBtgIy08Xrdv/HEZ4xm + + ksJBLydt2jGvvFTbCHmNuywWguHHtUig1IrYZVCYMWUABGkiZ/j4UqL377jjY1BI + + Fb75Fud/3hiASG07BdL8hzDTLqmhdXDwQAeMklhaB4Qw1A+Ubb1it/SoOI2CUzhZ + + /fqAJcQe3YGbVtJm58VWy4dw/AzcGitya7MxXHXrjitWqnj+CVM0myf3vbvaX+8y + + YQlv5pATfhJbCmh0vj03oiaSZcj9IS2fooV9UwIDAQABAoIBABCL4YuiVLloR9s3 + + MpwQ42nPrsGk2MlkFCeKcJBb+y8XBUkrlqc844bX/tD3O+RvRwbBMwAma/Hslzb7 + + QghTe4HCX8WeIndOeaBf+fz/EkmU8BCORMm0WH5Ou8olVSY7O3sg2A8BvepvKqIU + + JUOkRAmfFGKoNz4t5IUHicZtDmQMNmrsb/jiEslKqcroE+7R4bQHAQtdLCwPgQSW + + AHh+Ft1wh8lGMHXi1EIiahVqTF7EifNJ3Aoy/O9LybXEMYwy+fDN9WtCNa7Xl5Ab + + K8QXgtTLg2kcAWfsBy5aM9vghN19bDV8OPgXYhFqK9AbFz7nGBO9VhLwVHU0NbpD + + VSm88sECgYEA3a4QFEE+2qn2Z30ar2HMrEAh6/w3KSi2SKMfLvnPl2M0B/YqLaTi + + YddrdlDcKJGUEx2GKNmJFUqzFX++BD5RE1olNjw6RW6D4RDpCwoxE089WN6gFiCA + + wdG5RXflHhF4HUfz3lrewelbP6VM1q6Rs2tA0+Zn+KTjPSIGTl9mAdECgYEA1VY6 + + O3K4p2TETW3n+T7+CcLITnSMp8n0hWOAX5ASetShbq99ACDQ0mzcE0/KWyV+2sNx + + JxvwBBgtsGsGsdsqGNyRHXQw9LsY4ZSF1Toda+236OwaMDs7WVVIuFutlD4tKTX3 + + u3UDEsjGWXXohZSE5ajtIjd/IOHtCT5YGaeDEeMCgYArlW5Z3R4TdbkZTbJyauMH + + trA0qmjZ8cQs8c1OuhTDaeCv9AkE4lcT73uUTn+Khly7iWF4JJTcF8yv3GaqhOoB + + yQZp7Ft0jS7mkCGRZxaQ+lJQZ6zHzOojsS0g6Fqml76q2xuqSuli7JNhJwm9Z6MD + + yIF9Z95nN1vqCAd/Xyg6EQKBgG1wsqbUj02wL9PY0evXGNNBDSjSOWXKAJp9FNnx + + OsmwUrBJbkKmkvmfxrZRdGmVrqHjKST6/AHdtXKPNPwAhnQCkp8dgA/L+1OdsZpV + + GcrIRFRE6ppbiHKngYqx3TXzP4+ok9GikVUNklNKXWJJcnOuWRf4iEsCG2tmhCOE + + /4QpAoGAEfdRRaGKv4w0qhxS+RdEaTrDYzEW3xnB56ky+E8UJEy+YgbGlt9Mz1aT + + Kr8SBUp84Qx6EDa+BA60VnkL51E575uNY1/xICufxwM1WTWKnplLHmExSzzYv8me + + hm/J9LA/rmqRTZFd1bqk+M7qAO6bDzKD7WNhAt+kbdaFOlkO+9k= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:khtxspsd2whqf6n3s52i7osu4i:w4z3xljy7asjjjev5zicdzf6xu6xvvr3vkyghu6w7bjitsthx3pq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAtlZxFsekdTXUZFqXRPIXWmfAv4LevgoECO6qfY6ZN4XxMx5J + + ET5EOkkqBE86YqD2nZukuSzaY1I1PA4fHkwiVbdXw8CCEah7foRaxaiW/a3D6AOO + + L2JydZls+CYbimMxmbGFQF449dRtEMzQ5wSmPw+bfHulD6zHYLWgV94qSUWR5pry + + LpVtH1owGid+xQqJncxt1G/WYoIPDfEwHzLhTlwHYCEGLDEHk8UPNC3m0N8Skevc + + MPCPk+/68L7m79SBN806eeH1jkuMIOMYMPX8+5PzT1Z2wE7IgvjzXQ5bNL8fAk+t + + h2wvcClECFXTup9qWyFgu430AOX0eu8ITV1B/QIDAQABAoIBAB4YAT05wQ5pTn8r + + pnjGHg2ZPyo8jse9vnG89l2XqfkMfcUqk/OpG7ik967TZrb9iwZzOEopuXeYC1o4 + + mHE3LpmIE4+m17DTZmJ4tMSXsSf4RHOoFpEChhKbumzwWS5LddXAg1Ye1vbX0xJp + + Q3dFgKy6xjZS7+i44wU2pNqru73w2ct6ELmNp+2hZIaQQmOwRwXQeQFLi+Yyt2In + + g8tGnKaGTGHkNCxWIyMLc5KFgxPpxtFu7grJdkfsGQ40ykMPHQjSepAa7tDAvW1w + + rQX9MVkqvKFQUZCcSi2LUwWXZWnaMId5FPvWHSdy8SS3KWyD55Pg5K0MA1Jvp+iG + + I9LlFdECgYEA/Xt8lCn5mscVw3mF9s7c59hyIT3NMj6JSWKNbYVRlpcic1M+aOjJ + + 88TEZxukiZd+J12sd41J2krsvVWkfANMwzdIqnaxG+1/l9pqfFgo9skikirUd8RD + + SJSoUDqKzB5yRkD1eyk8vEBxioUbeXi+Km22A0wJ+BcY6/J9uufvhZUCgYEAuCYP + + Xn0RRbSZPr+8N6WlPiDdB/hJULTlsbV/hf5TzBRMeqOD6DerBASeikBwJN+cbOKr + + 69XBWy+DhO7CgV1GBuxaTo7RTl0nToDnYrWjOy4fHapNOGRQ07TEbdx1vKo4S1N2 + + FwipDdQiQw2For8xFwj4F0ZLR5pxtDaNPtb/4MkCgYBOS51QWqLJpyLWzSuO75iW + + WGnwUJmYIm7fZvyOTrbD0A0JGDZXy0fN7wJHYudwxIVn/WwvRUoBjlEPrmtvDsng + + JqxgUucj3DkkG4f2vnhwufHeujIEiG/L9HcEyQBkSic8AgaRM0yaTUGE6tZwr9X1 + + XwvwesU9h0zgXHdviwKV/QKBgFoLUkyTv1RkYOLMAo77Une0vh/diowKSJ7C7x5o + + JDWQX21Ac4mjXt5SG+viYnPFW8nqdMKW/TtHWnov/bAgGdPc0rPDJhm5dzTt1zbv + + NmgDv3dUBPpkIxnCNKK7wF6GpYw/vWi59WArsK00+XmBH9Hxss4+syTKIntKiXqG + + ywvxAoGAILJ2oWur4lXGipG+Mk9dceCSUNzF1+8eHBL6wcC+iNeEJx9vj6T7Sgv9 + + MlQYizRaU7AmTOa3jFqbDv/tO23apagwjQtIJVMj8DWQbp5p+HM5dU7kFwGQRscW + + iMVPpVMHqS4F4L7j3oQ07Td7xsLob0mM0FxVlgK1pVUTkcpQoRE= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:yjow3adqfodddphsctxenji5ya:cgqbbbawmgeeacol6umzjofyxzqeegjnl2s3utqjchh7vabkmeja:2:3:4194304 + format: + kind: chk + params: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:ymk5gokxdb3hzszs2nuhn6sqvu:kigmqblfazd6wpwi52p2t4lzb563naw7hrm5r2dtfmxheedra2qq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEArVHLgrQL5qjt1rxV30gfK7+AFv0fYYTcddOFF1g+03o751vD + + PTkhlPNGkK5w1gveH5nTq1FnZyH3qLQtWyxvY1pnDARDs2xuTbIQbbojeLjzgk5P + + 96nBujrI+TatjbDI4vuW+3tRQ/WJiE8dEV49r0F147kBgNC3al01TsFQZP7DR6Yw + + Cqkp2WYbfEj6NtrsVJIHykzDifzZ22pcm3SWFYk3N1VeWIBw1OSRh/0GtnoqiWdo + + yztfATT1d14uubxHIXL44hz+M8EnOZ0hwevJoFHwnUWzWP1LjrHxPhUwteTg/MCg + + nzJcVHWDVnxpq8G4+WTlQrstvTIqB0L37pA8aQIDAQABAoIBAANBotXlKsa+Acn5 + + ETv4D5iI0+VFWDjtgB7j8dTgdD27nDMv8YPNoP3lcZFNAGhlIgVB+fd2uL9NT2k8 + + tBAfI5tK8DNfmSNeiYFYM8rRUSf21vcUdZgs+QDWEz9AxxNUcPx4HFFunbZDKhRa + + OCztXQdVRDEadsFu1SNgdkdGhot/M0vQrl/0He9DomctxnvnnBKXgZo1PLHpsmdx + + s9pWv3rHT/k62FQ/d3TDfmSxc43K3OZRfyER5D70eOlpomJJdJURUNOTmTBpPlLk + + F8mzaB85ojihNtCcQjQvEskktQhk+Ejbik6YAXv9+jSf4ncchymG6LYX+S/G7yL/ + + ScmmJgkCgYEAyaMVMCAT+lLom01LjJoMPxXFFZV6CVTZGuDwxhkt5URZjXWLIL51 + + ZdhdBhoBd8hGm0Lkb9yZ8kXwpt7FhZLjt1up0yKMRhCw7r+zbhb5o4AZ4+JwzGcw + + DSyp+DzinJgun0Xup0eUIo2ov4TaPYfz+OUxjj9yUtnWI7KOlziupOcCgYEA3Aw/ + + C7RJZJm1AtsZFI6mfI3A2ObA7+KW2/yIJhfXtRLbpt9H6CIxotj8+mlw52C9U241 + + VLVQWHVC8t1eWeMaN6afCLu8FNxJLBzS1GvFjjlbXrplBric1yuSX82aogZJltoz + + TvicIWICOYGkBl8O1j51rmbMQwfs88W3Zwermi8CgYEAwcEq9/6rE8ydZbZFlYrl + + n60Mn+vtw4+7uz9RPhot5vPh1bOQiFtbtgzNfrJ4nKBfcIw7tF3XtF2OnNrOFMeM + + d8HmE1NMVXtueUzOX0hGg9zxg/AwkcnJ+67ieP4Qh4cYrcXmSOnYJ8fV0osXpy6/ + + uniKQPUopwJZ6h2HNTqrXxsCgYACsVusJv6m7oKakFfUOpKq/4kWnmxKAznZY1O/ + + M5d+LcbmWeElZBW7anBeGCA7lKF8feLFMJrVGkpBcpgO/Yp6l91mW/XHQ5LZqVij + + JNZ8EROfKyTFWkkBERVverKjvPP1lqH+G2i9t9dTINUDBvLFiGokQjnJsDUkHo1K + + A3wEHQKBgF/7DAw9FaDTbdBS+FhWErCYHHZF1ZQxUmcb38VuSCvf8fkmnEpN0hOV + + niDZyim8rB+ggGf1symleU3LqU6et1oaP22ORbiAG9uliHaKsDU5xC5YHY/LGI4/ + + xQe/x0dlLCDV+ZIdmNdXeG9AiUntkxkCzS+xLZDXuQxDgWn2YN+2 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:6rqx6odplnpm3q6mlfvyymo3ku:3t3xbyqbuz6ykrfnjmdccldcobpsx3giwhz4nsyeeiswb5tlf6lq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAqzmFksZy6jN3ReJ5UycelXqDDZkZZ2F2S/wp1LNMo6Tepcw7 + + g8X9/kNE0+5RTY61VFHSibeYwyV+LCJMAhIH4eDKz9puD1zSCgRF62/qofsqgu0p + + v4zvAoccEOfp1zIXIJ5YqvgsOLR3JLxlCQmbXOrhasa7OE6qzpedghx9FocQyPMH + + erzwWXknLxnoF8guH6EAsnBCPOP+Y8CSCgmnWgCE0XgF6cBFKGh/cWicM9lqZS6W + + 4sPRYYqXDzyYArYqHCSJ8iqxzx2/O1rIgo8Di0xZ8IC41V/Har/y4oBZjMDkaHo4 + + ov/gCsVr80Tu0maK5/NaUT2XGZXqra4FG7fqNwIDAQABAoIBAAjmuaHyvSCdwlKY + + vnPrMbTVpKB9WAu+zlaO6mHLXG2ZcZWu810bWuPv/VEDL6jXhWe3xTkxmThz54ZF + + 1iu5Yj2E4SZDFbuouKaaqEPgEpOPKhuaVrRFkFtSSMw8MjTkvr0MXlGtCyd7gkIf + + pST+IdyHvWY+pJb9x/Vrfl24O2yDTT6TfyhyMXqUIgCo+1Ntn25V4lFrn92AJPRU + + lfrBLf0dCMSiOJHfdyllN2UQkD/juqos6QBlRPi8BVAq97a79EvYMBD92Z7T1Hkk + + 8BKggbHmmoMlvJfPWdSrncMujLnPcEia/9fIxUSc5rJHR6o/l3hsLEwg0SLxS/Lq + + 9YlkMnECgYEAzBhyN9KtPRiNFhkU36SKCfYgkTm55A5RNi/vnRYuRBnZ2ReORy4G + + /16yBxTy6RpxbddYC6XN9Ame5NJX2sv55LUWGVWheJDWpCrTT4dGHbkb2enKFPZ5 + + 9d2UPior+xO3F/sP0uNC8LDKrWgB7Ji86Ed8xitO/LovvTUfRtBMI68CgYEA1sUJ + + IlBsIJk9eWUkhHl7sObBFNScuVEPx6QIr9+1KTiPbI8VnHZxSVR3hONpzdCZAsw8 + + WDILeii71C/MU+QIKN1dV81Oy1aqCq+kwrnes5jpKWjAbYD6SJIYqcPNbPKZ++mG + + UM0kUqgv2Dpp3Co9YWecBOjpBdcQTSnkvmsmW/kCgYA2V8QBzRzHicP3QFJogf0n + + Tdu6D27JpG5HSVg5sXA8Pc3dmgIOPdkrIeGxNQjAvIO7RX1yDIHcGruuHbu6zFkL + + ZpQtxrkpyxb7u1Nsd45Z17Hswe1Gy6IJrygLrVrsjYFQ5059TnnCcLBmn6zzfG/A + + QVidw2ZSsJiJfp2HU2sSjQKBgC5J508TAEsCXCKG7xjySft1sJW5wVGbrAf+TbUC + + RTxuKVNff2vqhz4jy2LD1PD8DY5x0Gu91YVttBXme2Z1VmDgXRbodBwVQK7u7lbd + + 0qboxRAcuKShUNBFVLV6MxNRMmj+CuntXO/HuhAjft9p5zLQLutL+7U7hhLrfZag + + 53KZAoGACJa7CP/07PoJg90wbxAqLxxQhxTNB226nSFhhQI04CqTMJGJm7i0okhR + + qjTG/lfoWNB5K7y0nLhS91cOaLKw2SM7Z9Ws/fMdC7WtNTsbHyjqXPFyrwMsnjKe + + eih8ic/0hKnIkV1SOjU+rgi4fVeFISzZ2qtjuc+CYRbXPKayMhI= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:dnmjqjku5vyrkeaulae4bt6juu:d47airwaqedeji2omacwsp6yfqp5mpurj776gmzvq43c6dovtdca:2:3:8388607 + format: + kind: chk + params: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:cqstfgln3ty443es7iodypwzky:bbaqquydugrdq7yuoxb44oathv6x342yp6o2j3eofdqac6rycdfa + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAy8EGzJwnWVqWP6vX67QnnsvA1zli+zGdsr3x7TQOjyQDo0cb + + 2q8WgMqjZxIr+74v11UVbv2T/1rncbdQo3EVmkGM7ZdHIL+kbQMRGVQyZMIHIqpf + + FNA0mGKkLPUUVm7KL9NhC1fs7FZSs8n18Xm7SBpZj5DgihoFvJ1I9ulhUhLrme9l + + EEeYUl+MFZ8g959r2dzXoszOllains88+DzAM+X0dg/muYVwQVmSl80GIy+DNyjz + + Y05f+vmcje1LhdwGxndRJ4QHfgO0O7NdFLeD130ZnuQMo8Et+PHQRvWbJyMDTfTg + + zWqJVxELEVR0ylZThQA+Ubs6xlU50kliJXSICwIDAQABAoIBAAPKeEPqS9kfvwKO + + 7JotZJH8cdQPSfDT1X6ehhfHC4D3nJG3IE+LSRHls0XsUqErwrFw8HsmK1C+8jtt + + Zl9Zb/DKHhh1jhSP5uaYU1hzS+oP/239BCkw+SmJ/EqnLKpcWVVWdiHDOwc9UUCY + + a1kDeMvjTEidhhqhAqZQZCvOinRVGUcIm5jYGDfeB7WS762NEwA6r5Dlm9S+HkYr + + iAoR0bOkQuGDDL9W/7itJLizj8lK3Xuy8XUI1yHVfKCRDqpYES7mCAYWsGj+EPxh + + 3OwVyRvQvjtdyDpshSHDLuP8ggIEocQzkW21XpIbWsuojBLak/I2EHYd2lSB6nit + + kxdNwwECgYEA72Q78fCx0yIJ4vhcqUkWUxdnw5hLQB7J3+29OUVVUT6byabMZLPU + + SJ6Yy8hAy0OR5GR1giplB74KWDceX3wH7OULwuaEj2ImiIqXplkVAFe7mh28Rwh5 + + jv5phhggBfvjZLub/LGkoGkBFu3E1ZBa5gwy7L10gE27HWRYgfheIUsCgYEA2ePY + + IPAPmT9CS0eIv4XJgXYZ3NdzjAOHFpyJOPhbg6WEJNayTrZiRTys40GzmSwKffPI + + nVEQOHbAR/eOjvIpBT4adgOuvkycylIIQ7DZRShOtwxrdg/a9bLxaHNDvPL8wqXx + + QMdYZZxWTsPSTXiwzBBiYerZmaSj19xdielxvEECgYEAj//dRzWf4f7xr4PySSpb + + sXO8yR1M9q8OhBK/5jlcjth4YZ5iCJlbsqskAkDdKOfmVFpRjRDvYO7hzhqpvIoh + + QlCs+Hotdwp1X2Duw/OF/ITJpnUIkjn41RkYZL8SVEcmi6uGs0QwYQWI0EAKTOTe + + qM7huyJjd+JKEe4Qh23dQW0CgYEAkKMepT62HBRR/YbOz9QPn1C2elLK8PamhewD + + az3yAcGtpoaedoG7Whqc6X6DqfoCPPnHAib9jX3Gxf8fMuStNj2zcwOey9QvgF5T + + /hs3HyFSn1AvRX/g6ZiPh7Z8EMF75/of29B4bXsKD98Niz/CnLODm1w6djNET9aI + + gTPlvAECgYBSUMCKSAZvVLMQghSrmVUFas74O6/9eOz2/T3YyG0hgr2RehxZ5iBx + + c8GPRbhAvd48ba8DS/hqTeFRX7+H8RT+hroM+dv1NT4blEuGJDClo+ioC4cJZTI8 + + thRcwEKwbjeyXjg0JZ/z6oAH77Fhg44r8nC0zDwonRkQegf+iJVaNQ== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:nyipqmlsbqj5k2vi2jbi7m2ymy:dsdawym74j7fgry32phhntgqsyc2k7wgf6hbs737dsrrriu6px3a + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAuzWxE9RCah2cgZfL7Ki09SgQ6djyb/xW4AuE3a7LjthXWmFp + + TL5LBTkGD6G1KYBw4BKgrj4xl8qpMheIvLP1rV2wCcfx8z+jr77YLrh4VoPINNND + + BX2+GlwSSYU8R6pWFnh7BR/RSMNQ9sXtTDRxnJ6xshIOAukOvREFRZGaO/sjLlL9 + + rg8R3oPvlSXtn7fdAXnxwoNnMeX9XEHoxM67MJ3kmBWb74kdVYcD3CimBqUQqkDs + + ZFWmlhVGhtvB38NDlBH3G8KG1ebN9icONcrRTYB8y50FieochydTSBqD7jx2I8ca + + LMl98BfCVsGav3yeBXetiWC/6FdyvWOjass/cwIDAQABAoIBAAzSVhDmGjBbW9My + + WsiYG2CpAFOLxLrvvOF2WIC4Tn+3iHALuOMFK20tpSEf8aDoh5KJJBEa+FmNiz3/ + + h6Fo79wSTRK2a3c99g980iCNCMzgFK+tgmsXXBRBFw2K/wBnhaLfWImWzsYdfmeQ + + UbrE4r0Xz1LDUstXO+euCT7lBHu0DuLrAFaA0Js8aklPm+eeOXyU+j/jqCOZVWRh + + dODy0KpHS2PcTCNzimKnfzC3DNsCedkIhMvqgIoXUkr/xqQ0UmZIVO06yPCfarZ2 + + wbXFip6A3gsux6O5899NKpJ+C3Mr/h6LbNb2p12YGe5xJ78kFR5IkSlkirDzNpJh + + ENybGnECgYEAxQdb6ELI0VyeCGIcJRP1ujtN4+FaslKssvzKU8yglb68ubi1zrLM + + nSGtQG+VexfaqLspch8JpVsY/Mp56d0Els8J1fMjvgbwuoojSKpY3s0Xu65rf7Zk + + /Mw8Ro9lEtCPPnWMYfWNJ6xYQPAzU4pn900BtznycuY6EHkClE2R7CMCgYEA8z38 + + LHX1ry4kNlJxFt68gw9DlzKyBAzLyNXD/lir/JpKeOpmVQ0S3E1orCugkRtfchXp + + UVogQwO6E9HhXYBdn9+19Y1cITeoD9XmuymsYq+pxWrT4ctUjaZiLHsTfsq5F7Xl + + 4xy3Bzqf2YuQ+f560GRaRmOjVpcuJpqFqVrELHECgYEAjHa8nQ7PoAKJX6yiKATc + + 0FHrK6TDRhIOsOPrUma1rUv3u+flJWDu4q7ZlvB1/vV4m4Yi/AsIk2womj+3PnSl + + Cua7Ol5GgvjrsfE9Sla3WM+aNeEZHkloIZlw91TPV+R72qlu1X97jGcf29vim5I2 + + oGWz7W5QXH2ps4ixwAy1FUUCgYEA8tlE5sLSipa/srh2jgXNIfBgZBlKH78Cyj2a + + E1tGQslsZvJnPqzx0p86TQK1qYoxrb5wljcsFJwo8FbP8UESuGZqzYDXpZZipYTC + + esRtho2pKx+v8TPG9DFUvOIYIbOWPjTuEuR9W6tNIq40DVPkHCDE/JfH4NDJU+Nc + + ZSvoxhECgYA1yJCfzoM9MAv4yDu8bLfqS8hUopUHrH+IREbSLverA+zQmh5+5hcF + + W/jOQdK/ow2wmCGSBo3TQla0Y6Zm0mA97QQOyo/N/U2aWWnV4FHa91uhI05vljAP + + /SQ8XCPJD4OYbGIi6lbdwVzSpZVAvPjiQ3nWEAiU5X9tRzBbHv9Gvg== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:bfjedwcwjehjzpwjsgwkfk7rja:jpwea2sgz4hfohqab642yj4cmrh64w6nfo7lv3mtacfzicurqkva:2:3:8388609 + format: + kind: chk + params: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:cq5nwf6ie35evzc5v2wkaucyti:us5gy5xdddvyhdg5ysritrzcphfquycmpemoviqbfou7heowq35q + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAt8fnhPTCcY+kGOiWih61BQKwwcz3MysYGYWkvIvoH9BqiT/3 + + MoSzJsx+qcEatHIHAuxqH8r0nAODgQ4Bl+4xGlU/+Px+l1KmR2qDl8ThwLFQDhYK + + kUtlb1kLgED7NrLLrplw1ElI+wdHwActeOfpDAZlxbZGohyq0L4n5ovS5DxjNEaR + + aFA36mDDYmnSkqSIdEGepVD5iBx5F34JckySpuLVBTEytSLKCp1S4I/uEG19KigT + + 5oeDlgp/xXlkhdF3wM4z8zJjCC9Gla1KATJa/5Js5oX9xMHi4ZNpfZENSR5zDw4w + + c/GLMxc5m/zNK6+lmBiUoEixt7kNaDxnf/XkXQIDAQABAoIBABdw9tmpW4zfGLv5 + + nMwAxy+crH2HvocnCcOlnYHUKZc3PwODJm1p6iz6e/R8lkKqYbUQgSfNfB5TP2iM + + aA0gO2Cju41vbVkxWFa3IhJPcUkiBLdLPe1S690EQ1iIUVKkgyDh3veg4l9sig0X + + DUiE8h+PyFbr3T4LwIjwHEhGkO9+AZujL8/2ikzcqFOwv7AoITQYVM6c1amvkoom + + Ag/p42iyL8ywof1oblSxBUO8kDTFxhbazaVQARwWQHIiX1AGdg5za6sq5OF0QPRd + + ib/hoUNJeRehVFUMoNSYCNdYNHYLHboTFnemNZvPLSBGJ+A8iBJAoPPgO9LSEyyU + + diFMkGkCgYEAv8uKz28ps1feQfDHF920ONHxIK1JnvOTMJzCWUOcXZv/9hVlt0JB + + 7fLB/AJt5GkdbuHx6dLkju6sZGPxgENtgyq07bf448x3ouJ3Xt2RlfFHbo7AFK0e + + cVUkfskr/ZpfCMi0tg8GYvXF68MJj/tWx8/4yMz/zSWNFAlzao6LH9UCgYEA9U2P + + bBjKVEudYXf5AMOJeEIwnVxnmVhUFL1i8Fnulm06HvQxCzFEadvtvwO+1J/JjGYH + + L5vMGjhLvR8YkoCFCv+puFI4zH8cVCvrF09o5+D0E5Xrt8pKB9Uvv2HKbXC674Zi + + SAwpoaaavJCsvHnvAJ+/OuS+KBbZV0ye14rPfmkCgYBMLV3u0eowL2A5tJZ/JjGk + + t84b+nfZSElX74tJxQ7gJ0vcw9bomMpy5g6iN5zKMe3c0qUxB/B7zNRv8zpChYWD + + qXy/Rmj2oYmLCoP7C+n9Mh37DXvBOplyzix2pxRv39aLOJx+Cy2wNInuAENWCrAH + + INVhe/rF0npcUPykgAVGGQKBgDUy5ejWk4Kmh3Is96aPwY+AI1TtRlZ+TnXVANEJ + + X/HlrFYsNTqtK54doTjs0gUAxlAZjHNpwWDqVpqkVMro7nGNMryTsFfBNV6Xy7tZ + + cHHhWm2o9N7+EwIR3PIPfjwv14q8xTHE2X6CSEqewad6djfXbTyTgR3mnqoNJuGt + + 7AQJAoGAYtfnWnH7kqQbesu1b0adJZZsIZhestP6CVxk1EmsPjO9RUBjl90f9arb + + H5MnbPNIkOKgLLojpyholHKlBRAeO+B7gh6zF+PuBeNp9oADegovCZOhOlcG7jn7 + + VOaPPT/jxE2ah9k5mdZJQNt1tCLbAB9Mw1Emb5Ku1lhewvWqWog= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:eo7strdhhosajkhzp7x5lqukue:fmqoufvqqrgeg5nxxzvvypblc2c7r7tqx2xn5l4gzw7uyuitqhyq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEA52AqcL8qR3Pk9UWi2qJWey4+VnwOLBN0MA/3tN/VxWszA+D6 + + kCV5Gr1eepPAHYELRdepXeyGEg6x80gFlJ8r+wrVGf6RA3FffB4NmbA3ZLjqStQ/ + + LYptnXoJT27VqboqLXgXUZemGuuW/jN+NfMJhmi6PDpKLhCH8smVer/w2hsVT6Y0 + + ltVxkmvQMLA7DHqrgaqR2CADWYloyx1R1n7uSf93TK7Y8R5U3FihsXimfQ2o/DnM + + o5nB0u9QddlplGFeDksVbYSR5TSp79OrLi6MTF+2o2A8PoM9gXy+0lhl09At+V19 + + +r/VzrjkRXzNSz7HrSguoU2Ys7aY2fbwhlyPCwIDAQABAoIBAB5t+tbuMADKtgt9 + + 7OT6jGDR8aKNLo35waz1L1DrTqGSr4MXP8zdE3gtUfN8DWcJSt1qIh0MrOCgwc2V + + pMo+7tlVkODve8yV81wPAHSR2jnVygEtDeFkPyCtZUkRnL8gO/N+mPktoSERkpT8 + + WmEnsBKBy9snmAxGjKKlLr1rPfa+cMDADzUC3vFwO1f+3RRM7PrMT23GsBuV8cbK + + uiTXWMMjNNbndNVVDFG9T8TLIAC6BKzzkXSWfT6oON1nkn4UZW7WVAuIikoBNb9Q + + +nkYMkwJ2BmCaMnKjMshBU3MuZFWrzF+T6LbhpeKRfm3DGpDh42peU+qQi/rFI18 + + ZxMUhs0CgYEA729qc5i5/6ENSPqThf0RMsV6FO1g8ygn/2ZZMy6Sz3Jxp8pv9t8a + + z2YcmxBiLLYj4mVSEk9zobL64Y28W0eFP3xd9MqVe/l+HuRFOrR/iJaz9T9HrFyR + + cYeekAL8z0VmQ6kA0Qd89ILC9h2UNN+RszYP7m0XuLI/PJ5CvvmJTLUCgYEA92IC + + OUzkdF/GA4dyor1T1z4nSKKg1LFzY9Zg0gtkYLR6Sud+TFAkc1hvbv3J3gxBTF6n + + V9/UoVc4TMf5Qk67/Lu+KdVGZxVBVuilUg3brvXG4ON5dBncpuTH1GecRsMTkqP0 + + v0BIG+Z4djzJr/go4VLpsJ+94ulEQplc9tJDhL8CgYBkFuslD38RQT3QeA8bP8Lk + + unBiNykD/JFbzmkTYDC2z1x7i8BqLrGCaWkj2SFxF2LAzSIVzWjE+5CsoRdQAQHO + + nCqaneUHQjBasYnPFI0LiBQKPT20661RDCRYhycvbg9l0UwqFTtC6zacs5i00ZCS + + ndLjFG+KIdkVegLk2mNu0QKBgB7vB34oykxvCXC5iDEnYYuBvyHLDDdsdRRf4z2A + + pS2eg8hICDf8sYIm5dBINezpNWUaVOydFZaTNHwNaXLMK5+fzlimzaXoN4Jplvqa + + twS6wQKwDyjgbwIDi6VYy2bhz9m/XMRpglrSx+9pDINPkbUTTBuE7haouptlWAWZ + + J047AoGBAK7a1AGzFE18nbWjTKeTQI1GWVb1A+8nUQMkyntaRBbTbKp0w9ECngQD + + 9HMeoTvl58e2VAjkQq+cLxMRgAb1bvNN2BCcO15RtUq1XrVbSIxYPwoZgIbKSwc7 + + LIRZ508gOcjxs64Ajkn8r6mmk+nYyjhuoRUFplDW2pc+A46X9Xq2 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 2 + segmentSize: 131072 + total: 3 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:hah7mxwfpqemm7icdh3hwsa5fa:6epvxt2uxh42obpnfn4wkrplqml7voh7aqpnqnapu7ffcyn2hk3q:3:10:56 + format: + kind: chk + params: null + sample: + length: 56 + seed: YQ== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:zphopulx653rikxed247gbiryu:on4kfty6f4pskitmrm6zbbqufh7fcnqoklpozwm4mlhxnmc6guoa + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA14mWsmxQm2A5nZu2dQ7rdAaZgb9ohE1EcCIjycMMU3wH36UL + + DhhwebvfmqkQEaiwomYzisZZER3IECSwqbLegy6tAoqrj+aQLszjcSWVi25FzzgN + + IGMtej2PgXfoJO7G2rYLvo44rCYUmBFU2+jKcV+1UF7r3wCTzhxzp1o2aQFBPBg9 + + moMvlX6nPshjoXUTVWjlomiKefoO1pT2FR8cKSfgKSeuyNGXR7RzrdP7Tj2hNhf6 + + AsLbBY8RVsqD/Xec/pOm0RtlHdgXmbcSON0hL9TczYH7YLU+4E7tGR5gXFUj2zy9 + + zwEk7vJlMgYGs5OdoZhNcr2T3BNnOLnuc6DOQwIDAQABAoIBAANtReqZC529JZfL + + +UWlqPiOx8hU6DM3sJUbkKG1poHCQCPL/imA+HK+VzoPxrbpKVOFER111q/bkUBH + + PQ8Jes49cM5x3Wi8fq/L+arlUn5LFjcvGqGg0r43IS0foynfgkN7dk/cnlwB+jSr + + ybK3YgjHjbWss0khn5aV9iHn69K2467H0QNG3Ppufx3Gshxjv2p5MdWX18oVy4F5 + + tZEj+o4nTegU2NPX0BNuroP4rVUke0pX7T2M8tYWaZsA+tl3KqCZafO3A6lpr8r+ + + imVvLpOFjnTJ1K7Ruhs6A8kVpd2VJsNXBE0l/r+I53gm3HWNkEK3pZLGZK1I0MD9 + + v2QJx7ECgYEA5f7B9OgcEhDXvsvm5pOwoaPDxopOWaHt5Os3olT4FBfCrMf/C6Gw + + 5KEI/lc8WcKdTX+suBBkD/+M/6ua2VxHCnatWpXBRo1WHEgoGFGlvFcQ+OGPlm2d + + w2LPtFov7YYBDNhW0B6ETS3Pe9vGr2spr0V6j8fhP3BirvmKwap2gPECgYEA7+hZ + + 4DboBwIWI4iQPJqr19b/5B9t6c4HRpfLWizjurltDENpPkHXTjpWvcnxYEpgyROB + + 7PSavd1yMYmmf1EY68MNRkmnITMRDIPQyc7mop/O3xwyS8BPZ6+8nnOfn0bLmdNT + + kkkVEM4cEZFSwUUFoqQHaHtDsgwzRBcGXHmTAnMCgYEAtbWbA6VGWDeaXJG4Mb/J + + s0sxZ/DpigNXcp8r60L6ZNWI5v1z0XrDyT45XskJU1lg8lPG3/2DMOiUO4MW6lfv + + gKLWv1TFyLntqJaRpvUK3kxjil6bFRwxoqa0tybx6tUOi1l47SDPIjLpVFAFH56o + + 5mMcO/CNU5O1Q8zABdZpneECgYEA2mTeTGovVxHjLX3IMCNthBNI51ZlLI5NuUm9 + + 6N0sgnMCfkNvryko4yHgjO0lOs76xJFpmVgi9ex9Y/M3Cne9BAKQNwgdiO9/+bCV + + hOFAu5JXNGvqrWLn5i/ouSXwjYJZHjNuxKCa+K1oh+WPPDmlI6XGyKpNueu5T6bW + + N6DE31kCgYB2l7HnVZoOtjgsu1S4PIok8o5yiJhImn4b7fXfWbT5k0gnBqpsL/0C + + uMk4uSMDC1zWRcQJSAZt/ulYvXSttth411Ljx4rzOsh8990/Br7xSngbVqUaj7t8 + + F9DHEhPnutBBMj4ky9BvqY6FSHRylmDnbx6ByalFkoKzHr2kIWzkNw== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:mnsqs7br4xbc6ghtpfl5ewzjzm:jlens2myp4td3bcjogvs5gl5cbvnxl3fycfwqyi5qqxp6ddk7etq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAi5KYNdtP7AeY7JsnuEUiz/VridJtZnGytDRMoVNgpVUZ7+uR + + H9RCE0FiwFBBk4hLw6sXCXMHyBNDlE5omYzuX7El9+/Nfwc6oW5APwb79cVcMYMU + + mMXSsTeyQ29kvLt8WaoOGWdF1SurdDajd3AQndcbNQmYy4WMupLvGE4ZCaogUGYF + + qxFqGwHLJvIigHDxERWC0PKQ6Z8PlxeQLC6GLVJDZCJRwNHfGXxkGN1Gue+JDh6b + + xmUi8/JftJwrU2ayamn64xnwnfodzT6bH6wcxn29pjy4BdZeKA+z6WA7jJ7JHLp7 + + nTEfjaPy48Ski45+O+if5ThvjFSah3YXU+PPDQIDAQABAoIBACzflFVYbgEuTiHg + + HmyVucQPnSQCBg9WRcS/PdXuVxfA3SZwX8fSd+316zh2dSboPqeppa3xkFJosyUG + + 8oVPtMIKU/E7ZZ/OJLELH9fDuJVDf0kh4ijeDUfR5tvcgBBX3Pp8/Kx5Mg//ys+B + + 05uOaaE9q+8o5zmj9eN0Yy+2yED9OntDYiBg0aYuB3FpbKEkXPkt502NHoIM4wO0 + + retY0PPZsMfkIxYy5LC8buJhnXDApBWYXdQYbs9tcXAfUM/Vwbl2ds0Amb/PTnoA + + gf3PNPGR0B+jNR5kjbGUJlGIdxRKKHbAb1y99qTAU3XreaCS+1C6BnTAYI3HbtNL + + 1v5e7xcCgYEAvXV3a1I5/PJ2X/vgLsDb5rhitMNyvxhepObYvMBpl9/GS1zmkxrK + + aBgC/4YaLEHaIH1gJO9wBplPHtuDZRgr9Eb6X81qBqzhpWE3hWBwi/+cF5blbHlB + + IQem6zOraUMMJiIKUCbShgqht5DLgHupEett9N54A8+OxaWHh4MH4bMCgYEAvJfI + + SL2QJmFg8RtsRb1e/CxgvXNjNNRHNrYCEeBJuSuZShYUz7JRnaJVpaMafHId/PV1 + + nZj/OG8FlRZMORYsDZOU1EktwdrfhXX/Jrp5QjIGw3yws5kNcl1rvmR55OKuJxDQ + + YJEHLx1EgXYWKnhv1WzD/T1tYU4TvwprMJP1rD8CgYEAk3FmdYwxesxLGZnQtzH6 + + MQ1QK/NrSpKxnU3WYNaxlrNdA+uRuewAl5AQTUHU/pplIiHQgA4jNc98Bry4/iUY + + l+vhEEuxdu52URledxs9m4ZauPUDKS8YY5cr7SFyBeJbAxY8xnHgJtcBUfWKmjwi + + sMJy+T1lUznll6Wh2vE7YgcCgYBqr6p7i9EKBThj7NF5OkGLgkdPpQDQF+4ZQyk0 + + l57dA4753Df1rriA5h5xTy1ijOPt/6WDe9OVRyjvR+fiu2o8W+prlOIvsfOUekXW + + 0NJb4hT1bYpAbyquMa8Ly6cxFhLSwq4+koxv2KyyV+z+JZeOMrNEhQVlcFe3UNuG + + ZY0q3QKBgHPo5ViFrazcRe8YvW3e+8EVGhOiI6iaBHwWtv/G2h3Sb2ePjf1FPdnv + + dW199YvKZ5BU0wUjGvnGT/EIY/V6KK98n0SodFm+yI/tI8uREVjyLkA1BwWspuZe + + SYapkc7QNeQdyIq+Y9ZV/ks7Kq6XgjWXlivXE6URhBCr9xcn4jKb + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:dx7tvyr2fc4u7lxjc6kehq2svq:tiy4qh2g6lqejxcaym3rr7ymkdkinn4qised6kgxloj7sptsqu4a:3:10:1024 + format: + kind: chk + params: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:fo2s7nih4drxob5yzbnmv3v5lu:cbyn47mjceyddva5v3fwwfhmezre57nsrm5q37pxqyg6lt76vmsq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAnZs6x26D5CRQj7bukVe0NoSa86YeTbow12aYJ50262ASxppU + + 2i0am5SA+XcTLinpy0bVuSN0b9kXqkSnAPWZ4fwbFCsgXsLvNwTiGBo+Bkgwlarl + + 8VVMz+FJLYpGxviWNlctkZ3MFEkHUs5SRnXLAE6fz2qhzdXWW/y2D3h34pqG4ETF + + 1KbU/X7OSmLzgo5hw6OsyxZYZYp/oyesie/y21TeE+6LyClb3O4mI80+vco6A9vp + + xncn1MJHDLpfY6z1sLOfC7NQ4N7J2BUvabJ6HSJVEOL4LbfoBJwo+/tKDQV48MfU + + C9NPhbV//G6T2JEg1sHcMNQuOSwQVe+hEmY8WwIDAQABAoIBAAlAGuSEjzCfXZn6 + + 8vrVdh6lDLIPuIzBEb6FFQduMxvb2SLXk4UZlJ3uyt1rzTAcKs6+tCrqhPWYNBQ8 + + 4r0HZCY6kPjhyWhPlv7GRlAVjsuLmJVHISdXnFkc5xnJdMRODJeP6TPvERMTJ18r + + /F+u6IVSGA+7p1ePeKG09yl4uO5YtHVHEynjrRbfzk+2c/6ePkWSeCpVDndCQ6eU + + me64LjZEPMFMRlGc6GhfqcT+0GPvyWIBocHZd23oAL8cJiMdXykWhDlCX6ZkkcHr + + N29MILURnGBYFOuozrYPe8zrZTGUhWpWGjZQLSYleyBM5BjCX0WR6cFBrhcdFLW7 + + Fgt8ORECgYEAu16bOPlKglvktH4ro4Ren1JpCUCW7SE8bV7hOG26MIWs9UACMYPK + + GdbK2Zkj9rVD77IQ5zWNNmmobe5jTqNpyOxMETsV6uRiGvhfzt1AqQOd2YR8vUKd + + ujdHaaU6owcWFOVDDZrBsgftMLcHmxFQDCAtH+ohfidA/OkVRbgs+OcCgYEA11XE + + wB1JfWDo/dIPUwCHUmE1AkJBEo87mp16F8e37nBXmQ1usD3h498ACRbgrNbdSqM0 + + gJ0PIKLysM781PqE7gGAn6WeudlLZ6/b2YHh31HDD2THio0+MBVKR4Rdv1rHxe9B + + nYz1A6gpxbYpFt1MrZRLRKGqa/F1J/bqfx6rbm0CgYBiUhS14urcWQg8RnDzz0Qv + + 6ni/qCsKqAQjiEQ67iljyOGnmD0Oao+k23d6k8excBEEOLZx/UHqqar+dLebzlh2 + + XLjV2eF4bvukF21/Cc8iYYl1WPZ0Af7udo98un14iwFlWaDEBM9bcplelMzi7ETK + + +B91vdBxeHu7uzu0aB8BRQKBgGUqsIMpv0seaphFRlnSl8EGVmc3RWc4z+H2NlRR + + yoJFWYJYozY9/JCYRmX+z5OkZtcYEiSSpXbJ14dl17cf86/2GL3oi8f45MpT/tAT + + i1DmEuR6jpzzetIQTpOHBpxORCkkHQmuHbaYHPf8exV45vtt/mbCJVUNXeNmyAjt + + GdGJAoGAL2huWld1IJOColg2ye15MQ1f9Pyu+9aaghglEtGrvnd3VgcH2cdVbsHj + + 5Sb2tSgT4m1N0XjT33nYCyTnwLS0txPqDU71KlTLcCfH3iPO50ppWbU6qTi2yUFj + + +MmT2Wy5/VgrsU1Z9hw7+FbXNeRnPNns4l43PE1TlZ3uQaBSF0U= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:6nqjbopsod7vfscqp45ndtfhna:ef7jrzptougwgmcofxtu3jrjgvfoib23tms67m5ssugrmp4ddqua + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAuyDM4HLs7yvKQRNJvNj7dNcszbLrH7da2Qzj+OF58jDB2Wqq + + 9ZQQcLowoltdfRZmPIADAYy16w05CGa1YeFI62ukt7KdZZ0sbGipQxaQShZzeDjk + + 6sSFyN/iQ+v9ZX0GwSSLCwULZ3JRbXPYO5fRSa1WAd+OuLQ7vC6zyPglC/IyKEXG + + i1tBPe5GKboT/1iKUjB7CRybFtERRQh0wZ6L5ChnLjpGJHEkz/Z2WZcSwsSvdcb2 + + gRTkquIk8kPMU4BFZofuFEuqVZVe27sOeklsPgeCiFedifX+S49UMuil5zVjr+VL + + kG2/ZI2IRU3JPA62RSlNrvtSaM9Ltb6TBnpZawIDAQABAoIBAByL/0VEXbSKOSNz + + +6fwGsLzKoDCM+VVlZSOx150mKO5CsbjY2uYaBOntntQ4Ie7JDiZjMaD6U1k6ded + + bOae4Clt9VBtfV5+5kPZiZd++InHjVurTkd/AyvRn7/IihJyoCup1VOxNCasBWkN + + pvtHCKSqk/gwMHfT+FhCcFHkgn5PnsfPbZRDm7jCIA2gaqmSISL4gHjJginqKiSX + + HtE4E+GO5NEkzIiNQl3kf+vcEBOTYgj3YeAKZhqO9XDGLDbZ1XHmWV8y9n2RWsjk + + ySZuMuvOYUQrp8kmnAKe1oRGrmRLexLwHM++raXYUCHEauGOirRd+e/yFHlWTA1/ + + tuqjfXkCgYEAxV/pHECWeSJaZSyY4ygO2DhYlPPw1YXZWyZnp4ptodyRj0SPHoqE + + ucDjjnQOIvgXRB2ohTrEuf0CTkJcnWtrgaWXbzHxMbLX5xp6IXeZyyU7Ly+JEzY/ + + DIZTrdbmL5vfq3pfTHIt3ea1bHG/Q6CX4ehjnV5NmJrbBpq4ibWHbIMCgYEA8rXC + + Kx4ASPmvIMQIyRrMR1g3BURiRBBdJ6O7WTFDBfUiJjnzqSo2OViZbepXMxV0h4xe + + gMoRaZAxEWc5G8UkH+tL2WpneEA2w9o9EjFX3aUz5e3IKVIys1AIIdof6r6hoFV/ + + /oQMherBDS6mxmyS6GnzPvp8UmuCz3DD9HSpmvkCgYBotxPkC1hJ+DHhT6HlkpEd + + ofdNP4bcoeDJfTytJMI5h94qFoOf/nmgW3ffUi9V2i3t05Ze6OkKi/M3NfoRArbM + + 19/Z/LMsXOgzElcNfni30I7v39ZnvPYCXRn0Nvl09MvcHFaHJmSzP/2tBUQmSwOJ + + tVN0YF3mwvHFNT0GwqqQpQKBgQCr5Lx6qwnKpUM58nzCaT9KPBjjmxX5XJmNLHHQ + + boooWv9vkVWXdnTm0m//n5tYa5aXNXvsvK/uUpfd2nxgxZObI5sZhTl4ugnPVe4w + + x0+Sg6Eo8+nyEewkgMbxqrk2GQMBOeynhkAUTDmjq2mkWFsHTZpf/Sk1ej0vy46M + + wF8qiQKBgA78rBWa8R/AB02hKh3CrIyoMwl9WwGAWRvJUuxtg8tXehMQIX8c7Wtl + + TYNLuIgitz7X4kPIxhUkFGH5kX9t2hdFaK+y5cTLayZ4Grf4poLVLFQm8rm/vApZ + + 3wP28XLiz49qZzx1LYE/n9k5V/bpQqSib/RtP0lz6Y7mN9RpRZKd + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:yo75evk4cte3b7rdw72zxvl5ye:ex6h7ff7nclucjtsqwgwu33qgmb67t4ezbrki4zbgurwn2ct6bbq:3:10:4096 + format: + kind: chk + params: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:x2ypcho2ngz2yydnvdfg7zmuam:rnym6enrwa3tqqrhmgyo2wqdhmxfjn7wpzjrkzjtqibazembi62q + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAwfmkPrdspB9pzUy2t5MhIH04ydpX+d0D3pHDWyQO5bcyu0eL + + prmWkEvPlcFW+ze0WTsp7bBnvl/ZNm/4ATxhq41cB96l6uROwL2jVOC0sJYRHZFZ + + 8BplNO6frfwsvGCVmml9cLj587VP1k6tk4eHglg1pK1rVNg5o/FzkmieGGDW/ynM + + 5FfsrCeWbFh33pr4NqrHHnR+VDADFwhYzHbMlWLOS6zujSoV1Cbum1/BIBxPijIc + + MxdsqJKWyCWA0mSbMXsV7a4altzIWax8fEbx8oxCiBPTyNFhQNW5sB0NcwQhpMf2 + + mHEuuTlMVw+RVYNBtk2vrZS9pOi2SdfFInghGwIDAQABAoIBAB/RcYHWIwHYTfRz + + /nl7WWY+aYJ23efUZg5Bbmr0rbrnA5vZHw0Qs/lkbh8nBpERG0oTHIwVhoGhsFGF + + KV3D3VOUzav+zMw3JLyUqYZCkRvG6fTQgxERtgM/lz16DY6IRfynttYMNE1SiE+O + + OrEQp1ztV5NKh74e9R7cJt81E6XKHPYAR1T78RqRiz8eTrfkECI9Ad73043NwRXQ + + 273Ph9X0EXKZh9RQYNuLXlvaTfMTU22JpPf42kL6/GlZSY7Ex9ldaQya1V1TG8zr + + qmcTX94hg+TMXmt6xx/imxt/DA3JOqTpIrA14acmI757uUX6FmGHI/0ATQBL1Cr6 + + 8oY43bECgYEA1lVDh5cr7aGRIcomfrOHQeUQgP4EaVpOfF8VigcTk3GqOxfSKTND + + BqDUiOO14ETjs/qV72poFXdruRNZDEBT4w9MwZV/J9K0eUtNTnh55X5BDLIG90nK + + yAfcdh2PvVeDBeA4hoLaEsumgBy3bWr2WfY1O79sPXvoRWJT10+tnesCgYEA5685 + + XDtNGlz8tSUJpBXmaAAoJQzbvRjOtlH65BujTFBWiW5XXH3kURUlCOYShyM2Age1 + + S01e22WnpgSlE+4DbrV0MVB3dBYLif13B3+iCOPZ77m2uO4kqB870K5wINBMWuf8 + + /mv69nPhCS8Mqz99JOGNl2vLtwQzrkfNhldmTZECgYBwTlUIIyodZd9KOUZadW+W + + E5TGQlPFcFBX0urSXErho1lzhVPVysqAGp3C7K5MSUyW7eLKhJLtTJnhbEXoqXxL + + KaUqek8aasmuFMr5Jx+YJMOpB0+nG79peNUH/w1mRQied5KmyMHDv3oK/wEOEFHt + + aZkTKYZp4Rcf5BnSZCmw6QKBgCXXKLc3uFAl/+BWPEzghtFVtTjX8Mvh0WFV4nR/ + + TxyXwoqPyxUAOtpDadkaOsx3o8qRF7tE18ldwRQMjinDJixe1qt3SQtczmWrUFWZ + + Mw3gqSfOXVm3C6Wp9EsRMp8pZk8ytM+ZM1QteQPW+2q84+OyMz4YDR3HQemlMJxQ + + ihUBAoGBAJnLqJG0ckNRnuaD0SM93Stvx5xOu3g2uzy0zJYjzjIQfu2gNNx/KnuF + + iOOJID+Y5HgvqxXjIuRzxyH51rYeojVcouSIxpkbShDkURDSS6l+pfEtRcH1G1GM + + CR9cLUiNJqUgtyLxrFljyKKdfhpQ1o7o7DBhJ2C5xwr8WEko+L5m + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:thtsw4tgtfmq4m3ajpgxuodvum:64bbbja2hsofykzx2imjzvtys33xhinh3kuyg274bkyuknwtobha + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEA5PUmN2ICIhg9Oj1fJgL8KS15MQuycqxSPnRGryj70RgoY/x6 + + 8MguJ8Ofz8JvXBVEOzMLT4kYihyce6fVPTzNv54/ln4Iw334UDEVmsY97hNf8/tl + + EuksNVofSTmUOQxPrJVeM0ezNGFJTVEPTPPjhPjFlA+ANox3h3x/W5GI1zXJopiI + + llMmpo9Q+UhcP5R250bchMJ+wxK79bwm1HH/1tfICLCeXnoX3JT+ndeptbfks2j9 + + /aQEz1x6YLLkLJFWVZBrCTwbQ1GMmKHe/7fwis3yUknn4MtzwzeN2a+KbNPCa44W + + /77Ono+sGlZ+JfdOmAr+eGgO2Dn1sMUhZWDqQwIDAQABAoIBAAsQabNdchrxruvE + + kXeFx2e6AdRD63CtMSBBgDTwtxKIp1MFnW9LTSewxWVF0RnTEUQHGHHUfzIVZd53 + + 4s8dxBeRbyM3nfbMfJZreM7M66s4lnd025KJYBCH9WEVfjsvhB4j7bRur5NFbERn + + OWUPmBwR5YJdKWX5bcFHW+Qx6Tn1EQoYnOS+e4gYbf94tK7l0J1tqJ9h2LOo6yMe + + PQctpdwRKAOVneENboh1pdxIHYXwziayEjuAo8QaPkF2LxroJASgRG3o2aDg7BUo + + EXFfvhxBzFNe9zyCVrBp7PfH740R2CyT5vbf7W4d1vhN1uqhRoFLkVWW12DEAvD0 + + 5No/hNECgYEA+0wtYGe7c7p1rFJHw88ngj1t9gWRY4H1JcNZ9o9/7ZOFQRHG3d+s + + HdQOmNJ7wJlhIjp/NQQT5TiffFSWAaPlD1oUJ63N8UupyEGlQsONnGoA66vOkXVh + + +Pb3MhD1kH5HDG5uMp/BEIJMVDJnf/O+8McEEbvNj0Jxf3ovM0oGohsCgYEA6T30 + + QGc5/ZDXmDZDbq4kaBL9WiqaGj5RLMluYfLDlsW0pxOO24U/A/wP3qVzNNjlqoFN + + TApi/x1TUFwaQJ3pjiKBRTO36+i8yW0Kyd7JnMws7ai4uNYlkD0gUUynmaiYvEJ5 + + QA7sgBuDhK0aI+hSoGCU5gPNv+ErWE6v+VTLmvkCgYArS22J7XU7NAWwAaEBmEAL + + TUATodPxm+M7dVObig+VQ9QyaLilYzLJFM7K/4B4pzQ37HIcFS7EUCQSDJSnhbAi + + G/fa+jO//bQrnzu0q/JK32x3Letx3hJaDVp7UrasBUWCW8g6ipF9oaU64FA6mCju + + XKtTztJUezMIrmlRYdCQvwKBgCoZQZ3iQ+hNnWxe1vsCOZYDX3FH4Tq9Zr9zuBW7 + + 0KvFEZ9ae12KBl68v0yLhmjSgVmuLvp7oXS0oVYO2boyBnbeKYEJHbhZ8MFWiiz3 + + pmJDxBQ9cOID3RHUxqGF+XZVpQPN5761MuDIlot7Bw3WIBvMcvO1Wgy5Iq60vTR+ + + pqVZAoGAbZurdaGhKu9DyJroKTRRv4Sk81vsI4hNgZuyE8FW35vfJf3XaWVIVo5P + + aOtH2mudq2vws0C16/W5fq+vwLa1/xY2X7Ni4ubRF915BHo2zTwR3K5dUZKG8QxP + + hHOaZsh6CgxL2cLCiUGIWNpKkcOz69GnZZoY0KJS0KazKTDGdMk= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:4gokef54smahrbfr4kq3jhc4zq:owpwwfp5gof2vhly5u6jdnbsfuwwwhqkazpsbeg3nldxv5pse2iq:3:10:131071 + format: + kind: chk + params: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:eb4hwxibn33xlwshgqgpridd7e:7ca4kdczkgf5dowmqrazebqydxt2cmb2biui53d3etgy3rlcwkiq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAxxM8zLQ6lSfHQ5U876H/kkiJ4+xxuvH8HNaT0EhF144PQyLi + + pYDMOgv9h9nN8Da9w4wviZLj5kM3kVhPF1ElPq3JNEW80o3J3w/5hTX/B0OpU7t9 + + SXNcKGOftT9tYx0+38LzH6bbqQnYAB4KFfLyus1truXeJMzIu/3pLoINNT0My3ZZ + + 3Fjr/+Pwcgxq59o4atIJlzLILQo3UwpofAwBhD1X0aNiHc/BYxjMdAY0uJABvwxk + + 7JfC2XckIVdTCtbOz4b6m/HoGU71/vdg6gRTmXy4m/18b+yuEE74omovDXa1Exkg + + 1DroYe9s9zQUzLZABn86xUZwR/c+vt+qK8ab9wIDAQABAoIBAExNj9bIV9H2wrYh + + PA9/cMWBfzS42mi0upTVHCfPo9F4lln9w5B7Gww+r0kETx585ORQVaIuBqMp7WEM + + z5fY1uU82CtsdXDgvtj8Nv/7j8oZgYviB6YBDPhAIyVl78f3HDPI9cYSfww+BSga + + W3RJQAcgmSNZ4PkK8v+3VUqpt2VJWUHOHp4CETT3aL7JZpyjt4IlUb6gROey4XAg + + KubV/WTjrHFbS9RyFiFD+ZpD9eXnGand1yUemMDy5N9gAM6MAdYHjNFaO/fq9vGK + + L6Rr/Xf1vmDMJ849kCEG4rOpLwMkol9v6L/eO+xssuvPRHHW/BjCbFTtkPAD99vS + + tW80Co0CgYEA1KavoQqX+ApzTahmLON85F2RkZVMAFmNa9PYzbXrr3pmrFJg0ac6 + + V+vPYBTlaJ10bDLYEsD+rpYqiHUwXsm99yHBxfhRHdvQFe5CfjLVQqh5p96PMta3 + + Eraiy80cS9287VTaVV4IVJarbn3BootOyK/VGij5YNR3COrrr2kDgiUCgYEA76gV + + AIOEuGPteM19cUYRzJh/Q3TlDDhc7jK3n/N5Nivtfris33AorN9lEtlQ3i/Gk/aY + + pNUNFS9K0PW/8u3M6yoGDeh+w1wLb1RHUTamZvOJiTSknb5vNZWTqFx/d16WvhDX + + wP4QF5YKVVRdOGnrm2XdWlsbwE4IlMWna4NSVOsCgYEAvQx/AOVhGzN3NGfshiWr + + x33jxxB6Y6k5j83jZWZA5F0l4DbQOjK4LKfIUbviAzJP6Uz+SRXolR+NKok8elhS + + GN2a3jwXKTtc79JErNrWOw96MCItHl5CnVFew15StKOprTiNbe1N7J2SRIVqWu4M + + GWAwTLR2l33rYTMwWl46rz0CgYBX7oX2MEtMFG4XOt5h52G0feeD6qn3t95xD27M + + Y1sAA1Iagsv7F331H+pH5jCDtWfY9ku/fuRT94wt611IVvQu/LZH+Bw6tdUEPhoE + + tFaNw6GdFBGqRysqr/0DcxzZwXzxs+BV0WI6JTUZZeDmSAbId7Gl63PdNUR0wajS + + C9bzjQKBgQCxJZxGJT0MUrX8BzB/tRcdPWpFhnQtDENMdeunArN27KbL04dSmrZZ + + 1V+H8GY1bBooSyULm+H2fouuavnHN4VHp6DI2JJ9wcxNsY8l8NOrrwh9y3sexWFJ + + 1I626BJlwuFT3Vd/0IUpJjPwQSVj1On2cAMTfsd3y0B56H2GiAXcjA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:5spgmqz7se34bzntgo3wgnqb34:zbzy5bsn6kkh6dz7leo4x3cfoiuhnotjp3o3n32nj43rkpoee6ea + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAwi+iQ/lFevdNoN9SEG0Ie3i85CHwJAp+ZcVxyybkesM2bDzq + + 1QQIfD/LZ3Wk3BLxJNviF3HPVAEBjA+EK/vI1hsw912YAmClkt3/YQOIVHZ9DKHk + + iNejKGC8ViL8YHABszBlnPshVB1k/oXwBO0izlwP+9hUGXbHjzuksYkq0FLlGAJD + + bNPvAunUeC6NrSsA0lgPDoVdMop1dktN0tOhLxEauDOQu3YpVHLHhPVRpTQ8c2S/ + + C4JsvoQzT8BWuilfCqjkua2Ul0oq6mTSV5nVE5GFxcxghV0+5M5csQtiFm0wmxTp + + ZQFGGdw8bGMbF7Ghn+k9wfPnJroSGODuTGyUqwIDAQABAoIBAEH5wiVbG2awgGEA + + jxbCnMeqmW7fMwJjyFcWkteFgspM6gAzYEv4f1OLrzWbDGSzUNgHlxUFF36AiwCF + + ww/Yj39jJKte0sc4A/lW0K4q75ZW3Zy9onJ15VrSJxsS7vFrDMDPWC7SShwUkpxB + + cG+UDCfVsp6L/OLb7uh0yLuDEZdOgnkDP4PD5v6bhccxm4VJK6LmU1+ozeFVoyR8 + + Azp/kCPwBjfs0qZ+pfyR+0uQlgcOqW/JcOTeGk0E0BKHxYws+eEZS/FpPEeKYUNs + + tgLZAcuqqhniijsWpvkhcItD8wn+Bl/6ieVMDdo4qx1weXjlTx/CVKZOcsBNVULK + + PTDUQuUCgYEAyVVq3ac/Z30nvDJbrvg396VqY6SDQF2QSVbV995ZKRofxJwpeWET + + QNoIBgoVIVxbk8bOs1ojAIFyPkqtIuunkTlsaVRfdopx/choA6mzRHed4YQOV4za + + Uq7xwmmOCS/vJA2xNl+BXnDd+K2dERtLl8iW+zzBbZ4wJN2vvvpKn10CgYEA9ulk + + PDQzTpa6SiOO1VxlhN81X7HFY+k/FJe6Ro9tVnnOCGJPJcum+4yicBATKmhrwLQk + + H+mmqxj2VBu7WLCgNpEEaQPmVLivE5I74wo7zAM9952r5Cw+cf+xtfNNYL324o8j + + VBp3FEV8U41uy1nsmCyf4LUMBCyFVTca25NCK6cCgYAowDpGLQD/YGy3gfXev20M + + mhWjn3vVflqjDYl3hzDCyf/eGsGmSMjN2pO/LTFDtF7w1U+nK7pj8s993j2XEN20 + + 3kucMjC0XKdf971d6G5ZkGCLceA5RlA2ZiSW9iiCoYok4QSafdBAnlW/bNyaxsyR + + J0+wAIciOd+CxsA4xo5uHQKBgASjR6W12UzdmewwlMs/LAz94FPG1A1XYT7yxqXy + + pbwdF5iiuBfepmlNL/Po6WM/iN6aw57x1ZabJm1YBAHbd3bu7GVIlHf87BTzBzrx + + g0QGv5A6HvNvPVEI236ubkKl7tA8ng5DXP89euNa4bziGIaXN/2RiQM/DtYV7eQ3 + + 9OM7AoGBAKE1BocDrOsN55olO9cnw4nQeBzXvlstK9rZ5AYa3FKnNs/sY2euzlBl + + m402fQ91RfSF3pBbUX5cXVdbNt/g16bN7bJ3tI4A4XSo71Pr6+nmiaAt2WDH2IWy + + X5cw5+CXCLok9331H0W8Im3yiuTU73WPxGWJ7voDpoxpBGj9X6it + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:7vfgl5cv4nlzqx35z4uthjv36y:nnueftbzxfz6u5yjxwwofaxzzft7xss5wzfh66rrcwv2zwrm63sa:3:10:131073 + format: + kind: chk + params: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:f5xydhnzscumihbschybuyocvi:y3wz7t7pyj7id3xyzdut5xwsvpmzxxbotdy7b2azois7fxfuylna + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAofW4kcitiw1sFYCYY0wIJXe0zMw35+vVTFDpF3kH6dTKcemq + + oHylBFmDa/qGwg79GA9TfpUVJ16EgPKEU3mBdls1MyWrqHDfy8JsQhHPS9fUD+td + + cLJFKypM89gtEXCt16zWe+36pL13rffT73hkoeaqGI8vE+xZazd4ZnFWd0bTGpl+ + + ZYmKMSbGNY6MAQfWPCOeQ4EK+ReIPm1dKuFAC2H3g6+BGNCArnLSSCF6QgofRvXx + + xy/taEULnHHRf8G9ELRIKpT7MQRediBwRU4C6dizNTmVKigOEKIQ5uxOCjS9gjX4 + + ng2ng3pErVaOB9jkjROOPrtF6uCOQY1cT0zTMwIDAQABAoIBAELu5o7VNSd07hi6 + + 0v+igfFeFenXcjlWRQnrnFE3kzYnW10VeQ8nRBlWlxIucLfNcvqZBuQW362sCa2y + + zE4lNoQ/8G4JYPZVY5/1Y0Ew1A9fjIPhvPWgryZGLpRN4F5HR4kNJH0GHmIr7USH + + 2d4rTsd8KQrKTeX5dQDy5T7NEzNqeBtsb0Imo+5hsZdCI4NMZJ1wYnx6OE8uYMOu + + L5MMl3uIkHQwWiRuilods5bW5a5kBmzeE5X0hsGi+czYVXcD6NvS1ColRi8Hpw59 + + ZKeGJdaX5eCOGFpUIkM70e3wIboCPN0msyEsWPNfgeuhAeJJ/l7BZK1avr2+mhVs + + E5LyzAkCgYEAuFIs0imReqm3/f8ahvlun7VPNVqeOjr0Qp/O5LpDv4V5x2RLG6fR + + MBUUM4Lf1Rknn7EHYfYZzl0FheNooQcIMO+wwZ5P7dsTDPMOpcjH4r+88xxa4q75 + + fiHOmfmlcXn36GKCo6km+PjmA9t72Yd/ylRFkx9KMJtNiIpI3UNABrsCgYEA4PFp + + iuCwLtL2MesiMSWjNFhJ3766g7ckQDBeKEavXt0ZrteEXAUXuywqpGrrhiLG4bT1 + + LbXfkoVfuvUk3JBSmTVLuCRtUGdNeAleB1Q/HC7WO1++yKHm1yFveeDN7+fJQ+y+ + + pdoYgZ58NtjBQEEdshtaz2fKwxCyb/WyI2uMaekCgYAju4KG55oVXouVyPu6iOaC + + PaLyY/PitAUgWVzBiL6ThWu7VN0eqmTqXlvBNLDx3eOJmMcmnZAZKn1knFZvSS60 + + VfM9RdSW9u51hzUivI7LjYIy2x9fbK5fXmxv+y6wlgWSXm6XDbbJc28b9lPHMvZ6 + + IeYvBFTcoW7hdnVzt5LU+wKBgGCMHZHHKLegQp1gX9eaYPdZobOQKHvaQovudqtw + + 01qzKY/a3uukH/BtX4wcfCShjp1XzxgkhOZdqp4TFBQ7OciakHpj4Ctve1e3JY2d + + wky1aawoRznUC8Fwj2lPbPS4lrE5zwZemsAfpw7fb+rFSBqnFQ4KbYPWCdB2M1Ry + + l9zpAoGAKRl7T81Ji0P1T66Mi0i0vm+l32QZuSd5pUyBZtsycFcuVz1F+lDIWhMA + + fuai8L6uzFNd/ErekSIQXL02buGHqp19QpbxRlzNYY4OB4OMz6pZkWtP6H97hXxm + + lGTDX1qzm6DfQpJjw39B3neYZC7cwAImcDyvr1kb/tlOmePvv5U= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:2hqvwmzuxctume2eereg7xj6vm:qucnebkcgyqkipwxlxmt4k5nagvufpmhahuvfvmncuihp2llobzq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEApvVF2hYDR6x/aIKoeIrLgvEerWkVwxeTUYP1rTP3faTPXZaJ + + RyDqHMkwSVlkTIdhorRROoxWgrD8GhI9Uc443ac1iFj7MKE61S2xnKNplhnh1sQQ + + OmjkCn0z9cIqCdaqDCtLwKnLKEKqbm30JoCi/nGc2Sh6FGvJ5Wa2gViqI8bbBlIy + + RzO1NcoJjEVmxlAqLgH79XDp2n1TOJvm0V8WsOiSNQREcMcbLTcl+cwTbpsKkE2d + + gmkzej9Vf/PMxHrGxGqYOUPsYwX2gNNiEWx6pMN+INUgJXuMOlwdgvEDxiCz3Noq + + WsJpTsJKmr3rPrPvKuhlDx7EsZQ9taxTyB2OXwIDAQABAoIBAFI/9t+PhKIkqsez + + xodL6SJi4vgPEvd/f8Xiun9PYJd3P+kdJhfycSMpQi6AaVcCQulC59luFZhg1HGL + + lsXcUEtx+n9nRqgYZcFrt1oxbuzRZ17ETDJaRi2crKJfuxIJvNAt7C3H+BunbArn + + BCaLrMCo+9pHhIzW5SmsRjDGm1rv6l7gMMb6cJ0n+K1Vc3dy1yTDLqXcpEuC2OpY + + bdekHv3S2mCwDD2jSpKBhX/fpWrKEoS34G9IL78tYSpUAxAJqPaHJGf6a/GGJlVO + + Wol5ZnfVduocG0RYfB/Kv5Mihwt/d7XuSDivefP0myr7w+m3XsOm00/BUy2eVKmP + + yrUDLcECgYEAyGsJwXMkNYw/hIRU3f4gekJrwHkWvsJsDkxluyWfjLn4XQGiylI7 + + /pix1qj9LSFi5TDLIMzZ7UlJgENNbcYy8jFx9AepSMHFhUvARWncOG7Uj1uHLOA0 + + ySD6V8dCnqaCUImf8chKiWr+joIdoFBx9xS3PirHCRQZaKjYkGABa08CgYEA1UKx + + uf04DS9E60ZXQf/Yqw4PZOpFgPce63j9mkggebnmrOcNMub+4BUdW91FinqT+IkW + + Qt1sI9TYtucBg2tZF5LkIRWnvR1TeSgY3qZsFh1qv4Q2zVmTs+ONUv/e96tnvyY0 + + s3tdeznWvWpqakE/62hnjdLIVcQRHmlLDAwJp/ECgYASPEL/+gUCZkdlPFEofbXg + + yehZ8+qQ4snIJ0VeWNcCi+1AMSTpub/Bs40C1g9rKs1/wwfIbTsq7u8kH3uNEGqU + + RNF0fbn2Z8McFL9i0XX7IIJwpMhQ2fmTj0+X6wZxvv6+azdFXY8Cn9yXhNlDO+6S + + p6zgmC3R8qU5M5u4zzNx2wKBgE5ggp0OWUlPNA8b/Pm+o8zKEBJQn1a0e+KixuGq + + 3HSgRA0LpagtiUKlv/KBMgug3T0cdNgCNLo+gZ9G6yF3lHi7fahDIzC31HPUrr81 + + fsfp68+TMejqoQQd/1SfwTxY/Hod+oR0NHkTWr6mm5GNhYZpCpXu/721n20D2ZcF + + 3Y6hAoGBAIcx/SZKMhXCGSx0/i6hS7DqXw2rlMsO0/jFPq8jyWpjES/BV31cbkqX + + i5QiyVspb2k5cE/NveUrrsMP3HHQpY9gJUr+GineG+aWr8Bruh2nCrar7ppimhYR + + 6ek5FVvZPBvVVGrhIhX6XQ7RNnRSr0UkF9grNkPn6QQNWcj8f145 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:cy7fmjsldeakhfd4psbmghmqyi:6a6uvyai4jkzz6hj5yjugm6uie5etvymcudgiwwjh47apz636zwa:3:10:2097151 + format: + kind: chk + params: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:zxil2jxwwoe6le5k73zleft6oy:gamewch4gcjlcb3xb6jzqqa6a6xumhsagoeakdf4ranxvqrq3hiq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEA1ga6vTinyDMFCVRuC+g+QEjCE4RqjfDLT2Jnq6F0Ld09to0h + + wIk55Pl7BCUanPbvA2C4Iy6uH6Zd8ixtrzfcI0neGUkfwn2k8PJ7nYO1DJ4E7IgB + + COly2AABlm4C943YRx5774qz5cKgYa+1+OtYZv/inbD8f97uHnfD8pql99TNKEZA + + y2yE1sZ93KSdTHidOXcj+EFNwIpBtnw1OSwx8rQl5L0zUcYRjd2J/709MRmYLaXJ + + F//mZQdyETFGuD4VOfvhcZiUueNnDxkCjM2YBNU6BJXOWeJsJ8ypgubzzuhOR+pj + + ERMo7tBnqZQYYcYe4Q4zhWF1IevGmwtWXAU4pQIDAQABAoIBAAWYad9AZ9w9iKca + + Si8WOQ5pU/kDpwHH+SniNjiiQSiqqI5NuUOHjlOQFU29XbuJOpICrfSSp+tu6Blt + + OFpN/aBG1r9EV364Q8OVIioOZ+8bS+nNJLt/IKSSAJ0x0zvjvTxD5LADKTnbcT0+ + + 14m0pNeog1v35RDkLTUT/lhftoOBxEUm/6XQOpfhp3RcUypuf2po7mbpFHvcVpZe + + DCFhp8/DWho+Oq6yBMsPavP0yqQEgFulbigvMUhq1a3xqnOO6T6qq/A0Ae5u8y4Y + + ItK6gzqhXkrtkyvawAbCwZfre45NXaVFcHFXT6DqHRKQE0GtAs0sI3HuRm78e6Cx + + 1ef3a68CgYEA4krJaRWuujRee0XgMPv+lmFYLC/bs+uiMITl0zkt6HCNj23eNdmO + + GP6J96V6tknKzdCoDkicf4zZqscoCCqm9WU+u/Bfra0pWY9mXVYSnyo46m98cX3r + + dIqcxq7uet8aMzZXn0z03PgT+ytOS2GMucpMBb/WnklS6hF8lfFCTTcCgYEA8h+2 + + dgVlyjDmXUf79WVI7J1Of8NEIGYUERaZGAzwX5oTRPSQc7iN2pRwZWBZbR2LlrJ8 + + 23jV/Gx57C9V3eblhYRjR4wYTKkFak98YKyZnckzMCaFMWaiBYz0aLADsVn3CB2A + + yte9NA5L8Co3OlWo7OPGR+MwhGptYCUPiBtUtwMCgYA/n3tFWl1H6RVvX1QLMa6A + + pVnfAo2o5mUxcwwS+Q6ZPZvvaZqCVWqISHiN8i6wNcsZVsMJUQz/J6DDTT9KHIPY + + luCugoTEFd18Wr8TGvIdYgeikjnQxvB+UcKGcgSG81cwcuTr2v01a2Jiyeg3dXPV + + gLUjIK68zizLtqLqnWxgvQKBgCiWXFXIbdnI/LTiXkAyrFjNvdz49LChq/d5XEyF + + zr2X7GcAwD5Fz7G2dGjqD9OUwlOOtBNuXCCmZoHLJY+/JvaMzL+volsncjrx/B2Q + + kWe71JLbwjQXyk035biu2M+gDyMTHwXhyFuzkdM+oGds+JZNUG24jeeEl7UoQURF + + oJvVAoGBAJvblfRFyShYaeIj/mtQhBgdJ1rHLmKcQNlgERnH4M0uwFOxLqSkgZWa + + uRCOGQRlIW1+lNMqYFNSeq+1Qp3DT3Y5E/9Rvp8aPqNjN4oCCFmaNNHBgUkgqDS9 + + EPG2GwgfWxGicfJ+ntis05EMbNxSY5MdoO3kJzRxcoZvlzrDEK+P + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:drsoyrobruplipvl2zet24hvhi:zvyy5uqu52rzcae6erus2ytl3dugnsf7n7im725vxutz3imfi6qq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpQIBAAKCAQEAr4dAM7CUJMARGjCtRYqUue6rdSHxf7rn7zqNJ0k/5DMs3dO2 + + cSdEuzPmPclqEXoE/XUTDszfZxkNIzKpcL084WTuzcecchxPAfuD95ZlWdRIK/2f + + Q4OTeIycpBycdEEfG2jbt/zNbUoBfRpQTTq1T8DiK2An12CEAljdGaLgBHgn9/2F + + 7xMYEDIcjtY9gID8Kkqd4PQmh06renaK5Q6QtwmVoztj/U2O2bdhAtUmejt7bGNX + + 3b2xewZNBtM2e2iJLZ2TgK7b1XIx07/Fw05RHH7bnTpRwIeAR4e5ECGegUKVucaB + + BM84CvbVpOasZqcssIcflEsI4yGUgozHlIgk0wIDAQABAoIBACfNZpySBPXUa8xh + + j1j+lL0Yxt53xPhu3JsdztZCwO8xP5JJqMw92FMO8L3AB4JRBgKnYpvvjxUk1BrQ + + KSX8c2q05YXaJrqlerD7ZLBm9TKKdZcsGspHctBaKkb4ie2+upwPigtNkxOePXot + + 1lm831JnbaHiWwZ2x1h06CYhDeVVLZuU5Yyp33l7X/hUlE2YaxOtL7ODZKUs2qvo + + 643v2x9FzZB6g44n8HWrJzxx6rtZt8qj/G7h3YVvQkT6D1LjVRD/IBVQ5+Un99k4 + + IxWuMZ5u2jKitpLlxWMuMKYQvw+BWpp7s9nTp/NkPKNfknptWIgsMY5yxwDnPA4g + + TkSfa+ECgYEA3MUmD0djOeR/YiXNt9ETkSHjL6ADPhdTEtH7BeRoRrAQ7FJXOiJB + + RiaIJBasJrBz8npqyfyFfYBJSe9lJDz2zSGqitmJ6YyWnnEv4gr7XDERGhkfK9Lv + + F9hCMQLS6KdoO7jBG9thNBpmEheSlOvaT+LQynQ8bUOdNMXk2LPuR7ECgYEAy4nl + + CRp1YdenVXO4qttKVXJXWh8OJ82dyNgaX1krHsCnph/P7AlB81VKZ85Arqo690YB + + Lu5BNJgsWcmUIt5CMNXWl4DDqneT6gwagRpEmiBSlEV5ByJagkrLKKX6Mis5Cuu9 + + x5nbZ/09pX+3UVfD1i4DEChErGgzCDicNmKLWcMCgYEAlcCpk32iIjgL7GCmTcTl + + 1/G7sKeC65BYypBjDVklHqX5pMQp5QYtbs9eU9SJS+kvjVBatc60IjBuBlf8LHuq + + EfV/QJZVhXXXCXzPtS4r2RpzdleKHGkFxA/uvl4jAKvl+XTWkPXb1sL9b9JLnPbr + + bHr3lA0KnDdcINsH47MRs9ECgYEAtbY1OQxbEW/jX2HB0x+V3HUJUVb6X0StghqU + + aN2Fpp3ezmwGR7b4HxLdK5Gyo30syYfBFLH2msrkhYB2dS6yL0EppPZ7ORwqfMAz + + hWD7MBJ9RwxDAcCEx1+YwoBzvwhhk8NlGebdP5iRycgc1E0jdHp9l5YrwTQBo2xO + + 4irWN9sCgYEAgFSvJKzRr2lpiAAjFs2iDEv01MDAcGRmMTBephSGBhNA4UjZZom/ + + W5gbQnHOhk9AY2qHi+z8Dzg1e1gU6pbVpNui3lSbng8sFbqDmpbhGyxXtve2eZyH + + mODJTFY/6VgevSdbgXo31ci60X7rYeGj7njmVGlGF8M9gn7jatE6ASg= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:ptsofqwylmkvzmuvrw5n34j3ma:ky2fs7xrlke64w6kfmhzsuilzxbhfrwwzkxih4rykpbxrr3bxhiq:3:10:2097153 + format: + kind: chk + params: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:c6l2mn3jlibs5zzoy2bpamqhru:migiw43ipkn5n2poogkq5n427lfhkbtrlwplue6mjh2lxorkkxtq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAsud691onrSR8mmRebEYMCb9vV9fBoPjt9ARGVUHAd4jSapGF + + bEa/lSUnSK/8pmEMmflmbDQGw7pG5bY33K23wPqG8LP5z5TqK9junZaBIJPoJV6S + + jaNJ8DVh+9AQ9f8bdt3dgAkMdRLEKCqKj0KHBBCphvoJ4qROy43ljaMcS5+MoEi9 + + q0cUKawtXGvwvitSfoLhYqlOij95W0hwD0kCW0tE1u7iaAwcQC0vZ4b0uEUNPq+Z + + UbbIQtl3ptlWjl+lUubhE5VFCnK77Keinw84HD5Yjom6uTsddorr4vhuu8iCNdP9 + + A1SVt8Aot4oKt6UeNUcAM0PDK0M+3Y+XHkMceQIDAQABAoIBABD8qz+Yyxsk04L6 + + ZD/OH97+ExWpahx9fmSU0lPOkDaZYndVdXB8QD0qX7JGaYwnu2FUXb4I65qCkbBG + + jsPQp9m2QAFTaXUlG94JdVC3xW+BM8H2mp5BwqfA/dqB3VZqQGKXOuypD0p/e2of + + 7fOf2r+PUHV9QNqJBOVNhh4efnWMmj/Z8dJSHnUVIz7iF5m0UFcS7ewKRRPu7rST + + hE2tQv9ezvua66F6iYWN4Lfw0rAOyUA7CFm0VKDoEB3PcyRgD1G38XBpk2EHwKJv + + mKu0pf+AfzeNvpfoRh+z+40hdlgI1OVfIm8RJuhaA3D58Nlsf1iLS7mmbkEJjcna + + kKBnkeECgYEAyXN0Zmar93SfLx97ZcPcsRI3OTQSeSBzS+W/imv3NXOOmVawyeqm + + mFvScTJKo0f4hMWdAZxReepNywrrn5PzTlnQndnJv1n8oqhUtgLLyi0N2YJR8wxm + + YCGxn1t1z9DVBSGTAlx/2Am2DrhQjeXYsI+uGlkRXScQJil+9ki4cBECgYEA41kW + + Yg5gWoqsXSKrJDWvYNQcJCLbSOIUEBsXS60bp2F0rpxLFgPs93A+I9HCSH7qL+Dt + + dgQWgAry6Z0bzZdsXNLrXkqHIFaOXTisBxodvMWRH6dbJLyFVt1QYw8LF2UBIRAD + + 5SiKN8/rxRCuyxpf7CNvzyKlrMmn6JrfhNmwTekCgYAOwrjqr+c10IPBbisaf8lx + + 6AXH2TrpSSlpjEIGoHaSog72yVVW1iyyyTeYN7kkUaeyAtDIR23o4vQkRn6RSMPx + + H7+bcVPJA4zxVigu1fGctMRpBZV/m478yDs9k/QD8CdLovQkniZ36+49EeBFJWxF + + M6HsKE6PZsdWJIA7B4UMIQKBgGIItGIsGNhyG8k9fdbrX2i9jjT24uAWvNgFFpKH + + XvlaSNpSgv5HSxOXzvPbK4/fSlTDBSJyuNEV55FdMfQBa7TLLrtGH+aN7G2+Vk/p + + rxELkHy5yc+Zi1XdsSBGCF5aK5Z6NXPHe3J9sgkUHItwIBTPYxNKuW48tq7Suber + + tx6hAoGAMpcvnPUjv1LLf10MWHoXxYlPvk1kL9INWQIH+dlOdp/3IKglf2YRN5Id + + bhCrwyIEEcp5iyNxgxrKYDJwEJblJV4M3vFEIkoXOPFNni1mazfBb1tnuWCty7Me + + gXy3wcxH5qIGhmdiJdubUyZ2Spz37spBASoV82HzeeW+x/pVtVw= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:immegpvupjky66rdfjm4d6wef4:6j6gmacjaguy375eqqxrigvknrb23lngyzhdv47ynty2iihpqmiq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEApP6Z4uiEAWfE/DO2Q0GeO8AlR5WgZpEd+/hF3epJmGYyTvRQ + + Gx2ZG6szk/btuLkI9fStqs3paQFW8aJG5I4HJjM0w2N86KIDgtexHL/zqf8ogCV5 + + w8LWGUPwyiokbUXFdo/td7pbzbW1AYNOLohpWtMkhalxtTIWm739TTPDGemWGilT + + 5rHOT3NbNmPjGPG645CYadJNShR2F+B0AWNZnxANcBD8IgMWvIl8IrlMkH1mUKux + + TRf4LneD1X438tQ2hCsLPcgxXJePyzww8pXujXVrv6Y5LsVoaxEy+qGeLMIcz6qT + + EgilvtiIO5GVO3mDR1TboVAk1sCc9DMPtAzV8wIDAQABAoIBAAT44zxSU4ATV31e + + NZTrSlB0pur0WGQe5W9tePWKFPOxyLxWYn+esbmCvEguPdW+RcXbvMwT7n/KmYso + + n8hNe2usSV/GBMKh90cfJug95KLv3JGYD4ZVvcv/HyeIg5aDbsL27WoZRKD7Y9wK + + z/VZCQvCpywcAiA6xTGmVRbZg3ypX/FdANVoh9ajiulwolhBss+f7OKrqJ9pnGF4 + + cqYBWju0fmZM4KE5nXE6ab3ea9oan7nXWIz+TTWxFJkLvg5/Kabffspokxc0j3nM + + DuMKJFTKpJqazitMoFsxGfaketjwwNZbgwleFOFrxYxtP257rPHF/UFqflIpnh/S + + 7ZF/zYECgYEAtiLFm2E4Naav/4agcOB+HHnFE9xT8ObIlEM+jTKwbYP6Ib9Xecj9 + + PnxLXmlC4KTgnkHtCPGYdUD4GeoOhDTI0VACgQv+HzY70DDbtN6UNak46AYcKnX/ + + LPFJhOWj7/EQ1md/BzzmHIudW1k9o8LZ1KIF/uy5pUi0i2R9yP5Y8UECgYEA5+g7 + + H8Z/RIODQR3esb+j1CntJyYdoEy2U8GZuFg503hyleiOGG9JKSeBAQq2nqnVUWUQ + + AiPo/vmk91GpoQvJycjQT+C6tgSzzUvjLs/SnzxAe0RwbAjn+m4OCy66WBxgSopY + + NtE7IbKxZJhXVOy/5vjr9UfeeLrGKoVU6uIdRjMCgYATy7W0jJ2CX0qTuDsp6Yxr + + ZeTAotrQvRSh4KkkyZSZYpXGIzjLuMelifbbHQ+ywNjU+o9bwH50iAovLtxDDEWj + + UlHjWr1VAR0BJL5Ma0CqkGjp9vgKuWZxqQv3kMn/ozDUTM1mqPzNr3L74bgsW1o3 + + nSCPs4T97OgKmnJ9bP+XwQKBgC5DY9gY7zapzbtlzBFFm9ctbgQLVImwBAd9bb8a + + yp5nPuSs+fvh54RwPwoIKxpH4yhTsvfaVhbXkpNMFTztbxn0F6p3uIerNHtWEkI6 + + b1gY2vw8UPkcZbrNzbtpXP9K2eLE6og1AUjdrwnUYkes2zOmoNvTtIv9Jp9A7gnV + + heWFAoGAAd+pZlqlxP+/6TR7rwPRRScufTKaRGJxsEcr+f60gEkHYbIL0yH7Aati + + gKsp4umaY4bP+hGfDYYNyiibguR0LAguGZSfx1DDaJUxIgSYoTHoWQWYibcyySUN + + uaRuiTWmWRsaAvWPEte9UmZ7i3kQMziCnPqh7feGbNKrc1+uf8E= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:72amudbwylfpsdjqfzjifywpey:lebiq43z33o6fznzhkr6hppzil25ngracqgvm3s54v4h2nchphsa:3:10:4194304 + format: + kind: chk + params: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:52f3eczj3vq6r3llloe3nxoybi:mpy66ok4hjcwr4ir6gizd6inhojk2bubjtrfkavenk5wzw7e35va + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEA2vTVaerg1p/nTfX0peth4kWZGnYMPgemUQUDhqXjwa5r6A1f + + NhAwXyp9Jd+cIDdr6JefXW2gW0trkfRNaHmMCPV3ZsNBFd/nQjaB3QM8mR+4H3QD + + kLNVvBQuuL8a61GXfTstdKcQKe7GaZ+Sm1H73d63SSfe0KJx8BtsAuQtM4ZVRjxf + + 0+k1EgFb1wkv4kShOcrVKWRduoOIu9XBGqqU4d9/QWNC3PMsYZCnLI2lEwrQAojn + + SP3ZCQgB+Mo4sxwDHIzOOWDW9OYSVTCTIQiSl6Z/vUmLIGrG8WHp9aHT19E660ie + + NwCniPEKK/O79XTEo2e4qKatjGCwdSj5QTE5dwIDAQABAoIBAAKKccmLXLWQ4HXC + + o2ajfxzJkvfAI+86Vn89MCfJWAXA2Oa19QNjF7SbAR3F5QFosztdOw+x/HjivKpS + + a+2I74uREaQjIue2k+/sQwCGD5d1S0UuKvZsZlPK5inlqdHOPhRJcgMXBzR9XVcP + + b3uW7XXLJlRWfprsL6dKIiw8apvc7zF1yUS1OAopfoOcc+HJ3GNMUlqp0kqtLlX4 + + HwXo6sVmNo4eOjJfllyTcUN4TVgzc2plSg6E6qBxnfiAxFwyyDlDuEFkTaNULPm/ + + UKOiniDWb5zf2sBm55dC+R15eUHf8rHKcH/wqmrPnTKn8nxzLSeC9qNWPDtGlZV3 + + j9JmEkECgYEA/XfgM71+9nQ8zZqJ32+ascITSDWvAgAtsZ9A+ze8G9+lJOB0MinD + + IvRxLN5nq68MjGrc+xZKY0NWfOAh41gbgLLvWYENKJ6AEQiDnGSDOSFBxe2Xc1gf + + BGaQMUYXWjbI8BoXG+dJcOjo1LxhxW9UWSpVgebNvnAUpgJz5E5jRjcCgYEA3SS1 + + zgELO/afzf2PeW83QbfFzkLsaTGCGgxwFXyeduHq7Qik5xfo7kX4/RykmyLB+D25 + + kmKCmdZxQQ773IUzMnMPOhxIj/9Gw+RkzACZoFYOZjmS6b9Sqb6jJBXX/PZL7f9v + + A0cTftaN2BnZ87KBf/GKrHkuE3Y8moEiDLbsBsECgYAX6MTnXIqraM+LfXZf80Ee + + X3Y+K4I0qBunU6RnjhxabMBBOEL9sF7N300FtH0G/t4qKLJrpPCjaGiyItpPfbIq + + c7aMNNYu7LSb5rezeu+95ds0dnMA2GEkoyAa5ceyJNTTgUKIyUpuMio0VwjJ/PRx + + 7MJgHItv2Va5SiXwdUx8BQKBgHIPHx4zd6Hj4CSUpU2SyUNCD+oEpn7TJDFfPOg4 + + MFtMxqifDr6KnH9Y48VY4qWJVdY9r9sKqCXEbwGJQupIYVGh+raUI/DxT4R15m85 + + 2ALUn/SluVqKbY5TXz2bbp1wQ1Vrq8xa+nkvHFXbb4i8BwMAh+/RSKyNDVD7TZ6V + + MkOBAoGAcqeFeUk5BFlRDEUB/34tiUR4ZwGzdwMgQ1RQ5uSVsgSHpp0ArFZCgq6W + + C8KtdvCh+hmwzF6QXrjcOrcHpRdOM1c1k90HOnO+d6CoaCDsyR0HSc6fIFYl8CS1 + + Mz+6Ugx5sCeSa2DEQeUSYwlQppTEUrDyjz8y97RR555E5lof6S8= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:araofmg4r5w2byo6viyya3oso4:6wbfrlkpzp2w22keeo5frbjirditr53g5oc3ku35swuwoipu6fdq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEApE3alS1Y+qPUVkfT9Jou7Pm67Dm9hikzQfEbQS+qX7pvdPae + + H9QwenTW6cfhMmo6vkbc8wqrG6eguctXWgQSG+rQroBu4dV8UD9lx27kYVdUcVK0 + + hKtU0eyDpqMiTGma1ILQVSM+/e3QZPRK+HmIlwvsOYCJSFGhMIPLekgYyVuxEFe6 + + 7olys2mhVNrCpaXoxubhXKE7dOvBuWBWs5A9Vws9u/11hGpXS96xzJldN2bDMyLS + + p5qygPyIFKNhoUEahGUD261Dd1hu6g3CallXEXIKl8CEQyPbFMWgJv1x23omjk17 + + UTtpd4QZsS6SZwVsSZLtg1/R2hDhIN8Fd45XJQIDAQABAoIBAAEQcgiCVS+2bPMs + + Hu7YKtKlIXVTQGuEi8zzC1qmPOPG2N74k/ifzrqUVCoKfeZuMrg1zEuUt5wDv3JE + + o2m1WgqtQDHJKi6zS81XQ8kBamBJCQZ84ydy1qdPcWDccKXvDy4uNLxAcLGDX1Sw + + EmY+n0hfLuYGc50wzir6x5AgtGxldX0GOCb8MY6mY/8mS/OSRlVGrG1KdQ5WqN3Y + + Min+oTPRhnErxHWvVikUdlZ63vtoGrRUWg7pdrXqnCkwxPH1ddyBkzPIIg0A07YB + + 8oJux1n3u3hO56VelHX0/yzFNkviH+gvybPWBJJu1krdLcRgqm0QsJ5NV/x40/QY + + osBcUpcCgYEA1Mzgj7GJ9JfFENkUxvHXzzSK45//DEOiW+rA2KziucHTZkLs4Kl6 + + EqcrXj+Yk0DbTypOW6uVKdKNPWl6XEkKmMvFxZQrL0LDEwAcqJzZlizII/D7fkE8 + + KMjExMg2K1lXCt50nqCW8PsyaSVpy0xNrZa7x1Iey9HksRDpBfczgAMCgYEAxain + + m+M8Eyij33XeC3YoA4eADXAdTSrTEuIvUUjWZ0Us1FbdizwniqYiVucI9AKNzGxN + + JRxFr6az/oBiiBJQi/5Qav4Iqa7G+4YKHhi1QTJogZ0yNJRQIO/CqXTbvUEN027m + + f3EwIpQx5JbxU0hKGyEhhEZiUdPReMbvwf2/R7cCgYB0BdBaCB6DcUxMx08AuVNE + + 8gzX1qAke6vGGdRTTs+/H+K22r50L3MTQHnwxRPXFYF9RD+802xchSPk2+GO93QD + + ovaNpx90gR4C+gimFf68VmY40mcMi1zVj8FY2SBPukIu9uL2qfAiK5NsqK1p3oxr + + nMd9AVUxI5tgvyuNyR4XKQKBgE1m/jvNgHkAMSwQvCNA4ep/5WVdwhu16XI7oMvz + + +gH21NdSLO+ZXuKsrEXbs2XamiyzPIKLz745ScMgA3XFtkUcEeHUGRBZoRJeKxge + + FNyzILmhFUgBzF8ZhOFXIbW7A+8IPrspV/AymFcrxNUYOezlzHpAFcB1clIZlUoi + + VAWBAoGBAM/HP7FbfXWQzWY85grp8fVi7FkYSaLdpgZeVUWCXj8YfkTyk+8gDabZ + + wf/rdHzjVNUvFEf287nj4M8mi4XtB5ywgFUvehXA+PnHJ9+55GeCOQOF5ulma0z6 + + MwfB7DKFKTfl7bHtGfTYJ14xVg/PtWFBJE7pnOcLFMimPfNWWCMT + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:tyt4hfvn44igztqh5zeiusggcm:ex3gzzn7byvhajithwbo7c3p7qwqxmnactaxxsxsxowkafyjrf7q:3:10:8388607 + format: + kind: chk + params: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:xsmblf4mfbwrri47s4psav35va:amlpbidxmdffbpreslrugvw55nhhwqjyaks2fri52ehshxngffwq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAuSBNunMdTApYjZptHL8f8fM+QhDWIE3B29TZFEhL3zjRgFty + + +KJDwPMViLL70ELTThuoSUscHyHAvFivKUVYvbGRg5WcgE9r4V1V/3pbrCvEfMxv + + RwNES+aYzSK1etulmzdBk0K+aXRQxe2Lm07/o2iQ3n4RjuQbYbaQzfZS9isEk+28 + + iMLAQ/AOIiZEiY1jGIZMR5k7xutoceApwzFo4e91+Axmh2809Y1uYx5SOk0/bv7Q + + XqxxXejURpevB4HzAyQiFKQcKz4CEsU6+SKr4dovvXdEHodaFUZzSl64sN6DqKuT + + i9T5c39KQutV4nyD134tdZy++2oV2RasLSphdQIDAQABAoIBAFMZo5qW8uc/26lQ + + 0Trutl3LFT7dxOjCTsup23oFy/0bSbvHETB30lcqJxfyVCQT4zt0IdIow6pb4eMK + + IjKx/NhF/a5l+dcFD8Wduq1QVRdPnEdzE156om05yYyH0JQiRdALeUWr18KJonp+ + + m8TvLMTC+wjM6X/NeFcf9xdlQ69ZMVEEf3Q7mzYc1L1e6SpaSVV2iTZODn6oGiYx + + ZPc4djYLwZC5+uC9gDx7pSzusAol74/0hg4xJAl4ak1omlVoDfviuloX4AEQDEIl + + O8tREPUjUOBIYBMpgDyKapqv08eGMD3Dch7pTYFKHYX6UJF8rjih2aA6a1E+Ch3Q + + YBijUT8CgYEA/gowWim67ErYzMXCqjmy2NBpMn/LUU+nWGw3inDZ17PLKdshXxXe + + dNuhfF365DLhH1Lded/nr7dLDd+n+IjMD/FNDG7xXJXPMZAlCwTZK9t817C4MNRI + + dXKliwACfjKt9fBiszvy0TP8Z9lXWtTw6SEvGNj8ftYY4BFKocpF6/cCgYEAuo38 + + 7BZQ6qp7G5epVyFQ1WiGgs1SzTdxyxdHpVM1Yt9Ya97/oVLaTeerKXOAty8Qh9a4 + + 5pBRnkJBqFRZhvSwXdRVIY2b7k0NsEfbVvk+fnhDSAP2wIVDFGQ6vpthgspeCEZm + + XkrQry9ZwG0IvmopuOw8EHtmvRVb5tdqzhyaSvMCgYEAw/ocPxI+T5eWFLLjb+q/ + + HB/7Z6fKs9mdIcuqNTTF+W+MZafU4MPAL6pXs9fUe0L9BOsqTKD83UOrtPI7ZLIz + + qoDejZ7wuBoiEvw+d3ewCfNzJfoAvjqmA2UEbGz/f1edeEOQAPFYayeNqpeymjH4 + + AAFHkgWjFD4aRpFQX+vpcRUCgYB3FqTaYPSOmP21g39Ka64aTXtwjHnLHxW5O8c0 + + toVh9ImRcu1komtRSA5vi5gjWBwJWvz10jMH/+vB9PahvBnKC/28SZW87dtLKNPQ + + FZPbUBJDKqSeCXPk1Ibbnn0E7QJR7f7zOnc1HdkBiZkHVOYFcmh1bREMq1HbbrBz + + mra0GQKBgAYljdd9NGkfDCfPNOhwVTXZz1Ro7kU+4wId/wckMPd95MyWvfU8E/Lc + + o24QaZSDb2VrRVJ1dpgNcSr8e4zVZ+4+urhQNsFItvhRJTVnZq/YFITB3EJCrZXe + + BI7jHDLwaimv6siVxbcJfwPw7DJaTbCAB3BD/u5pa5NSSVEf769i + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:zqldyhgk4msrc4wk2mbek4tytu:nc7rmvy5fhgxbszjyshadrvfn62rqh65euocyb7ltfx3qmzc6coq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEApLgLaCjEUy8x/xNFZuVGYvJnPKzk2OTDtvDaddCmrzTSn2O6 + + gAX2K3cG4V+NF/zk7A60ZoYjAqgziqk6ybvR5f1zxTk+zmWplhVahGox9iMi7Vp3 + + atPcu1RNBSCOj1usI7u9/+fkxJmML6EgNtz7p4W3ZbmA8udv/l5U37S3tEb13kFE + + mMKrokeTKbW9T/0YDkq1ukYziY5ye3eYbDqduRUu+bqmmAL5JPdb4GQATlDzHtvL + + 5ouc3if5YErOWJCv5ZbM/jPATNqPbpUQ5FBA7kZWIepcN9OG7wK+0aaVUKS/k5Wg + + +tTjDshkFAZ2x0MDnVz5L8qiC3+wIUbj4VKw4QIDAQABAoIBADlNaXJ31CyYG42A + + F8G++yiK6Y07HHWzx75JtcYMqyACgU8/s268JDJkuvkGc6Ansz/HscyE14MiHqQb + + UT9C3rdi37Z5vrawuTlj/lRYWT8mZA0sTqTURVLJ9e1VsSKAIrdfpa5z7qrSO+mJ + + 5RoQ8F8L7owt54UZLGXSTTZxuQK2qb/g0DvuWAUXe+WU9F2rv4GVLzvsuQf0AbRw + + 6UdIfIweIa3GU0u44FEoitXGLq+3O0ZAq/9UoNRmTUSAgAAS45OsKWGhYjJ4qto1 + + aVig4iNylqD6r+En6hbklz64h29dUiHdwOUc50duIarMnaDy2geCFHtkWAlkWJjD + + SBQHEK0CgYEAwIdu9AvSV4FKSxDefLbU/jTU/NkrrirRzezylnFc/xsjTYIVZOWb + + rH3R2myLRowBN/WgZUtAUwH2f4HwmJwcmMG+13RWYI/+Nquid48bGkNTUkhbb78+ + + LybV3mdH2pfRkbYlHpuZCZpf0lZWlPbH2WgtWxbaFH1UL+LT6AWcKIMCgYEA2wWQ + + ujP1CLDD9b769bJdDYoLMAkciAjtg2qMLLmTAfjD221tcEt95u7Xq1qdpaCaro6j + + Rf+0QFaG6BzgKTiZA4o0wznGJ74WDtKkaIK243MrOW/4+qSrRP+romKrdrB2XphI + + t8mr7bURtDxjEAwZcWSO3pyHdcPWkXejOTgIW8sCgYEAlfGknxsJ4a7HDrl/nb/D + + GIxLCPWWSFn+9pNAx5xYojIfh4D1apRMbsW7B5Mb0YC+fjeliN5XpY5UzS+FE0Ya + + G3phSGnJ0AC1Kxz3NohUwlqG7QF/fQODybNEQ6dKVduBkys5s6HZSZpaYHVvjyq3 + + sQGquVON2wFU7MqK4RxlZ0MCgYBy2YJBAgnV2suHS/RRbox5ExA2yjBZ7USPCwoi + + UdWSzR33LHSc1BlbRZd2VXghaAx0yHs5s5KTwkvP34R0WSdzwb9VODB+mqD6eN6Z + + pyG8N6JM5jiLRlpBPkiESHVdMb+Abx6CsZAkgDSebKQNwCp/WZnJhg9KY71aXoAK + + +yT27QKBgQCxw9dLlBiQLQuqDt5TQPkYzNSY3j2seEUqYMF8QSTvWxS8jJN5ofCX + + A/yQRxNyunl9AJYIM8GE+4CBDszhSu8CFeoE8cDhHSIMMFOR9dSEL7yXlaykQ195 + + C2iuwWcu1i9I2hWDtNz2d8hLL4XgEZxNGRQNFhC8VXEwbGL1dizZAg== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:xymheose4rdlspgydkzr4nqkre:z3pfrvpq5fdpkoybhdxppwbzrt6ejf26xh6emzlce2sgquljginq:3:10:8388609 + format: + kind: chk + params: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:ezd6kflfw6qfpryquupyjxvg34:dbpmolc6skjs3f27go37huyn2zganyrgpcdijrqlhegvfcwdmvyq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAoNZxep7/aicgkAheO7wqXkoQY3FFMdTRxZNUrOEQ3Z5AAe14 + + ill4A8FRMZkT8xgpHzBw6bNDWO4vVQb6G0baM+6yZrzG23MVWyyKkiTjnnUdWU2X + + 7WxGgnsZF/evK2cLFXDU0oe/CV6JzqYMJbyeIgwdL3+OmqJfjvtldT6a7ZphaOhh + + i7IJSJFlb2sbdcHWZPCyfPHZFW+6sfdTSKKcn/DfR40lG+TYzVnYQfLQ15D7ZmNP + + EORKNzHk9zOYZnPXwJS8IyufjaEFUJLAguqVZGPHX212GpGW+jzc73gSC8oDDBH9 + + lzD1Rc9aeb59hGOL/LtyNsBArAqruG0h6RTH6QIDAQABAoIBAAvAl6Km3xFt4fx8 + + TAxv26WvokJt6qkxPJHECfYm7PFQqKsrY7kyP+mAVPM7lQBYldqkUr/U3DkxkE5V + + d+2J0BRm3uzQYvRylI8oskhq/yHbO2WE3LLZzE4o+gStEcTpXt82cyqeBijEWmv5 + + 6J3SSjjBK1nG54/niPV866U1SaNOXKPP4JF48+BKkNaaKXeYlA84cAttD+xmO7Tq + + W0PoxFny6hiGuDXYKsVlBnCujD18eNx8aOv4T9o27A4LJztklpUF+BuzqVtfhXhd + + CZ8Y8Tay55PKIi2qv/DLYPT0fjMC6cbNoT3NJdTl4LEB5NWk7KS3Z6x61ArPHdwA + + Em/km6ECgYEAyyrcRHYRBg2keBW/5BpIYLJEj4WJ2VKbB4jicc+wk3s+uGM9lwMt + + hMPHt5r3NihmsGABjqcARrLLE0Jx2qmyjWrJX2rfLKtY1ZFdyQ5hKKHg+ubpMpEo + + DwKOVZQXxOEfJDqUUrI8fM15+UUwgkBgh+dGqUb3IGdim41jIxoxiBkCgYEAyqmh + + w5pP6R2vzbvfTtauSCzhBQzdOoAzHY/aNq9FY/isAHVQVLf51obO/FRWIJHOGTQn + + l/OuYKQFZzR4wWIm+763shxmGxOGHQQMPs1JwJoksl9hPJx18O5gUZYhQIluI6zB + + prOXgyVeLqcm6Ios/gkT81J4XLxlNEKC1BwleFECgYB1GdknpJ2fTZG0nWSjBvsc + + sOOPjbqshk5RA3bxfnIaL3kxMhI3zl8YHPgqPamrj5HQqyV6oYspNLiT+0JAdHsz + + w48Z7jGAP6rOPiE+V4lssBFKzHkw6jWaoTCE5vzkP5WBfjoriAwRKyXYpSaWjKCW + + 9JjnzL138d8GJXI0s05FUQKBgQCFF1ebnFiEUDGnG40wOj4kOgzggy06AP0QmesF + + ZJ9eYu2aM3C44kVZxBhkj4IsS3SdCqpB2Q8Yej7uIwB2h13wj7QVbR8FAxJdNc5Q + + 5AJeURxuY8L4yguOWQ26JqzZtCc3mHloX6LNxpmOa8lah3u6rP2EGxHeXP7djhxa + + 7c0RsQKBgAn4Gd+IRyRU0Wh07FVh/+LuxC9Fc/GbIjxNbdodeuQzuOlU7auUtIId + + wVAXoiatL+sWpqK1h/+4YGb3xEOd8QV8NPO2mImXr6fWJ5VdZ0aX8DVbcDUCvra7 + + w7HwiFiwVLIGdyNEk6OuiaaYGyJyJEax8i3GKtEeLOCagh+//eyO + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:hnueoftricmvuv4q37m3b5mzee:hl2efj3ydjv2ydjk5ilxb3kwehm7qi624mefyi3ksmh7o5me2nua + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAnBWZCUifbysHRyel1LzvRgjn4gw6sQo8ifcbgqhvHx0GyU7B + + rAOUajUAzvCGVkUtbQPe2031Sx1LSrXkra+1D2yGXlQ/pyLJixUTwAkF+V+sbF0Q + + bR1TGztpzxQZV+W2SDi2KFCGxR4b7dN+0yZwXCPbZDYhpWfpo9gHBQAlI9VPEWto + + 8Q8Lv5iMRUFSgo0redIGWfY0lIH4+2/FOlOG30Hl4Q0GwzqXiSv1rGjx7fc9jDhN + + vxf6tdJs57MVbUf+l0ZZ+eh2aST3MzfCZUO6r/DIYzzauEYyLG6ImRImXnjAhZ/D + + i5N7lz4R8KE0A8VBh6Zy+SsjtXjvZeO81q54TQIDAQABAoIBAATnndc/l7j91Gei + + lztek4idyNxJhOz2yiJjaD3Rw/9+4ULMAtnVrqEsGLd9XJn96FibuEdScBHunKTU + + n0LA+dLUrB/vNvqDepYFsR5F+lDU4OgdKuIO5kNoZCG3JeO6oCDYCAtaGnNmcpC6 + + 5mpFXomwBm8j+f3cDH4S+1zSHW3+iQJiYkD5slH5nyAIWCYkGBj3z6gzOhCOuzVr + + 4/WJdXrrxv7zENUwz2PUYe8aEhtyn0NVayCnS6oxdONwUowySPtLoh8G9EW8NQb+ + + dbBehYK92PL/XH7zIachSdgcRvoXXzKSErhFXdVh6NM3wYZEFVh7yBGWcxN/453q + + JAWTvikCgYEAxk9VdT03f5jM95yHhBMb7+E4GWtt1iB2hfeS1hH6xkPtmBUR1B1+ + + +nPc2aU0kv3VGgzIElUWOrZndf2eLNbNzslfECnVg/8EwUqZhFJjlS9GvpmE4BE6 + + /z6kSt9locLP1GvPllhbKm2yi1uQCbPrqQRL/LscJrubV0awDkCdx1kCgYEAyX2d + + wCc/HDuvoVcHX0Pk7tl4RklShX3mlHHvWLQMAuiN26edH92p48vscw4s8DBB9K5U + + 1mqA1civ2f6zZtcIvqoerW9vxwSpqVrDwq3HBTOscuip4aYbIOnMe1pLabjmsrLn + + tP5rkgJeUx0cbSfCBpQjxUlw1BsnM1qepU8AThUCgYEAvSjVekRPWO22rYXomenk + + Xxc0fMLFfVdv1u/FZ161F0OaMdP/MpaEFYBJLG3yTTfEetmwShRRZOWyoJCvvVOT + + 8uiQPgm5efPaZEm1T8uK47W5xHsJjPXCkc/9xNF8zyTVO2kvFNjo9Pq4MUfAiBDP + + /GN12/farXOMhF6P9rhaB9kCgYAZLtELlhwmLDOMR7NNLdAsJhQJPNrKgmzSOtc+ + + T+p8ZpJsVKunsu2r4e3gh3IIZw+nRC6oSdFmZtnLtjC39sJKCjshVB81UZje6NA0 + + wcFxHf88sWWiJT+Ywn/jHur0AL8csI2TKoVJT3B4lNfbsK9oYRWDb+VhLS+eFIJl + + iNUx7QKBgFRzMgeWnmVHkdMt4WqqaNh6GEwXM86Yeaw3f9WLLKOtoR8QoVQukEWC + + DplFDDniCfBfD+a/NoC+a5azXI53NnL1iN8t/G7f81xWgZC1y4yeDBqNSyhS3vpo + + L9QHJeP8AZ2cEsSxKmei5+zHuvpN4HuvndyAXuWh1V6TYSh50gVR + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:xt3owduddxqodfhp3c2fu6yzr4:bvk5d2igrtlo64kbpyypajyi6bjzrnvl2blcavxhguiupjthelra:3:10:56 + format: + kind: chk + params: null + sample: + length: 56 + seed: YQ== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:zdubpnm3ft7fapqny3dzeypowq:o753jes342uzajcwt6awhn4rmrqn3cszsth7kfd76zd4ow3yc6ya + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpQIBAAKCAQEAu0SG8FEKO2y70r7ymRrbI43J051onWTZGOZIMga5iRApsZxe + + VGIotwXn3+3SkSCChu7Z8OnTx0iRJuAtIi5uRaOzfzFfDqsOulG/Gglvx5kRDVff + + 54RXY5b9kejnLu/uqBdanMU/xUmaOXumHgYUdERxKdrAZhEyJ5F/AhINA+kgwexU + + +pGBtTFiamb5S2gqfxZ9MyR2LZfjbAvcA/PRCGcs5w+j2Kn1rSBmIU2c8uED/bpz + + lEhu7PTB5GgQubsl+1PYwk38kSqDW6UQiydUKhCVwqXOV4fbkzDVdmNarmPToZJB + + yfY1R2XNRhLShnHm0WPJaVAq/I1kToKUUx0btwIDAQABAoIBABsmvmHNfixLgZf2 + + s3nbWPZ4slCKPAbF/mwLx1/pdbEXtNPZlhup97lBk/L1qlf8XLBvpQ22+Uuli9YV + + HrYcAUT7jSTd5ahcyM/e1lRSFfDckopauU359CmuVKl5GTvG8dVRPYQJXUufdkrr + + UJR90S1iVv34h3jE+X6fK8kDEPwF6vcDrvIp6Y/coKzIiyWR4Wjc9CXYonLfF4fX + + 0XsHV9cVNQ3RU9UGNn9hnH88R3f4YCBqJqh77qusUHW+RXy/XoHc9Fz3Lj7kGcJ7 + + CoYBNwLKU95fdi9TRIQsnJiEoQzIb0H26NDJFjYDPqLCoj1C4u5SF1pT4WaazZvu + + RtjHaGECgYEAv2mybf3RZ8HA9J6n9mOHOdU/5/q4GwxEj0tsIVAvf9WdvbXgDeLm + + xuXw0j0N9gfIBnCcrEZ/FhR2uJmQfN7bpA5IDYrp3aht/amavkgDv98WcG9RYJMZ + + aso5RxI7aAJuJn7oN62fKkcCUS4S2bMrTUzYkrfcaqZLxBRABLctIFkCgYEA+nTE + + VG24gt40Ogehs17u7GL4xYhB406LyZ82rMcI8PN/4TxUV/wQyaZeMKtheMmWGuV6 + + RHU2l2o4alW084ZAyYD4cY56nIJnC7AUNXXZrYKaRP11LMNCvtK+T7HE3jY9BE7Y + + FrpGXbyeMz5G2wfV7KCW6kZ13C58IzLF9iy3Go8CgYEAmq3vYrMZ5Z7NLuCHGrST + + MkkBu5T/8duYC7QHTWRe/g7ByeyPgqk5lMF8OmjcP1VKbunRseXGDTG8PrDZ8g6l + + r41a7Ja1JkpVmAbW5a2MWiENIQ7T1BcLEyEX6DbzirlsCe/D+Dp1xNRdKvzwfrwq + + 4eyXlvi9RfHciDdVBHqCHQECgYEAurwasZRI8JH5wJZ2EoWif+7e6nBIJ9ElWkNy + + AWo4mWYDn4xammsenSqEqabt+p/aYd1cxvPZqxUQUP/r9XHQliypkAkaE90KNWWn + + +6ANl1d77BpJpgFDn4EDUeoKDV/FKJQcev2Rf0wla7FwJNh3wICPZMb6Exs5hQjT + + HlOChbMCgYEAoWGCOHmr3XDxNKEju0R0JVuOod3VkhkNwL2l7P343QOOO8lfkSeQ + + F++6HxzYkR/50JuYS19bUNS5KPYrJXqsG7HT7St88os8ORcaHtmmORf4UlMoJkwl + + H+Yi5PY0QpBrszST31bgfqdoHKh7i0BW9ux50XO+51GUPVwXQpayX74= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:xps57pmsgffe3kshaz7ynqjity:zexdamdswofh6v2t2wc5estpazeirbynhst4q3k3ffrq3w7xau3a + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAnDNgEwMYgFqRwwZJv5J3wuv4qByLdSfvHKQrLKXUjr2QMwFF + + 9g8ohHmrV1toiqIRQDiqxr71WxCUMJk/84bBdkAoVGIM3jAfl/q1jsL15JtFPeQz + + mQoa8hkWQ3GnxL3MAnzEi8knIpmtJkW/npG1L39CTYt33kfUbm7+Z8VKNjHdEQ7W + + qsRxfmxz4CCkHQAbtL6dZdVbJwEi6Ea80/jyY6ipr4+JqIhtGpksvpKhfhpPnetL + + AN/aX41amGlOE0EEmdjXi2JhI28+fYplXgNR+sBqsi0bVmQZsRb5dwVpfbNjZftj + + 8WKEKB/vKvm1UlmOTxCCY/bCW3QAYeSStV40HQIDAQABAoIBAADFFA0j78P6Lku6 + + xTRHgYWZaiFR+rH6H2iRupC+xHxrnMFTmUesLXPxsZF9ptdAEzuwy86s9EKdo01W + + BAWsPVna2RgJX6zcqdsy5iAs/88/oKi8bjCr5xQYYY61ibEjilTczo8tz56RCVRt + + 9ZLPfwgb5XTCYjXbPsXIkEJsq3/23vg7ji2Pyq6GmhrSB+mxe18u8ggwEwL+sMZu + + 7VFAXoEkJ9Qa6AI3Vwx9/QuGs8W3SkEA2MiEjYWrVZUYSYTdyQ7W5b4qnFPBj3GF + + gDzsDbNEDNijyW3CJp/SEfqSZm4/3wmxIIA3f6Iz8jZc1Xmo6LdwMwWOm8W1ltDE + + 3J1CGNUCgYEAthlbkmf55xW/cw/0OIDcMkaiQb4ld5jdLrVz5ZiSUQjpbeO8lIBL + + epQzSeTNanJ6p/iI0xZQ9kSL/9kLBIiVMqYhCFsnzqXcd0TX59WQkqhh/Fup/nCk + + L/Hl/eZvDg14QYhBoZBsuWgfgD4LIT2+eIPFtQxv22MJoBWybFy3PYsCgYEA25di + + GV6OHMhaE4AENe8MFUlmsFq5Pdp2kDySDDFYjriKJ8Z8ejY6XBHcWIkg6qCTykld + + 4dTkdPg++bbA3zc1yYf2wN8bRVISOUlioafrhGsi5kL9HUIFWxtfHFgDutMQGxRI + + ROFbXAbfm9t1cZvUpPlT8skoU0zvHKU4L8MV2fcCgYBrAWWlF8Jq/4Wb6KEbXuWG + + CampNkIwEDzRCMGNBmXchn8dGvkizm0MH/AvmOr4hUL8V3iXigKTZF5cPr9Rr6z0 + + sVix31b6AM8XqvWwfvfQpm/F6ltvb+ObZOtAkttph5LF93qRpRuuq7fvFQZXR0AY + + 814HcMJ+SalLT9SkBquK1wKBgBykLiNo9dhDOZx5ghMWztin7kDqVGcA6538iIAW + + n5pd74comGvITuxbWAYkPKrdrukfkKM4BWRMTMp9T8LNjLJwjXqynvf3sHDQZZD4 + + OfvXjYHDEwiR5+juNQWZZUMk7GDb0GFLk4L5Uokdor/it2WdL5nnKt9SlY2C70Ur + + iNoZAoGABeXyJCB8iv0+FoQG+UERAN1q/z6ycM1CKEOoaXlgWbX7bV6C0CNK9NFN + + IcXQOwahrj0N7cq430R8tmgugKp37R7mFW8RB1B/nx/MFG4fSLduuNZ6/a/vquMP + + k88UQFA2FX9bV+T+NUIiu4BQwsNsO/vVENX/cSAZWOV3mTtP7Tk= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:x55owxzhsezfoayaxe7jpwnove:vawdgtqpxyntgy5i2po2twgelrynkfcjgwm7publnlbdp7hpqmfa:3:10:1024 + format: + kind: chk + params: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:j64uavihuouuyskf24dua7pc2u:obcjb66mh6msejksq2b52dhz6vffzxuvxjnoxvc5eq6bp42zadrq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA5i4eDHhy6sVcQaAK1wSU0kjdcM65SS2aB2Rl+SI1Y2o2JYCA + + MdXYdmxkBJXkrrZQ3OEOu/KTms8ovYlQnN0Ayt+9GuUoD8x2kkGrry/bIxwEmTqu + + kSqmPbiloRytbyfxffo5k4+APjm99AfnvHeOaXlxSy56dzkQJaomI6gtrQqlLLhT + + /lSPlackJd7Al7umpBQkX5NWBkqyEGT1xV+9Lz7ttfaaOeM8WMG0PJBHy6LxqpFI + + /V13kwFNkHw5FXaYzsrjfSBmCOBVPQT+ZmAd3CRNs5oxtSldKa3br07LgNRvoj4M + + 2A05bCiNoMm3NO56zAltjXBtrd/l3ySxdbWHyQIDAQABAoIBABGTdWtt2haHrfD7 + + 48AB40xAUJpfyqGmGAQW/DtfC5UVA9/utTs48T+vrJ52BKF7neaTz9B1qCQyy9FX + + Nh7YOEqFdZbjZyD3s5kc5xtoK9M9PTOnGbPPfiSp4AnSmwKpGeVM8U8NbtUxiwni + + faU0Ot1ebtJ3ENZgNtWtbZ6c5an4CEcxTjug8Q2WDqNmLqsGyXRTpgXZNPIFLDSj + + rwAw/gfJ6/9vmsxil4mnr/fbnSL9/i0XCj4y9vvc9AcwX+l7v5MsNLAh65169Gpe + + SfkNcngcbVWDSIlSAjAJW9lJCKZtvwCIHUYPhX1lrcQaEPLzcCYnlX7hg5ogEx+o + + p49JTwUCgYEA6nbV7BXTwuxAUNmog9i5Mz72vydZSyWPDk3r0j/uM1IdXKAntuyw + + lNi07UcpjjoDXSO0+YtIHCpr5HmE7gV7FDutNdG53mqkXImfkSiywRM+EPF+fOmH + + ajEsueZyi3erqJRZrsa7b7HEAQQyZS54x1I6viBo2m6nFsppqV6hMN0CgYEA+1KM + + BOXsb67vB+oXDc/oBEBq5HV2TXjXQK3ZmZjOfcoNW6jTpXDpJXXiQoMWfHT7W/GG + + DmNNJYytgsNUfsaxTLCyE+oCsqvgbqay7JckKNham8JLR36GT7cV7TN9RMHDTKKi + + 73iaKLrbetVOgmFLP7Qwy7edhAvdUMbA+uudrd0CgYEAxofraeWtkr7DUvKKu2GW + + qCrnekLSXEwoTv9x8GzLwM8GJ7lBB6ZxewfoY4Y/TLwYvxQOGMN0Qs004Jh5E6a2 + + ahKB/zFgBlIcbHLoF1zzx4MIqgYiiZigXi3XZm4Yjbm+M5eyPMjwS4qlogqwtXZd + + NMGPFhCRWGwbtbOdNpn3OU0CgYEAqFg72FAXFxxrmraQKL1aIfbwYwXXb7+BGB8b + + wgocTyAX4Izu8EP4uBIFtB3Q4x8M/CKFdH/JvlxEIXIr2BvJyaAWOMaodfwxgo0B + + Dv+SxhVeZDU6bbJvz1fJRTEXOQY9hsjuMVBsmtnHiLj3NNhtKkfN47ejuD6mSaRI + + wgsvfLUCgYAe7EGyWS3kXBXEkwRicBjwPi4kwPfxHTXz64vjFKj+NbimV2TZ2O3o + + EoJk40dOQWZI6GYDJJnNJ807MTi5OwC4rjgxc48mCb44ZBIoY0BUwNh8clpiFCJd + + 8u3/e4LmEYJe5AFRT/Nuz2xvRpnZqQSQAvrLB60U+6Hmu4ToRMv1DA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:n3rxzmvfupy7vucfjfe4pkdgga:o6s2fbq5fryvlt2vbno35pqtqj4l6pjfsf4d4es3nz5cubjukw5q + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAxJKzLV9c2HRBtvz9Sh6ZhLopsZmlSaN5VmFY4IYF/t8KMFfR + + hgY66UseCV+wFwjjiY8ala1w6daStW2aP4W6Y2TUvgevlnHTzWqfP/Rekeyyj8Gw + + S/GE7wieXiahP9OwHENyStN8dbWkmvnIrYqBiPyozVjpavYMRuhGPjKpHMNXSkr1 + + yYpeaQ4hMxosd044ktj6ObE/ajBBiM4l3uhQzwsPg6rjWpc/unGPmXjiwOKB5iO8 + + YZrUUvCMchBg9oOHyf8ovjuWB4NU2yV1yhNVt+xbN89hJlqzilhCJwHMqf4sIbUa + + ScPV6AycqUz8pjCqPWOvTYHms4jrDkuXbPiVswIDAQABAoIBAAyyJuMi0Ppkg9Ut + + ZWbcCvFH0137Dzs3Z1t4hj3IY1iaIjWL0wslvkq+V3wUN/uqmDuUkZ2iMyr2C1vT + + t7u92gEF1aZJOYJa60YpOB6VHIrTOLxQnERDaAWiPKex7E85hmQgWsTkAFFbW6EQ + + yfDtJ/YwqsoTWIUzpuQNzJZ6PZbqqNlJXNt6nnCqecng7H7Pt7tNQzQ+WA/WHjnt + + A8R0e5+fUFUO7M/V7PZe40FdtatNGOv5MIRzZfkVvim/0BScd5wnyhRQU8evph9q + + wopO/r5wQDlsYIBFr3AuAB+8gzlPC884wdK30+qPUHAkUIvzwYLbDYLVnz96q8Vh + + St0WxD0CgYEA/nDTsY6A6Bk/c6fj+Aj/i0RP5F1bewED7Xr1el5ZhBFrdAvVSPxf + + AGM7KG7LCi7nY8d+HZVKljvEp6tDFjrWrDW6gcPmA1Att4ofQvt4BA2djULbtmNU + + uR2tkx1LS073jK2u5qRyO1e6BCcK2UjGh5EU+4A+ZYLSF/0Aozxo0VcCgYEAxccW + + tJUQMphzAE+rRaCrkVeU+3yR/LLK7Ob/x8P8n3OwrJMQr+aJWK1MRrmtmzMLnGgo + + hqzeaibCC/1ncvqKpIw0V8tYxMEN8I8svoeUQuCmUF4gDRkXwEJfheE3qc5aosE6 + + QnO1CcSRZnod+ywuwTK0I9/OSNr+G2CUHa19GQUCgYEAjWmXvm89JcIiid0dzpTx + + si1dWcapOUvvKuXT2RbnGYe0+OI6wD3Dbyu3jVlGb3pyD/qoFTkMI0NEoQuGVayN + + 81hJOCXwiJbfUcrqZQfuRBJtJj2qb0v1oozkE4eMeWaCHyXIt1deRa0ULYqldO4F + + qQLxbnZwN2rl6X8sA41nlDECgYAFs1xbLiS+YJiH/MPiCOSJFu4rZYbLsteYhnv+ + + 5Q5GBk6kWsTTXSC+VphpPXbcj1cZVgM9BoSOqLlVISO3M7OFVKk5kpnnae0d7vKK + + N1w1pUYF8QCZgAyoNQGN2VUCZvlD0a/9NFqWgnzyaDivAbIDTZPVqODIRs+mOF1s + + kZCWhQKBgQDIO9k69EE6VDVP2iO5emq6A9+9QegnwQlFF7iHN9PaKYFiyHzFueB5 + + eyNxHSuDrjmXIBXXHjAbX+twD2gLA6+xeORj2yujYcUCVJ5G21/FJ+7PKFdUPh74 + + IMMuQtmXaJw6zwUUsqZIGxllv82zcnfBQbXzInue2+081BCPh6wn5w== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:fe64krzyaeff3d4teunjbetkzy:27hrywwaffqiqcgfkmzwbot3iamotr3bey2l5kaladmdmxuaz5ka:3:10:4096 + format: + kind: chk + params: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:oibebmzc5klthxwje6i32qsa64:4jn2csiis5vv6ix2qndrvegu22up3a6wciqg2rrcbf7m6sxzxhya + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAn1VR+MsUI22ssr2wErUH2SszGxF613S5wZV+MY3I+5E3xFn9 + + Mwf6CHMiv4sdNTnU2dadtx+lF5S/MP/bpjqt4shLF/71Sh0uNktvixdE1gHm7nKx + + oUg1huUgVgXV4D8cpDhscZ8CSZ94GMa5JXB6s7e3EtrAMox93P5qoBVYNUKWRTyQ + + YZnYvVHE1xMZl+1QBxnUBGtHjwmiUG/BfafAqVYQyd2e1hakp2keLmk38GIq62gO + + 4y3dAUTDfImlENH5gbMiEHpmUve9QLxWBRDqk6VnMc2L1pTk+DA5l5xFrFPCU9yv + + I5M7rjV1MFL64SA4+WmsjaqCv/VLkjXT4a72zwIDAQABAoIBABYRZiIUVny1swaq + + mRluM2ETx6dHG4F97EBwqSLJ5X1aVqP+ZsBLqYjEEZr/9JKrqNxnCj8TxfTnKDfs + + KAr086KGZUg0itqyAfWJKzDTjzgo2UhLYGjbLHa7g2gGtOGzPA9OtU5jXJi/2o1r + + 8LbLxmLf3h5hZ99YcBJMto3nhukRUMB504vpl7TIRJ2K2Nq8Qn+9oK+WkZ10emsr + + gSPNH7hcZntVEFSwPwNT/xs75QnFT5wZJ1+RqPfCK+kPocIFsGSmih9XZfPeWVjz + + QD/uEGcTl8fxvxY9pnwp6EP+NW8QOvcBudwK0qVaWYPE8DtSHlXYOLX81ZfKqKhQ + + TTITPiECgYEA3FS0smNgBbFjAlIcr1rLBYpJZRCqC3XEjpppu5bVeRjbhEdJ6BaN + + g9VrgyRiIK0DykV67ebC3wgABsuT8WntWF/Rsm/jdr8Cdyq37bZLP0trRToKJekt + + hEnE1ogakMSydW8sG5fCWUNrmLae0XqmzkvE6D46WcLuSKBYKqmi/zECgYEAuSCo + + PmYYGqwK7G9C5AgjEvSeG2FRHxruYlK472MMgR0C+NFVQN2yXqAZWGN88bS8SIXd + + sDzVPxU9+kfGYcm2HFejQSbg2RevvzXJCMAVY9bvZfqKXtlGMLaH6puUDgwbMY7K + + qwSIzlTf8rI1yIsZrT0gI9QXInSvro/N/nAv1f8CgYASqKG80amaEdGeqrF/MCMt + + Tu60PlsIKWsB4JW/qyBc5vwAEcFyhCZr2bEHJBejSMOfZ47ngrlSBe1qpebbdOsC + + puqtP8h1j+t3iAiXeu6YZ5yn+ihN2ZdfMpgWyuPlCqNKSqXjmFB/GrSL9Dsy5j4m + + DkiYmlx6qYVgZSPSSRdioQKBgBMKteN9MtuDeLgrFZFI+PqZKK4eS27MoVqBsb4F + + zSJ2rniTZ5Z9dzxecVzzFsXx1jALfOsExtZvQ+m7ej9StSWjKgqoihYqZoxfZuc8 + + gra7Q/KUW6k35g1aqQ5LpGXxftaRHm2K0NuQRVy8UeXn0ONN10F4Lkz//ZYR+plC + + RtlXAoGAT/+tvp6kB5UqyLWI1DNv7Cu6jtCyAm0IebguHqf/I9pTLH2fKLd1vy+e + + aeTjxfUV2CiCzZSMMyaym9hf7JNR1VzVzkwt0Y5L0DLhrTbDGdgDRV3nI1GLTXz+ + + Ro+Go/k+Ql0hJb0FF9CQz1kWDVprTooWe3S9llqMClMjBKkMPMY= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:nmtnmr7nerwtcasdqjo2mwnxwm:uol7bxzbeeiurj7e4bmvkvc4izwgzsurrxm326clfbiseuzspbaq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEArLV+zMpxyAS74Gnjm/bgauAaMXK76/Ph0vy8Qj6b3zyoKJKA + + m/3Tm0AbX9AAZnxF7hkJTfxdQrdAvXv+E7xqO39NCaNxNqO011MI1wgY1FV/+2cH + + dwITBj+Kc+Rr1zNoMBu1CRQriej8rK68CFpG0OwKUfRbOX8vpIhzaEHtPN4C7NpR + + UFEUbPOQsdfNsUJzzB6ilZ6vsxd87DzGoS7zbuFQMmBjt7+typiWCD2jEvfXHxcF + + 6WH3EkMwaxZB1bmtPbrHe9FnzPP3OvP1zxjjbxsvCnYmxt6lyoN1wFTpCECIkM4r + + Qwz0rTVcgMfApXxg9EWILEo578qripethPURXQIDAQABAoIBAARUr6o/w8j8gkqs + + ftuVaLznIA13RK3e2XM3CRsLe98+6sBaJXOSKAhN355MugT0VEKWCgX4N3gGM5Zz + + 6nW9dRC4/N9/uRr+X856Pkir4weBYSNdytyEYzASpQyKJGtAHZbiWhe/WuaV6Vl2 + + hHccfNzkpJZGExdRglFjZlV0qxfjNostFNHIv3EZjqpLgtBp365zMzga2nBhshhN + + dMRN3zI/Wysuzemfz6IC25F7Yq/5HAwRKvuaDc2rJaS4Dn78WJSk3TA//8KSxE4r + + PypI33b3ljafExSBSIB9++C04ORKvhsc50BYPfw/g5PyXIoSzjKHOOuLFwVEPsVz + + 9DQnRzECgYEAz9iKWOl4z8LzTbBvw1udresxMJ2jt6uiFx1hyV8OXy4MulboMBE3 + + LLPOl247/zyWbw3FdvnUs3/ckPh406N1IVuSE3LZXw1Wt7sael3mFYT6RVD701VF + + PX9Bq3xE7hYx3/ALW6k3H8BTaVK44SRCd6PZQn2wKtdk/EDx5YJdQDkCgYEA1Lj3 + + EP3yA45nJRf3Z/5RxeOGYcOGS3Htdgsom1AQnK5gNUL8lmott1pEcGuyQSq640je + + /HkoFcqFvybQ9W3TGv2rsYQmShFbkHGLcD9V7XMOVsLSov1MpL50jwG/pTDA2yOc + + cSmH253V0V/CjcamAONd0+YBI0ycIY2KEnRE0kUCgYEAzw2RrMdQ8e/svx0gCYaQ + + Cvz8cMjpmoRhohNEIf4O7CSMy2jeP1w3EdJB4TsQi9DIr/MRHtf826BpkwXkIDl6 + + 6vM1DyjfgMBh/gBnfTVji1aAl2L4q2wL4RqPygyvAlub7dFND1AAOSI4NfkRcj/T + + 8ymHuqRJRjRzRpRQJen7iYkCgYAI3HOeR5XPRB1T1D3AHT32ylWMuQJdHi/QHQLi + + BWHLxQ/I6DNxaJbi7mWvcS0JvefvE9gGGF3tGnSb09gcgSisFSkTyfd2WmbAC5rN + + YDYKICLWxmLT201YB37/fgknrnI6Lq+TnzFDmr2PbTfDhCTiIJaF/yzI9aYDV8wK + + nMFJKQKBgERFAoAThF/5RxZygP5Cr2yytKde1V6K6tkx68LlDct2ayQ3NVioz3TZ + + g6nZzbnYZye9UxFq9Mj29Fk5KRjC2M1u8deuxBccFwsC4Tpe+YyXcyqE2nyaymcc + + 3HHpX5Hj7T3pvHSgGAA7bB82/z4082OxiEeUnzxPfhO9YY3eBz4e + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:gvajllsonkuscfemygbnqhq2re:uwyilm5a7so4blhsaielnf34u2qbaqmudd73opjkgodgg3okeaga:3:10:131071 + format: + kind: chk + params: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:ghiqefrydh4yklwo32nwo43ebi:vszy7wnlzofynf5nswobaym4emv4yzdhwswmhoxzwquimn7dnthq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAxHHC4JknMYBpghJ7/ZyumMb9aDChLKIpFlACpvC+CADLX4zx + + lJUGu5iTIQcFOi3oPuX6rdL3/d3dhuntUW0Gw3R+Jym++vrIi2kMzMuTcmQ8PjgO + + 5jNCE/4hKdWSIZ613U0uAJ5inusERXBaU60P2zss+2hvTk25lxkkRtNQ8OQCwXk9 + + TjWhnk0y0jW1vdjYOMH11uHFcEG7yIcvyKR+a5jZe16XMFsM5vkhMQ9RhoDud5/B + + jj7AiJzWc98lqkTuEfc4NzQecNn9hT93GpqjM0yC8re2Lou4YBT6nWlE4z2n0F+i + + 8ljO6mDrwFO8UCs6Qu8cRzsFcfp9WR1lmyw7IwIDAQABAoIBAACFQ1HwYJ/gn5Ge + + VkEcIC5RdgHHrIr/ZwJ4dc6sG/ojc7vYwMR3B0yJ9qHYxepwjV6qogPB57GphoPB + + V5qmok+e8xKhfrBlkmXO18oseUM4AXIyGIAC/21+zTxcz4VSnq1ryJpse7Brc63H + + bXCD/eMZK2EDLCsAnmRIccXU8Nr349ndSmUAbRVXqMW4bRC8qS0d5JsyEvHCbHPD + + vo5xJCRYdDtxm4uVML8JcAxVFLjvVWe+9XoebtfYDe9zHZfgkRrjTRuzWLda+XIy + + KZDgDkIjm5bNMnjuw411SB9vGIgbYF8WeC2pnZVuS/o7b69XxgxRtImYDtt1v/fG + + tBWvVYkCgYEAzUxKAaXxoPpkIcGNA74PZ2F/FEe82yJ+GB6vM4nlF3k59OwGvaxE + + Zz67jGMAZRdIbagWdQ6Tc6FdBe5bp2j34LOxUVFwU4bbFelBFBSZtFXIIqkCzjLD + + 7QWmLrMiHvrsND9Xlu6+esFjQl7dPb6y/wmFurAY2rPatON6Kd6MBGUCgYEA9PW2 + + d6oBDUP7kbvaJbhzlIv7DyATgt8oDoDFVOg45ASnvuAprYEJ4Mbs31b1SsC0UQf/ + + Y3pllTd+DTLjvKszCUIobQl0j2g9mbkR5qc8rrN1DaCAavxqNiSgjPLM2XKGy9Gi + + bxMjjBtoqw7Ln1LhyhL1I7qd0QrdT0zY/zfU9OcCgYBHCj0ZsOiaAcsgey9muh/u + + cChfRiut0JO9mPCbbv4dT0+k1v/GJpRM/cI8ZA3A7XucpmuO+gpAGvhrkv2YQpRz + + 5vpW3011OdcaD+r7Hd3KL1zf0Ygs/hgaLrhAtK/79GxD8B9JFThIlh7Y2qbINPMP + + maXy4fjXxSDLM2QUlPPymQKBgFCKaZA2yVm/PHvSNAuq9fWlgMqcVU32aYk7NaaR + + JAN0tGLB+XIet0y8my1jvgryCVeLNaFToQrK0Bsu3EowT/t/USNotHZiY76jZwtb + + eUxHnPj6CL8kdxeOO2uceVYVndRt/OZgeJOcf3Gez7x2195FFWzF8xXEaLemIMLp + + bI+JAoGBAIiVzbyeGxhf7aCqbILLUTGxb3yLf+SheEiMdVJ/CR6bc8uNcKKH0E3y + + 7ygPXZpAiajpuSfqA9e4rfTxHJbinHpV1yxOkH55QpPm/4h3mMupthqgv6dPN+aN + + PcOyLUGJiFlXKXKHMeG4YHHHLrEOFxbKsqzMrgLWANmg7c4dN9Px + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:unhxjfxrkuzsuds26fq66slezi:bqley4l5syeh6vyws7chsveukz6rytkb2ihrquvsvilbumohnrdq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAvCBlTHQVDtHf5zH4GvRbJxJPXfdpQ9voK7dqmz/ggmAS5bzF + + z78igUXiZT1Z4AR6wWgodyYnXHcl6nbpvSPBJRrVD7eI1dBmjV3QUBPAoQzTYmzD + + HFTLCR1wYQLS768Rrp7lUt0S5WHU0a+MxslcWNHi5DrhxKWUQ0eYX1t91+86l8nS + + B1CfhEI2stSYlTFj5vCI8DhSYMxhZclqgSUx6fz6RFmqRA5zZ6ENzH0D4BktOFHx + + G3Tx0Y4LrDqU2U1BmXBpQKzabJwLwXTwghVGI5PgaLEPwc9Br3bOmVsj+PXH02Lj + + 4vW8YCd/Q+CfieivfUdhVwk2PNWXWyiCpyNcwwIDAQABAoIBAFhJqdCd/83zL+2b + + 9VCNEgQ9oxK5zGSE3So7C2Rtr2rwNJ4tn/X1wPdDOVMC3l10LLn8rFTyinFqF1i4 + + UsypbXkA5THZk/WoNqCsgNk70+ChGMktuslef+S4tKdKgHzsv9MgDgZ76uTMq2h2 + + xw35rQWgBqfOfGrhvDlw7bD+yoneFRNeiIwphYezgi1m1fhtQFgAT0JhBqKCJWPu + + KrB55XmZgc0dKTvp8Bzg9/KnhV9zstpeScngbjb5gNgrWqSG10SmBeApYQdsKD3t + + hFwUEb0To/8MAtOt+/T5lM/MIBBoU/wyy3Z8zQgZZePgwu7NmAg5Z66p/MGWMfDP + + ZH2Ayb0CgYEA00xMPiXnt3NkbRl2eByVJN9TmPATawagV3yaiwZSnwohgizb2osM + + tkG78UAr6cOA5QSg8KNHCddPO0Wn+U3VVyua9+ZH7u1gdjH4ApnpQ2jJewmfVItJ + + EKeKd8qX7m0RjU1IRmTsgDveABdTgcDdHlc8D0KjuZRTHFnHt/ZzLlUCgYEA4+0m + + 1vliiIcM9RxTEOiEZsWnEsMJBBJuD6Ukg3Et23rZKhIqLcCDWv/fb7X1kaDVPYsO + + qPr42x5b5c6gZq8AV8UhvDqxOCY1MuKnWOzAGleQTUkx+BOa0N3LNIqCxSzvNUek + + e78qMhNLnXxXuRiZtkOtG7w+3vlhc+0exqmaRrcCgYBTmQc9O2//A9eC1qUphl13 + + tif0BWAZYwjDNFhMktbTd4WkZC0jvQntffpmy7XUCfaQJZGrQ15SxW3ijH+Vwjab + + A3SPifuBy0bz3Hc8SDqi4e19EWSJZYYl4bOGC5Cq01ozZpUmzL1JSuZdcN0oI+8Y + + Fvl7LClsvgNX3ymGXipZ9QKBgFfcQB+YTJpSbPVDcOXQq9EuGeRKmHwgWprfTv74 + + LvQvG+1yyR2P21LF1ayrWLlFZU3u/7y12h4lSsmAaCaNCTXMQN/dRBlf6RvvcRD/ + + WmINJQwVzhRSAljHVqCvUA+P7bn9HvOw0iQxefGAUBSC3iX7WoyZeSbcvOtCGZ39 + + HQJrAoGAC/jVQ9EFNzwXDnkv/pHRj7i0N+3cv9XXCX19bu5AKmylBgTvgj4xZ3R2 + + KFwmHdXTAs80D4qZPkun8XiFw2va56y+2e3lcWT2MrTKhs/dwrodYidOOg7XGCMJ + + gKrOJbH7cf4Ts4VTksouepDk+ZAdbgFcKye5wN2FajvvY5kSK78= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:zdmicwopo4p4h4wbfcbnwcrvyi:6qn75anpvs5gls27f4lybisis3udvjfjhatxiny7c72bcbtuztia:3:10:131073 + format: + kind: chk + params: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:r3q6op5itaypcvquahlmpudidq:6esnusmt5xfk2kr7nyqoleb3k22ks4efmltbaah7ldhq3mfz455q + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEA4s2Oe78bWzPT+ETNoPRutptgpkUKYL+xxJTAoatGSUdsqQbk + + NxrKfoXADGGWz2OXsraRc9ucRjyjlbjFNVIksr4Ql2EaYPv3IVdPXK6yK9ZRHp+x + + 3F/sG+d1fY0BdXfMyafiPHGC2v2E+Mfa2PcGxEjA/3jb5pZtI9VKew+6d6ANVEtS + + eiRo7oFA2Hdczo3nNJGH0I6eBO12ifP3e1kAWoxrRY+kkx7zp4fx0hgxKl5YYJhp + + ZHhW2VeX8KNv4OMbDnUyQCgTiEHFoSvDCLYNJXwAuOgziyDMafzuYI7ZvNtm/yPx + + mpDMQKDF86d/PWTst8CNCXy57inD/5WxtshRtQIDAQABAoIBAAesEyPGmab0NR/k + + hfDQRY/mtPZS7UOyE5IJXSdq5MNuutm2t1DrSb1ncJE92nnoATYlFmNtFgqxm1Zu + + fzjrSMoFUDuqtLEVfO0Zp/N3UzG7Ikt1dMqpZQWM07mvfV5LP+tBAymiLMTHLcpN + + PaI4X9Gs5Ph1ztLsgXZXmf+/m7Vw4IrdU+ovT3qkKumBEMgOWn/AI/WqVaWpBgF1 + + eGTPP3pr39cb7b3kPJkPCTyQgKHaoeRvX9JrpGXe+z4BNqiZI01U2cVsAF554AKH + + Va6bXgzuIgXbVEFBskgQ18pN4KcZyBx3pGQP0OEemH28Zpm9r0EjpmZKWDhq9BIJ + + RyAjzOECgYEA+RHYAi1qlnPVBIr3ZItxSVwjd2+Brg9xqzvkL/sBQg6LVmjxuFNs + + IXYzZxZzcv6Dl1Up8jdP3NUUFaZLMKvjaLx732mUgGSesalrqshM3j+wxYrsmpQk + + J5hHgpDYrKTUpj3BsPJFBq+E39WoEnMnWipLIimGUFsF1bZ5rT7kc6ECgYEA6R0a + + lIDBaboOjLjY2JFdTHnqbCg5+f0J+bRUAby4IYENe2eR/omdqt5aPbksdDoP1/nL + + 9U2uAjMIdbYL7zSPd6bEIVT3Q7UPUbMhbqln18RcPLvjdk0YfeK+eT3Hg4XZhTqL + + Yilmntaq6AlCcpZ2taL3FaCXUvme6XRGGEUi5ZUCgYEAs3IQr9jqz0Ta92/rt4vj + + bdgtUVKMGszTt2vqBkuQZ3g1GWd4p7Wq1RzlAeOh//qw8ioQk4sYReFanBJ4X7On + + nwEVOixGKo7T6upGQQAYqZM3l8t0lhYfSkujUcVr5k7HSpJ55zNVWfDBCcdUVR6T + + /pk0EoPaWjCKLqROW+xRCaECgYA8ecptgEGtFhG0PDg1ZvDXaEGCsaToz9aIq1mn + + 4be7KWm//AyKBlWbAHhUzvdTZ8S4eRuKlg5wj6DAOOw7sF0P43m6U/qZ3B0PSvN/ + + a/9+oHh1YSEPjcyuy+YyOe7KlizqPVfvrWHsDzDjZZOReqttT8veFn1rj0rEsd+F + + aFo3SQKBgDtU6oa+9PU6s0iffu5Ev1GRT5fYE6QckfPP91UzOSIOqVscfIEzzKvL + + N86/xD6Plzzft63kQExQ3nuJnzhsf2lRNn3tedhT12GwHV2PHMinI35cDcFLzamm + + Ew/1GbD3WvZ90IPb6p742gHoDqYUy4CjVCPLl400h0sK9KEIt3Xu + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:wmnu2vkxtpdducouhywbdxiavy:tbrrl7peldu7poep6u2bhrnyfiixnzzgtj3bwrvfbfoedrabknnq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAys5wie0saDumz0H4gXLEWFKZ9iXAFs8R0S95CwxyQn9EMifm + + zlAuS45VKipB+ysNcZPStxlyjenCyb+jKNg4ZHD9DXBEB/iDflGIuJ2IL7byY4yK + + DQfk34/eiOms/TsiNc5YJjYQ9vaov5W46vymFhnOLxL+i6CnFHUC7eRSJPcjnina + + GhopcwN215sx2j4nmad3aI/2d2sAtlVoP1inlZeNdbp4ZfIe8YMBwb8TofO9ga5s + + 3+4Hp5bfhiZJkC/RfFlZypSIJi4PJdVohPxfTIFCYBF6DIPaL8rkxPzwFcFDgiCx + + tPY3JjSqU72586e2JMK+vygKMFS337YFf0Kd2wIDAQABAoIBABVAb7uMTmh3w8GZ + + LKTH9Xo54adRCmF5fmj4vArj2X9NXcSRuNZqwYcqWZNLDVH4D3cU4fJM6NulIMPK + + YJsRmUsxKdtEJeTd7k2I1rZdz50MYzb6TacS6jFhHpUjQ3zfuvR/dG5AoSN38nPt + + CQ0av362Yow9Rc403f0/S7jJAbCcw2vo1UZVeUGJKIsc+ALANrsCOf0ROEToCh5B + + F82zT+qGZ1DlDnCyuwUEcevWkYqnGDNoECfG7QGSFDqq5CcmLgsW39SdtfWBcyUs + + PtJlYaH/xvi9W6Ncuf5YIrUv4alkZLJ0XBgvuajTttsuwMOpN1SOSs6xvoT4AsUk + + /py7ek0CgYEA6FFs8usZHywVZGveO74qwJTLUFe6LcYEnTqDxGuVMCTeVMf9Hp1l + + oE5CBhBfFe4HbNEtUw2ZqJBQDkbEP1E6oz5Gf4LWlmnEbMsZBqgd/UJZ1hUwe33q + + 1cNdC1kw2O6OeULXCBzt/CyKZKGSjJ/JLUza0+Ha02TUTSEOGaFylj0CgYEA33rg + + m4rTpuowGvJ5ukWYEnKfsHoBJKtZfWYdpUnopWPs8v8tE3zx0vyD14FE83ZHzGlx + + xxdYT/AdKvIQ4EB5Q73E6MzNAFvxbcVHFx33JwjaR3/bG/EWs8DgGfkOIeDyogft + + 4nVQuKcernxMndU4pu832DBtbpv9rXI6aRcV3fcCgYBl19xFGZ8ntTGjlk4ULqeb + + SR9gFzU8/8PiEVbWcrsyIdd9nzZth16XyfbTpbWpbXG/2GtgL2QfKzSNLaS2hSuJ + + iLFrELZ1teQwNVDBRE3xSncLjLp2SJr8HurZIL5zOxEmQ5D0s4n4tKXuu439K8cL + + nteHb0l4xojzTvxZbBdJmQKBgQDa8rg7q7fRQIAA5q78IFLtP//UFrQoCPiUMwe4 + + eMDFyTDModS30yHZZCyHZs72+Fs/mc8vD2AmcUkiWibOjlxAUhwpOP1f7LSMp0sP + + Cvyp8bJpeopgxcNIOR9WUvvVlV4iAUK/K9D6GEGnEYC+4bevVY+Q72FHjOzskY1I + + iKWT4QKBgQDRmlhPCytajGNxHBKF4WM9iqSeBr8FVD1Q0L/OOqGHd2WaOPr/mK3X + + Po0/Ruug/l07GIuTO+aDClxbnjSnVIQkqJE0PJcS7uefQSDpikL/UXAZKgO6fBD4 + + pOAT3qspB/zHsfYhbcqhJ2wWNgcl0OBtcFL8qa7aZXj5j7xYQWx9IA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:bv6qthmetlhdnwc5tfjqamp3yq:ehz4ttd4g7ktkxvbovt562wfedc6jgnt5c6af7wxgp7jbwfwhoaa:3:10:2097151 + format: + kind: chk + params: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:bliwpq6aqv6np5ekwyzd6pzq24:y2pbh5xp3magahwtbhypx3gbgs7bw4guuvl4rgygv54vlx67jzua + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAjbfnWiPN+x+qVaQBm2BqDAlffuX7yL8H5/WbFYqDVhUvewd5 + + 92mHczn8gPkatrrqfDQ1uDAT+3XN93ZNbLXpVfApI/JR9Z6xbnjd4ngrCI3UenZY + + QdaMgNAYh06MkErLWuLiqNFIRxssdc50l1w7PdlTETjPfMkwQxrNx6NQYUaYqkGL + + Js2HJg+fvqY3IzZtYlbKjTvo6X1vTk2qf6EkpsVd6hm5Gaec9eLDumTGHSsW9e17 + + vKnDNQGcEMAR/XZLHPoKta0iJO4gEdg0uUolzcgAAxMbJFQmYDg2ZemEFp3ESHe2 + + MF/CmdJSXvPa13D0DdfGFq4alsuIDx4a1JTo5QIDAQABAoIBAAJdD9G0CSpoA8o7 + + 2v8BY6NhwKL4KPPXI8WdlgGM9tXHsqwFmuYib2zfibOI9AYaJfD+WesBekPWWiIH + + ahEnE4YoZDdCQlWrWOAzydeOE4GoA+Qq9xvZ/SvkzJPtHnEFnlCcuhUAsIjnDh3E + + 3LLtidtlNXpzDRrSrChiWQ48TgnhQi203dLAQHQ9SDu1GDux3RdnPnNFDB41ipjZ + + dGGPCOdIuEpbP9B+k/vQcr/xBUPhyghW9ysASn2I0Lnx9Ge+GWBug/iq1Rt7x3bX + + SO/qY5iAVc53F1Fp7tzPeThJyudZexakfNYtzOL/fnQ8WmWLREwNBwMGCsm2+FWU + + V3yKVn0CgYEAwqfRG4nvbtfUTxswgH9XL1yH1aY4870wMcQ76PWxT/4Dfo2YtJ6f + + ZWjTV1Jock2I4171HTRxApKMOhS5OmlgVUj6sjVxtPOSyM9kj9vzWn2ZHUvCOj6s + + kJKoF5a01IZng7qzo5dgknjnRMt1eiFI0Vhpb8R/nNDCtoDullqSlHsCgYEAumFJ + + bogBLfkJyMtvK1RzFXDEc35vE1O65lGcW+Kbo2YX7hlkKy/BmwBtrl1D8xoaJBnl + + +ztKcBrGXGta2B4n95PY1u+IEYom42koCyu5v6DFLmpIQ2JvEu9yIiTtNFTdYYyx + + sQHG9dsIOXAsBOwbDn3YbFNXDhvQmEcPLhPuah8CgYB3Om88nPpJPG3QnmjQ7C6s + + 1dJlrNDJiqIQeY/wmz0mMAJX68cTKu2bIeABZnqPOKqWCj28y7hEyRqXIMZr3sug + + sXjM2ytwmJjZ4x5Hd4PRc4jrhtHK90SfsRTAjhDo9AJHj34kv73pOaD+ZFjqm6SM + + hcjfKs63cK8zNjntYkDSLQKBgBKAmfOZGNThhjEi0PRyO8KDIV19zbUTeNhofac5 + + hc3g0rtWVfVbllK25iyLIbW+f53Z3FTme+tJHSwLlEckJz+Ss9ISkWV8W4Pz6n/B + + ZX06jpifAHGAEhrFHoV5OPsa+ac3emRiEshRaC2bjyMl9UGpCJUoaNoDtN+JHl6q + + p95tAoGBAJbsD8qJ4q1Nf/76a/CSR6QRjkvCiDyleC6wQp2Qn7hOQwbIUwb0qDHP + + ab2VcrW1bhIHCL0Q90VwD3lPlR5V54aukmNGJKY8cUqnNnMH7jSF6ER0y//AowrZ + + 9ZRTAHV2uZPr4N8lnc3daoX0FaZl3M13Np7vCmRXaOuppbEEock+ + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:5duhoqd3bucjahftqmhyms7dbu:avli47dw754af7h4atwg5s36ffiwbxzaw6ixatwm6nyohsjie46a + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAlPJxm2YJfiYklq8Z/UAsKStUXkHfOfpaljHsn/mCLvTN45rT + + f2UWUhtX+6wFiBep9Bq3FC9jofyCHoIrcT3RsA7W1X5OgGZdG3AsvPF/RTaKpBtq + + fPD041dOuJ/ekjCEtQXtPNLRVETsCNkYG/9A5ixZl8Ge6OmELFQpHDMSVx4PtOPe + + tDn4srKQhOMSEoOzngHf0dUtE+atdLUUs+gwxGaByFOcd8h0UvZYq/JcQUKsPkTl + + rvbxFuujFQnhcIshhC1da8wWANcP/N3dSx0wv3934aoA58ukb0VzV21QFHyF5Bdj + + E4Yf5CboLT/dZNpIEmvokcGg5MHEEO97+I4bswIDAQABAoIBABsZRwEaXeTFJMgD + + gIt6Zu5wky949ZeTTHLiD2aFmyFW6bScwRj+98Ildmrz/6ekgofGaoOyIYLhsXzC + + ewvlzuYktQJvsfGbbholXQZdO6YIh83WrRehMTTBeDGP6IsZZ7OVqfV0d6BIz9bG + + RKQnWxPlgsFg+Tvv7FuyTi5yvkX6C3g+cGRXxDHVE31kJy9aXz1nK/NEFDzbbHLx + + PILDgS8UUOP0/mnGyfHgcXNSA7vhhiQ+FKSH9gRQGNImX7A+/vFxc6AzoQ63kq66 + + ygiDdQv9c4+O0f+X3+/cGyaKafz+OWKHg1UeXGUv4aTnYuxelo+RwV4dtaRygoEm + + dtF01XkCgYEA0ATGNf0sWHpxkdpCHEgaxLge+m0OerTIZKNzVQ0UFszsRCj4LvK7 + + Nhvu5rCY9T2C9CpJkgb8VpwGtBBZEFW7Gvfg0C5xQkzDaZ+AhEbWiSucOH3dFPGg + + 1IE8WV2oALu+IttuKizhQWyjSpfWa3taRiuZgWemEalLM5vC1qQTiGkCgYEAt02R + + z1g6BoMt2OExNMB9S7JyEsL0X+Jnt3pMa5tG99lHl9hc7vqxgb71mLKb4jwBi9U9 + + tFLIbTGJ82dXlFHsM39KU/xvJJiBlxfHwlfRF9t1amp68oBJrMyuQASJYs+3XmQN + + GAm44QKSGdCaxsdBOuQT4LMIQJKjv9bKE6gG37sCgYAlTTXd6JBTLWHALcs9FxD1 + + xa6IaZX3GwP0R/sefUHk9MpJTq9ye8RmZ4vngjNrhqQ89HhM30PQpBnvoB7Ydwce + + RuThb/KPWQSRpDB/h9RgtJlG6AsE/m9ArAwOWmUN/JyT05VlqraZ7Mk7Tw78JxqB + + CsB0HAoDkMATeRLvOmzmQQKBgCg7LjWD97hWMknXoyUg2l8y2zai81/YIUtz3DIB + + 8qGTXtNE+aC6BRuk/eJ10SDmarB2LQTW5oaQyOZTWDWFhYIH/hhQ31P45Ph0j7Nn + + 8sx5rluc4z82SPVUNyp11HGLhYOCEh2khJ9eIRLpZg8azIZQaMx4fuctSCNi0Rdf + + WaLhAoGAB+9gxfCGBv6EN9tZxPZHx5YTAjIJ4Y82ZRgMnGhijzpdLvS1AzZ82tj5 + + KDv9+If36M1jJNGSQIJ0bNIOGbZkaqa2gHAYb4gPzROct/msPq3/UacauNiUutCn + + Fu3UjLZ+kbmLgwU3vM+RGjup7xbshlSbrHFlnnrBW/dFHJNXYgE= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:n7ogyjbo5jigvxgel5ll6q4vbe:3yjb3zq5hcdavv7ruefawal6euyvjx3lx7quslvasjellv63cxya:3:10:2097153 + format: + kind: chk + params: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:l2haupf3wtffxnzmhgwrab5hbq:cux7vo7b5w5fcqfqldt6l5ijiskdcjid6rlcz7bjchvmleb5xq2q + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAzZOs4RtK/VfGmRuKSxE5vLsWwJtV0T8lRJZl7Rg3EwTrDB1K + + vK7g9E8vsWmNphYSpOe6gZ/iOiLLMB5Qhml611luN4VOOckhRqJFmcSdIPjbmw/b + + O5z1985q14iV5ZjN2VMdfG900+GmMKv/h513BlxOC7Vp0BIZ+PAj19JyZXLm0pbn + + 475i2D/pAyIkzWGsNPU5Wm2GqEqbFAu9knp7G8bx5CF5J0RuzmEMJMQcnPZ68Kzq + + PRfPmgeevEI6lBRLxD3QpUJL2BgT7hKMKInIxP4GIFyWN3hMiDtgxq/2UHWHdH2z + + YGl1j0ArwtPLK4GT1PQ24vORxw1iHdY4w66tjwIDAQABAoIBAFSfiQci5oQR+VD2 + + Sr+q6BL+CpgfeTyI20z4Ah4OnUEpgZ37gtPXwwcef5nuwt3O8T7LmvUX/RaEUxLM + + L8acrfHuwNV+/NwBpL6ANtlc23eCqVeTt+G6s2+eG1H8ygN4mqfutFEQSk2b8f7Q + + FoBbO+802PWt6FA721AjfgWt/eQvIhCON4Jt405tCmRlOQ9KLJmbCyXsiB4+gSks + + iQUAPZI2zoj3L0tAd9J3KEcSRCYrSRTv5I2wBCuQtQVDXWZ5rCxM4T4hoNj9JWU6 + + kqKEU/nCQnfc/01z/jR1rgrJpTnfTtimqoc1IWC0sEbuM2Q+O2Z/szmN0hRQLCcz + + hxBduvkCgYEA0Qet8LyVH6CzVLof40VcpDLu3er4qPHC/Y56FNmtpscTUh7QtC46 + + ecUyNkWbxw2i4J7J/6x2nMNhPIgf9p85Xx11bQlAaEmBAEdTimp/v3GBxrY7vEn6 + + 5GFuIo0PkY7cjtvk80GsM3iXELaIg92qRkSTf6X02WydJHXiFWZ7wwcCgYEA+8Vb + + JpC43P6LB4oHbZmVEShjmiy+kKRurGFkwOGrH1cVq70x+78EriyrMQso0Or07L9R + + eiv/sMawoGgpF23UeGDWAr3aPg+ZNBeOaH9RxrqPxR04zkBge0+j8fppQ8ynAPnA + + 8OpsO3FYZNFBi0qUizRxklE/6mw6iTUAkkvIdzkCgYBI2r5bW782CNK4Qy7+DZze + + dgofOth0Od9WdKRERCJsMJKhWrAvPLWQ35RCqjxDQpN0aqPJAxlMRiTL7j4FvTVH + + 24KkAEd8kbHuoO2THs9rsGolEjr7w2U42GSEklnMx9hDyoyf5FHalrtATf6Cx22j + + lB88rGEMrviOTq//+XpFXQKBgHQCrkuI6AW8rGde7KlN2Wg8ihiigXS4r95ySjCu + + S2F3iR7HYN61V/zBzGge0kHh4dWtGmgHGhxkkUJ4fGa1Tu/g/vvoa0WpfliIejAg + + apf9ov2ax3ASLeLkAZEgZ5y8Ej/a1VKtUg3Z3ncmDOOYC/ZQxfw7wA3OrPJIH+lJ + + TifRAoGBALkaj07vfKfnf1oKCsgalurnPRF5/zaIrsQUQ02W4ZKLDPCa+0jcAPHy + + LoqUyfmGy/+Q9QFRGVTY7enh65sRadx7CN3TAqdqHyo1KIN63UaJ11irGdQZbgrr + + 6bNSRq2kPWGyWtURUP2oo4q9jc0iWfmlIO0OXihdf1P4C5Uxc2GT + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:im5wulgo7r5kaqlkwexlxrkw6e:qztfutuv3vxg6dzi5zn2is4q5gzzavvh4sttlaliyhz5oyi7ihqa + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEA7EmomKj2wzFXeP4lRtDJtToKB4s5mP1VP+LP4XESDux60i5q + + OjCMqXBnyRJpd40mqPRKjVWrHLmmZuc5+aVqxXrcl4TW7ZLBEyR2WI8WuTaFnEeI + + gJr8obAKiyfIeCrace9IShGhtFNuon8JTMILE3y37jPzhBMGL6OEMguucOi1ct7y + + Pg8Odd6aiJ1Jn3OYa7nf69PVwam9FWvg0Er8jMrphI+2dpNETLkyHylZPTkk/t9e + + QUBSNW4sHfUOqwoBLYrw4B5xDaTNSsdjqmxdz9rRovBs925GwRCOEzM0A2VCUkKg + + ZeuIOnSh+mDhgQUoY3CSAqdjda0ba2U/mRP4WQIDAQABAoIBAAo1jAfti/zmy75d + + 778hrAdtJqwJBU/xiS6Jlp0JYUP9Dnj4ma0iNh+njEptJdqtIfmclYCKDiq/cDvD + + u5boYWa/LlsLGay54ab5tTXSF+OgghEcq56P48C6LhhxW3lqs5XG5o8RuDSGAtKZ + + Tc3v1+HrinhrrPa8yjCCfoaZh9vXsuc55Z6Svntr5EmsplpTUrgUq8QgYk5PXpV6 + + ERadGnNOKlP3rpy18BxciEAdCFHBS3genlqfefIJ/6DSUH6lrlPWpGgoUC0YIQ6i + + mbQ3PnRiqQzLbKlNbNRf5QUQAfcqXSlEC/puwzj/5TNcM6gGgAaJ73X9gh4+xsLL + + 2WmgvBECgYEA86Sm4AR/+yPEl0famqO0ThcEoRd+oDyxMn4KPVgIqsgeN96FZA4A + + vGQmPuGlZtsvvW8qQ0XqaGi1E4CSdkGyDYiUchMmuHTXRn+FVQRNScio0txKICuc + + MHTy8av55F8GGa3m90LyPvuqi/FyRheV3dmJ9Ou2EJy7FbU8o4TNr4kCgYEA+EWB + + 0t/gMEUg1LLNx1EH30kt/5ivK6uJUN5nyqrtKT11s+5Amri2zOhS4uiY5L1FJ6ap + + uk2WewmyVZl90xnHyb0ggznjhM1Bu0v7538anx6cmk2vztxVtVp7QpCPaQ2zVRxa + + 6lVjV7WYwxMCkCK17b5C9wGtVrZInuxKm0pKflECgYEA19XPwvoZihg4iq+rt3w4 + + OUlo33BZy4eYjhtb5NX875XSNzoYPvesrTenLeNlTEX198Hn1aq1KoM/jiRDGyG9 + + owGQR7IxhgxzvM8xBYyHD0sES6+8tt0LQ14G7hKkkCuh0tPcnMSgpyz4+3oL+o3g + + RKT28pJxOiwuC9/+9Pir4ckCgYBtaaRnHIaefziSxCHv3wQLISMGa3F3W2dunjU7 + + mcxeylke7LbH+POGpjQxD7Shyc+6Q7a1BhB1NLbFBpnu+IOVoqW7bz2XfyWitz/S + + q689xK3bSrVaArw66h88HJ02/PS1Y4Olle/r7XnfLneIseNfXOQCG1kax8aFUzkl + + 6r2doQKBgB4L13+ad44L1O1wmL5bQbXilhK+y4ZCeI7kmArASXbok6bCjf2KhN73 + + rw4VTeImmWMn16GWznvekJyff6JkG6f+RMUSeko8GT+Q8A3lb+99clJxUL8E548U + + GT672ONV/XbMLaIqGlZfblZ0qElrI2744IJoBr9D0uP1hvxDz1z5 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:i7vkx7yjzrtlzwnm66a7jn2dwq:rpz32lhxxu473pbze3c4a5yrsy6yoabfdb6v6o7plv27w4rlwk7q:3:10:4194304 + format: + kind: chk + params: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:guphc2riwixq5yh3hhelvznxvq:cb5gfzv4yswoqct3njn4irc7j24q2cm2byqlxuawaxyjrrvns2yq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEA09GH1R4NN/xGCfZG/AsPeQ/UW861L5FKvT+F9FWmEbotN5Dy + + DsbBxeduMPEti0z5wZWXbuSrHq6lZMD2MxJ1M2QL5rNQ+eR1Rpr8LAQO24PE9H9X + + TGJO5aqeIo5M/GLFNGG4sDCGmwkwMGetNAtTFRzqjq1i96GlWBIYclTRLj5Wey92 + + EaSCO+CWjyiYMaYloSd0QJCyfUipfrtA9O/tKBXqDq5pY7g0elCCq0KIjyUBRHx4 + + UmTcl1Hpg0S2i5BpkskvJr8M6foY88eoWxmYLw0MFb7ueeh+3e+NwpSPsx0lP2Wc + + B2zTT/F8ClWoHR+u4YqCSOmePKci7qRtx4pQKwIDAQABAoIBADdzwjCr1mASvi87 + + dyfiqWFTIJAMVGioi71xlNr7VSeM6uuCGax+ohnyVWmgqgCu3S+tvuA8IwQ8SnZP + + AeUq7t3OUkNKLGfPRFiAmIXZZh5Xp8cuUydfETKU8SMwx7zHCsOE1bniakrKJAB4 + + E+LtGAoN8OX7RE551fRxgE7mH4EQPNwTpl+1aqAvl2DOukXjcyywW58zBJ5VuiVf + + OR87US8E1RLKAJjxcMeh/J2hJV4KY5LIzViWBsyQH+3E4938qLubXlKpZraXzoYG + + STv3PTQd17LTD574P/1aFAirJlAobqodbwkQdc5sUcQjGi4tcK8ju8UPEllmrpuD + + kVkfQUkCgYEA5zqm0feBtoeJa2BxoNmjKH1p+zaA3RRps44hDnriOrZZ8R/xaa98 + + CjhVJMH4lthlFNXDtH+2VgaMs0AdAXxQfhwMUTUgX7Oxivd1OG7XBDT21ITlnd14 + + f9tBy6J5fRXGkyHRSZ3ImCRcgObCA0BkPPXQtdzdV48O6pQudqEgQN0CgYEA6oKN + + Ot+p3unrLUP17oQqyEHnCcPRrOTQkdVOZ+rGeQm/2bkzVOs36q2cWJcXrOFh+L1h + + OKnpdTvAYpgiDsBt0i8CybXhUr73l2c/uO19z91VUiFVLr1qNRcW9e9bJNQd9opx + + E4NbpGzFINo96wB1c9oVOJKfBKzkkiUpCOGAAKcCgYA+u1bO2AtE7fiGPSAWt3Tg + + Y0YBdYP4drVGlWS6fPQrYZV9KWFhfs50J1xSIJ3EruidgnEZ4xwgsp4xc09rO8LK + + s+lTjso9rI6aWRBgQxHqfkQI3BU/gvpSFbX//RBgsyuwdxhElJ37SMIf5nr0Tt/i + + +f2pmUYjnxg45ALHBGevsQKBgHhMXkyMPeTnFEhVK4yeah/uhqlgtWe+vSuCQ8VV + + D1k54hu7QJTYUQfm5WQgpfl+aLaj14KszuDftPIe3qG4nt2KViDJV3wOEI6vXWXt + + FnQSM1l9VegzLI9td87TaWr8ER7Op/D9mn4/eeQ/cDHkO1whzG8H7+EDHAHIZEN1 + + AifTAoGBAJfmHCZGx+d1X5Rtx87YgpWzwVDDXTveZ6CNGe4hUNAEVQCNgVzol8Ha + + o/i+J3IqLSb5KUclhFrnH+fMYelJm3JQXFy/3c25cwnQLTJV39hIbREDrErGN+SU + + XvQJSQDtqXtCjWILMbSq+xL4fvUYNSqtoqsvYwV9XdkjG/pfaHZt + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:fyqcwoevvyytv6vwchxcwvkcdm:fqdtjlkf2yctrwprgu2eei5n6noufnasi52qvqzhmoalaxgo3zsa + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAtptKt0U5+0WhbXHsP97E+R4e98MMZ5zhSIEJf4w0SX1PN2JG + + NoO3CpdtmqZSHA/QdiMGM18ThzK2RU9OfU2ltAsM9VGFPjE6qVGYu90vpYy3RTVj + + en3dXXVix8ib2+6vZSKx+BZXxadeK1AMFEKl6XzbcnTfTgd1JsE8pImgHhUGXH+p + + eM9h0MxIMXvYdGeG+OghjNkPUveC25jrJO2ZNv1A1AxAX7Q1vNgFPrYe7AhJNlfA + + QYq0TJYXqywUYDlVxEtA08CRM3urlFJIHrdYgQ6+7PkGqQ1V0nUdosqHeXxAknrI + + 4U1WAObvl0zGHSG6UzfboLxPKHqOtpxyFIrptQIDAQABAoIBAA1CGdr+orwUt9Nw + + ybzFbuXA2HIYGPwjjtPV0pu5Jp64rhoQ/5S7sXw1DGyFwOvFVixi/0iW5vMSV9uR + + FjDQ2hlyvUkegcnHMewolCn9CurpMZv2dAzpEJEq5wt8YZyb/Y4em8St0pzzjvR0 + + G7xo2Q+qf2sVrepGGQJsVfvYUOYoyXihZA+INZ2Rj93jhFHDTs27vo1sksj4OWDx + + urXat3Q3tA7AN/nTzdKPOSoslqbctY3qMtJuOWi26a04YWpa+QYp9bWEEKuKScRl + + LbTDqj7GgBEC3ODeWyI+PoQg+Q994H1AETOZ8bDQ9r+C5J9cC7nXOKZrpF624zbO + + 0ZvMEeECgYEA/z6I+iQxjywspyPHvhbN2EeDwEjCtGrSw6ywO6YiOLy+tjjxZp77 + + drsxc8yIfy2DAN9L/zg4vNYD7AUVFE29/h/GcPRU09VbcMUzlP4xAhs3FKg9Jq5E + + o4t6Z3kJKVzFjkN38fuzMq1SsIw3+ntlPCqF4k681DabW5sF3XFaLekCgYEAtyWz + + Sid2pfJoyHudbIWKTQZwXK8STw84VE8KNCviEGPooBoBSs2FInIgdW411DYQQyjY + + PtLakwoFOnj78LxBPElq8NzVKRRLzzUcQVw9uXTZN0UGTf9ztauZ/dKysBjKIyzI + + NWfz1+SyfND/BFwQlAINLMpnxkgwjLOk56wMge0CgYAScfJ0ISlzrz2K1osYsY0u + + k/xxaNCpOQ8CFPinVtoiP4GIqZTIVbTWX7CzLZSvnBpbdceIKgfvnYerBrL/RJ72 + + PlWY1A9NP53cCGQx4Cyqek0AsSe6I93R88Jkt9pxosKkBTwlwIqyntPa7kcdUs1+ + + C5ShRg9fRpLzi8BgwFBEAQKBgQCYqJkYb2qLilJjAf7HLUyJRZu09cz6D0Kxq6xi + + rk1hwhVuFh8LneGiQ6TgnTvLJkFJ6arOOu0r8QdIpP3DvPdXbA7ys/ANrLhAABIM + + PPnKMya31hYaP5rQTDgwhUaiWBdtWG+NbJepVhycw4w9swuygz8+HXyAnz2wmjET + + VqqaRQKBgQCJxWxMire59h1v1SMAf6x7eZlNMfBf1VTmLmQalt33xCk3egdD/lHx + + qS8fZdcBgxHcVYZ2vWxESyfyZ3Iy/dglRIKRy2rmZIyKxp2yQNURXuEkKNQdXLrt + + NalUR6GCI8RRgnZCHmFIB09WQ9BFnSUzbFhoLOYQIg3Dm2HoA495yA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:d2hbvcmbex7fm3qu22yj4qnkh4:2xwqxbawwgn773hht6etox3oypvqqjv2orktnthfo2e7vibko7ha:3:10:8388607 + format: + kind: chk + params: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:cugmepep7hr4ori4mvbkkplac4:5m5v2l3ul2gmekwefhahep5xvi4o7frzr5t735ruioy66oicvf6q + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAh3dRTejzms9mArre9F+NB3B6BmFfrOo/fytWqAeT9blc36bJ + + x9ErWL3PhkhxJBaCQd0QxwKbM3gV+41JIJrrwMCdw1D3yi2aAqPo8kogQg1/MOMp + + v3LF2OcuzY716WOMcd9KFJDgIMd2dQpO0sEmxsg8r+PDS3mL0ZFSMHScpcn+tq5W + + of4qhCFEjkvkWEtVI8fG+OcA0JAQJHQDzjNPuYE46q8VjWwRSSt/5L3U4IwF8IDy + + v1w52Q2Q0pdO1OAOVh5FxZ4lybu/bhQENQGaQV0ViBSPtYglei7fYYkGhVEpZlNr + + 3VfwUtywsvqTMfsHIDUhgycAFqyl839wPal/oQIDAQABAoIBAAFJ7dCXcEXfRkGt + + ZHSfuhhBhi+sWnwrKFp934+ursYuMsd/7ziC8W4hNCm7Y7QrsKaOw1uZVz+om1Du + + 2azBPI/zZSzYBtt6DpaK16sCcIcgWL7u0lVbcq1rGaNkAkvrogjtTmeerzswmpxn + + cjOIGeXXPtiPiqqbp+xg/Q6v9VOQclw128X3roNF8WWtDKZBUuWAcYvT2Zot/aNf + + 8KND5bal3af2m4dEdEGK3d9cvKuPsukCWkYecghy6K/vgce4uhA5I+C/NYvVEvWv + + +6Bu0zNNT1ITlE/adhtcdYD4EClV+ShPWr+d304HOVLZD6NWJsmuWOa77ZewgmT2 + + jWFgOaECgYEAvPQP99gKb3+NkU4TilgK6vDrlt3qMsGTQKgPwB+hTJ8MFFPKyA1e + + +pSudVtCmchRg2E7YAU6NOnSESuMvXwAY/r2i7annvKqgegwmP4dr7K7xEwXILic + + w9J8G2mAU5l6XD/55kUPIyJRE65d0ver4j9krjH8Ok3HksL7Y5NCoKkCgYEAt4ii + + 5LYOockPa/coaRFNSTfaUhKc9kii95K8iPyRMx+OrgAZRYuOAL14j0R1Q5gt/7Pp + + R86SdRFVuGCL0Ec3NVwZHAeJZVxAv5I6vpkKQFE+UN7UAp79DY7Usbo2oYNKLbnz + + QxeEKdFHGz5YzgZAT9plDG6YDzszdltxoyy4KjkCgYB7e5BX6yLevN/6fqi8d08j + + PLDpljrwUpr13R718nXKCgKt4hiaZkqUvcfJQAulTkke79MKrD/expOWzvwZ9MiY + + jjDDG2otsO6HGQNxaFhkMw0MeqF+q8cfHhYnH+pSN/HECbc5qhX7YvjTQNdmAJ+e + + qskUIexw+dWb7rq0107qiQKBgB8KUOJ1mAvswVr1NjRu5K9Zbucqlrlgl49fldtl + + O/l1gOAUzDFi5OVjJhy061A/UKhKeU3XthVulRzV10+me8Pei7Cd5bLq41iDFsRd + + hcpS80MiLKE02n+MhJR6dfrjBYyuQmI6e/PGnUwSQ1q02/OlcCmmGrmYvZ9q39FF + + IvChAoGAJCqcARPYEZ7bRbUiWNHw58C12bjqA9+Z1aIBmBSkuyIFQF7jDDoOpzON + + e4njvXv2tY0C51NVQrnNNqa4Fsb/HEE7mv/hAApEGBZhcJIX3nKD1o+FtCIMROe0 + + xx7f3h/IErCNOmoImhrv2A5Cxi6cq+ZN3LtO14mPdkq1lwY+9i0= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:ynoa7zlnwraylieeahe3ndidf4:ukho4xxl5f6zku4qsknk2aetdzbkqkq7v4ebwiqwh77mdkyw67na + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA8nnT38dExgYDxNQX/t0WtAGZd4ibBLy2OpW097OaJ3ets86R + + qWNH5LWKwESEIQhRAM1bynMudc8dVCPRRuMBRq+aVJXeq7eGufhptFvtzIFz+/h5 + + sehSd+D/t7puCQw2uCA3E/xf/QiqVhMrqnBZKAWzflrFlIxabgMjAxST7XxuPk4v + + HxjrwEdBc1UNwGMJ1trhMWdWrP3FgBn4BIA8YOszmHagsjheLBmI+QtVbtk9EySd + + +zTXZdGSN1wq/2WuO+YWBhLz3NW5dnFn9OCTlBYqzGStpAD3mATiQ6RNVoH+hUMN + + hNcepowHlrnzjLW0CArh21oFReznFg88eEpYBwIDAQABAoIBAFH216Wh/P/5YYXD + + 8jaPctC3Z7Kt5UT1K55jI9DFj/r+bCPHVJrPOiq4KWZz4rwtzP/56yjkxZRCRlY+ + + Y0xUiQZlbsRgAuzF9Y9gxw6WMqy6J8RJio2WjGYEkzx+kxqQ2+Bi12t0mNf2eWnu + + QgjzwFeUkcfJFFfyF9FvUwMEDjss97odJa1TRP183kBDSm/yC2dndnXuS4HTqAnK + + 6ybRDUm+3dg3um0wAoe0xZhW37OECO0nS42GK2DdHW+Ln8Uuw+sMWT4lBqq/5JQT + + 9pNyUPqvbxkV7Y40JPbSFHG17eUBDCRtc3G91SgEn/MY0iZMdYeqi5xaMyhknWBi + + CTwRGkUCgYEA/W8qJcXUoo6TtR3y1XO6vC/gUBc7zxj6UaQAK/gw+m6SUp35Q9Yd + + pGP2t37T1m2PMcWl7CWvG/ctkuY9B4JvaAGrfD/6XeKdnqm0qOt5B4yy1yykfJ4X + + hnXQ0Nv0nxWJ2Pi/+rwG7tLnbd3mU6AQmp6Kp0lS9I+Rrp2Q2qnOIC0CgYEA9O5D + + BpBn0mpgy3d+55vZXufTSN5ZS/vc7WRcWOGCtVPHD80k5uNkGiR/XVu6MmFWeRdy + + N8XeGZl41scKnKtqaIbr1tWbRB0HthSOQvJOSXk8cbEbbxfchUsBlW0LNmMMQIC/ + + R7388vtSdnIOY3gcHprfNT0W35f2KxhOA6BLBYMCgYBMhl2WrEbJkv286b8ifuB6 + + 5IX6CRnxLdyf/EJlBHtdkzexpKvYtPWcZubff3ddvxVG9SRlyvc2HYvwWH9DHjqf + + kCmEyhjCcqQffaTkgL257t0tpfhA/MejvT2BY3lY8/r8vhfSESaSxLJG9YMP6zw4 + + Q/kgDD71Q8i8ji1oKW/pPQKBgQChvwJ2QEC/vM5lL2mX69y1huSJl4Ri4FW6U2+E + + po/ZzRSFA9VdwEan2PhfH6crhApF90zPNhUA1M/vDgyc/7pKgucVvYRGi+E+xf5Y + + iYlXjf9zmSDj0V8oiyrlkdg4t1os8pje+MEleQCxBYso9vWi5GWI0+naCJFhTjCe + + xmkknwKBgQDLVkyCu2lqLdJXEpEjnaYitlEFo61g8U1509LxnqvqjQCyBUZRf0Wt + + xc+QEPNIlbF0PmX9dBFVazq9bQ0bNZk1C2Fp1EJ/62MNGrc5PzNuZjXG/abfX+Zu + + uYxLWlHgLHuky81onVX0+H2S1y2RTUiBe95xwIMJu6gFEB1iVq3a1Q== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:ccxkyfl2qtqyhihduarpxbdcci:e64be3i2t25selbpc5y2zj443gkdo65chs2o4tpqb7axud5lnxta:3:10:8388609 + format: + kind: chk + params: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:zlyggm5badnoshujgf6jckdaxu:cjtasx6gvdlsxn5w6wf4xltilhhqq5aowc7tblsewkmvzplkjlya + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA0rjXHJzn6mxSQA7dvD/ZGq2Ot8GytcTkwcCzqsSS7zWKiR5l + + 6n3krs0tEkZNteHntn90oQV/LxB3yzoITuZmuu+hT/RO+uC82NV/F1Qa07mJlfDY + + qxGvSfWVtLgvpkh2ruvO2N8B1ClRKcB3mHMnIU/ajHY12rQZhKIOqjvhrqXPI+We + + S0CjiFbAfYViaCsb1wabjTxg84HhbXVl2GKjujecYhiwxINp2BsNAVvuveuEOvBK + + Gx2CpnvQ2QWv/Y3mZe0o3TeCy9xAEpMmZIxHjFfrLZOsCifxB1VXcjxdGiVpmhcn + + 4gxFTldCKVwXdjz/KzyG/gQcKPRiVJanBC9GJQIDAQABAoIBABahHwjFmOpF47dZ + + YVqcCLaiuNbnCEgY8vATv7exEI5703rSNuOtzWcwRYzW2/WSYw3oNiAstPHa9OJw + + QwAmIhYlMc+iTvEGPYGTu+hHcfIW1L2zdbE5Xve0VfVoakWTNpumWzpTCKE+Jqcz + + MiS/CQ68wp2e/D2WZb8moCiL8bqNhfo2jhuyXuq+QJqwCFhsgNN5QbDT+SrYCTGV + + sg0sudx0bcmuZ9+1RATuBuzgBLIVSbwyWcAtM9YRoYo+DKdlaFCFFAbQdo/G+tnj + + +scE2KjiPE/mr1//Wi338gbjQZ7OjpVcAE7iBBie6Yrj5VZ23Kg0hkFQYGz+PiHr + + FcuAsakCgYEA3xxtkdDHGNpBx3JmCb9aGqSCO7YhMBFlo1CQhRVVSlNJwSv/KK0g + + DTYyhwD/bIqC/jny3Xq7RCkaVgmQHw5hMoOBqmNuqzAdBHHUL1xvzYKMjaZ7U3J3 + + HLSl9xbweADUO9foGOjZCJuSs0bgL69gUry6CfdZRuX7+fZscYChq2cCgYEA8cji + + 514ojQpIpckd7TGyelLQbSZp54YKWGN//WSY15Ool2f/bmg+DVSAt6fK9KEvfuSw + + GF1+ZaeYDg6L8J40NMvj3zwA9B3o5Qa1VoNuxC2gZosh+aG0GLsS5U8/EqMFwuUU + + 4Z6LXzr87o4wh7yGR+2OsXuyHghPqg2pT2QJFpMCgYAqn1uvR5tBfDCk0Y38vrmP + + 7W2Tyq98Z1ZrZLC3O+QXVuH4LVeJhclhvMDaWa5yJePwfVGQTioIU3Hcjecih7S5 + + 2bWjv2sc+QwSFUzb32Tcddw0E2HsByoKKdiq44783eutowmL+K+9nTrhVODvOynD + + pJpF3SMJEFaa4iDFbjV0cQKBgQCkCHPYIAtG1IlA4FcLSsIZNwHsazlCN6/hE3AL + + yyneZ3Djd0zV7KbciE3jS1Tn7kq4vhGyFgvgj3kbYEcUcWBdyU3Jb33+ICSW7Jwu + + G3EUaxf9ObtNDqWOeaxyIfdaf3szJBOsldFcRDrA5XqLPB2lwsciJhdLRLw3VJlf + + ITEBPQKBgQDHVvKBfDKNKVhqTFkycRAzaWgtoVljEIfjAm/qT1pXthhaB3PY9NzJ + + qiyd//b2I/8mWIGdRs5BqzY1hbAGvC0qDGLYZ29GFs4J2s3Vxz2jc6IpZmIbw30v + + PQKAnbv2N/7Heauqbt88TPY1G3dz4asI65gCD/7SmmtqhGqCKnZWBA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:gxq3orvuhm4mtwvw6y56cugcly:x77co4v4zj2oicd56pqyvqzd4kcdhuyatx6itovmdwyylcin4xxq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAvJZwIBA/tV4AONyhJyebcdroWZgshXLoo/BdnTDTi2Vj5S8K + + AtGOObP7foNLz40mn4faSV1uSpFvAKKlNWhZoRUlkyh0hMIQS2WqHUkGFrJ/S8oA + + HvoZFNaVtVtXGlDI88wUSo4dR1Kbm1480t5M62RyNos9FmG5W5n6RffhuJZBUPE2 + + hAuMrk1wbJDgdz9+DI/n0KAi+kCPpktBjN0uTgiXH0xFJAImONLVkSFGUVdDSKAX + + IKCr2LPc4ZGF38gbDspg+NuQR3Zk7/I4IuGevZwXh+YMgpL/oAPdYZe622yb0lkP + + qmIIhpMw9iMs5bgJHiXKqwVl7DIYp3qiPirN3QIDAQABAoIBAB52B4W2T4QNEJN0 + + Ak0KbGTsi0vayj4tPFSBQQMLMvA4yp+R2dfGChIEofe3tL7BOYmFGQYb+EeadsfZ + + 0ujt16RZf73lnUR1pXTNkWJfkYMzBmAIhb0l1SKfh9jzov3A1LnvBHayaRpEcUFG + + 7HjRqAem1tKRP6cQ1oQW5UJN8qtXcBlXugjlQGu9SoDKG0wNQ0e+47WlOqFribzs + + XVJK2Ei2jH+ICeDAzNY6KlwsOv+GtSxdCc6hSapVRFKPHf/A93iglSKcFziAg4lh + + TVrhLwzDrfY5RSXInipYoP+cBO0Bo3LkzF+yo5nVkdBGlH4CTYpjsompgwltECWE + + DnLLuaUCgYEA3Jf2n7AKjb5LGnA49Hg9lrN3kWGsmJv3AXmNPQCR+4752lcWKPi8 + + h8Vbeef35VjgrIfoxIGNf5YwAhZy5BHrz+IB6z5icQ7ehHnV/YsqnoDI6uwJy9lQ + + QzoRMj8xlcXe043+ok33sMbYBDQTwWZ8XFdl8BCDzSwzxQvhz1F5/ZcCgYEA2ttf + + Se7r4knHGKE+un0F3q6YpSFSxN504W2LO/gXpBDBA59BZubKqcCQSxY5QVgf7hYW + + Wi7JtkLIYBDOaNKRsJIWR9CtIxN8YQpR0XQxMcyYW7JclhGFpS7MTg3HKi2imfmF + + 5yVZU43BbDg8dqnl4+oZpDjRuXCQW5csrO+GJqsCgYEAi6EY941LsNrR2SNNudje + + SyTAO2LTCCo42FMjRoi03sFqf3z+RuLjGyGePHTLYf23AR5qBPBoK2labAffo2OA + + my5YvpnXX+7khIBGJl3PlVK5WpIbxU+B0XvQ5LhBX6dG2ywXEI8/iELk+wwnsRR5 + + BU5A9QrPErC2+DQEM+FD0XMCgYAKY5V/ZfcOk9/+nFDk+2BW9MTMOeu66rBzrwaH + + /zvoDt+Ks3mgT95Y9ooi9lgbcPp7C9NdzpDGtR7b6JBTy4Mc9aJXIGHHo3opBRtj + + LPfU3FhzKeFZQlWsxK7wGZlVuDrawkyH727xF26SG41LOL9v9UHoWMYj3mML5f45 + + 61jb5wKBgDkaU/ngHe1yZHcp4xODgyS/d/GX1cxcsdjHvpX8wr8g/uqTReGEFblV + + cJJz246P1u0ISESiLcSDJEaIPEXaV9x5bOHrUfotPrLR9K2q+gxpx7iYZwA8LSvr + + EuaRLe0uq7CAE6Emd10jQw7CZFkAnlObMXHLsxGpoU7iL2qHU3hb + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 3 + segmentSize: 131072 + total: 10 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:sghm3tydjjaadmiiuda3flhmne:5fqqykrndg5kydmhwetwqdzria4ap475j2qfmq2gmklzop6y6tla:71:255:56 + format: + kind: chk + params: null + sample: + length: 56 + seed: YQ== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:c65i4qdeg7vad3lj7aa2nimdua:wcjda3bhjw4zccgjeof5ddovqd4i6h5ab5giecvj3bwiuxlxmyrq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAz2I20p+CDDfJxxNVFjry9xBzepPa0mCsusjrd1KBfvCMx3oE + + wudp9YJCClLGhwuATEr16z0endWApkktRRratS2RGn1bNQ0cuuzjLnuTsT7sXhwy + + GXPzjrUiYMHbIuef2Lrby2kuKTP7UCqpQ1VHS2rqaQ+iwIvah7VqI2haVewh7/vX + + 3CR7BhMCFKAmTpNS4+lKa2UPbpXvRJ3kwjfkpoSYMvrRNEHORDUnQ627X1St5F7p + + Gytfsm5do+RpcI6DYDPLjb3BDNR3dwgJytxX2udsHIqsUu/wKLK6Y1AL3201C04V + + FMlrhkeRFc6XfPzaRocL/y2jCL0Iq7b82FikAQIDAQABAoIBAANzzBvXge+4IgjJ + + Xpo/IvpP8Mwyl+r2pwl4/MqAuh0l3gIYGuovtgjbQUQwupNW8qzSdqOS4eaYvkqG + + X1WOK+PmPUsmmRUaAcdu2B1W+09xD7hFWeQolP5kL41SzNYY6wVNnwkU2CrhbaaY + + /zZavhQYVIypMPlpmpq1v22VeCOEAiSiKicn8LvrDuB7HsZSV0mlHMq0Bjgmi3tQ + + P8CCofImp8OnuK1+Zpm5hyPTKvsZJ1j3D2TPOT9R8bjBy8NA4rFpZBOfrQd3dX4Z + + 7qWFHfn5PprIZEaRmEgido4dn8k8wRRDR8IFoxZcDtv2PjlSMVjaiPt2Tse+l3tn + + PxlYpUECgYEA5F5hvzh5rGFjgwwhNnP0NFRE3N6/Bs/b+K/2EwxUqQvD91XtpeM+ + + aiLKqAhY9X66PzPgZifDDKwzybGbdlyEYXS+dl6qwTvPusPoEIGs+gQW3BGffMai + + 25jlGZaoklr2ZOKaC1ANsXsGmDHWuIcq/IzLTRedrouOqZkaJM6kyjkCgYEA6HnU + + ktCyk4ughuJ705e4UXuaeGLsrj1nOBSZ9asA7QxqTFFSi/DKHXjoxNbUd3FZVJ2g + + c36knYO4VsYMwgaDORSAezF6j2MW0INkULQBQl03M9HmeqT4LFMW6Sto2NERIvxF + + R6T2mPWh1IP5PDeoaVRkuuqLcJrk36kVzc8ZyAkCgYAKoMHXzl8LQLUK4kOhbyAM + + V2elB9DIFmBcYIQJOuetvlhuaFdZAwxikB/yVgEd27n7OwTUfEE9k74NQvDDP2cB + + yhcbFyjHOWtfe8KPEhnkwM/3ifJsMipeIe13lWVe+lDBPTKCGEWq3tjduGQPzmqX + + uk2z1seF2gTXq8JluCA/MQKBgCWTc6GbbBHfMr46o2srDdbV1Lz6uGjdce6lndEQ + + p+Co7hGR33bRH8otven7E3KO9rJvm/yvDqqLHOOhtXQzG0jBoJbJA5djm89uPWux + + /LYeXQraNZfDTH3VnFFp+9N3z35JKmWPK3DD2zl/b2ylTmpgArwXpxw3XSEtsmGG + + xRMJAoGBAL16UQxn5k0B7R/bzawd5DzYX96m51DOi0MoZPEYXaOBCMY2k5wsvt7F + + LsOBVUZIz8Q0vAtYxC2T6uZRj9Tgg0ufjUq3uc/KJGvY3ZBBC7o3GBdB25jLzF2f + + gmLCud9YOcaZpKzcdDx3eHRDe/cWUoN9d7F0A/b4JMib0dV0Pgr9 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:jswgkqtxgcqw6xrjivthxqi5aa:6a62r4e27eqtgi433tyjd5ijounpuelqd2rgnsz5vascudnp33ka + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAuGqzbMA9HFkXV2SDqVlTEYR/Q9+wvjzjekq/SzLK/A8tMWb9 + + HAVggYq6vNEqowspiVVIjspkXCPAxE00vlKQOpikcdKM4euG8hi259oFgBQFx2ER + + 2HmEljXgRmkf2VuYWdoUA2ApMQiuoXRM76WXYA6rzhWp4PFdmEiv2zFOzOSWwM1J + + MnyzaN89Q+bgDjl9He5MArsxsgVOhMTNDWHmvRY5rSWzoPv+9Bs7Um6uO4nX30wT + + kp6adeYfhndTz4WN0vimYhyIWThXrgaeTIzDRTzAObG8V8RW2l2z7g333PNYVFUL + + Cdjzo30/b6otmHgQVfb6a3icMLisCG6J4UbWxQIDAQABAoIBAArTrC4uY21qjJct + + +JiEvOlVHvdK5sXlw2bOrx2OK+0cQ1INwr6LppoMXUC7GKFNr+CscAVUEWF4cYza + + HzEs8ziWyrwU+VOaf90Zltlp9ciKbw7AyUBX3VvH6h9wH2Aj2MADsIjvMxPkzNh/ + + 6wlnGR9DqpH/jcOTSrmnS3hsqWdCyxdm/S+9yMYJkBzgzDFBkh7ze9nPL8lpiQpa + + wFDhfPJOww51nFfksQiVBlMjh7H0XmC7NW/KoZBGGsp4nNmIJVL+d5cehZ6GB4B/ + + GIhVz8OAqRtLNU6GU3e88dkLZM6sqoJrw6KOQxMR27EydgZP616gOMEYaJmE5Ghb + + dNx22rkCgYEA6lGKGvs5ljaGP58jtENG3hXk9DoQ9peVvYPb/BlRzEFIY9PdNamp + + GGpDg9STFq3DWL5/o9i8ltG31NZbiZSG2fBnpV6oUbUYTg0PqBMTzIwKSOt6PaAt + + mDzU2MpCR6HwLBi1uvM60CQDB+gQg73xPp9d4gvzLgrd8p4iaTYgEkkCgYEAyXsb + + G/sgl6OUUEo1UwMAuUoIUyVL09sqxDzMq+ru0dAeOuavOBCjX9Bk1UtKH6nwDxv9 + + vpkdgw6hsV/hBJmY6GIVhR0QBIPNIgo6Y0kzq/pVrC0n0E6ht8ZMf0y4kFbFPQb6 + + 8eATpd5ORQKvk+D5Iwndy5kaKZUhLzi0UYK+oJ0CgYBTyA3ycct4a0x7KSKyDLAl + + Lnzr2mtAUJkI50HcFQ2LU/hXQWTCEETW5v/2/iYNoNnNPGgVJKTh5GCvqGmYetPw + + zyWwGnViqbbkCYWEmjWlGJmA0zmlGUXUPkP4s/EY/c0LZ1ZrXxazX3z58b8d6+d7 + + da4y6gTsfJQ5cNNq/SBgiQKBgQC462ooNkblplcbkeB7PghOB2q4lUSRP1hzH7Ji + + H4/ttevo94zeEjdAW04QjbeMdDZGR9SOOI3jmWxCFdO6mxbCQjOqJtBqtGVz1ptc + + QPVR5ML48cDW6TR4LWJMfCfxIhKJPnzXvmeKFw0TPbHUMem3hPiyQuTGQX9hjdPB + + 9BPt6QKBgG3t1vLgr3+pHCL5JCKyS62AP+n7Fr6RjLnpzFPHLW1MtJoaGeFlEaP6 + + zVuwxVY54BIcxMLa3lZiuQBxUiU6cM84wvs4A2bHjUmjFnL1FyUY7Y7Wr/gUUyB/ + + eBKjH7ED1RMIqzY9ewqF6RyxsLnahfSW5BDfM4XesQCYgOSs/f/3 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:2wvrzaobtfkoiqeqwryjzvxatq:7zzaycbdhepzaqtdrku2sbb572h64ggwstu47osa2gn4ol3eglmq:71:255:1024 + format: + kind: chk + params: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:zbygwa7b7p25n3zxovsemlz7s4:5eklpk5ywpbaqzmobixzkurqsccvzrsobgveqgvfa4mnslut2oca + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAnDp7uPJokCdVpnjWHvYheSiWbRyu7eLxgIyLBNB2MpvIeYGp + + 8niIIfv5LlsYmmw8pZmAwN8O6IV0AMZvT3XENWhCWRfayIOzTx14NDyR5hGoe+Q+ + + pKVESHfmNXsH8AqhR2qY2SCeYKOlNGRBUqj6DqAGDiI92UR8FWg9xZBCSN8EdqFQ + + RSKNM1AEoMa3OC2rJdz/BOlplbQrf0gFUBBDW1VzhDSWz4TkwhKtMkD256RG61q6 + + PxWMkY8/dI5jbsWoTGRjV9HFBZDx3RvQhBvVFZcS9TPjjHS4wQd+3+8s6SASdBu9 + + PtqRvUtbwWwQVBYBr1PlJWuO5eYnfznC3eDNIQIDAQABAoIBABZ8kHxRX92H29sn + + P2KbeLvwrJ4t57vT04D2ObhKrQihxZw/no+I68dAdmBGumbXt276hj68nG+5bbYC + + 2ditEb3CMPKT7Wi5FCEKE9go87MSzZZvhti59PcdUuRVvhG95fLak/+Eo2czhr7g + + w0o7iip6vARix1Yzdky6gVGjbvb/8Qm2W3TdVfxnhZq33SkSHQkN2VbMihtkqqBr + + UXAOf74Tz5xKTiRRHZCMP2NU8PfDH15CMyAnu2dTl471haepUYzq2nN5HnYxNTwl + + CJn7AI+R8assOiODwCIkwPWIylmk9BsWJfVwS7JvNBf2T0frXc8VKrI85BDzZuq4 + + 06XGITECgYEAvvKJ6tO/2ggRvdmqoKBl312g9dH751nOEUbezEA/t+WTvEALfmnZ + + LsFjTOLvlLFRo0R/Wt/Pb0ZY1qT4iA9wsA314t4I+qu76+PoCv9DZ2VRXvt1504h + + /LsgJX616FhFtux/sEOvdIx6n3VT//5yshFhwo/4aAYGIj60y1kerw0CgYEA0XPu + + 352ZqPHUu8sq9q3yBWSqXxpKAA0XMieIPxOPviqtBHkqMj5W9bTdS/eYsjmvgSza + + Tb/dV/dalqkSwpAFA11HCDcI3ejV0252KO2HyNUIF2+bzyVU1W2FyFCz9yCLY/gC + + 7ruMWTqde7PVpg2KMBi/2ltGhwgODSxVyidFcWUCgYAw+XIoOaFGYbVzNSXPRvR6 + + AsCq6+2pG95/jebNClmNaCOpL+ACz1E17cHzUW1TfNtMfeAQRcElcCyO+QcJlrQ3 + + Y41CX+J7sJplWTIFyAzYsyLYsrQ93EtZUAFhvIsZibJvxV7GrcWNpg45YdVmnjN6 + + unyRc22p+ImQNPcYBMaa7QKBgQCMd+PLtDZJR2YUS70UkrOtSkW4Yjker6jOyhRl + + uQi90IEYbuoNqCFJx3JicDrHzEgXqaz+V55qUElAoUMjmNLD3tq0d7RKnsxIb9xu + + tl5KIhS7Iu6rja3HNRxzqywGoJza/ol48e6+KMFVJNYz9wCmIPMJzg0OoihKTWF7 + + obrAwQKBgFbXt9Cww42MReP5aOzcn5udgNQ94YtGuWpfn7xtxi5pD2BjKjH66OcN + + 2iUReVkhFsjKo1G7QVG4p/QhK51wxtFlZiuDU7AW6gzDSMGjeZQqB0HEtFoPdaPK + + c86Pmhd09zlzQYCIaA9tZ8vbUBZ0jHq0rBUPuPg+kXVP7D/px0aS + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:oojuwcpqotfqvmyuhc245awdpe:vzxamqps66hyemt4zabbos7cufrc5ugi3ii54vsfqeqsmjc5ezjq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAsO5RooSEJraTk5Cjb4wZd8vjOixmAQwBXC54S0di42ixXyhH + + xHCLhjK1TuCiC6GaHdYLXAc8YW7RstLhxdQ1NGhCljxesSedcXqvkcfKKyxWhe63 + + 06gCAB8y3hWTEgeIl4ZBlTo+xx1phtpoC2ujGsIgnrwLcwJO+FSIRUiLjCfC5OI6 + + NACIlx9sKopj72B3N+8TCmqUji821/+jmoiqMT5hRsjmjal1KNqeOczC62ieHD+R + + 54vi7EkFjZGRHVpRgwVyEvRhy7y+RJW6ezfkUKW+JhLZo4UfAX9AMPqwYS0ZPYQz + + 8++Pefs9ULQF33UYcekyV5H+1Cdf1NUwEV1s5QIDAQABAoIBABtAOZacbnY//K7n + + wiR2IZ4P6ymUmQlkPflituhxUEvSXi9X1uXsp7C9sqs5cfv0ofYid5FvE9+139p5 + + HIkJzEAMJuVY2wTSIy/NQ6liakMICzOJtwqEf/pg08bc79ABFQqxhPxlAjJM12oL + + zaakp3SBneCU+fZ2zo71BiAVslidrUWUPs4pc722tln9ZHVchXp8RylDk577GoRh + + paYAIOAWdaPsSmLTtju44Enpb0wmHF9xt+rQyDvNgyRbsDXGg3laU1V7ySCfvKjL + + P3P1GpJmfT2hIMq1uBQWn3adRUvsxlfJATWLEJH8uIgGnRj2rqI8QSJGKdrU9Eck + + 3UzsNmkCgYEA6zqoHJdD527EHVtB2NHgQgGG6mLVv1pE/+w8SNUQMYc87ZQyLZVM + + 7PZkOrZGOlL6Ns/zQ3j0OP3ZhjsXoA30gFLVMIby0jmVmjSV+i/CwOb5IvvT6vV8 + + n01yQ6rGGbUPYqoMffS1SxkoJMZA5pIe887IVuwjwWcmCfXgKIhpLuMCgYEAwI3V + + kpHTJzCC+MPtiN1CqMkAyzvtk3ON3D2tSgqpppAnWVzRTMzJqVVu7qClzJM4xp5D + + sxqgt3Wrh5flNPD/BS7S9WqNFbkwFhmjh7UzijDJYBHJs0l8oo4XjFgHWIGOt7p9 + + hNUR1nwusFYgAlmQ/gN62ycS1V4Fd3J7YkjAN5cCgYEArldKE+604EnDRrLFSeq7 + + mJBDK2LXYzyHWVsAj0aC+wJt0PP+gLRgUFyJis5fnIi1dHyJot95uufCGe+gIftV + + 1OoPoijSvab6T1FcOxK8+HX3/srAlSsfE362Cpr+ujzsy0aXfZ8p4yAhFahun7V7 + + BV6kM3BPS9+kXMuEOOZpdtsCgYBplhzz91TDG53mDYIWV8Xyye3Og/kdrvKuP3/j + + pv8qX8fD/9qhc3ZtjXR8E8l26Y/rkeNrtgFFOJgrjUZZhoFA9VEm1BuzOs48gCil + + BN7TOzdhn79rubNHbAVLpwW4Kar54qBrk27pn0T22vNIdpbEQ2I+BCoWqCz1N+ii + + l2QX1wKBgD0xAfQZ9D+irtHqR4tCSCznbqmGqCzxA/g3PJeCDf2cNvdtO713i/51 + + SHdZ2RZI7WitU6ovMEyAXP22tFXa0GL9UHEYszooVegMx5/xR+/z0fgWQQ+IQuAD + + xXlJCybYPBqfhAsEqoDmQp/IrCllKC92U4oM45kDEfHxssZ5aXvH + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:7wi5xn2uwerkjzqs5ndn4eimti:llrws4uig2fpi3rtla6lk7m4t6zudjhszdfjgqtihghfrvs2robq:71:255:4096 + format: + kind: chk + params: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:lrrasvfrww4coivsbcorx4jtxq:phdrpge3wvi4fiabhbo73jcrponuzqz2js4ow5vd653okjkilp2q + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAlWJVkT5XFuo8cXgSi25HvT9nFeNuNilLT0u0dZq7uKrANJlG + + aNDF7WU4xWXHdiCod/qiz6o1kjyaBiSp0Epm2xJCJvBrNlEhRp2e5NsAMGZhuHeq + + 6Lm9QyRE9qp0NtO9KBWyfh90r29MeQ8jZlnSNHfGE7FHYrCTtBr+dvT2WoovzbI2 + + ZhM/63HVX+PMMIVYQWTqugBtznDzUNP0JM0rpkCn+sMbSwEIafRf48LG92bHsaD8 + + yIrDeNA4JRbG9DlPZLDEcvciIbs2kJxMuIKjyLq1C/LU6PMykcWPr+tjBaH4p0Op + + yUeB0OXugqACzMMx4ZgpKpHWYPT6gwgqjJS7WwIDAQABAoIBADn78efPUWGxKSYI + + K/aYJ1uDhUF/RpPaYoOUiKcPmSccjD++cRCVXQaBQFCK6anmEk6D6HeAA4xQXp7c + + 01do0dTeGPRhZNQwGKD+5KLDiSbATtUaCiqMkjTCGsy+LB7uAGHTTjXbguDn7ECf + + Sifb3JaGGUoiiWl6zdJemgBVpFxO6F8ueGWKywCHX6L/5VLMQQvAAQaxEMkLiEYL + + Rm+wcAWuK88XNETTglgevCmuZDb+0H9U+9Vbh0wR74+tb2XvBsxw9o0O88KlFaSN + + mrCrH40p+ShNumfE8QbHz8ch6CQTZpPLdC9x0Ziv1oUo6CGAOA1IGmz7rMyWjFu9 + + 3xx/JH0CgYEAwoXBg2lz4YooGkw4yEdzXffJXaYxvj2BgJKUZyb/ZxRUHxHl0LYY + + WZvPQ5UyOZDqmqvkfMYJRiN4rW6J/uPsCp2NYdXQVRngt3THqKeHii71+xRuBObB + + qNNdfyT2HFuwbxELM/1a8r5QW2ebDU8eLlDbiYOEC6Fn7/foxM1uuHUCgYEAxJiQ + + wMs1eu/dG95kQCiYX0EjgYmhNtQPhbom4+9t6oIXl28CtCmVhYDfJsI3/qVYqXIl + + OqTOW6PLaA7uuojeQYmq9ehqldi5BHavs04DQ6nDncBIDqI4NlNG/EAWxQQghG1j + + PDWUn6wZMBdGyAKCX/fiz/8mDBPnlTm2NdiGqo8CgYBgI0cesZGKGIP1a1Js+ZM1 + + D++/jxHqme9VIhyiVo3H3i4tJOVWH4ktUGpBVo16EftA8k98s0uGFKXh4U3mYbMZ + + FAD6J3hNdvqu8NJ0ske0rbz4mII/feSckcoVuqjAHzi1y9Cjo0W9zv1cD3p8O2wJ + + LAE0l4E9VkpOOVIbYgSkuQKBgAZvO+WjgQeaDDGaUMusyHftqNzXhVhHDo8A7b7u + + Gjnfsif5sSv2ZHdvJV6eYrjJ7qH1I3TM6hgjv0eTnYqraiLY/6h2x+5JnpyfydZj + + ikXPq6BhJ7qa4p4ckak168jc/rd24RWaZ1fmiRiC2oU3V88OTPUj07n3eM/wiJ8w + + jA/PAoGAVPnLKxls+vDRSdc0aWW8Ql2yuiw/zU/GhncTfwfiKIrU4sF5GW//n6J5 + + w9aw95QBMmBRPS2dSyF0iqvK5Ny+EXUzVCAVi3s45o/XlFz/RxkrljibI+5eOBGS + + lpe0EC6J/AhY2/6/5213nDPBoJKrqD6kCq3cA0V4ylvIaXoSjAc= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:g4lr4g47o56kqvgsyikfj6ksoq:qs67p2rpyurqsuq4ivtebxyxyxi3i3qqxvvfzlhyk6nuumu3nzca + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAyocKsAG2eamQBV0FKLNCo04WL512g7j/7OeCwMofs5xmj+3C + + 2StysnM5fjeS+zGHd7VOanAx8XbtNwwh5Mz/h0c0wi8zKhro9900FyVQklsa/Vnm + + ckOt4km7N6BAaW/6qscFcf3xMM9rzbvEv2VvQsY2WTn0PbdgNHxBQKqmPyeHhfqi + + O68IItuJ9Atnb5nJYm09+FAjAAIOueg8jxCoqJCvPD/LEsKKu4mxS3MJeKG3iqag + + Rl5ZJhMbxJ0bBBxm4BfrmSatcbyxlAERlyvRt3w1Y23zXhJBnf/krfJNlwYliu8c + + e8sH8NbmGGEdJHw0kA+1Inm+DJTcO7EJ6Rm22QIDAQABAoIBAET5M7cYhjwt9roU + + 4W5oin/SVrT1pAidRy+38qxUyfIiCD3pQ+wxI1lJ66EkLR72UcP/j8qpFiE9lvEh + + 5SMme7nnEr01VO/4hTHw+E4Pq76EX488AEMW8I+5+5Qos/cUp2Jk8GJPkUZFxdml + + oo5qbbdoiNbfaWJhYtbNIG31EDQ+V027Zd2A67QzJy8Sl0WdhvkB8jQaX7DOe6X9 + + l2aFWxUH+zUx+T8/2tfH16OYPCLPsW23IbU66Wq55izeu183Q/OzfgS/tXX4RNpg + + i6zxihOVPpMf6+4WVBh560vW9O/2ehIbHwTsm1yVWcqYgWV6YDahBBkO8lrqKG8o + + vHOGg88CgYEA1yRioqjxZm+CmF3y7GtXJsFnyBem55OSX+CFaFCUDashKohMw+Pj + + KWN92PiSpIC/PliROd2sm6BSw/RBe0kTXWZAJ//TmHo81dCdh2B1KKSlsjZecOPJ + + N0r450/L3QtlzTyC4iGU4ThnjXLmqWK6yM1XFn/UyhhgGpZsr92jrjMCgYEA8P1e + + T1XTv65k5YR7CVfJHhhcFWkMJmRDFU8h1jW6A3VIYXXAlx7k3Ju0VlB7FbedmyWz + + hgEL0Q9Gt+7y60+3dMmmeFk9fkwto6g2WqMcrxr0cl5YqFtHjYp9Q1kFL37TuTax + + /0s99AKkXefp53tDy71ilQjFVsGc2LUq/CrR4sMCgYEApGTbMeviOiHvKrpvS5Ri + + De0vfkgEc2PiL30Cs8kOuLsRJszry6uxAwlROqAGfckbWWqX3h2zLV/+nllgR/J4 + + 55+gWnAzoYmWPtOf67gbDilxq5G77ItCUAvr0eS5pHh3G7KnWF/MwaQ2DHHGK5yT + + mai+aSTY1mx10xsqhd/YmN0CgYBh+WAiOO6Be1Ehzp6Gyd3GEnk9axu5cAGl5CoJ + + gIZDaacnmEvYJIM+/T5v6QBhb+jvboBx9nLrZ56EoOy5pgsbu++l9gH+GtJjOrv+ + + VVoQBpFi/eBlcdbBQJB0lPh6usExB3+OHvTtAzX3x5VcusxxRGmT1aEFCGnP4Le9 + + FVuHKQKBgFZTLR0b+xCu82iNXFdgFnpkl2AhG2puinsGRhqhRaafb7Vt00OL74Ur + + TmyjiGA50sd4dzJuL0//CZsapGnt5yDqA2wtFJVF+YNw5ZJRVbFqihulfR7zz/0c + + QbIo/GGC++g5f6gU+PNlJ9yDlZrhKSWmaECyGDGVvqrrPCqNp3e6 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:6nvs23bhrpiwiz5prqdtvztujy:wlg7g522rpdoitpm4qwhmctrjhnh4zfloiq6uq4tsvaoawg4slpq:71:255:131071 + format: + kind: chk + params: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:nana3u55ewg7j2t352qbjqokaa:s6i42emhgzsmtxxhro63znk4cklea4qmjn3hwyvha6ckxk4ptsbq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA4DV02okqw3N3Kia2PrPCZrjkTklwT8WV2zts6g5i/0UKNTTt + + 75IwvU7AJJhMsCvPSh9OrCeVE3aqgUyb+/oQ1EKD/U+hAEVc1SNP3YaNlAWmK0kw + + tWbTmBk0WTIm4yFwuVVAXEOWJF+437l5Nramp+LLrP2sS4hAOwm6IYbUZNiL2xal + + RO3S/vkpCFx0SmZr5M5LbkfO+YazS2abU9GmQLXY7ZajS8pzb0us2MrqCbsFfZud + + 04+zyDLuAJb+RCFx4zDJ28zK9haqdhdGZcHfNjIu6dn9StEIMwsUIPJEXxzU+3Sd + + Z13IGWWOOOxnLeXcTH9MBmnkCrc5k+D87aBYpQIDAQABAoIBACnzTpUqJ45w7wm7 + + uwsx2qyaKRuNyZ4fZ9ngfVEw1myRWCba668Q4R42myRtu6GV3N/vSirTcCT3ZdEy + + CpiDsjzo4iXWDZeNouA9Tm7yQ1DAGtaMB/lVDz+s7ZrH07dZNSx0K0noGnJdV6vC + + mg+27qlIedf5EdEDIhN5zzIBNoHUmfEK9PCzkvJWsjpmKYRl5KNWHhCkj303wyFo + + LhkwIjg0Hh2zjwu21BsMcCQ7AAwwqPCr6QR3cV1bhuznBu6TQHvFOEVY11fykUfE + + Wb4etekeb7/dEOkXf+9MX+O0TDMdZn1LG3MAqEh3bmQnwK5HdedpxVLY8B8uIb2w + + Rq8yk5ECgYEA6xZhNg4+FnydGZhNexvn9D4pKO7y7V9Ylsxi7655YOj0PMSPu8d5 + + Rlk3sH2io301PuevGhDQlToxMQJinSCBbS0m9Wh25puBDV9Hv6/isXCLSykM4R4n + + fr3iXLlBy+yaAX0FSmDYOLtdmRic+mLftg2vDPS8Y67QeFxxBsslANECgYEA9CdW + + 40AXX1DUuntO7UUdv5KruTl4ERuDfHA/bQHHSWQhHbDQJih3PSICJxvcOu/yAUj6 + + s7Jz7HLbwHBt6OKReJgWAsPTI9xya6ktWP2AhX70cg+LAJUEqjPNVrdmO80cOpCx + + 8CI32X0iKpyKosPO25tyV24dsgtCI/64HC8Ir5UCgYEAm1FO6qrgRHUSSk3AqxyV + + 1F2ZTg0I/OFoo0DoANjyIp/mdZucJwE0U9EwJO/smz1nB2eE3aDMXjtzMqETzCIk + + wJ/7RFo/bD7DNbWErtWi3X6w2PiHJNiKWYdU3dQb74IjgeY6r5hqYpkPzs1fMWJP + + 0XUa+WFccjOWUl8o4ccnbHECgYA/b8k/mnN4LIRZP5iuRXsJP46mvavYphFvz6pr + + JD8nboC8OiibAYCZvbBZgP+jwJxcgR3Ceudr1BwM0Cl+jP/HGZz+cur1Ml34YyiJ + + KrSt+uKAkFFJgW6I2pTLpzNE7nIbNWVGQ13HgJ/T+oB68e1ZWfZHiTDmBc7Vk2U6 + + zW2wUQKBgQCmt/ZrSCin2teQOg+rqcmicysXn3/DkYYwafeTJa52OJV7JQTWK5iZ + + vJlkfKHpqcmsLrcUJH2rRlcOgzrjXinaXGkFtvCaWtCuUUJgTw3n7SF3chOUjmRT + + SKL1Ay9Y5Dus8UREJ6V3JchkwgIZcUgmWgRuW6YHdjCscWNOiD9Pow== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:2qmpqupyhsx526l32tjy2ouhri:taqkf4pffhgu4iy2ddhv4ltt5opsx6s6l64exee4c4vmte55adzq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAsCZMCCaVWiSLYmGr63IC88oMBAiDDohGPCEAS+pgXNHpb2bx + + 3rRv3jqtMgAlKF2bQ+zGXNRKC1Z+b0XyDxBxLtDmXVhqz7FTh4Wsv3NKnNA8Lu0n + + m5/49C9EXv6RrLxk1UQJ88R62yL3/apBgS2bBC82WlyxjyBjJhv03B/6AYzx8kI9 + + Z8Vqj0DnstVpaKi1vQnoJtpYvTxiqxzK9PS9wKV5hGHEIVqD05Z1RoPKPPHkxV+t + + 1UxbggtnME5+SfQ9NQ6w0t/+aNeDRWc7dGaLyJhR9FtASuaqjsORutFXOauAXxpZ + + 0Pk+OLZs8jmvjyklucoNIQ7fU7mNieIO7EI/CQIDAQABAoIBAAWQUBp5zYHZYaWr + + 3BhFs54rpZGDC1CsMTu49x9uubh00OC56a3VGSt1wv6vTn0l57+PfPx1oBkXlErD + + dM/Q/yIxavVL06PXwAGp0TkC1Tp6wUviJbweo2hjDc/KTqOcF7s8uOvAKvk884RJ + + mmQQIrNRInBcOSeCKF+NxoAlamoeEiPh3YU6T9PMHeJkyB8vtaB3td4+XQTX3Qg+ + + gaajC3cwU2fV6NEU+apem4dsn4TrbHMgJpSLWxof3XaJSdsAz8KVV/T7hG4UZbFK + + HfEriY5xSNuv4u64vikNPNi8hHaTaU0KAa52VB1wVzyUqjfSaotwCL8acjg4Lizn + + u6VO+7kCgYEA1uc4dVic62SSxJxoPoojZu0DrhEXgvadFAxN3X6udkYNdCs1Ox45 + + PuPUpTWjvLuaVbmEt6mU10tv83A+wxjWOcJXetHsWwPROv4G8xPO5vUgVyrxysLm + + UxCMP1FE2rHaQpXNYtB545V/SV9kV/+iVxmlGJ1VzWKKs40GXVwESJ8CgYEA0dXb + + 2XxB17O3FK23zSFS/yVwivjLFq2IEoCwhHJ6dLa5VrRH6NvUAX4xYIKiWQBcFsUK + + fOMBhAe9Y5+RCwwuPmP8ZnFPeaS8mjAmQIzuXxPr0pjXM+j//D+2eAL65oFm5ivU + + LB9qAtwMQ9yNwtjotal4yBl3HP2ALbPnQCbLz1cCgYAYjdKllbpYKuWaEUTX4HCr + + EemZudo13HeWEtHSvOayHM7stwMd/hYMWXuyZK6Qod7AbLH9SiL3dmcUKX8CS5Qu + + hUX5goK+43DEjMG+hETfnqJTU1TNFfe7BekAUwjK9Ac8FGGjKK7EkhA5Ee0lINAr + + o4J5jYCANwIiAbr4b8sNgQKBgAmVO7od+5/PPFA8csVyfSjb29zs6dF6UVmO+QDD + + faYw5hv4lcQjrfX3fmfK74EjDBGaJBV6BIq0E8kl82jOwJnm1RMUn62NgXOFOWn/ + + Ra+f6Egw5LshK/eoLTwj3rOCO2HNpJ3zPVMuG31J6Et6vn31ZGe3CgKP7TepHKmI + + XAx3AoGARDvoRZiLJEmUDQOw1mI8NtrvD2oKuO6YkT6pt1eZhqPN6SMDUK+ckx+C + + nG+2loG32i9F8IcBCFWqeEXO/C6jvb1df3tcsxtaJFI9L4c/Nhxx15MQAVhgpwaH + + zN8TUe04nOhf0OZZebwoDU/ZrLY7OjiW/vzDmeqXOjINS5kbxmo= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:62dtbeowncjj62jnwbggaufvbu:6r2sapg2cm6dvmylodyxabrj63a736uouzsqyacnimgo4svnktva:71:255:131073 + format: + kind: chk + params: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:xann47c7l34cwixkcegyjwbrzu:ohhz4cila5kkv5ruixjzrltgx2e5q45bx2bfjpjqbyi6mt2zfkha + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAvSRDhsaKiEoAW8+6Xt0bjgxl/FPZxd8iSWdGcibGedmJsvK9 + + hpAFDrqSFSXJ4pLjabqvhnJlxhc1xjlqbB/JuP+9eqYxm2FSzBZr5F+mezE24zQ9 + + tqjCHxbm4JQmOgTeotoum4WXS+mNp0YQSm2NDil71mmN288mZMGt73GN7sYz3nti + + ubABLMONRwZqN3tffAgzcTaSKON0k3pimr7CughlTGSSbnQ/Hd+G8jxiSTuW1/8i + + p0gt8NvDw0WaOxbyG2JsAxqG5HN0ScbUlCMEwiobSxc+ajAy3oyMJGbsPMqTPT3p + + 0ZKiTJKpA5M0uMhF2rQd4F56FKo6ZjkYSFDTGQIDAQABAoIBAAI8Xc368woSy3ew + + xQe4PmPXkq10AkscAAhB5ivIRVvAYUFrxcDDwanyztipv/3jjzhubzYijEBv55m5 + + wp/KxzQrVb6PtpHOVmlSMVSB3iWiDcxYXD7LnOVulfkWpAlwieWYuNwexqYsX+Yx + + ZlTmGZ8OvEeEq06E+UCDDf7Ns7tPhRP9SMdt7QpgfyL8rWsqvh6i+hNaNFXj81Yi + + xp7S4qgLJ2XgUhyAuQuO+tJxlWVkMhWAcQgKiEWDZGMTDHhm8+v0elc4iup7J5SR + + 0a9KKlwe8iBwihRyvWTXgf9puiVoGmjQPT2OwXqcrYwMBTIsf281am37Yo7as1QZ + + 17GvKTkCgYEA5QHevOUc8DqG93qXgcT9k9x6JWnRC6YmVWhaERDRkPANNCiTRcBW + + DAVgL6O7lZav9qq0CRXmF8X6Ukdu1MYiYuxxxGgSoKY6WqIcaKGqwTyVUlSe6HJk + + +9mSbnCbE+fdBMYhg6CcIeOvoTeXpFerIeXxqI/N6d3FUbA9MdNhlaUCgYEA0295 + + /nPiDcXgv5sgF/wBL9sR93MQX1IqyhT7Y3t747odvkxkBg4aQFpZFM/wUEIQFBEY + + 9adWSQq07nifLL0mrkazF9wrDBJhyppqMnqZStzAjdEj7qxYb3/e+RTf2c6/r0YF + + 4LC3X5tLd2G4d+UZCkcoSltdmDs8ncOkIlvNVWUCgYAVB2Dus5M+tAEkxIsZDX/D + + jiFhQiBCE5W9jgGHQ6YayxBLU9aCNzEvlWbJuR1GlTm/StmRZANm93UPDSQuQaty + + rgecY3oiamE9ZVl6ei315JxJnR+idK61ObtqjMiQwV/YSmFVdvAfZIsCINq56pr6 + + V+Ui92GPMiAmaiqUYra5SQKBgCjvHB83MDyaYri1v7DlCRXKw9+0VycdMUuOZF0O + + Ox4LmlaNU5AYityKoVR2LYBcSeCYrsxgaUQa3oyMrcRrmmGDLokgBvV/WY9v9b9w + + HN1xf5X1N4+trjFoADMY532zmUjFtb2aeOX5mtKyCJSttftXa2V56tTeIw4oIk7E + + lyxBAoGAN90N6kHoe6KuTq19gc1MGSy1yctOk9LskE9U/9kZoNBK94XukC9jTFZa + + g5VhtchWKaQmlKIhRE/m9LIIoRp96EBmDMV+VPqNu+xqGGHCLX7hofEfYDst1YLH + + y8TOOjHiY8XpfqOnftV4ZCRKrwR4BCIIsXPKJ++p1U/mK7sw6XM= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:opuv6wbozu5pmpb2qfidrw5wfu:x77l2jh4dvrui6plhqzcip5z2f37bdwj3wivq74w3khga36muzoq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAtMN6z3XTb4STPwK01804o0QpJgxm5B1pxh5LPc8DEZIN/iHn + + V2wNfPlqoUGNT6/p63eUrJEQYzCCt9A6YlzjkRrgaYC5sqmSevkZ8bwX1QsUh4ts + + 1eVyb1idHEpvglwvZkxlq0JOXcartlePm4pGlj7/DxkQs44NGOivmg7AqzKILz79 + + VX9HjFTUYliglfcPsDUyulNm47qRSEfBKiaHHdszul1hLhfupVg6oNF3Q+s1ct/U + + 5avxFEfd/1DLz3HRyIkI6I5f2bshoS+cQ2nM7I4/y2HWEb4ed0yEztbBCauVKNdP + + 83D7GSJAfWvQoAjTJeAqqZ6N7MNMAkfce3gTkwIDAQABAoIBAC4/IOl47KpIUd+6 + + EohvocDrjFeGrsBH4irkzz01/EP/iQLuq6BLLbw+l5BAFCZCDGfIxUnNJ1MpMxhR + + 9s35k+Mo7Ccx3tCd37MEjiWxiKth1VPEUQj8VeW01yVIyfShHyNeAljpcuE9Fetl + + xYD2xI5l+Z1kPUii3Cj2Rw70HUjvC3rEAfbdnr/larJKPajWHKE0uCoSX3GqX7nt + + QtmfG4KrGMz+xXvigLH41zNHevjnvnqb+HBVgOAdHo5Evh2VBBb+1I9l/8tdjEYS + + Vx+NAal+kpHXZETFmflAyLRk2whcCyQtuhye+ITJhMAtBxvMkm94RC6izoRRKPDy + + XxOxRWECgYEAy9CWVhzXrr6R6hQKXRBuUa8mV0ogWj91kWYF0rSMbm/SORBBIUrS + + mX4Cwj2YqqdSKmNw8mYvBL67lV/sQ1h67Mv+g0Me2PojqRUnh2TjtcxyRhhUGkRK + + x6DZUrh0WgybPsK5zMeT4G+GtZvzeBvAO/D8FOVE6H0zytax3OwUTKMCgYEA4wv1 + + m8WtVAsurwJVKmtxyYLsTMLeLnW8NO+STlwqfBQXzBxWYhQJjJmfYWRbSTlUPUWJ + + UCYUNiF0Dusp5dL3yPgpDrBnpUQ/uw+Cza0B5V/Vrh23kvkicABw4CQSQOwCGZkV + + CL8bnAxI33SQ9B3PHTqunxrb/NAqq5FvP11OHFECgYBTwmwWBZJpwO2MSiIcLuV3 + + ckiKdO8ox42UbF4WQpa3yAKX6uMpQGueItgVZWT5NPwiaW2AYJgQFiZW8+3Pm2wh + + JpB49zuVJe9DzGrLTJ38F4Ia5mKhzNECi0rkoONIIogmWbYrvxU5lfvBZM7A3H66 + + 44VlPPd9p/6B7It55BdPiwKBgQCZj0wld/Q75HhFi5lYYGUMOo1heWbWG3EYiHP2 + + paViWCCkPwI5wX2X54sBTuPiyXBtJGuzlp2S4ttg/7JNq3tFJHp4Yd0nzNohxWLd + + gsbGgSO/aH/xWqjtAY9WOW9TE4x0DbJJQSAGUdSztV4YjVS4WykhmQPyoERL18hb + + HdsnkQKBgE9xTWmSxMInj1Iislc4Ru88KyGMVgpU1/IEQVG4zmu3js/KgHteBjPq + + EZV38+EW/RMblqDu6883JAEURnsroLU+KWURE5EilBr5psE+3WHkHqUVDiG5ghtb + + TRXTovFp+oZ2mz2O2UL2oA/iXJES/fa+B71ZaGdqMt5iR4a6/yW2 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:hk3a5ync7y3dnwowqjqoa34eae:y4f5b2xqa3lslh2vti5fdbho2syqhbj2p6f3enlhxsjbfpt7zuta:71:255:2097151 + format: + kind: chk + params: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:7hnqxsumt4npusjqlbhmmo35qu:e7b4rmbxwbgrhguhjfbcc2pywgducrmjdzcs6opoa4z2dywalc4a + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEA7VJ3LNata8TXtyH/4NhdD9I9oIEV3zGvsKLFSzIjUWqOoM3X + + XZdWCl8q0ZC9NTZdDUNDQmoHCLNGVQjbeXkMI9G4JMGdVxafHivxCpa4A1HAcPXw + + 1Z8L09YIXQmgPCxOiCgvdCcaVNxr97PiZgndAiGr2XQ5XU52YWm1M2F7bPv/67FB + + kaPKlS4lAOVTQ76WKS6x4z1B6HpVQh9jQyvYYlfmmY1zvzIJvBMmyfMxLQpgBSe3 + + xKfSHW4Ow1Kjl9WxhfBw71hEr4ZxX6GMLpFhW5aPtmUCZwl7FBLQosC4IJJYgRol + + FSc5RsdvRnfPYbJ97+qnmpxlRRICBm9MCXiZhwIDAQABAoIBADsA88xcG4XdzNwl + + Qd7/LDQQy22qamuxiMLb1T2a25kUax2jz9XfGG8/tf+ggspGF+CCRqiuf80z9VqS + + 9y4+YDxPmf7ZfGr4ntr7hdRiIKICo1vyacxS3LfwUOgAyqvrQCMuCo5QYoWSv+03 + + 9iP0c9Rh1r4b3V9LcLdLdtetduhjTGNfPE0w3GazezdAmWmVeDe7L8Fts6XrDdYM + + 3x6Z/0rk1daMO/1j2LFfu0bbtWY62m7++ZlFbG2vNnFmZqUF2xxpCEbjIm3PwshA + + DRKISHqirfQ3yCMjNgn4kHUOFAIMK52IDmplb1I2gknoUKdLlXXuy1PuEty8nOu2 + + QuPwlNECgYEA796GhEQhbkiP6UYCjaRkbpJ7/YEvwYo5RCdSiNvfLes47OtwLkK/ + + lsfqvqPclG3O4AZDDTW7QCqiu1cW7tx0W9fEz1t+yHJclrIYG3GcmN1TtXVreKfo + + MPgxNP/XzmXMevD5qPzETf0yyIQTyHaRA2Yo2/kev2jRl0gqu5kZ0LMCgYEA/UgX + + Gv+QGUAVEy7us714+7w53IABy5mRq29pAVAfm4aMKEogX7LPjYytMEUmWdlnSuzd + + rltH24XThnGHx4gfQDbrXHL1ln5MsUtL9PyY2yzbvubr9TqZhnwOcPhvChfH8dXT + + 0H1BXTs5JBDwEZu43GVNgxCMNy1ck7aVBy6MVd0CgYAzK9isjNBI86fnzuyqhOB8 + + CjnzScUDV9aBqJXd5nIFHMInIM7sv4aZxwpYIyLic06H0i4pukW5GZ9fseONj3Av + + S6eLyOwSHPuNlm64JBORNN4vvt3vfnp1P+1XbiD+wg7OR2wrVckXDiXwSuThhhHH + + lNqwmsOpd9YGnPmoza+JKwKBgHHthm8fe4rQF2q8lqSE2rGpNgGoFqalWi/Z+kqb + + 5svHVq4cwbkqLlAGcjSfNiP+NYcvSnvOFWF6Le5wjNnEsgHpci7wiuV5xDePnggB + + wyP7ZpDVQFfbVwl2LezE4vWQQuDWBOPoI4mzRP2jHMle2WVRr+7/d4KuRdEvtJM5 + + beiFAoGBAJZxp7CjdpRZv8nv9j30zj2b7C2Kn/fBusD9YeI3u1P27VkZ08SaP5te + + YrH+YndbGm0I0CYVhozsIVQK31p7iSx3jEBZTmNbXsdEduTFb2n/0pXQ8Ods8/iy + + JoBOx9z+6v/JMYxoKDbPEohDL9lT6/TbWAUr/k2gv+90rdFwVVQs + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:5udu4xzy7ffr7mbrwuxxzus5fe:h2hcxihkyq2fpv2maprwi32kerubawnj5yrpjcomxj5e7n7p4yaq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEApwF50oSlhVzG+vcDRCc1vYwk8pdlqMMgnqSHn4sj7zqNrXZr + + TJGYDm5NhltudMc+AXFOjStmNO6TtSsU/ttQtG7SJLI+fu6rHf5nr3c2AW1NgLKQ + + py47GId5AHbdyLJdVqUqQuZeHTHN+OqgHxEJ5AjYB8avGvAtOjGG/1m95d/VHe38 + + nB7rCIZtNomiO7eFN57T/RwBFCMazD7vhTYyq+bRb1jg/STG+b6kLl3jC67r3/Fp + + RqXBVO9WpHjwbGgSdAGR6eNt6jm2eXXrv1z4IFR4DdzgnaCtWZOIKmfdk9jLG6ZI + + xqwznoBq3ahG020HxQMUWHRljEVI70HgZBMSBQIDAQABAoIBABwK2uZXAKYinITS + + illcziDMUf3sHxVV4nnQ/bbz+a43YkfQvRan0eUGb30SiDsSo55BZOO+eFSGBQZk + + PAvJTsVlYGLqDSVqNRB9wfJMLaTSsjNciH6R/DlTsiU6UGZdUN/2LuD55q63SLM1 + + zno49bS1KXUwzwFSd/2wCE+DRag+BlETP4hCR8Rl8puZq/2KAT+e+wLLNyNZA+OK + + C9L+PgUv3Ac7e7Ll9hvhx0o3e6WKhus3BMbWyQN3fDUt31leVoROXnkvF1lORNtH + + aaCZxdSFvZDJqTtEZE+XMQND3Ea1aneiWVLD95Q/TwIlQfd6TpvuONxZ4npoyk8V + + 5CNHU+cCgYEAvlTtbjqSHCV0/3OjvXHNjnH0tTjPXzwJO/BdKiE+BVnVyznVb4UB + + ygcxDyblCy1zEBOxSszpKOc8kHUh2IxMVe7hLVIxjNjxZgKso0GhOJzF/Qb/tkKN + + H7EdXU1d8lpWewztR1p/ZZrZ+dI/+m0kKlX4cD2L7HcD2WXbNJ3hj2MCgYEA4KBH + + Rhoi5tsGkI/dGR1KFLKDehd+tTWUqO77hs/CCxEwLilVu43Bj8PmvAG+MazXjG4G + + N5RxAUsPN9VhJ2jeUAReCLCkpEKHJ/YuVEAZjwDTypAlgIBIc7tLCPhOYarsohUZ + + 3+LPhdvBoV0CMY9ZdhTXpyaghhqEuygcEoMgWXcCgYBSmdT5E545bOAbxPn4y5zk + + BvymcWM993YidyxXjlm2RMiODCle3qBqJzjZVI3ujejzvzggOFGwGLqmDs+DhU/T + + s3oyCwvKDpSlKt/1chQf15ntN85eMP/CE0GlLmBpP19sw61uXA4R8GRNETwG2Lrr + + TKgnPe6tzvDytkutyB8N9QKBgQCai2u+PYk50APKPlDeUJqBdviibbvNrRmkyRfg + + /twAhUji2amUqsk7worjW0eiIcsDYUeBwe2l+CB2R6baWHpsDzUrQW1lXihjRCtH + + 5/otu2H8AgTrTleK2JediklTRSgds+rjcMdaz4F/JeC2fGwOo/Rjml3jJiegJM57 + + piABrQKBgHUOJRCcLh4UEvI4qsKalh28maFzjLqv0Neh5ENTU2W5Mv1YSISjqRYK + + V/hKLURUqDl+1/8reScAjDNXIr0gXf9UGkdcyFWnPjkCEuu/hz30fPegYcnfjLLk + + BBH7ePyDTDomBJK8NN2NybS8jIx8X24OQI1aPRCK0yLFJcmj9Lwa + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:qa4hwhwtd3cpvut74biixlv6ye:uuj4ujg3mcf3lqpuvvwf5pszcvviyb4cm2jjicu7ugiypojjie4q:71:255:2097153 + format: + kind: chk + params: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:v6mjfmtvbo4eibevptf5g4neuq:pqohdp2lrbontgxcna5cg3ni6bgzljvxkho65xjkvbsrgxozjm4q + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAzHCyGaqHP6kzJgCNhvnbkwEgugxEUDACv6Pmt8k6W/EZNti1 + + WnlINd1d1AvgHV1OSW5q7xxoasO0Bjy7UoegZPdYEvk/neqT3ybW+4SncCqCjo78 + + ighVTq+MBx9BVLnQQ7NT+6clI891veBldr5h43bZlhKfrf1Hfz38+51sIYd1aS9t + + xIVXnp7O2aOKEHgMMLjY/bqNBmR7idiXvB+lM9U+Lm6lqgEaAq/Owb2CV+a2b63Z + + ddnDUDksw21/SPgJzH4CP0N2UH4KZvMGqRqSbE2fNSGSeXXWowRop594EMxT+sJI + + FLUuincZzdQeM1fhSQLSBIYAVsRvAFum5fygYQIDAQABAoIBAAY/XGX15N+waUB8 + + TFbnXEssAebFmLHVocPe+546r0afgcyAB1y+L5N70hH4ke77yrhqQCjR9qvqkp4N + + LZFKVT+4ok+kH8pQ4Jd1baTuixpdpjM6keOa+RZoPXB7R0kSS5fCC2s5kqQ7Qwcc + + LCWakE299EzGgWw0/QIZsBk1WJhWqBncC4rNL+5iviWWbWh+33je4syQR31R845H + + nyeBNpsYDK06j0RljUHqJ5sbe9o/YE7lKj0Vr/T8sTw2Q43riJAF69aDFjr9C+FS + + C0vaua+r181Bel8WOI2g2lSOvOfMydUoYNwBYaZp8tYH7eJQXmp6FB29VCmvVdNc + + JjPuMAECgYEA0Y3w5z851Te3PxRWVrGBEpuHBtTawzz19K9VVEAWwuJr2YBMeGrF + + t+8OUHzN69A8l9VlqWyx9HgMYYz52ULO2UFh8A+1CRYxfBck+TY4Tl5HEpiVuNwD + + KJ8WZ9L2AjpE6i0TkYO2uGTKaPn3p2wsfh1FCiFBcSosNi6yz/DlWmECgYEA+cCS + + /3ZHOr9F0+w27YZc0rFuEYDGeOSypBiES2znii0GnU2ACD9Z2XgEG2VLwSE3U4To + + JH/p511I6y5VxHyULkRsxyEnXozevV46DJEkurBbz70S4U7jXvGAyGA0kdvPEuPN + + EbhNFePEwO+hCVx/8hkclSIBLSnIYN3vEjGZBgECgYBZzhCtqaTpQWVgvSB7Krr7 + + 9HcbcGEIRrnJUNKqtoSKpGo/3gHnoSp2txZVXAcLxkQRdbyJrTFeaYw0yivQ9hab + + eK+2J6UX7dDrMyf/PUNIIpMm3wlbHb6ky/jYKcqQDdS23vaB6AaIY3lzH50IvQ0c + + RwLtYm8fRkmINt8eykggQQKBgDZ/xFQEnmR+aqFlEVNhl43OdANTw3uMBEN0qiG/ + + YQMw8hmPWNnz4QpoexTzVMWPFwCdpv6X/xWisI/Ja6PVv4wdGFOXs3yZZt2R2z70 + + yTwH0fESBDWwPkNwlbaj77TIb3ZiyVQNkJyvODcV02E0kyLkQe11HyaY0IX6x/mD + + Yy4BAoGAX7Kv8GIypDDzgEGW27QA8RRVD7Cb5mDFxt3359RtZCXqH6MhkvOcU4d+ + + XhCWb9sqD4csSwSoR1/eT7g0uBoOPuGMHyZSoAcSGcoDm827Zjphh36DEBmwCbCY + + L0Hh+TszyB3ds+x1+1NbAmjvPxYiFn0K5tIsbkdHfdmmhuS6sd8= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:a245vdxigmfg5vqawu5hddp6mu:su5w2wqnksv23tg26ffsuosxhtp46zxxyx3speyruvj6c5uqaswa + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAmtdn1hwB/oRemDFMiEzAVbcHkLSTomQDzvDP90KoQ96bmprv + + 9q+PCTkKlGcxuiZDnMBA4dNzcJG+CPPgxR0IBGLQ07ncNjtU0zTfVzOz46p7cbaT + + jZcHK088eMKAySWaj55DgirTXIMqsmolE58mIn60OOfLkhAAslOej61MagYYj4CK + + 0ID5Ij/ZX+F6d0DECTSo6iA+B/a9tt5EV+9vNS3axFdrEQb8aiWynU5GsPUGEyVg + + l42n3LUpBvA0kg/macI2BY+Q3xjy2OdKo0BC3SCcmiIj4/kXXwStwyghlo6gm8DS + + IjWeiSVZb/yaI1Jb/BtBaTSqC90OLqI0xCjqPQIDAQABAoIBAErmCTc7YwePVgZ+ + + Skvf/GU53LH1dzhk8qamO6KaHrR9uH0Hly2XbDQE4IY6iIZHvgrTwE68Lqn0BZ1l + + AoO2cEtW1TalP80H1Bc6CxKuUsS8kWvG6gbiWDht4o1zYEJsKyBvaK5NMuIcHIoi + + 5/5ezF6BNYIVNZZYoU2hPyC2rjDWMxqJaP5gwCszWlC9sAZJXYVrh9DusFz3S2A6 + + H3gYZLvowbuiOS3A71LXcTtqmBRIRDV9xvpVt4l6lfF53Oqq08+g3+0btVRV9CB+ + + QUaFBPDx+7fG+uoChfN8xz7WNY+aC7rA9YwABac3sLAxv4bnkQHWfz1m4t2Wm1EU + + Lcmn0O0CgYEA0uKcNd0kgeU7ks2WRq1TiobAxS3t+x4/iTE4BMcNk5hrUYlHRN+h + + maQjn8gTHiMpPdF9aESD8z3yPGWKlU7o6hC0GZL1PLfqFA4ZSOkmEGkDDul2eN51 + + Duzs0lc4tv+PrV+9rEQv4rDsa+lMSohsRsj6JK29bHpAHi6UL6O7XY8CgYEAu/d9 + + I68Z99L/d3ORDo+mqbIOnQfiLLlxfr1G9dwIltCqO1kOQMLqMbNjU1+EPs+TbYGn + + e1C/N/UxKLH4J/m5Z/EJqDWvP6pPXsjY7TeVnl29LPbZRcu5aX0DbanD6zN4Fl1A + + 3b7nL+czFw0Rn1hMuwrFsHtMFUbKRhyAJslYbXMCgYEAuMnvXdeYzOXkjN/vRaFN + + qf4oXt+/QCOiQwJI9w7BW8rch0cGl1hqj2nf+XvlHKxs0AmInVwkT3nBkKDdjbXm + + rGvUlPBMSldSGx67k0MRoqGSF3gF4yXzZw+++RWK0fggmyhg2NmrKDYmBO0ad9kR + + H/muD4Paj3qUQp5IJXKQlQsCgYBxC9SIPIw6nvyj465O+pg6oOrnCFG/ojwfBEkE + + HrRPt+lZziKjUla1U3UeNGj9uauqBXsr0BFg3ycUmYxsxmT6nV24e6kNeilIETVd + + 3bsvRqM6wq9DqdW2GsiQELTS5N6JXMZhVqoGBl+UsnhxxBJJv53LmSvV9AA9EHEG + + Yru6/wKBgCU6ccUsqtnitFwdtI12V9v+zhr1oHYahPf5c3jzjr4bhDWFFXZm4pag + + Vi3DpaMGaMBqfFpst7MMrWOVwLPGhEKiBVtHiblKlioxSgQf2rzGWJr1c1o0EJJf + + V+/ON/eItUzjfe0PmWDrTQr5MxZDHc8ZQloCo4FVHSXhrHIEPscL + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:sxbeisgiikrymzuqqb7njizgjq:2ljbf6mnq5mjc6kljctyknsznnyqle62unninepj7imdazch7o7q:71:255:4194304 + format: + kind: chk + params: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:ftvoiodksdpqeepc7uozm4osom:tbrsewof5gvjlf6lk6fsgssjs23rxgzdko575t7saubvyhn7rmbq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAmv4eV/sJqOOb9XQF6bLeYQDp9y3wcNXMJr6t5VzzYNZr4mr3 + + mA5y18fH30HcmLIHVDGr4nqZ0jT8pBQOPjJ64pCLGQVTddkflwHAqVfaa82XBM41 + + 2EVvmzzNnlOadGjL2QAwFo8CFrIhgN/SfnBJHZpJcJFOIcYzlrgBgSmrRerDT35i + + 2UN4odKlK0EUXQ6XKUR9eswoKUjic5nuowjgnSvoKC3qzBZqiVks+d8DFHSVpt3n + + UzAhwp/LgSC3RP21kALqr/fWZ+6iZtR6rsTX6ZeUFVoD4JaM5SdTsmgEASjT6vVk + + vA32+CVjPmCThnHX5LAKRaUGuA2Z21+qCYWVxwIDAQABAoH/NBEqIGQ02oeb3Nkd + + I2TzT6L+9gp4u28XJezofiS7ncxqcaV9h5dS/Sof+uAlOyaTT7VgCLUm93bVaElU + + f5B1t6bXE5C2eOB3vELadgkNVym5keO0MvMgiwXiDU4IlRKfaEan4Owpx3YPyztl + + exQ9e7RY93fYx3/N1NP7rWhSISo2KN8yimvmYrRe0SpVL+kFCpaA55sZdcGwcxSr + + dFISGFZ1dFf5zkEvNIF+HOFE16AkM07PICFaaL4XfdFsfNE7J/T3HwVrIlsaukMG + + u2Rky79ZUDhxaZZUMutvIuCun6apcq2sjQhPIacH/zzWrbdThARSTRFpeDVNdfAR + + uSjBAoGBALdxT2ixGDF6wXVRZ1sufAmbBIeyYkKI+WJqD44dykDGBlTcZk8YYKB8 + + VC3poAIeN62YhflJHvQr32UvLu34mRVS1AePkBZ2Za6VWqSgK0SGRjQv5oIvBgQ0 + + R18EedGLZnjsgFTSx0eHyQEmYuKJRgfFXI3q/WNVhbRrBAH52En3AoGBANhMEx+g + + X4wxLLRAcRdjbjdfyj68LlmF8sbdkvPDyPfnyp/SUjwAa2ZzJqPraoTD8olReKVO + + tnNsytDJSF050KBRdfe7PHgKBUPxIUQ3B1dLFWilPqaB2L7osWuVMDnl4j/om2WN + + 3up8Ydo09UAfPai8EntoYQzyi40fRT3oTJ6xAoGAbossxDT8FE0aKZ1tgEgJ3Sv6 + + Vd+MUPYD+mdZilWvXMs4Y4kRahaRnARwId7IWp5lBQqFqYyDx7Zsf6goSqVlcrEg + + LpI3zSF58vPz1ILkr/2ObsJy0P6PTJdIbxzeYAT2MmaqivMdvaA446WDL2pzthkb + + xjXWjjaqROe8WYh6608CgYEAgaziTjS88/TLY2m7I5WGD4bLXt89PojS660NnD2F + + 8DK0RSs4CCcMPMjOornSC7TaZL9GgHz3X64azh/O1a2CyYrtGc/USfdf/sLC+f2v + + 1gL629kt/W+dfZ9ONzyjRCLxiPUwrSroOVbG56aWXpIcSlwvDHOgs716MupLffkW + + bpECgYEAtfMQhjXgJrGfS513PistBD01BCoYuurvk0Ja6Ssy0wBNk58/FV7/3KVE + + CcB3JwtQiAfLvTknbvtc5L+a56FZ62bJrgsfvv2X/Y/fPLicvWXuEFoKAxD5O2FV + + cXl8ozjDXcUvEjwAZaLH2ui3QrHn3jX2iYl7ohKCYiyXqDbj6po= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:xa3yy5ihgbskknv4nvxaxpt37a:qrgqgnkwo6qzhzge7as3e7ywnt2ddxex6oktg42ljxnom2n5ymha + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEApd+7rVZ6XKYuR1tFUM70FiekjZ9EQRcinHLUYZsTriAQDlgg + + W+dW3OJQG9w1HHBwuiKKk2ACOCms9LtuuU0wNnclZAFX4xDk6Ap27lVTneil1mne + + arxqyVeZBVDT0A4q9fuRMDGrV1kj2F3RtBgiXiZd3DTT4BqHRFOrKfXjDOD7sDnm + + CFZyWiDTAAipvr2tj1dhJU7lNnUjktg6koW+JxP2c0MbQqJeIy/99G5crdTkga21 + + jJVpgRJyH+cubw9SjqRRKCKlEKkZlF8b5X/y6Htn95gfHRURoltK+XogdXeABIso + + JZ2L6OHVnIrUew/7nnMl94wTJPPFoQiJGT+7HQIDAQABAoIBACemtzwtJTRzFDKC + + DqyNwDrwkI07Mot1vpQT/hF0Cu0PpI7tQZT+lNzZ66jxR6/r8AKKwcIPjBBFZB8f + + lA0PNtR6QFGq4Ym5zuJqJ/p6orGnfMcnyR+OOV+2hTGIXA3K6TmigJc69Fi9ygwN + + h1TMBSEo/jxm03Qpm0a50nuGGBfKaLk/4Qu946LJoMqZMnyBWMc/6wYKVgiRhsOA + + 2pf6ji8xgCCwhv8pwjJSKIwl4exMpnTQg7h5wtY/oGzYRM0DhF/8rB1XHzeogbjL + + dbdM3Yc8v3VwxJuUdFf96cfKHhID+aDoAEB3sfRpkLqBAzXDdlDyMErVYw9THMUK + + J2le4zkCgYEA15GXSPxWeNXfYCNq7f0L6o3LSoDMzdo+yuAHwGoj+waxidoe1Qku + + hGRDm+41rkqe39dEGuD+13wGKLwDLqXE04b9VS6gMGXhFMIS3yjZHXher99pv5YY + + 5vKwOI5AYUbLKg9QsL+Kd1tba0w57ydX2q1AcVr6jPixFL2HL1CP1/kCgYEAxPwT + + omBIdOEJ8QrNxBhm1p4aCXsQ4nhxuUbNrmoaNgofATcZHEdF0ha7vQJg3LotBcHK + + 7PmqQ6HomXgTmxsLEBRYIkX5CEDQuwA0WMPyUa4OTaOLZ6vuQgFnteGe8Jtu4SjT + + nUw7eeWnswZ89q9HQtSeKcd/PpiyFcddDOAR7UUCgYBetF+6eOGkhJF2MxkvJRSv + + H0xIlv1jEpazmmjNZ9QW3IHzBhi1jysYjtQFFUoQIEhcHr6U8HQFRz+NdcwQGlO2 + + en+hhLJrkNapv/l6gP+hqtgufACBYvfdvpEcx6IRGoD3IXNZs0yp00D+iqaJIse+ + + Eo9VPZsFg9yIOBvD9ai8QQKBgD2ysremLqulHMcJ2j80YWmRZZhYmoZEsWIVwjCB + + /Sm169Ymms/Xpw/RnQXra8lW6ukltNiarnC2krMXABUR2Fo19RDvF7w1COu5eavf + + 29MnkEVTF0Pmfx7fb8txGqZEGOufLQDUssBQZUFWo+dkKQ7Op6dwW/OQQh8+LW/t + + 8s99AoGAWsgONxn9p7gU1XVOIXKpEUD5fL0IC296ibYQ3QA1bc/GALkKe9rRFgTz + + L0WbHRw95qLp9KnV3m/zeUk6C8xqZZlKI6KRSJxQczVYoUJYqWo3Iduckub3oW29 + + uFXwuZLgnBU3W92hYS5a1CFZTu04UEzTe9zLR1fwDl9KwShyG1Q= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:r7sv6u3ihzys754twjltuzzpza:tpyoxinubkl4kvg6hipls6ezun7t3yyqj6ix3vp6tvnzqirzzl4a:71:255:8388607 + format: + kind: chk + params: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:y7cqgstkbqw5xg64xzpveq2xle:2rt4ydyudlifphjplkxroltqcbayzsnfgvreu7qufhtk6or6r32a + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEoQIBAAKCAQEAumqCuPxXTpGEF+uFm+cidAyGqbwWz/TCmGRlr5bEPrerLg/j + + CfLcq0IeLayQrhhUdGf41+wn+Y88qs4Cp5jjdcNVn18k6mkEnxh7cHjkCqucSojf + + cTlMY0C/NnVBIjub+OKwkoMWEDlzY170kRgtyHuXzHfuxCwij7huovRTRgktqxF4 + + +IQGCyhxRJqgApY1oTgXG1Zt2fKFz0CcZ5RHbatjwxtOCANZyowwfGDNlLjTOZLX + + F6I6sVx+77tfMJTmNEbk4+ZSA7BrGdXX9w6M9LdQLzLWRgf+cxjB3TEC+6WU0MK0 + + CrieNwcuNvV2oQDBEyxmiiAM6jsXH81dNPkkCwIDAQABAoIBAB/IpT0xGRm2SdVi + + PMeWIxOyRwuNnD4ct0kQZR4JELC41CDoaId7txAkF80lzQ1B7LRkPdNi2nX8bBWb + + RmyY7r9XbLPdnwewnC8cF3/XvNns5Jr4t1Awust5cKCyYUaa7z8CN6TjYNGnWfsp + + Z32Np9C08e7UzAr6k3H5ujNigQhBoP64Fi3ZpsGbZCH8MWQKY6TBLwsvlBMeiHfg + + 0gliCDdMc2AQREKEV8fAptVGJg6nX5lC0SyNP4e4SFWPbWJaF8Mb6822bU6sb81M + + Gp5rrEdFnom1Z3/fCAIsFvWEZfsRluazFq38gkZyDnLGZj3JH83mVtW4eCsJVq+m + + buO8RYECgYEAu2czdC9YfrH6CNNnsbdg9P6qVTUKL8pQE/Kpj/nY5GOzQI3zB82G + + qf3Maed+gzrfDCOMeMphD9yET4u6J5s+qZF0JVusO02PQOfBoM2DPYWdxDQoGHA+ + + P8nH95p1eTeZjuEfsREt0Mhi4hnoiuXKlxNcuD/Xzl/p3llFDNiXOSsCgYEA/qbQ + + qg/3giRNBarJvaw3I2vHc0NMq/gF4pIxjsijekkMjt/Y84zQHovPtTedzcrEDYst + + w6AuJl1ZNwAfBlTJnFPtZRcunDUmeBjJwkMDhxWdSRkOSbJD0FISIJiGB/fViwnY + + roxqfi+48SPzMTDwevDe1KQD1iwTUw3NxAIikKECfyMjNoKSXgVjWX5OJSMtPwCw + + vz86sq5DQMB8v04/imtIRlPUSb0szBMTg0BYJ2BzqV6dS7laONjAgA5qJH1Inncs + + zpoylhiIclO5IJUF85WVd/9RyDLM2N8c9mF2lJAl3KTtkQOiNPTwnZnHQdLJQzMQ + + blIdplkLos4N7uR5t+ECgYBzz4/EV+CbekDhG+wF68VjwYeCnw/GgdTDVvNc2Vin + + q4MfkyQKl3aq/bCn3LRSvC1vb2WPu1BhuEBzqAV0DqlmBDFJsUJMXkuxgKx5QZrg + + G29dqBx8XatDmZ+O3W7PPuIKCp9VupxP6Qo1+MCIFZa4gsUEddcc1wyuz+9Nfh8U + + IQKBgQC3YjitqxP1dxzZwTtwJ/c8R/lLWAasI6EBjUd7pFZuh/9igtV/urvpuqog + + Zmk7gs37xciTlmcDqK9pBy1nBg5dUyJbPjd0TkNPi7+9cGZD68GfvCHfDykuHKV2 + + zuSE7JqM0hDEC+qXRcw8lg0lODM5X/SsUqmbxj7yWrdqf3u4rg== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:nn7eh7thqv5fcx66toeltgxtfe:gf643o6uocn3hvmapcu57cd5teoulsyrqlouysq72qegisvc4p7a + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAxWHP0UAVKpUHireN8BgAJjXIQiynm9HdoIfvGrkdnP44Tghe + + 9MzIU50xT+ysUmTrXar5oBRDx98mv8YuJ5dpUoHl6Q87uVQhS1Qgpdvwh9MWY1gA + + NbOqNpveTvRvHGSLyxrysuUwwjerumpPWTVvhgLmxOryytbwsDb2RmS98yq9pX5d + + wsyuOf2RZ4yRn3y/QmrbAtui5iCvpzkmeqf+kuXJFYcbd9S2/pNXpLL+J6UzTovJ + + rySjXAGqu0amNNbkQxvL6Ofd+ZtOmZ2zGHQiFIus0AkbjaM60mDiFd5QwY39lXUK + + V3VTejHWrS16wbq+Av033vq7jn7Z7nOGzGg+yQIDAQABAoIBAFXeUomG5m5q/Sf0 + + 6LPdzRrSZPec86HPMCqpWHT4uZBV7GrOK3k2KaRui0ho/yKtMtPCEOz6Q+6M/w+J + + CQVCUpiJWFsGvIXIut9JjxZ403BTfbbkTtsN+WvebV1N65SfjU1jwNfg61Bi5buo + + ijKWE5lqY7ihOdTSo00V7Bf9tcE2P8NTXlM7rhZ47QvFkEwNoxHlSFJRMp+5GN4Y + + CTjIjBUvBjda78Xl8eOLmYg2Ct+SbwW714K0hNwVzz6//k1oh9GtCtD+n/ST7h52 + + 13mg77iq9jAVqj9NmRWdcd31mX6nFXQ3mK98py6sOuF3NxiFMK6Ginx2rrux7c/G + + Nmfkp70CgYEA8fV45HO0S9bC5Rk9TbFYhg52XhXue7Mj1s9FI38Ft+ZXeT7Fa63j + + 6WfOhiGyJmRR4cWdAJr/RZ9xqNyCZLcUdh3pBvpYR9dUyVpTxUVv3MIFHWpkc433 + + g1ZEAPWzdHQG8yVx6/OyOzsUg/IsPGwc2sG1h4OWf6SC3bGVic3L+8sCgYEA0NYb + + 7KpnDiwbZ1BlcV4MGIYL+Z7N85/jK7oXBJU0Icpv4OQdryu3JFSgDHp8bcvhMvyj + + IqFXH2YkiOi/xlvWnzj9QAP7FViuIb0Oz9bWiaNZLn7QB+Bs3oqgiLS2LvhbSQJq + + Bx2LGIEGjnPWJfV3lytqCI69qbuOlvdlf2CmxTsCgYBazLXLdahJdZS6CNi6mT0R + + Qcgl0rEmdrmSWUIm6fopYyWceHP5zs3iv3P/XhHO2oLn6RLcMU5uwEEVD3tXdGUX + + Vm4mkjgi7aoBzgX11/L8s0rcGRsNSk+CWBM5EPuBTjF1ea3g0BkopSkzwuPa4O+L + + IHqRGk6WJBSAQa5Ogo50NQKBgCo9pJhSP1YWhdR35ozvwPKU6ocrH+1PQdvuYAmF + + RG4xTD/o5DgyV3D5zQW5IMH0ozB0+WpfyAeJ2Yn3yhKNMPQzysXQCFFhBpe8beqM + + QgjFCZzl+Z4ePuckkyQTqWYGxjAWVOvrhd8G+hSGSaKT7ASfu2rPtH1Ieqb+k4EY + + Q6NRAoGAUl7vKlquxaJmvieV9q34/2NCFUu8aBlLAK8w3sirAVM3UU/ISeEYdJfN + + OCumo5DKFR5COKu6hkeFhqRI3ieukX9k3FMUiNemnquubGvTQUlkwEJjUv0IRfTB + + +gWW9npE1Z6LUoQ6su4iVRid9Goq7nLj3lfFCT99Y9upLbYcGEI= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:3esfsjn7csgnyqmq5afbgtinay:xsrhdomrbrtzftg6u4hgipm5tkaomumbdlxi5hpvpvawe3bjikua:71:255:8388609 + format: + kind: chk + params: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:irynodq3ps65ftyonzektbciby:asfulhlz36ydefcp6pjz2s4osrrs3eghnv7um6kqogaiqqcins3q + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAtBL2bv+jmSUrBy9rEW+f3y7FS5qfLTP/jryKOd18rpIELuj9 + + U63C8bVHvJj5ppuDRy1BGA91tVLgr0bLE9GeXWxI0pNzILiTpADn1B7xjpAzLAJ5 + + UuEPDoawcq6wIM0t5uNj7jrMGm5jW5C3Bq4vlYz/Jxfxz7UjbJtTyswhzRbxvYET + + 7Q5OhIOuosSdgrlUb2UK/rR54hDmAen4eaPi4Z4N/FuUlsL9rAURL4bp5LBV0IHs + + gkXUhlgXMc+u8DgfSCCGaWTk6bH9rXDhMBbKGKVhY0uU2lX2B4r0JFql6LjAvzrC + + I7vrbquJXCKTOVRE2aCSvqb3+83slsfkt298MwIDAQABAoIBAD3G+aNX8XTDMxFK + + e8VmEadcINSQrcYwvh5mYVd7vGAZePTs+qfAB46rvfoeUxuM44tCI8BQ4XV9Atcv + + BtF8SwPK9+rCKi+SExijNOVpXj6mCuAw0xWTX3qAy7z0YDlImtRIs4pRwp7Yux55 + + NclFbc61Kf3r8YnsH0SNvz/mKzxz7NZyrWy+oN0M/VBr3IndorZ5ht34ovOr2zkV + + We5rbMVpLtR/tYHuwtjH0J0aLs0TUCmzygTO2QCcAGI/XE4rAsCPr9G/avQx5jis + + DdvW0paMIYPAr1ioYN0iUwvXQVnEeEWINepKlKul4myCdM/OdH4NYOMt4McI9Mbe + + WOBaR50CgYEA1uUGQgTiyuNXLRkhf2Bm035eoY2NAngefWadNlCJaR9FwGkJYeM+ + + DAABxnjrXLtMTboKuZN5CMSetIWTFA2HgLnLLUvAx2VPaVTaHQCA0Mdpwqxaq5uN + + Mhk6QPeDVEWqZnfqBpT6wBGvus9Y1CKaaoj74WAsbjwwwhyxcqjIyc8CgYEA1oTX + + wUXNngPQRL4QBsdOJz2GlOWuBGee4ZW+GIVw3CjyI2S7+xWHLaAG8TsNqg6GHP4V + + MLp+XtTzGJprnK8nF8n5gNZIEUr3bPMBJBHWZzNb4EzPBl334YMORuE72EcTRWEa + + 6q3ZogbepWm/piOG0oyx+yxcDtMpR7IZrp4mFF0CgYBZ7eoagrTuNwlqZBPynEMr + + yryLWxNhrycDT4gHDNkUVvP3u30jq9dxaidUCZJlcjRSasLGOoLyOmY4IZYVVDwa + + kKYIRKVeTHVZHRtR+73soScPQtWG70e9aXVJbstU3vqaeyBCtOHiswQZZ2BDFmAM + + qVrPTFILp8C32w4fb6bnXQKBgDZmwgB1n0tvVCXavV26tYsmAzdHd/YOATDcNLUr + + Qg/TInTvWuy17O4ZIymR/EkgHcrEdMNCyEFsZ6nZn2jA0n0p72hI70XTaSPsDGIF + + VAYf9DDRyb6nnfFGtxwqim6yt6Rkl9rj88kvTM9OHhgX8lz66Tf1a/MmgdV4ySKL + + YMTRAoGAG3JKI3n4MM22AykwVmCkhPDoZk7/ilGkdE7NSrweNoHvePdO21NBiTfU + + NTIg6eKHJR5EufHTLNvW54L40jiGM6cSJAXUEh1BA9ODsPCsi8YVw7FRPyZBP506 + + AqEaxdVFEL/GB35RpJPJJIvExR8PqyDFxHmDZ3WEGGPPvXb+Llo= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:umz5bassl5ndo65j6syf4dekqy:jw2rkz5kts253bmupj4vdbxw65tdtahvmtmp2nzzcgglyduus3bq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA3JDtfIt+04LLAf9VfMIMYrbtFIzKMlpLx0K7B/JpCtHWhQvp + + ozW2ttJfwHv+5dB4ykoqerNpdhxJnOxxrCdyHxf6AH7mI5gA8lFsH/0UsOSXS6Hw + + 8fMJxtn4pH6VF+qmd6qsHYyb6IDb1C9EvxndZuBl2GOE30+WjHcaMJ0oCbzgrF2V + + mUPTuc4dreqA6Mx8/SUIWRQxE6b2ozrW+eEH6VH3kRndHWtsqdrl9d7vUdqh/ye2 + + asady6qLCKCwdJ/R9/NxeoREpAm4pnQzSUGPxNHS8EQcgLk/AvrGH6EcP/rB+tZ7 + + /0P4fraqe781k2pPGrqXTD10zZZgtW664zxaDwIDAQABAoIBABePA07CN5Gv7q8P + + 7rmcoGYK09fWEeK+8kkeP4vhwIZ/U0Jyu0nLevCcF84fcGJrmftBYLgqYaFT9Cjm + + uF2C+RWJIhLbewliOvem6r2f8o3SXLafXXT6WJj8vyoSuyoKzi0J9chSNHTpDpHj + + Wpxuzs8mOLqcJp0TiykFr65xms0vRJGsZsqyA8JDPUBKRckdn6xtuaBgkYCBRQLj + + iHgSRXCgI+TTL/TkJzYEkSFvA9TRPKYOzuiTjdRyZWlZCK5r/aVHYVv8BZXoRMZ6 + + zjHqKNoBn0/JWBnxi3YNO6bTnM/I3IOSDoCW7pd2LQB65J48Hj2glnQJanDJ+M0q + + D9h0wQECgYEA/ZLlcPEphcFk1/8nQf6IZsd0ZnB8TCYSIj+ahDyzlVLUqPlRwnMm + + kY/Dk8nF3f9b3XTuD3AOYtMeDpzH1Ji0pCpuDrBJDj7bVMeqO3bvG9I4D82DYWrQ + + eBsPYLn1sKdQ3E4hVt+82kha6dJp4RjkINiQ+1wM9ehBPQbf3px7gtECgYEA3q0u + + sNTWMSRVZSa8AHk1bef2CwLYMPUWPK9h4feXcxEqWBDwmMKx1cGYqy/lYe6LAIr5 + + +44V6VfrNhE08dJisR729BMtZ3oIQfpOVPqvGM47NqZi0ckyA8aKfh4EOpzYQkH4 + + pzh9fjwJFY5rq+qNVNBlJPwwjSa1C24Wbi1sht8CgYEAxfZUoaPk4sNk0ywjneX0 + + 3yh/uym+IEToi0xUeUBagw0zcOeT6Na1GZa+/TXc/79IHNAYunyk/ooLQSUs7NB6 + + 1l85pMYDgteXq8xlHh9v9KxdkBjFpNwa/GlDzCPhp5Q4EIX+iTAK4+7w6vKWLmGc + + V/g618G5bJFxvQ8M32ITGsECgYAnbjlHXNj06L8qYzqFRvFcHegmuQE5Yhzm8BOA + + JQyvdomuAInqMwe0l0yGe7u9pLT+ip2LmvRsVoIzF8btT1jkjlwiikbO/P/7VuyK + + Bb39wX8gxUPYbC0sF/ssK/qJun5c9TunuMwYD194brjIP4d5TlGqw/GA/Sqv9HWK + + WwbNtwKBgQCHGgwM8CeTCarJGPf+Z5+VJyrjAtybE31yhlLpPd3OgCxxGcczItsA + + ipKu1boyc+nkLpOcpm7DvRK0RzrFu7XXcX03mdlxokuVuuxOSbcCUTTkWaBUyXhq + + PUgqs6k7wjKRmpvttA1MC3clLHEYdZ8uDPVyusNKdeRizqxyY8ic7g== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:hz3x6hgz6osbyo5he664ntvxiu:7hpheae4wou6b2davtrizoumqqh2k3vo25erhpgrq5w45txmjeka:71:255:56 + format: + kind: chk + params: null + sample: + length: 56 + seed: YQ== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:m726xdiezljlbxgy6ud6622etm:wycmmsogqcaot6j2xh7fdfl7y3csgogszo5bc6pfaoivwnmlvsma + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAirky56Wj85ffM32v5KlYNEzn4iV1CfBQCeBhkRUBqdvIN+93 + + MdiwR8yeuJRn1A0xJsEA5j96+xIKBg/ID4PsMtskerfMU2LtQgD8LdtKUA25kx9w + + 9UZWqap/90DNf+yfjh6SolFr/riRtv0pdRLNz2e9pykBAfhnyX/ergYV2OMCeH+I + + dwoUDB9Cqu9/6lLYkPx+GTiw2G58Sk06/YHrQmtfDKDUxbUmmnM0zSqb9JJg5Pve + + wEXEGqXeLgr/BSizd8R3ChJVYEp76uh2vNooNJl7Jp0QPvyKbg4b4D3CePFoEoi1 + + /Wub3M+Z421Q+YNb7fuX5AoX5y0AJpKuv0vn7QIDAQABAoIBABX6NL8fzhtttGIN + + N8RXct6oT3VTv1jRfnCuIG7yf8a4B96avMzEGltppryx4EgnReHwolKX/IURzM48 + + ilKw8Qb2km8xtriAO/vguZMPQpTvs7aD/OqS1/B9z1Ot69CoPXfvzoXSNfSPK6Cs + + t3hxf/MrqY91zs/P2auB61axJp1rT9bIQr9zVpcfXb5dr468l/ea+pISQ3hQDskZ + + 9AURZ7a5uuUSvqInlKI6NFHBAQevt2oC3tdcYn98scU1h4Sewgw6571Me5vZiomJ + + cglaSJjf15FtFaS+7j8yM2Xpi2sUk9O+T71uNDsoB5lfuNF9VanMADCnXk0qDZ8W + + lj0HtHkCgYEAu2XdRh1G6H7Rs0rnEGwFjrMwt0o7tilo51KV4pui26AR6BAYiPP3 + + TJ7JNebvZF+dTCIqqFK2gcb8Xa5c5b+9nxI5VFTbaoez4ad9f5kWSBCD6I7RKJib + + +U0ATHVG65n6k+k1Zi2ITyf/W0f7jZIUnB+26B9W4Wzfl8WS9rEuM6kCgYEAvYHF + + AVuN00PUDREvFHhvp4SzrUm+W4A3o00k8hIPkLsyvsvrSs4BzzaHW8+35AkYPQzH + + cKXn7TKvjgy8yDdTmk4iLkKuPNBWShqWY7M7bc5OexIGuIFPkp+1WOq9mp00uwqF + + KTgRduvaJ//xDxZiNsNMYsZ64vp7TGDSOFbBPKUCgYAI7JvyB9jln4x6/lkspghJ + + uGzcfbOEREqToZIzvXeu/9t6crHIa93eDz3DzGCgJhGGm6XuaCn62jAQggo4gr4U + + Ajkqs/PTCe1eFKzcU70E54xwmcSKK2JaJ/mYqokbFTUisBtz5z0zj9MQVMg9ALTs + + jnIWcc+gYp/vSWBrURrDKQKBgHLL7Z9I6r0T1Zyk0DRCUMDVrlJG3b1oCkwuKzdI + + oY03GSJjPQFvkcEIcy62wdqtd7VjzFz842XY0mfmZ2WRvl82/ZWwZwQH4H27ZWa3 + + 6EQ4OWpsHQ4fpyhW/vACIyFKIes8EDZL9KhpbxnT/R76nDw4Skl7mm1s9svpyu69 + + /wjRAoGAesaYVXpU7npkNSmhZrlx4kUETa1lVvj4TmT4LM9Cp8gKpVPeFc6p6khb + + QBUSwuYYgtcvf5oGk09R+q/KWwe9ppMmj5rUfgBkEVZ/ayC+7GQWEdtZHg6TAppZ + + VsDT5FKUq/VvHFHciSCvQKosCyQzAn107htm3epmi5sP2i6HXZU= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:3zidms5g7qzx5flfxcadclukpu:egrqrou64kwdseq7ezt66hkyjqbbjpalocp53yuiqjnh5ot5nmfq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA2Hz1ojFKwlGap0pdBpbwIR1nUr2lpVkCrPvXJv0hD8PEGa2c + + FDXahwSgTqempSiDmxvG/HHl4LRuvFQmEhdTsg0jEolie2h/33b02GQj0CfP7vw2 + + AxQmtfVWxf8sIP5TPQ1tUaqYWKww36IObzAqA4FnKLHuy2YQf5c3f83jhLVlDOYP + + wxvSucWuCjrlBOfXT0/NWvmDbPvkiNWhMTt576+0XiXcsKeTGuhZfRuj6sFjIvAi + + 4hCDepVXy6shquZO6MKqQ0tJUeXgrcpBWrvkr23pkQoi146x4D7mxYdVRzUgyzdU + + iQrRYJ2XyIuj+T3POB4ymE3gs3ffQAOmY08t+QIDAQABAoIBAAH669NBNxFKcxfv + + 7yRlI1dzAA1kPmLiCstF9wlizz3bheYEB48YXZvJ2ZggLyVh44kUarat6Wyey3sa + + nCOJUsk1Sg2vs7bA9X4RhdSB3y4wL9YPhZ3z5GInMMhyvx0ivmR3t9K2qDPHCKAC + + PAND2HdVcTn3A+H/GIw8nmcWnSvLHNiOI6waEn2cvNvIBKaHxuRaT2bphJ0at9wm + + BkCyM/uS7d96LwZ6RJ8YSnF3uYCro6UgySA1yECalrDgHRb87v+EVOXrqO+Qij0U + + ccEBi+TW/6zzrCUinG25uxAafi2UJGSNMOf2AsTGwqlj5cuRrySlLWCCJS3ZuOM1 + + 1wLNKGECgYEA3VVnMVrcQuf6TYWfBTfT/+so9zbvNjUsS/HPdlZtrZHxMylAHsih + + 0zeHdo8iDyxUKQYy+Li/1R8Cu8psoQ/iZD7eMl4f+aJtzuGywlBlsF2GQDfRvTX5 + + dSnuoiGM9A7ph9O6muTPUdb89BlNlVvgw6noZgTd32oBKonGUNlO8rECgYEA+mVF + + lqpLkLkTtjhugc5jwXhvoGLPKhU6GSYNcr29eO3JKUBRgkNJoX9bMLbw3EHAIjt5 + + WKCL3bP6QjW6R1YZd7iBqXnIMDjdLJYu14bS8rVTw8MGWNvfsFgJVrY7eKf71dNC + + G7F2yFXKpUkmcm6x4AEfbuKwpUd0uabgCMH68ckCgYBNUX5BAYqcXMlVt237tqr6 + + Zb3jzm72MtEMnqZoonyh+6+Uvb5GgrP1QxqxUgMF5ehohF/d/zwUSUb9LxOPmCrv + + 9f5M/hCRdiqB8Novg0Jiv+kcGePNA0PnqARS4wGIaIUwC8jOP0wlPMMUypoNqRD1 + + iS9EJEMVvsQ1hfefWqp3oQKBgQCpi9lK45S3Mhq+0AdDrdSuNDahi0ZrYGQuky2X + + /BJHx/rmC78lTRqWV/4PRlBhU8QdadgIwuzx+eQC4Q55LzufbTee4e9Dd72La0Xc + + elZsMYu+ilfJ41fbuEDajhpG4LgNWTbyOYAMtsq4kIeQBJQ88YWvN6AUygWnj+8y + + /uZEwQKBgQDVQWWlaEbJofp/s1B7ncYdVQFfnr8PuVpRPDqVHc3OcalyTYaznWiq + + 4lHor+uy5Vd3RLzrI4Lm6GeibnSSejYVZkh0UddDA7e2CRJD4ni1f2oAalFPZd7k + + WW/9ijHdHpKKp4Jw52d7Z/tfrDNukBnCZ/t9z/iYXMBeFf/XYnMe9A== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:wxmzfzvgaqm3xfzgaqrlndgolm:6t3tqcphsutigxnnzyp2xx3afklcpeytnh543mpqptquvqqahwea:71:255:1024 + format: + kind: chk + params: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:bjfs7kysomxsga74nyejdwojji:qhxtmljd5h5t5azmpoboidjf7ti5yerpokkmwtmo2dw4ppt42zuq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAwek+eTfmmunqMaNTfyfWdEnbjjeVm0QMhLYY8gC0q5Pid1v1 + + fjMuAspOREl88Dn//5VrVQRTgLG9UvAIbG1J052p3HP6AXKP+1GiLaXnHEycgvLj + + U8uZEzW2JS9xjY8DPegTAC2WdAjcgk2iys4OUWzTTMkRlM8gJ+04GAMWldTbVicr + + T1TWG+QmmhHIGSVWPBw+uTxsywDBvHns9asJKSCrnc8B56nHtnDe+QbiP0GDSK8i + + L8HidT7nULimgpwj70QYLRMS8RqbaYzfEy1kx6+tpiVKbsVYg03lw9R1RWHtA4CI + + LW2JJOXAn9YIFqUJm/9MujpCSVptrBpnIMskGQIDAQABAoIBAAlYGhlWcwTwiYdK + + UaCilLrmVeToyKC32wA909O86nfhKLbWkNd7vmIb3xZ7JxY0E/dm5cpDQDCHAB1M + + xqR48LzjOldOpGkTnPIVTo1zaEC3cAxUXtV/zbjSAVy8iEo0GR8k0t49lCQzysfM + + V3BSlSM3LV3Oy6DXiVBl5eqBouyQhMH6oKPhGR4OuycW0BPYtzNft9xzDeuHYFg/ + + XODl5cbzmRMHfvtcROw95U1lKKHig+44oZdLp3ghrP/QfAxsMniYy3Sp2cMbHIRL + + 2ovNPSEugMKN67PM2NnkeE3pWBaH5zUrSu9M5I5jaVXmeZovGuoLr3ss5Sfb2z5V + + 8NwCBQ0CgYEA+kRQECGtYw8E8b5XHKyc1rlj2KPV6bF/cOUEtcU8r763YtkzHFdd + + OI7ErgQI8I+tbNgAsBPLhEWAct9aH6HHep6PlwwA2XGJHKtVMY3FATN4U7C5BkIA + + IbvfSrIux3mBhfS12PN9x+KWvcysQmQYEUeibAOdH2priY/q4QYVLCUCgYEAxlpv + + BRaj5zntcxZ9AHjPgSGg2f0I4f1AV2r5PBzhKL5XNJLDhYRc1GT2m/LTpB6ie/3P + + 0xzF/H9SR9+G3cHMQoS+2VrX64+WmwVVLUTKurMqEZl/ZzIp6g4Lbk/pMJ67jCGG + + E9Y18GaUZTOuwQRKnOuMbNVORR5/h+S5cr+L2+UCgYAERuaXX/v2lWsgNoCGnOyR + + PtnV+fbN55ql80QBVz2SQ1AfAFc/RL7zGH2D+82rTslH8ukQGUaBHC71x5tirwEZ + + t1v82Neq36XYN5VdI28adia4R2ziDn6yFOPcAu+JuSndgDEbZA3iPJ0W4UiQWeWP + + ZgoAjo9A2jC8SRlafyAdgQKBgFywa8rD7qmhry0lqBotWkIslb7n+FuqfYOcMIV/ + + tVPVxmiB4K4m0T5LQ9ZSHcZGroUkcRZlDrvUP33onVxJMIsw/wIQ6m9gdO9SCaCS + + 0e12xcTdpuRxU5bVI1BUNVMMCfYMwFvKsP/634N/KD14JOm5RLCi6OVxwASfxG0z + + x0ZlAoGBAPEC1P9iODm4dxw7bCewFciF5XxT4TCSMHc9WE6okZuv3X8husTtZ1K6 + + OPLQqVDytIq/i98vp8YF5MMxuLgZGaOLpEPmHSc7w+00gXr2xxt33VWUQRzqcL4K + + RKEZQGP0jaVDX26z+qfM9ZCuOkiD+tr/vnhzcvZiaSBM6vnhdzFf + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:lrzl2tr6irxtqw27psdcusdg7y:6koh7cij6mugtzogurcttf6ed7ul2km56hqu72hy33j2tsn6c54q + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEA6cNZgrmKTqX0wV/DLvTE6vn+qllrAJWP3WehDE6oWhbLxpFi + + eyBNA4vtkIKpJAdmzP18k2iSJZZIv96WaYLkD9L3AbJrAhp4oBh8JHb34EpfuUjl + + 61L5XbEJPqgMKWx1hn96HVCM5he4ZekYKT0tNKIwkIMN3Mz6bxIJR4yfQgPtfiqU + + 364CJnZjFhHoQzzzGV2ObsiDnEzlketbz/x928canoDNwR934pRHrwBYYvx64VO9 + + g97Ox/YcKqFHpXYInps87WlUbNBDXE26/e5mrOpAxPLFXmnbGKozRWHL6w4Kit6S + + gDCCka9QSosvILbpjegPd4Nvxv9sB7qcscX+swIDAQABAoIBAF74XAnFmoCoYL8d + + Sj1t+QCj70hDCrtSh//B5caLwE7VexVhpHp0XYWG2E3BH7mA/k1i4LU8oz99BnJZ + + Go+kO0aIhYydcWcJ3R7hw7HG2Z64aJpsmOhZrfDYB3L6r/I2W6r4aGK3gn7KfUJ+ + + CDBc59w91nAnpj6h0k7Eq5tzcJJPImS8S2fePcM2zxcMvm0o6iHUJsKPzN5VJJpc + + GHTZq8/hVu1jyuzLEqCL00XHc3AQteiQoHE9gICeb2fn2jnW17ogWMCqXFq6qBL1 + + MgPXipuJ36Ngh9kX2OaeJs6KLiElxT8C/y1N0aetnLJlxwzV4Vh4kx4bnsrFBBK/ + + dRYkS50CgYEA8HGT3kascanqYZAABSglY1ByOWCD7dJHgk2cVdCWcQlZ+rDmvmaj + + LIJWXHOTCWIgIhYpbnV9EZp+mZ9gmcsobqh+viAX0P3an+GtRdBikWNpsbUvX9OK + + Me9APdVi0nDwmmfSRhzjQA0b+5gJ0SUEgbKXOl8T3g/yxa/lnkNvTc0CgYEA+OMf + + dr04EJwKJRwO5uZZ8c0Pf2SEbGzAjQifZUp3HWg25hto25iPGNbU8OJ5HkyZqJbf + + 5kPpuChRlX3VKOz6s5v4sNjf1kE8bMnnRDk8waD5MJQziEtMZ5utGs78Z93etP7m + + EWXfsbzTF8cNcQPCdfLc/ynXWRRx7eJ1YXLR/n8CgYAU+YNxr26Zl733ds1Zpc/l + + Iv5j3PSFSYOtbUHHBqQpBizQPqBSWbfASTppZDeeaO3uq0o/9YXMhFKo6gtOPzeu + + t4oe4cPSGmL48YHhBjWjAy4UL38Ld/OlOX68JiIxw2JpxcbFEP4N91bKks/Aa99B + + xSeGEwczpuaBsj9wl+dcvQKBgFJ1ZUpAvJ98IzxSRHmpneknyFerpNgLW+weDDlR + + 5479pRqtwBrpO5e+LYS1c+1e8ZXSjtHKdFfIO+dsbkAF67WwGj/1SovAx1U/u3h2 + + AjQgsg6vOzePwvucr0hvhV6gOpX60Zy4BNntNn7tOv3Tggzz7tY3NZrU1D49RiiI + + ExzfAoGBANEXl4L7qCTan3hhM8z2/RS6bVC+8Dp4E1Kyd1bxzpGmjuHrClf4aRiI + + kvD8iSd5tI3weoS7Nv4QP5FtLNKlI7zgAfWrli8uvCW7FoPlkF7JbWKfDn9gwSy+ + + 1WL/kD/fdr6kSg40dBbb2zqC/M90DVvLZKmoTOQWk+7D5EsBNyUP + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:r3q3q4f4zbsipxauq6knb5dfoq:lkae5mflammzxkoc7gllt6qqyg5gqdxeejmxtgcscblqh5tv5obq:71:255:4096 + format: + kind: chk + params: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:37oycc3shngu67rijcu22qy7ge:rq65qf43dfsbraa6axxhtmm6xhzxeirrwyfjvqa3wmta7rbuw4ja + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAmpUbuxF3CDJJ0hTgu2qnv/tyPJ/VzSwgewi0bI4e/g2dplol + + eHW3ss7agvO6XPbI6TWqBM1OhDrlvuGdiN1KDtHg4aaJSQJaq/0f9N5t/f/kYnT4 + + ipaX3kmkTrsFsPJJkFYYWSwGeFfGTYBUr39cfJELTdoFDTXAd4D6AnYkZHvUXGke + + 8f+lk7pxR2UVx8udWyK3yO34tQmaBi9uWNACfzv9QMpaYEJVd/wadtxMb1OC2PR0 + + W6fmtBIFIREzkHsnpdrUp5jYw1yHKBUAgmdtGBQmOvRAyOeUwc9IoItzSlVvh+z6 + + q2It8DphEixauzfEzhCJoZgdasmhBoO3bdwAMQIDAQABAoIBAC8xZo3t/xEZiUAB + + 77pIDX6nHXE0uukwl5n4Rlz95qhZL9AhpV7pUXPdgwiHsFXBYgUQxR9CLr5f3NQx + + vQ6TwJBVsvoxBaisd1IarS7s2Ve6T9dfLqHg5+yNPwRqRIqI7byLDFPtBOyon3n5 + + u+D4WRwOjAzwiqpFxsS1M56cwu+KBQtQ8/cqWbQeWeMWKHi3b89WhoB8QfLV7DS+ + + jRes7EB9nnW5wtUXLEtnd6LRD/LW+RfC5QEqeeSnVVVAhGgD54R9wEgS5+TsC0Px + + UQMaz+4FJjV54yJeB9WueHbpxgOJ9Fq/khINMug8y2KZvtnEUQ5zEPBzhN7AAie4 + + oWqxN6ECgYEAyFXYTEIfUtxoMGPuySPEntVOo9GRNmJAmVC75iDH1xlXZH//gGov + + hKA0fbGuU55cZE7TZulDfIarN820ButlprVmPBebSiR0/Zdkal7ogjv0StqziUkH + + tWB/+aPprCi4jKOVaR1+Li8RRZTgMjvDaZIlmaz2GOb72xFJ2wp6498CgYEAxYjK + + xHUkykqDTxkZATds7+aub//s2XfXPjFFPgf/7jhr7cVLGaSsIcajPGvPqfMqwqMn + + 87Pon56yhbfOQor6fKKYXE0BwRX//7AOLn6mFQVpmoySstbDLxIA0qdwyYUbMuBm + + 9o/YREP6sT+cmAY4iuXiJGu4xo8AOYZV5IjPHe8CgYEApL0S9QKax4S/mKtUvMpQ + + 8VvvIv8+Lj51aJ3fJcpnCxanqtkmve6TzLgA8iuectySlVnMtZ+0Az6qpWTeWaJR + + INmijF/NLxbzrWVFCcOp5w5uQO+/G3GWiSwlkJ+dlBiYSe5q+tlp3YiO520ZP7Wt + + Z67qhIiahrfK+8YnuZvQmnkCgYAXhBNvk+qPUpOTRQ+e/3Quky3NE5CkywmK097E + + ZbtoJrtikQxBv0LmunkQZl1QhCxhA39sGczlw8TI+nrJnTX4xHqS8m/1BqN1UwY9 + + LsKi2gQabAXC2KJf5irG6TwaIYh9ZA2d6L38UoNzunjv+D2e+4MShuh2auvB7WYo + + UMknbQKBgCaeMDyOJEYdMPS9Bw7JYAd/vNS9xOjE2Icme4HZUl+JdL1EDCQj7/vE + + EldLMt1NT1/msmPH0oOYi9XtCiO2l5jGtHA+gVVNP+LuxsHsrcnFVix3mjpDLbxn + + v8ZOIynGqz8Rif96TV/668SwVoUm4kG+EBLynIsPtAGJEYeNKIHB + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:67btu4c4e5b4dictm7lug7rgp4:6ir3a7vofnm3lkackuqxn7bngupp7dhrcezxofoasrwcozx3japa + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAgsuk1pcCXme4e/8m2RTs86cJmsijUmOrLHmj6Tl+G4Wq4hJ0 + + SjqO0Q0aoFIhL8iLxKaSm+haRNASv9iwp1QJvc2yL1Pa6PUFzSdwLkW4AYGSL9nx + + 9q3md7ctNyZhvf3PFM8xSnSCsI0R590M4QdWwS6CaTBB7c4Noc+g2pPhktcbPFZf + + EZX6X3GrfjwRMGuXHZq4UNoWntddAcoaaMA/lwgiDJvwriwRe946YY3gXPOc7wca + + ea+WEBQn8HehwPLIYGkPW4Edzyv6f69CbWoghgT6qzUMNuAQ5jcA9QGY5xlvNzGU + + X2omztyxcdgslXZryV5VFuFBer67M+hB2Y+VDQIDAQABAoIBADWyFs0GF7HcEO/O + + 0wsBvTlWFOpXfj0/r7FFitYfhTcVTA8dlmI24hTOtWSl8vvj8AVegQfCfvSLG3dp + + JTS8mncyb/lgCpnipWwQycwlUSJFKFe+uMgVomz5ZXWjqzLNdOtNGCZB6LlEYNp4 + + dGYZljMevekjJ53SHuSUEaxKU6vtTv+TldyJcCkde+V2STeK9R/aihABHQBVK3cn + + JrWOcJxGJJ5ZUE0kxVjf8r9QZx7eUODT9Bn1FgmIlUJtcJvUgzQEibdCce/zUE+T + + Ov+7z7UoAlynuEshFDeK8IEEDWSOqHqyvc0cg1GwRL7SGZo73CBHi7DYS0odNuUz + + tJArr30CgYEAuMrCS5otqSL4uHp04bPFSKepYB+/7QQgW/X8NL6Xa6gCRBdGN0q9 + + E/GINWagSkihBw1lbjFBdEKeq3sz+c/UTiV0j75zU57oN9kDDM56tlKhAvwaz8/K + + gcm6u0eePioWSGhX8bWUsJrxNVq9/CbJEJ6wjI7zV0KZBoMCvhmmiOMCgYEAtTJF + + FCQNuACFptHI5EhEo3qKq1tdsKyq/4VJEdEfMAgdwCceJo8HyVXNFXBGTyIdebJa + + cdVYUHVCROP+I65tc4lGcaDgmfQUho4+HawKA7S2BQSEewDGnDA8rHozq8zMEbNP + + znPhiqh2iD/gGKkpRol14hp9CaSy3DPiHrsB/U8CgYEAihK16ldhFqeSwAR/oMT5 + + +7eKzs/qT+ZtZ0j9EUv3R/FZABeD13x4mpY19/Ceg+KQrvxLdXJIPd4pQGfmBhpL + + v7gsx9q9wRVS3afAp6j/94r1040bW3sfDKr2Y0i37Cr2S1ProibS2sJqyDrtCaLR + + SSHJOLz3BZQ1UrBBNFlmHZECgYAtBwb/kE4QcaDE6dEAWa0k6ujW2GeZ5e7AfMDB + + urQDXaD3BUGK13RZ5gaG01XFiHbGrTmonBnMNLd5IyceetQcJ/rnddEasPsAzQxG + + l2ANt7Sb3pmFb5XrbllFi0CX6tazd1nXthhQOrjp9uWbez4Ul0hCHc8AvHruGb6R + + YGuIJQKBgAwXJb3Tg+jZ8Eq5fvjWc5s2fxrX4Z1BTtY68CjY7rAaZHoereCB5V4V + + Cbdw2vEKYpYJFlFsS631wrF/7ZTHdkt0Cm+MVo67lQg2uSKJpGLXw5YcHIag1JhT + + sIvtzyOnufvRg/SA/sbqapMukycDhFXEHQrAi1gIRadkMJfsRZed + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:rdrzetasw4w7tuweqpev65d5vq:gzzb2v5fzrduk6kx5xx27mvo6dgnd65ym5lm7re53in7xuudhsxa:71:255:131071 + format: + kind: chk + params: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:ohn5yz4as4vgezj2r7ohcudtzu:onvxdfapx32vsz457rp3mqeap7q3hm2arhlje5x4ctndu2jfsv7a + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAmiJHD0uN13uoQUJJFUQATJKvFtFBXoHEe83NDAc+cUkNym9E + + c0bZmuT+HH5GPAqkhH1PKcSXRq5/W4vZiK8safWJHzREdi+gI4pHIz0BILyd5dmx + + /bAdUMXsQMZq/QSzaN3HdxK4lJ1JA6Ncct9K5w0QPC+EpL21mFqgvLmnKmEjUBV7 + + 93G5vw+EFCkg5JqKqLJu17KBpTQYJ74DOWuGQ5kgx9zX9Kk5errD8EDSoVBRnfjb + + PSLZwoCRdibxem31dDmYp7niBjP7NKPdNSa4bpF+mQE/4T8PCvTNGiAqjUZkz+AD + + OQ7di4pZPThcqold+3DLCQgg2cNJHMxzsllltQIDAQABAoIBABrFb2A5uEZAJSZI + + lBcMe5zRMXYeHGOE2JLEWSQIshDNJocNsm7vVGZx9a0PRbWyB4c3mKNhkQDm2BoX + + fU4fVvCEhC+WTXnVpdPmdZqqQuLjv+0nVaIBj+XyqqlJjVWrFlpVgwqshsDRXNgz + + 7J/LJuBgxXweqMRQaxUuUJLXEDDs7wW6SR90mI2mwEWi0G59CWekRlg882KKhhTv + + ZTIqQ8J1VN5IVOFuWW6rRiGKLN0FYuX6nXYcTPneQi8uLGJ6lK3hP+UAiA+G8hAK + + 9w/EMZXAdBa8eVqlpsSXB1cQTeZNbQxo0FZnOY3IqasLo4oLvgmHI3zmce2xgXyV + + IcjB0xUCgYEA2Qz1AeAdhXDKtrAIem9+1g+H3jguNsSnNL0cFuy9S7AKcH3BlAHF + + 64j52VbKk5lNbmysR3cYmeh7CUqiuPW2PmOCVqbG9KKxttBSffFAOHB6UKblfzjI + + THy7oge01FFGtt2GLrtoetpbdHWwGog2paMeF+uWarjz6/1USgP1b9MCgYEAtcsA + + qEpuih7rz/f+x3zskcqg1qT5oS505ny/m0Gx/H0WkFzhgh2XM9VbEK29npAivBMm + + wx7bbIjrJnzJQFpHx8mJoZ/IsLXSKOMi83pun0EhpAjmePVPnl0+0JJH9h9RLJb6 + + esCLSVLViDIr4Uz1DHkKpHNseaZ6mJsr0xkz51cCgYAA+hz0ODUJz7sp3Vr8ahoR + + DprW9jvHBVWXWC6TL9eeSpmRbg98AhIJAGHXh5t71JnToGuaGsAimThMj2hyGrEK + + UNpaV3/XxA+2ufNVG8vlNSRnzoiD7RaBuaIClbRLrF38Hr0m4rMSsn7s5Ea5p9lP + + H4/YHbhcnJ6Edmx1tNTa/wKBgDCTlhFiEjeGG6zur33Ou8gZRPEWFD4lk8ci/nAW + + FeFJ64WXzApgrc6D7FmAk3KTQTTQSUNKM4fE7lDSd1Riy1tvVv+BGrddXlLenrBA + + vt5/IOYcGrmnkybV87r325LAu4gWr8etO4rUP4qtHVyOm8xBa76VuR6ohYnRrNwz + + l9LxAoGACGlXnDUpI90vU+AJ03iIqDDtTklsTXEVtzz7cBb/3iEXtqQhUghhkagv + + IMrFHv91YW/KqmjWIpsEBSQdkkxzvf83C4xm1C4aUtKa8y3IHA6WwkMWvkn8EM3f + + s/4FM9Bo7/jtQKGqiLDj28Z5kfTa/CXORqG1kiNtqej2zXXHIMM= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:nstlxuewinqw4ugsceyyzo25iy:niiwkb23khhxzlsze3hkphtqke5w34gb3xau2yl4kyevxyjnhlqq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAoNBCT+MDYgWIm49IgY5NRDrTwPy7aG0OQ39aw8oSPplMVECX + + RfZqDzP9Dpc3ngKthBz+7RW4FrS8GH1mResg9m6U89aNh2HURZGPTzQRGMju1REw + + U0y4n2ZSfo+t2GgTeU6qwYCsKhVNwz2pJNfkDp3bcwW1MsYr8KHm4iydt2QEapu5 + + eyaalBQ6uPVn9IwWH3ObYYVdfaFDN+V1Ztkgc9t6dk9sLU5r4115k1PKPhvPBnh8 + + Kz70AJauI7d3OFItJqmHYd+qhrDgXNlQ3pyweBRvHMURi8AqWQ1ZuHr76hB3WPhc + + BsQrS/dRpIupucBu9Gs12xPtvvlGykRk7CsRFwIDAQABAoIBABHNrKG6gLvf3/VU + + 4hKRxgUZPCs/76GKfUtEtLA7VVS/1P743aZ9ttUzDL+KRzqDkmEvcpudzXEaFj7h + + 1ypDczVFHdF2/dkwn/cJu+NpYMEtMZ++FOsL8d6Xzec8EeOE9i72YholHCpWjHLi + + hzDQg+uIV2y/A4X5AZFU40JD7TwJthat2H0i3ReHycp9s5mSkInTtqKDtE3su2gR + + TB3RZ03BA3YddUjv5YlkFHfVKfWdf2XiR6kJt+tqPo4iNyDKp4G/NdNeppgRkfPZ + + BnvTZjJIT8w1PQS6Q2vafodQa4DJYQo/EAdxsBvba+RMAxPwhx0ReN5gqt9g9W89 + + zSAV8OUCgYEAvNUtjEtvS4V5iK5TLFLgE0jH3mvHbn5wPiMwvomralNwxD/oZ4kf + + Mu3No/qGxz8JfLFn75FJC5JWJ3YU4LJhNrlNyYONKry0PPxEO6lkSZB4gk9fXO47 + + 38rswz0Xz+cRhDlBAVPXF8WHkt7riqTuhvtCcNFPsXmB9hmtgnOyMl0CgYEA2gOx + + of5J+X+EFcK+YtJpDnxoFU1NxgKqZrw5o4HCExfieWoT+1SGf1J6H06u2gEwqj7Y + + 7FCFp5wq+Afzlv/Rg7A/lTOIbnB7+dMvJ4+D321Q378ouj6F/0qLabtFJuxvq1FA + + MSJ3qxWLrlb6xPN9qiuzMHtg3jiqAbNQx+fywgMCgYEAlsmwRoCSTf82rnNuDU3c + + iumqWK0+IriqjqPxL6WlkREyUjQqNEsl87g7Zv8OExr+S2kq8v3UE352d6puP4OR + + 524PdKQs3Py0/KIBJpc8cxX/dSdGomHGxA06BSnK0wTUUv6ZLyMw9lWQzjJeamcL + + 5hPL2WT7O2Ao7ElS6YHTwS0CgYA0YjzVQqd9ppETNXbPgeUyUNwleiyczlkpVEK5 + + Md1y/wMwzzc75YRnpWaojRxgT3blATLYHUTwEAsXC7oQ5yjtbnToobg/aRGw5nhn + + FgnGrpqHGIRts8Y4oC29WvzzrE3sqRo2dCSy2/tzCX05w5PHRrbIiGyvGIho3jAj + + yGzBBwKBgBrogWqCWfccQPYbPla6ba4gZtLGZgdmzZo9IvpKjv9zzWnvSyAd0JiZ + + dMazfQuBoPlntxBBlpu9waZaw5K0eT2j6OmczoOflow1YT/PseEm9K+JgzpHejfJ + + Mj3ne9iBD8PfFtuKsMJKEpofYRiQL6e0lr7ogTAycog6N7vWVk6D + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:e2bjefibvz4tgu6jgf66gw74bq:mugtu5bx7h5gcivevmh2gmwoc2kkhmobrzshkuj2dgrtm3siysfq:71:255:131073 + format: + kind: chk + params: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:qrpf526nw3z773vbald5la25oq:db3krhancbpchrqxt4nc2vyvlwts6yoqnksnt5ayzwq5zlcmgfoa + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpQIBAAKCAQEAov9sWVeoF7MGLbhaZtLS+hM6QhOWvKBvrk+zs0y0tsjwWxdw + + Ojc8PflqtV+xUiJh/4lHDhS4oYhNbjoPZjQNubB11cxOxOmhCObyYvM/DuYXev6s + + xTGv8dARhZUlQU5D5JvqoO1cWKalZoYeCw1N4yovpuxXQWHvUrq3ch0M+MBs04c6 + + OM/nbyLyMCq2FHUKqVe8aVqwYxN7I5yIhOjmyzjIkMibDEmfrBZAzvOqJ0SMUjNL + + IXN11MVpln54wwrNz7UZQfFgHEMzEYb9oBzWSzJu1C9MaE3GSZasQz6TkdcrOFRP + + XozQXPSRp2IcFTPnQHg29ZB/UaZ5eWUrVfkhEQIDAQABAoIBAASOHdGLXRO4eZI6 + + hjA8cQ/zDJw/HuXLmANnj86RdLVs/SaWi5jc5U6YE07ZS0PP2SxCgl1W3+gHvp43 + + eimxh7aqQ0jDymm/W7Q7fAee46K/dGWIC30BS/j2hx7UEbP3A3e2kcKIj52cnp+0 + + XM+TQht4mNdR4Ihfu8f0lt7WCABFmPm/MTRvOKplxVd3rMA8oAnzLHOIRddYDHen + + 4b1qw/QjlUzPqtH3jmDVSMu+QLzKKX0d+wPEeGvVzZojs+D6iuztjvzR/om0pUM7 + + TXJu0FvJ3XjpzxuJrIFlQXplX3bW8WBUM+aNX1K4BlTJQkRbMHJ1PnJmsVpDBQ+c + + yc8IbWECgYEAz4//F9IaIUpe8m16Ev4ou1WaOnDrVuitTJ03/r27v6XZMEeQSGBT + + xhiaPPxd0mFHVj2SSGAhp3ChgSVc7CojC3lpI/vKmyX/k6RYsaMlwTpD6CtBVCDl + + DuzIP947JtfVu6A7ucRGxSscHFQK+DxDRKFet9wBIXEKwBvkfJkH8TkCgYEAyQkU + + 9zFTbXBktUvgwmjIiEaAcHg6mgnTKeI/KaPh1OatvP30g11sZtXa+YuSfa6FUHWR + + aESSSVn5omwOd5sDTvS8NcwzBQV5XwHA94fL2clnrOtsdt4eR4grmhSF7N8hEKXs + + Pkbb5rNMrNfV6NiG2ilzho6+KAABgYGWAmtHppkCgYEAhZxQ/jl+Ho3sPqwgV9eJ + + ysWY6SPFKoXPALF32SCzmfOdsolupFh1tOAjcTyW/JUoQaiS4MoY/9rt699sSI37 + + TyiReNtdma/FLHovqfG2nQLvsaUegZRHPutHIG3ir+diK0xDBhsF32gXyViEUzUf + + rC8gdMRHagqFfBK4a0hrJ2kCgYEAsqeogxjJvEsSpG7/GaXG+Bw3Tjv6UCQFDYar + + fRTPv8UUhwzku81NZYINbJEVqS1r+hnRE+lEW31jNG102ePfJ86kZ+bFPGQl/UFw + + vElo5m1u/iPlqykvnYAsx2wPrHaKSuI5NQsBp16V+FCDH7808DHAIcc+xAtlSzLf + + Hd06upkCgYEAilmq8p9cUry8Qk+h9BWGskzWNH/tmxDtPA+Q1jcB2axiukrol1J1 + + tHwT2fQBsDr73oDUHBMZTYxRCHFOApff4k/bTBeCNv08xzhU0RXo+F3Npjj4b20n + + aXOwLcX3b5ihtM0ebzioMdx6PAZSqQbs0lP7mDvCm/Jpl8+DLANJ9jI= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:lndkjnbiakpgufkyoq3zqiafx4:g3lynxko4n7fhsnts5knddchlym2hsb5tcbbuyhanz7fzjfxq3cq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA8JxtwE2NOlHhy50hjDK70rexn83ald2xwqtK3828FbKQk+UY + + +chzZkUHohC3hWMt9W4mQz6quedx7vtV45IXCDVk7q461Po+68Xb+Hu3fDTV0tke + + Jmncz/OvKriqT3eLGS2Lk7zLVcOX9wdh6XC6cD/qIQCs+cf5uENZKcBcu+bjLu8l + + ViHCA91Rs4mXoqkwXjvRq2xfR9DsjXCwCzhEv6Q9fswGrnAEj3K3Aog/szTEu9Ik + + 0sSC3NtmljeynQyLfVqmPTKuTxv6utSRZ8+sP+14yB77WHnnhBVDIiKaIdCFL0Rf + + jg+RZp+jwRQI9tRBVf0eWa0jhQKp4kbms15wmQIDAQABAoIBAAF3njEgWKsvQDYq + + B6AgV/BZUvz1OXwpf0KFMxfQOVpGSuhXyvopyIEBjKT4E1UmnhnkGk34jyqrh6fe + + WEr0TYIKHQr3P7upxiNLXVmMUyJs2L0vkZpUKS+Q3KmJ1n84OEzT26Yv1vEutSEb + + rwqz8b7ta/EpgtApccNjRO7eWSvgHAerVuRAmtSAuk6SVPjk3nc8IyvVhAEx4Zp/ + + wczllb1AMQyMx9WQQAoFreRobXBwN6IQucAXZEvV5BoIerp15B6fFKwqpToG3KmS + + D0K/2mq59UsjlOyaEFu9vy1CV0M8XmxkQCatCwPShH88/x6Wbkf1ktAfq67OPv+7 + + QX7PFjECgYEA9VZBp9vOQ6Uc3sVIwN4oucBQ5hRZ/lmk/dyPNQD7HrXB+YY5uXDR + + BE/JzsYmKg5XP4iEGAEl49px0OY9D+kXY2Vr9rqUjStfB3UHO+PKXw7z+/NNDBYt + + 99DQ+vkjCKT2OJfJ/HI4lptJK9QWHBdN7VMl3Fzqn/PHV7A18nu7YhECgYEA+xGW + + 78+JvGRoMAD3YtzbitoQim6TIEuKmICmtNFf+r0GJ/sY6LEjTm1Gy6kNarp6jRPN + + 0KoxLbHwJpOH+1X5FGcWoI+CWum5jbEdUOvQUb9+JPm0CtiGF9ESrRbAtM7Tg9OO + + zLQLwx10agEZ2hFzAY7U7cIDW06ZsHIXJ/WqHgkCgYAMh4o022nuVHlj+ylbCD2G + + NwcqqPFrpwJhIKmDqHgqulecubkq+lMCaFzDHaWHUlIsYXl1jGF2AIr9gzStIlda + + cSyRXjgF+agRxm1HJrwIHMhjHqrZqixQ0q5Jkv2yDFKy0zWymdbAAlA7V8qFRr9p + + Fm0BkxE8eAO/O7WVm7IXMQKBgQCUKHMbnSs4oz/gZBGYo6BitgBg0JO90RY+nFzE + + A3JSMs25NjIizrV5CH9om6AxRU4ghnlEE8rlnkWLXjA2nytXYOY3ZbiVEavP857L + + K/1I4Gn+Q+R9Kf0nfNc6kVyy1gJ3npZ8MhtmzrDuBSxORVHKr5DzpTP5485KE2ma + + yRvUkQKBgQDuY5L4hsSoel7/1gDI7Y7LFDO/3YDHUjdezsy0ZcbAk68bC74ZZYPq + + TPa6D/RU1MzZKhcQCTlx2jpW01z7oqwnTDo4DnprLTzPq2Fz+3UP543/Zh99T/yi + + 35EmfCANEuaWUWftgpA/3Ed5q4ymuzGBh7R7WfQ1ZeQ/vfuPpCSzpw== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:gy7bci6yvllxzvhh45khcwp3u4:qsfjfmcl64zey4k4o5cfnh2i3mzboahf7bhtggceszrulsv537vq:71:255:2097151 + format: + kind: chk + params: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:amhnz7udmtjsa2duvirkppu7ay:cocikvbx7m6op3rjflwmx73qin2q4cvm25kqjble2nuf7ei4y2fq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAvQZiPNC4SwC7jB2W8gNAM2Guvl6mLO6xiNi0LCvdAzncfRmi + + IUXVNDbkxqETHRPQzAXz5k9hVuo5+iBqcrIfkHjokPhDfXP0Y4QN8/e4Sz0f8E0R + + 3XD3FPZWwSLhRG9HIMuHf1ciebYZRSDI2ku8ul3RDZXebfEZ2PhWlOMztCaE6nVf + + X7+IYBg+qSND924gIbQyIZlKbYZjeAomPzKfSWpZzbjcGLH0DqFjMjdxDIe/3pb4 + + tpVubg1ssWT8dzisY+K1e7ok/qwgggeqRhy1+jjMWqma5EXPQqBA8my1uFZ/YxLz + + v2PMHqe+X/rnQ+qia+ul3F0/+XQj1hfLLHO0swIDAQABAoIBAAFIP8IS+QxWKzk3 + + Wpei3DidAZwZUB0rs7MdDJnFf+pJs8CpEVK9Nvgxx61pzPCO35CrtEXMRENcUmOK + + eOcb4UY8kNHYoG5kcOzKAw/51J1nni6HCnXv+vnxG5llN/qb6GPHB9ieLr4YZ6lE + + GKbCEeK+iiTflZSiKJ1ZNqJabwvSnk2QW1+Hg0XBiM4MBlHjvj1IPJ9GFOsgt2Et + + uSMJT4wzQVAevIEfa0Dg7uB6ahDMz/RZ2PgUDseVMdwKYuiuuT3Uog3Quqa97pYR + + Pnws1X9MXVHPm85XAOKGtwWV7y4Hgp6YsVT3SbJWLbHs/4EAyw/ExbAvuvy5j1df + + TcB9F9ECgYEAwhyO/KDviIy8UP1ilcZ3+oHQP9Du5jY9/30ATRluOWrSnl5fIg81 + + +y3KVlZtnpCwFAPivY2P/a7tsaarJY1+JhwtUomTR3T/m6DBQ+rTujQeDZVEQ243 + + ucyGvTDq9UiONkrP4SgIIEookMC9+rBjUzTsNZ4FWDxvAfYayn/DBBECgYEA+Uqn + + Xc+55BCh645lLfl4HCbw2oHaZiv+WJOUUvtwAUpLTWf5xHLWzQVXukQ5gMgZcSnJ + + tgHW3qKtgpPsqCIwtGU4Cjmaghci9r3TvZ49fUgA0z5ZIQv/wAkIOp2cf3uJ9uVx + + Pi+Xs5Wt0AD+uq+k1RucGmMhlFO3lxK8kR0+oIMCgYEAp8iXr7ZMVfOQM1FSDbRn + + sJjUsNSgK01neZdK01nP9MFpHIrmIEKVnm+OHeLHDfBywlo5ey8J73Vs78no1aTg + + DYD9jAJu061F4/eoFlS8fo7eC0+imcaDVI59SLsn3KzCgBtaZHx9yatQNQ7lJ/Of + + ZySvqAjXBdX2/fMEZVTZ9IECgYAdtyxkHlLGQMVMUtj0tfv/PxUOttPVwgC7hjvz + + +EzNmpGHVJGNPTMllTFz3pYMJ84Akz6cF3QJbdLI8eEP2aN8nWQks+EbCK7+Qnpu + + 6+HggSi4BYKSUd/WgD0e35K8D3nOmGL7SqkGmxzw4m16y10WmgftjUt/ZstHktAv + + bBD/CQKBgQC2pgs1s9B4Gen0kW8l6nVLWCeQ8ualP1wzG9ZVtayiLKUY2pfGDoDU + + t1Y+irOwSnXoF/KIixgBjXCSqwVphZjl91TD8TF14Z0KV1DRjRLcms721gaNwN/A + + C7WY/9mu+FBjue6lFtCnsHbbONilHqXd/GqKpUscosOMJHWdc/r5TA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:hqicjls7azttyinzxclbtyduli:cc7zh7zdovyqe4bd6k4q45uz37qg36swsd6yfegamoszoqiewoiq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAkAdhqn9YOrOyEP1wO6nmbagEawIgoX1+XA4cglBMRg2VaxK6 + + /RLYYCIqGPkuAZyX5UTfkOJXCrzQmywCq9XvSScpX7L/JGX6sla16F+FXtPaQPkx + + 5PTVjDvkyYxR0gr/RsVqzdpe1jjuwCIU4K0mgLbnaGnatNMHofBTeV3n3eW1fb4W + + sf8fOC+vsYtVJgMqk64hPAMtUqR8IoMgTPDpy/oIZ0I9Os/abCvRUb6QQJDtJRr/ + + b4guTqQmXKsuESwfC+oZQ6VUMglT/rXnndNV8ixrQtfSExFxfk28uvz1FKAmrUZ2 + + ULTewSN4GF20x7NrsjzwL92dgHh38keK4IKWIQIDAQABAoIBACFGY00FsLeXLmt4 + + cgaGwSLSb3rdefZ1TM0twW5l6MlCeCPNpv+y6+SB4CH256cdq4YffFs3v45Ogw9m + + gpN6kJbhAlEGxKV/HgU3vT0bXG/FGCZsrBdObUvBxqC912Vkfwe1snAupDxv2NDw + + zsv9lOil2R6pXgrquleyc0aV6Gy+PYtCQo72Rhn8ik2Qblp3VWDqsJ7UwiPV416D + + 9Hz/9H7FsCW7VLgL52IWgBh3/X5HlGlbjvDI2fsz/idwzu6ZJyRYn6SLCTqQEFjK + + TYd0xhYCDKwdKBbvS5Cu1j+v4rwM7TnvUn1RE9a5k718odC8wNP2/W0BocMbb1jl + + X1p3KvECgYEAvHoNB7rUjXZjY/wioqR3o5ETVr9WMF6VJQlv1MFH6Lc+vvRmc8XR + + ViQjrE2BQIEzyQL+bOXe43GCUfg/5uMosj9l9ZDjL9/BSJUVmlzNXeXlXc+/5uPs + + Mp8JFjnCPy2eVm5mlmgujEKEcF9neNtSZI61uWa7RrGxrVuVkxNtfn0CgYEAw6DV + + 3qOyFXJsGk1uT+KMzE+M84wOLJsxQPEmljX7LzFnbQhH8CcFqVOMRoLk1YACYroL + + hj1x2x6iFe+r+kbIIkuDnZD/wezY07iTBhfqXckVfAX9n6w0mzSlOoGZWlkbL/Ia + + p9kuBwWWr9FwHefn39mX/y0bzLoHBCMRZBkOk3UCgYEAgVBoQkZwcUKp/L7QcLDR + + GRt/nkQW+YbbY5bu8JVQJh4b4d5DsOknsKeJBj4DEWPUSPVR5RtuarTFikH+bgar + + NGkFJpArH/ywW4FWWhuUF/mU/mF8tAjrVOwCywoD+V7uRTToFAgU78zvmz4J+0TX + + agD0M+mFUoK2ek/c9xUcSe0CgYEAnh2e/wY7583FxjSTVon71x7tA+RNiIwe8Sh4 + + UaxryycZOy0YR+iiUMuwc0VUg6OlSfqpWeTL45kM1MIUtIMFO4LhbDdIIIu4bNeg + + LaqiyQ7ACLAm4Cmlk1Snv3QEaNvgd44tMUD+TLqdopmbDvDjnzAWBC+Hap9pEFTv + + t4HxIAkCgYAcPS0TVpKiTqOek0oBoLxT4VZfqp2E5OlI3cVPBP7qEJTIaxAn/T7O + + /U1/zJppJIf2svpZLWp+smkUV2VxWWl4k9so46KLBYVG5n5R4x+g+YbgRKioMRGm + + cbzUaxovQsg4RLRUoKUzqE7fxPCxSm4aZkGjnDb7JvFsxhSeafnmKw== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:pcta7uk5cpioxzv5nxxqsogw3q:gnu7fx56k3j72bbevirn4kdu27yfpvttzl3qk2zypwqt7tw4rijq:71:255:2097153 + format: + kind: chk + params: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:ljw6sfa5f5n2x4u5asgq2dvhoi:wmjel2jkzzwwbizyoqj44czwger4xrjus7r52ujhd5mvlnkt4cja + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEApjugYoc3XSJhbpTGOoIpqlTOTKHVBBfMsvXXlYdFIHmQw+cu + + 6HorcanqnJzGf3L0oOV2BAOxv1cee/G3c1xVyzqzEXR66esHHA7W1C+9z+H6zTc/ + + FNjEixwRa7IpToVm056kFmnsNnAW1Xy/OJsh7n8YMZ0dh7GGjb+q07FeldpYJvzm + + 18gEovb9N/TZUab1MscTf/ne6scWGJxAN2njmWG/mhyoPWOha07WMPvqIRTc4Knj + + 9YD967WM4Oycml9Mag8HOpbBZ1DH6boKKHqROe7v0SjeRdt9NCfRWJ983E+SOJMV + + OEtD08tj3rkweCbG4i5lEkL5ORI56Y+9zjXM8wIDAQABAoIBABQJcOIfcU1xFPRq + + y2AHC3WkBj/Xa+Ez6zERD/zOkscA0DHE3nMYMr9fH0/kV8rJ9PGl5u1B8r1hB2Qi + + NR4bHZ5DA42RkDU85pz7ruphnMv/bacpxxlAraQk7HaiQXdc/hF4+EdZWicPqLjv + + 8e6lSFhCioyEZyhRfin81d7xbLi8KFd6+LualDFVrOjFmpAyXneQseUfc40kGC6q + + a1IIiNTleQeyFjIhvWRV6lxjZ3jbq/79s3UtO5uVcsh9r1xThTIQZTW9NdKAJdCl + + wdPg8OwuGASuyZkbE15ZTESCbUN2ERoC5ZTflO6qI3wcxSq4LrGA+uq1N2Ez/onl + + x/bur1kCgYEA42DJos4fx6Q9hkUcuaVMdF07zgw1lQ08QVj5/VndgY9IEoHJ9DqX + + F1ZdMWzHKKdPanGWiVOlG3m1xaThxkFvhri+hFb+PDQAoAJ2uN2Pc5trRP/lxE0u + + 9e6gY/bspLxYWiCZwtzZ10Qy6m5KSRO3REC++zNVCd/E3F7GvyqEGwkCgYEAuyhy + + zyi1lJP6zkFNKtgOhPwBitd9CS2EshSqtCViKS70GjyZWqxzyq7V2SBhKHT8j14/ + + +vRHvg7dK/lZelwE5qXfMODkcrzdt1Pjpw29xLTTesPRx+dhDZXYOwsd4Edk+t/D + + igH2M8q4V4iA4Xp2CwrVbwid60B/p0WV6MpFGxsCgYBfSQg2ubqHp0RBKGVJRwQr + + H4cYafVqaQl/ORJKIYa57Jl/Z/SB7Ku0k/Sp6bPsTXDyYnd7RRpD0VVjZh1XP8TE + + 6FaujuYrxH8ejunBvteG0vK5D6PyB4ZOeZmtSqUQw/0ih9bn2jVQCLxtkZp/1UtP + + xvJBwtk4MhYFY5JWOjLyQQKBgC54x7E6qYPADsnCGzglN827iWKBSVHLFKTnTs+2 + + bJ5PQ1t0apvCMGpGaWElkhpqmf+7ZmWY3GuL4000+AvS54Ch9T58yRzYWrFXyjJD + + zjgWsmBMWT2q7UVjTLK0evGiqKdGgpY1EH4huw45Hc9fCgqJ4R9V42hztn7BX4zT + + FL+7AoGBAJfX4rjrlxziDdVLEdJZQmJYOOVVzXKLJ2MF4zsimeNa9FjEiAgiEhBD + + /rPgnX4ktS6EmhBV+Y6jibMRNWsadW3Ax8rtH0mNuTK9x07RBdPg3aagGJdNBjty + + Dt09KtA9KOPgwTHhKP438g7Me6ymoZ4e0/1vrvEh+Buejulsahmj + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:xznz7n64qdxjwcqxpo3mufyxca:l2yaiaxgcyx5wch6k2f7x7dindcddjjnx5odka6rbng3gsdkzp5a + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAkCrFEf2l8ZyPTx29aApnYaAK4d8BpZ7kA3dmGshLdNhl3e9V + + duskK4G9nuBSvk/SVq5v2cky6jx/LaAr2+3cXdpCvFJzqWpvsiGP5e8jkR03ED5A + + qH82iwztFpNu8Bb2DwhMr8LFXB18ijPYBB6GPFYsWFXjddbdkMDY4yG2OtofQsUP + + TwQiaRRZOkvdzSxLKZmsao9wrbE0oJanlWk4B09KL1htIRDv8SCuTGRHqFZ/ve4M + + 5h+Lp9ykUhxvS1IMymyExm8grH9jLz+kNO0Wgj5UTCa6l9iD9maNW4+2YKeYSako + + QcwEKEFln+7cBQQ1AQNt0+vPkLgXcoiWWGRIkwIDAQABAoIBAA1w52G82nazHYtc + + nvbvGAy6jU6Ev6m6LXut60eY9VawrEKrreCsnfkucJ2PlYynyC5hDLhAhsOJng6E + + w0IPJFSzD4tw9oFM6pqwKrM0f1uSs+UK9h/03a12KDlKx/TmIc0XUtWvh5NbLmwm + + LEAqEQ2EDUCzypqxzIN9RLCxyRnQbhY1B5Vsq/8anmZPcDIRuoAa8Ed8CcJlbLAQ + + qmpOjzvbDEplAs61HaIvVBM7H8tzZ3h7HY6zipPDhP3fJ+F8o5gGOZzLGYz39pHB + + lNrvrk4g5CFj6O9h2BBN+sW3hSE+KFl7jSt1p9GGewO2hnZsjU2NAbX5Rz98TNPb + + TJSkNIUCgYEAto9/b8ZzJ4OOHYzWoVgOMkhNuE0hNcvx9n5lFrxWNvx79Q4CgrQT + + Bk6e07o7udcapjj1eWPj+PfVFALSmrO1lbmAg3kYMzVdZrzOjgYKWCPFNjp/Np/L + + i5iJYJz6lY5rGrzcNSjDwRl4W/iVualEoYFfH/cYt16gUn5I7whELqcCgYEAyils + + uOIgEBzvkB/tq4GV0OhGcSIMBPES4ydhjZhWO9PvZANfklPM/0MfWWC7as1LSQGV + + jNVxmhrJeZzlqgaG1e+WDclo5rBn6UikjYxNKOwM1G3z3ekPhzdvbaf+nH3F6PR+ + + aBTwzT3hIIWFd9+CDeeMXqoNadJZLAk6GQn4YDUCgYA0Fo9qyfmTPaLv5X5bvK8Y + + Q68BNeiS2+Tmyrt3GDeVKscHbX7j4hNHimkgyhM+fBRbdwb7IrgqEjRWqFOE1l+q + + H6p+WK/B9Kj4pkhdF3YeHd6oEVq4sDE4XEZeLYwF3gPLNjWyaTYpQ2Ym/69gsN4n + + Iq2MhkkkELi3sNaIdRhXIwKBgQCnVeinlIzjqX/mdXc+WlIPDOSZ6ou2X3G50rQe + + BzWB6iiiSWSHc5QgyoedbMNVYT7q3EPUwix5WajhYCx+M07SsLEtEkUhhm1MnROQ + + Es0fjVwFTknoqmxvCUTTqJXJJRZ3gEFNl9/Gk2zQhZT3p2s4ZSw8g1f8+t9S4wRT + + C3yq7QKBgQCNp7VRwjdpSliGZzPCcKjPAy3fN8ap1XGRtw7I4AKCpXqS0Q5ee58B + + yKWnt+B8SZYzZUyDmOr2FwGC2xUG5gM9WLQ0S6bDJvrh1qHfPCtFZWH4zeUWmuPi + + B9vtQibh5X25jiluU+wr/pyFCoHb4ULo2ddBVB4JalLsrhjxYq0utQ== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:dov2az3yktkdiinlcvvmhsjwfe:dddl24n4cj4ttibyu6j27ujcstssqkfzu6lp353iuezfvdu363ca:71:255:4194304 + format: + kind: chk + params: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:67grzglua2vdwco27f6euk6ybm:cn6owpmbn2q43vfd2vdsq7ptpz73aibiboslvzbd4bdpl3frym5a + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAzetleq8DMBrg1fbfhl3aQZH6aWLrtcBK9/TrvK66KhxfXfdk + + FU1I35e6O92+oaOMNIkQzVrGNTcQ43qqDzuJEBLxLZLIH3DP/dN8c2mbKlqF903s + + 7vFLyTTCzB5UV5IL1OhoWLrrqkdqtvSRO/gnAD+IjBUuCvqqDW6vWlLQPTzIkgrm + + 4GobyH8MOHX304CX/zll0zUbJkcukeb/XogSOg86CtTXy/yCbfU7FUf9QdauGGmb + + ikUAID9L6sXeb7lqOlJPj16cDv3aGKNsQaN+0QZStXHElpcYSppPrqqOuH48cCH1 + + EB4oVJOplz+SIeOXRVtJScF8jAUHwYq8I/1OZQIDAQABAoIBAAoNr8MBSmw4fmOp + + OEPiJHwbuW3DGPOjCMX5sjdgZycT2DVz6kdT96EwUjUGtX5EyDKZwoqUH6XiI16d + + KDJdhhU/0itBxagrT4GlUqfRLx8uk5MoaHnzU++Qi4uAVj5+8I7w2ISvKOmuWkUb + + xFTONRvrqNSrWrfJ1zBCstL7xL5IjqpZRUF2xvtZu1QPAxKz6MxuGhXxUI6OVTTC + + F2/hZin7Qb+mRb6S9RJlfWhmMxmne1DNxKNh/miz8h69KDeMKh7+EhvIqOgtxJQp + + XjCI0rFVtx4ghmPKJdvbOizXnSBx+IbJWQ90IPkNsplIt45eaVrSb5Y9DCJxDWoY + + xdm6aEECgYEA7XsMax7QwWox1PDqu2s8YA9175s48hhc2alBS/yYsf8adCHvKXIL + + 5WvmZL9PApMFSCG6K4er9Z7nEdoF3kZBuYRm763k7qaO2Nj7JMDMX9TPZHTb5MYD + + TPiMvNCkqaV/S9ETPe1aeZU3k/aeX8RekYYtDgQ6DWVaZYg7IHZOvZECgYEA3fpG + + tZH2+vz7JzAZ07L+5CQEVX3hWCLi6PGOxqqz0cj2xKntz3XcKNWU65LWRNy/4P5c + + Ab8gfDHXX80e67QXadZcXWmCDLX2BmbJIneclxCp//JdqsVOdILZ6Zc7h5EATWWl + + 8YEh5fcgDX6nehKUwLv4aBNdzM06m1uem3Og6ZUCgYBgT94ad5XsSzhIhyh7uCL1 + + Rm/rLAWtUaoecGFWAuyei7pbzQNkyKcAdYEr7NaLUbr7pQoO62gXJknKWKS2n8G6 + + DnN80wacrxoR4fYA0txQJUuzDx27K39dMRRK40dUshTtV665F9DwrE6tCID0j/xW + + gpc1LwuoMSm3McfhA3otsQKBgQCOrE2EaJQRUEbxMiZ3fiX8ZvXuKSGMr6eex5vY + + L2GypfOOBhaW8I8YI+c63r8fta8SowpqCPmNOc/PgJyuLKub2C63z5fKKa4/AROo + + Nq8MHabWnmX73COIGY6MaCrYAKfsFzhomHI8R/FvGwf0GztHAcowwrnYZ9SShHnW + + OqSjXQKBgGdHwIpNiKsM52SevnkVcjN5Dz6rrHb3t9VNOby2LkdYnWdOG16NKPH4 + + IoQ62YPDmcJomTQ2AjEkM93ha1e1SZuQnnMv0IMbE80LF4qHuWXJ/gc24Ruy0xOi + + mQRQ1tKPZ02L1BTeEfbwSR//OA7trVsBmrck0a7Qjqb/ogpSnORH + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:pe3cd5dbpwdrm7lmeepipeh26y:3mstlvoiza65p7zol4eenbiz6k4uvdgmmpnfxklaqdvwdz52oq6q + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEArSoxW/rzJSR0lE73K7ZigVVYf+cxvOJfNpmSGPZs13+5Vm6p + + krmH0fUIzmEfCE05m2dN+6y0C9JzfsdWQyqsuSeZxCD70ND/UUNzGMrF4EVGoEH1 + + vxkyK95Rjbg4q0oeiN9uVua07ry6C9paxjTa/27sFn8DybxJTdofCHy3qaijfHiK + + t1814hXv1rU1hzpKScDqvU4Y9jG402I5bCdccofKIrs2YX+SSW7CpqQTCoGWPOIs + + bNJK6hBGXmmmPc4aleACYOzOUsdWZUi/80OOAsMMtYMkQPVbwazHHupKpabIsJ+r + + J9brBqe6QGiZnd7QdlnW9SmKB/PWhrHrXd4gaQIDAQABAoIBABR97zxmzJpDH6aj + + V52tJigO/PuZ1Ol40nKoJsFcfBHedATV8KxD115RxHqDxMPbO6t3xKM5U08o1vEU + + TtGA/dKlbI1op9QUv3oS5M50xIjfOdXiKF42cZj+ZKFEQTSH/2gMJMcU4ylzXQLl + + EqPtAlODAV5CJqUbaoNTgiOjeqqRb+yfbTZZAN2sEL45XSexlSM76Piv5hJt34Hk + + 4w/eXRHnr+L9WGvSMnxAHfLr5Yr1PDLyFqRyE+rHQ5vIoD3t7oWySO0+OgRTUdE8 + + w1gnM2fYHLpDZu0PohpHo/51e3N427IWUFQU41m1Py7EDb6vyNLb6s3PnFSp82RB + + oaRiMJkCgYEA3fAXRXqiNCX/pX7iq/vH7vaLP6skvUKgjRe4s47E46xnkiwqaWBM + + FRjTQ169Ojs/L00afUKkuox9UG1Cp/cra311KHiChNqBqba5Eos+sk3wEBpql0FK + + gwdeKBtt25mJqMjvKGMaM2MYpP9gKn85JFTo9KGVAFeyPqhLCoMg86UCgYEAx73Q + + y9aV2euWy+0Z8HsZeDKDSB+u1l4xCOIna4gkTTn2PP28xYGVx3pcvaZYWVWUZA8U + + U+1eUo+s7Qctj6lXuPX2aJI1IlpFxNNyKi620srfnDlirq94b8upZySUBYUD0VzB + + SDbbqlVKsTyijUxhQuCSFelo/pjC2p9OAnDAznUCgYEAszRINivdiWodUM5xzRkS + + yVt9+L0Cf2erKAI9e48OYCA3yQmsfUXqaSaQf9ehx8FLNbB2cSo8xPznuudeaS3l + + e3fj//e+u/OLuzP1oIma6HKSIw6RfuyTc9WhK5VqUWVaiFUm919+KnwbzC8AwY/U + + 3gdJyy4lmA83t+xAG47iLpkCgYA7ITUTctXvqi989RbNuxNiIsn8auyuJzoq4BA9 + + ZBMjDXqYuaDNczwszktwFTNoVs5UBKbG5akblc7iaFKTidUfOykT8dxq7ABlcRcF + + 58hVhJtHuzE8d1OW/NqMXya2r5bevq+1OhAzT4aKC2IvpCHS03pLpEphvEVKxQgp + + 7skVFQKBgCSmyb/MDbRe1X24pJ9yc6EHNH2XifO8B5/9vI5lTMnHzaeva9YM0MGH + + OrsTFL5TrTTMpb4nQiQafyOT75HJCCoeA8E6tnNyhcwMuBNOCIoD0CpA30HzKVi4 + + eB+5h1t3mPf0kWVhvNcdidGMcgyr8ekP8GYtE9GnNT16U3PX86Xu + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:znxyqo2qglpkhjafnuesu4foge:trau477bj5u5wjfpy4oq5e3aziajgbpyfjdv4svyf27m7atpa34a:71:255:8388607 + format: + kind: chk + params: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:5viwo3rrpmhruutemkyjhkna7y:c3pe6gxnospoucmcbujn3mgnlky3vs5dx2bbvqm4uwjreu4vd63q + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAwRdN0XCfwI8rQ+l/QsT3MEwTmvLlKgs0mYInN/KH9uMT2KpI + + t1fjzryeOi6pPRgnrAFMRXGaUo5i+LkKVS3ULBH6UTlUvJmUVpQjToZFvbLAwg+b + + GJjM5lDYXeT4LimqlXHxOLCTwjoQljsM2whYgI7Yjhm5LesBgiCf55GU/lw2gup3 + + zfnpeqOtwmQfwzZoKIpEQ4ZnCzSLldlSUmR62haEBGiEfnZDVCIvdf+oC0Qqgua6 + + L9AofDFZYRemNZUGIRumAPoJMk/hPQZo5/g8USz7JiJKA0DI39nILF6RLpgAyPxF + + xX+X7bwcxn+ycSIeEi1lx2LxaJttyUXTg82nOQIDAQABAoIBAASx5dz9UY0TjhhB + + IayEcIQ2nVVrqYHLsvQ2k3CLT54DqHRgs5LtqqbYtDoy7z+Cilhm0a1wlTGDr8lf + + am5mxl1p9H2sGLDbRR2TzYX3wtNZeNFfIsTG1liVR6WEzzoEHlcy5Ywc0wLqeYPF + + nMromYpKrt5JptSEfc1lsK2nPwmuNm4YycfqYa+pXKaO2QKvmYjGVZ3Di3ZBKl2f + + pf2MRrf87aJj7admBk6PKG2dhs5FI0HLPjNmvk39EKpLFyJiKFWoDFwizIY200xB + + jFy86Ic0PGVz+jSsTWYH1+f2w1PcZ2Jjss2EF4iZrOXdGCm2xgXuf1R0FxFMs40M + + 714HU0ECgYEA+H8lLs106NG8OePE+1uxtPKGz8OxGWYZoxT223JMJzq7pxfrr3TK + + qRswldZ/LlzuZ9tcEyVATZqUUFcRD3kTn4/gQb8S7spEHTTdZm+fhAS+EyKBhVOw + + DInkl6HBAO7O2wvp/Wy1yz9kcrerQpJLWxxKMVyOLO5nX3Ae7gj1PUkCgYEAxuvg + + 7+MsoJUi2AymHp2Pbasmfq9cS6tFD2pPh3isFZFvU5hLqlT83BOZAI64jkb2z2/0 + + ryGLUtoA2zIRSKnSp+cKEZJxzQzRxKlRy+HupXvxqVRLwe439q304OOCclkNJPfv + + 888gOQKmyTfguP+KvBmMkiqAqZXz+snaHh2iynECgYB1/2oYn1c0duN6Wb3f3dq0 + + obWCUtp1xRXHat0Nt2iR+EHDRoiT+FGDm3WmsQQTb+2FQ5SlQrsWHqDuxWlEf6nh + + yuAiWCkVWtadR80aJ0cH2XiofWojdWnTimcR2a1cVAnF2hJyVHy+1otMLgsUwYMm + + 8HgKmHiqvUo493S4c2iAgQKBgEIhCm9VS3G7ApFmaxdEc/kWa76z13AEaPn98qBr + + unGVHrhgqc7fYAxdq4Cm8a3C46wEYQiTkzig5qX4GAza///3a755u8FaIKZLT7kC + + zA5RjP4o2uKGqi4kmILmv2f6OMdwcWHRGro5Km88V0XJFjsAF15EKO+3vRtDXXKj + + kYnRAoGANztb3ObR6BF8O5x4SWcwwQL2BGQ62zWv4EWESwjlKnRzxcbuuuo4zLjP + + ++3ZcCv6IXd73pFgq9GNziTJs6xS1Nfa1O1BcJmrnHVuLjfmrTP7UV8xciDFvN+y + + z876LSrjUehLzbPQbkLbhlX81cIPK0NjXLe0mea3+b1xvtTZlKc= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:prou2lgkp3l47jduvnely4pbtu:styymkrx5ja65ke2bv3w3oywyreq5mmdr4ujovm6p3pskoubodtq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEA3jkvQeQAoJhf+j5aF4Oszl1IwFfW3SKPNisjSo+8Yfixq79K + + 6mPF7c5zZBioF2fcTpNmEX1hjpkc4BZ2NwFaxvstAtjahLiHDQkGh2tGhuRi/jo+ + + x8lBS0Jtxh1FfdxvnfrJIk5fhY1/+3bFZcOQILIfROwHGpYM2K5MHOV9a87LBiTM + + 3Q71jEbs4HGgHAcShOVcy+sCUMpnSv46vvuHKRMEl//JsydjK0rZQo34NX7GwBs5 + + 4wnpCzC3CZ5mH66CE2aYy1owI8LEQKdI+eiU/ka1yb54yS7hjJ/0I3wX0X2mJ+Qc + + YMIG9YbIfQd0UKZeT0kNmWtN+3JJdkni3G7nUQIDAQABAoIBAGukJXj0OT2RMYRk + + qX0UYiM/2lqY8dIByH8DnD+kqiqGrYE00tQAakKLqydELj/QJk3FZj54jkXlcrA1 + + ESQJuvABgMcNRaPeQkSVJ5124B29CRp+GiTqHn+W+NdrHFsf6M0MSlscvXZSmTi0 + + Sl/Fv82mmjDnZ3WAU06t9t5UZ1FNXaTex8v0+AY5p3m7dvM1DWfJyTC3F9T6a3XK + + xkupwTTUnZh/y5kbedaXS6Dc6+C3/BG+7MBR3Hix3DcbQFYU/S03tP8DzaQ2aWtb + + iHbxI1Z8q2QpPd0rOMOuquW0PxlJSQFe9+ZGKThhVfBoUhi4Wm5qjVibnhd5+pnh + + VzkNozUCgYEA9r9eXVvEgOu4NhKw+k3L2Jdj4ibdfVyMUg60i4NXozEDx9FW8C4r + + jxgdVYkdnMFKoToPn9dFBxX8CmSf0n0+C2Vj9c1Bjfm0COEFFHq5y1cpLjg7ULZF + + +F3utDLtnpgRhDOlVXl6E2Y7LD6L98nwOd2apyKgekYIwJh1RwZ5pKMCgYEA5o5m + + A40Gt0bYW/0rvl86DVFKmhlxKC6mYfDoxosQkMgJzHS/aacef0M1G/wDVJw6TSMv + + ggVDq7Bd1cnkiqwlZCurJ2LDlywtkpguO/4kQvdtYgO2mzhcC30QwhoYKhG8LRf2 + + SP6BchNZ6evOMo97VidvCH96UNlexrqGj91/z3sCgYAAgSeqPTPLp6+6vJMMD/io + + uraDkdzGEthempUX6+7T8Je3YuAwoYeJRV1Z/WvIFEUYy0uY4hHMD+lyA/6nqYXk + + 9BIeQIsvxSDvG7as8gtLNSRqaccFRTojZd3FFI2T02/Fu21NHXB4da8NShtzKECL + + fb6BNPrrBRWjfyxONt8szQKBgBIXxKusqoVBewMlCATFhlG7OmaDbpzfpFD1Td8e + + 1Kr398TiuI66/aqxBH7wtPYz2GNrSnQio5/alFKNqHC1d623u5O4rW60mdLyPFaa + + 6A+VSTEy52ag8qA4LVN+Jr1ObP0A72PlDRV9rUWtKp5PIjetmooJLvkfRc/EnYC2 + + uiv3AoGAU5U7ZTY/JnisBTemkVKlw8Vrxx9weT8VGO5VDW2HbGbOZkxORT+njc7S + + YmsnNnr69IbUVMCGERomyhicNe8c+ePulCkKoKtOqni5eCqmUA7tMLUl4rrvKT4Z + + DCZns5bcsoRDJXKyz36qFgSTQ2p04ycrm8HumkzH/x/cboQqFD8= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:mlzs2hbztak5fxjkrkuuvnpdpe:3myvisewm5uklimp2xucrwep75sm2rizfi2sq5drhlqjszkpdyeq:71:255:8388609 + format: + kind: chk + params: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:keqm4ealssgmiq3acynkwtilna:ro4e4c7ueczcitmppcr6cnlj6lk5cv6tqeb55e3yztg64qwy3wba + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAqXifYzGbGnv/uvDk3WVV3V5T/jlDSMj7hYdrpZkRZNIadtl2 + + jdsE/pO+l1E5UE4Ov6VlLD7Xqg1BCnisJ+oInJI1iDuZwjvSRXAFl6wfc/F0pVXz + + kU51t1iyBXviZcx/Cc6P7JxEH2LOiRXhTUdOcTUx2lJKGf0QNVSUgTzfofmrOHiN + + ZMOU4sZRTYa54LXhv7PSltyJQlxUvuLnFCXzYY/9PZTxzpGzP9JtpFbccX2rYyG8 + + +mcT8FMQLk2IiHTK2AgfPDzJEQH4riMzxBCEjI19nT3c/spP1Hqy9UvZRanNsMKg + + SDTC0WQy7hIrmY6I5iraa2xuR/iGyjkYYejeIwIDAQABAoIBAATLnYr5J7DXe7CQ + + DZHSylRaZBkqNe+h0Agv+bLHjgBqTiSif2nMPAZTwaBtYnOGpxfF8F1Fhz9fF6PL + + ICu4nHNb2rVH4rIxlH/9B3VdVK8ZdnvVMZX7gFgbTSFULt9hUEvDmOCu5rTO4tni + + tpUWt2tOWpc3eB26M4dqjmvnw/gXlXE/kknYLbtks0cgLuFeXIax4TasHXItI7KO + + L6/NNtp5QARbC35v4c5MfLUsAKBptrx0SjIw+9B66r1Jsg2JXw5+rsTNHGKK8I0f + + CZJ0XX7TG8gy6RTXQxEi5FTE2XXJl2TRWRS7l9pbyM4vJ63Fn52Hdquh+sbUuyxc + + jMxlf3kCgYEAvdyUEqOf8MoLtXPyShODkVUwwQzMxWxzYRbXL2ZFBysvPgKoekeP + + 6Nq/a79aRb5mgKxMeh3RgDd+JQD+X+mIBS7dxWlcoheKam5pvw8uqF8sCzop62YY + + 0qtvp4CkYBNdUWczsT3nMhjS9QJUQCyUIx1tTLpi+DAKJqHinUl62YsCgYEA5IGr + + uZXnD3BAf7RIu72UgSF5SKx7ucqATRmneFd4ho4zvaw2C+GzX/OuVW56EIzFojL2 + + TXxaWJHe2y0FlNFNXRvTOBHEv6Mlm5UVAt0ULliJ2O4DCO6rYxAtDrnu0wOAectz + + lw5/XCoKDAFGSMenTxIRrHZ1wFkqVxcsROg+MMkCgYBJzG15+UP4EnEOrOzmwkMH + + wLdcsp79tjP67yfhcr0uFikcz2excBOODUkOlqh+J44sQczQQrrmPau4snQtz9Zh + + PWBSlau+DaxtxlEwRLR8GdJC4u7cYykO6jhSQXyjI6PIOncrU8aEAIYvWiJpd2p1 + + Y8DSbDiABBxN++rb/G3WFQKBgQDQaay0whJStHE/iLFl+o1+EYfLTvYyCI5ow+NJ + + EY6uOvjaID3TLHIsK9dvuCnA+oQvYgffuHG2oqT+htu2VggXyg8l7p7iouzkMF9P + + k1CazMo9fyhpdzX+TnyqF8/JykHd1ECDIAftibJMLMVsEB17MuHHyOuxGiJR+KK5 + + 3pEKcQKBgBYbOkjHTSnijJf9xkqvnk6M9bJu6LXvc2JjnixB2J2YxqlblJWw4L7n + + 4Om6dx0vvhEx5E7XvNn/3ZyuouzGk5z6iKxCNFq3YzIOhe8VS6nozs9l5X385qUn + + oV6h6h8pvwzuimK0pm4i1V0eJSagW5kwNVKiYekaplU8We0O4yPU + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:fojgru3tcvv25rlf54gmpurhqe:czcchpdw3g5tablqxfl7vezf6de33un56mrhpkswdaclo7ekmeea + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA1YgD17SLc99hZfEdDFeozhbsXLe2bbF2oCY4mx5SzH75Dn36 + + Pa5XwUYhgq7Ejv+nHhIn9w1byoY/pbaqs0WUxV9481OCX1BjCVQwS18ZfKvuZx2N + + SQs0I/lEZVoLgdH2bnabZG6qxPGXrzYgCYKL2D0rKadCCRbDfIqs8EgosJ7wE8pi + + 57CkHQZePKyECdpBivXzOhAnBVOIIBh98LbEENZfzfuG3e3Q+LvsWQQ5PUOVobE1 + + cxvzWbHN6QLBmNqu3suf23gu13ZfwtXKZMxfsKVVOoF9qYDJ2SGrNfQdoMIU8MuK + + kFl/ur79Ld9Bu9NBxA0TwjGAUtMniQcz7FkwOwIDAQABAoIBABirV6JOmgPfjV2d + + LxlzcS2qKVGG6f0fURCsicKmDLPSgYyikkwY/ct3Aj0aWtwYfiKzv0lEElRCEU1g + + XrVKdyccYhlejwPbAi4cO14h1Qx5wpfIKsADGtmDHVtGPWkYrEtTyZ0fSfxp2vfj + + nWzr16M6Yee0iqUJK7mSPeuespDA/e0/zrJjMSE4hjblphHJsPS71Mx89zEJywJb + + dZBvMr07LL/YT/I+B58nEPu0VdS3ekJXGWW2wnBRLG6n7pX/X1cTZGO9DRw/mXIc + + MDtpE8O8Wn7Fpzt1wN5DfSs+SX54YQmVHMpnS/jM5DXWQP8pgVUH8nHlqVDlZQjH + + OWL/+x0CgYEA9cwUxl6MI/PFq6TtWnHl92/dIWipAoW17z9iIlXVS2RGDgOGXLan + + xpf/R+877+BxEcfmE11IGpu3BpBSD7gX8glqH/VfhXQjzrVYj/33s08KSPrf9J6a + + /JJvgwLjrIO4L1X4geYTjDNHHpNLiX4Gg+OkXrtcQHN+CIXOM0n22y0CgYEA3mUQ + + /M5S2tyzDHK6cmyzf9WFZwyHhwYMdju7jOwErf8VGJCn2rrCDAdIlNWG+ru8fOz4 + + F/++Xxq8fYr0caOzDghK2O+IF0fPaDxYwFuHBT4vRHjVjau8sYidhIeK4V/JLrBF + + uZ1NxA4kX3d7f+6Y8YWMpie16ple08b6yN9VOgcCgYEA5HoyqX5DeDvl97pUI2mS + + YWHrRF3cFIsj5eOeHdp5bR4lfGtMXywuYozxb/VyWnTfxa5yMHfaSVmLVR+cGB6A + + q6ySqGhWxV+C1Wd+jkJ+GIAVSGdi/CjeWn7oBvkNl1PNRrr8SAsNCpqztjkm0wSB + + m+Fj7ebtRr/UXKm8VbKgM3UCgYEAgwUj1uxu38X6LjFBKrxjm8Jdj3JQPfoQSW+z + + dLhvoVqQQSKn4TL5s0BvQE/z76++whKRrwHaVAlaVtQQYwrAKFo2Tkv/70c9J/m1 + + h83kY/BYxIwzs/0jc6w6sKNx7IkT60+qJEpKUGDMiPnJZntY26GEVTc783Rb64Rk + + pwb8HO0CgYAFAmDbWNKQynEFHE0brggHalBZiAoBa1hmOqwYto4/phUIFdqjt9Pm + + dmiA72AuK0YnKfMP481tM1IcCwzUIYaxbFMDUN2yOuzGVvQppa5TrePm2KM52fp6 + + 6vZZcaQ0TXRp9Mo9BroG0CkaTQ/XTm+zfPpoDWPE9cq7L39g8meABA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 71 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:ljiodj4tijkfzej5nqdk5h7cnu:3fb7y2osytliwli3oez3y7ece6rjwfrpa2zn7uwrfmn22cisk4aa:101:256:56 + format: + kind: chk + params: null + sample: + length: 56 + seed: YQ== + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:pijpudpxiovhh4mwyatsoo6rhe:j4zu6olizd7znzfvxqxtjkdr4vwvmvjncqafvjqkfi6ppk47gopq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAn64dg0fS5XbYmqhhNZVejtWlUnL5oqHiVo7tMYh17LRrq+lL + + rJ9ra9j+FATvN6rYY4FdkN614veLud1eys/hvt4bgJ0gWxOFCkCfvlak7U8CPw3T + + Jmg0xA6bwJPI6QUBysodMZQTNn7ZkFOglSAxQKsssPw9FrOiH7LwYDG+KsbrFdcO + + DN5q3zKELzVts4tlZSwuzKZe/DNiwgaLYousjbqInS6vxYJWfuXL4wP0igjWt9ON + + 17pfF2+/K/ys4VxjUoopbxRyp27R9HZq2dbmpv3DolTXDH8sQtUK0u3EvD3qemBt + + BkYDyGBQ6uIU/Rc6RNXUwT8r64amQc1+NpG4mwIDAQABAoIBAEkpsgQQyKSyy5Qx + + SjkO84Bmi5U3cQH/QoF+g1eKut113U+rWS0C7mk/x0rM5/6NnRAamhBiutv/qFnF + + AEXU8g5OHjPTyptwWijUa1z+vhqtdM4HO5QBcwvR1bNrA0chMC0GZlHtEtCJVo42 + + gwFQ+sAyrgt5x1O7grEbf9/TatqWCF7pOF6fJMkYfODAWbvdoOoG3icQxDj82Ya2 + + UakJFH5ZDKKc4B0fV9CSiT7HCFI6ZGBjJfFkRphU+h0+qdgQB/qWSwBZ2VhK1rKQ + + zMKjluwn0I+nrQUzmshKe5StHLwTezkWkaZ1iMvQaHl4fnw32d5c7SIh/hmy0s7j + + cqHoukECgYEA1TVlxFZKCPTJxJmHACU/tMq7HZqESDojplOlSv4apkWC6qJkJYUn + + 5/VwYmiVbw2Owk4borWN1qaCI0EqbVCkVi1g/pk1jAkRdG0yz7IBVGlronAvttWk + + n8aw7sBXkQcaNybz1k2KVfREFHb9kQfZ3e6RpiKQ5YiMnrFVUku32ZMCgYEAv7pw + + nK76qG9/GiXRZRjJjUbtzxczwnG1jN0BqW40jK5Wjku8Om/TwceJlamgNwrCYwb/ + + pe/a9lhYOT4f57sOmcHn86L76CKKY1BL93khF8AFFGmO1bHrs7PaJQOtr5aVKYRP + + ehFNOwgJYCQWPF9yKoen1Lx+MJkwsKAAHwyvadkCgYBDDYcS51xjUrD9/pbBifVu + + I3ATkFvX50j870OFwUKaVjQlHKtITYdOYRdWK7QLeAUUwMHaOyT/g+BbvAve00TL + + wXvGtmJrxxJRPmKDhWT7qifqr0OiSbB7e157x8wCVWx+Oebn1/0QqUCb+wwmB4US + + UgxGZoqRVY97/SNrPVr1twKBgQCDjEJT3uLwyn9ky2neeaFgo7frDTpgQXCVk9Xe + + EFVR6RROUbx2Q+AA5w2JeHcLDQDOvTCPBAEyYO83Z16wunGMIbUqPzujzH8zIRbe + + V2fTSdayaLKuAIN+KvqTxvBWt3TkpXl6gYCB7kOwiVIQXlSQxb7rgeD7K0BzD3TF + + 2QhKEQKBgQCNQVMGBWjXXo+pvyumYHBg8GxO9ZsfI5bzDyNgJqG65MCQDB47hrZg + + Ng0UwIF99v0tjfDWmzx/ZPIOJFhHYv8rSKyh8j/u9U1GbquH7bpsq7ICNl4DZ54u + + AoI31FyAYBP3wOIBPLe/SxA9MZhoIQVBh7d//CKn5+rnCZyRDqMCBw== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:uqzfer3za2hbweacbjh5w4hvjm:ujlv7l7tyq25e5l2ctgjunsutsjx7nxv563675sshijvc7rpd5la + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAyW1VZvVCO0ryuRbM14typdFJ5+iZtIiILWrHcR8W+AKXxOdk + + vP1XFbyTzJiigYY2V4z6yIYqq27XNcLpPZq2RuxJ3bl0VSaeRC5DzTkaKtfwlL+W + + R3qE4Et2YpgGusuG8w9MNOsBZQ+BbhvrOzU3aro5aCBt74C7cdok1GXut/+JPNMi + + AkAP6ZWfVaUYwFzTEwmhwLFotyvIXrx8p9Y62XrtWNTKIvr2WUIbW67lyPkPcvU3 + + OMs0iPRP8TnJpo5GU5pg75uuMpsugl83w9ixt1Kwziulo6uflBkXbeB40nbrudmH + + ZBExZmvcyHs+MM0hPI1s5NHwG6/1qb6fg72oXwIDAQABAoIBABjLnDU54MbSwZFW + + RK4N7PWLj2j8YZtvKTBKEjYTKS0riIpFH8oB96vl1F3dtjdykZLyeFah6XPEB7sG + + /NZICsMtSCSCtVbcE6R5+3+yVU3L2kI9WV8ALoY7091sMHvjHQAjtHJZMYlCwOCQ + + kELwGJvLQ9DVGSNf+fMYcuswhxPgKVv09VCjOw5T0Bmi/acM1diimO+YFx1HA8uQ + + gm1Ent/v2z11zDPZiAA+Z9tCxALdLOmGqyz/GWvfO4p4S26YD7JbF1bfpBk+2EXZ + + tHrVM/iK8BJdp6fOgjgOGCxungdU+YUfv5c65oZwQCM+qzyFzaIsICyXWptvX6HR + + reF25nUCgYEAzw+xZ0D3AjFSAC/WqkpwaWwpZRGgAcGRQpIp3VYNooNZhVJ+jWhq + + UEIEKuF7DEQq/u/y48QVQz07mOuWpFNa8/t2D3kt2RHWU80kLVxvBnkbsvnInjcB + + cNQSVGT7zFGqdQJI/GxYpQN4m7PYcgZSkhQo0CuwP37TEX7Nf/DQYIsCgYEA+Qi9 + + mwkcIWRWI8Y1xjcDwUwbitmm3tD7AJWmgLyTtW7f03XLk/TBLBJPf/9AhKGAN2zL + + P431+xoab+tiUeWAlJqYkCqrPD7eagEut9HOI7Ttl1c2NN3E6o4yYu7IxD2O1BoZ + + wx5a7hxywIR7f2pWsmGhOcZtMMUp1wI0IrHrnf0CgYAf2oguHD5jpfa5dKKPe/gj + + H7KWi8mTu5V/KkEqfayHTbGd4vz5ABEq250Mg7eMQYhjw8IX6/hhabAbbFK2YORj + + GFInOzskY6wXJD3mhIvH8SWjuO11+XxNQTK4rPhXjFCuw3U67+gLKqeJPHeVwwc8 + + 1cEZlT795aLO1DUE86T61QKBgH+jgTrTIn3i5VuUnb8oN159WaiDAco2JlAYY6yb + + +sEFQOcq+tqsmc2y3NhnxXO1Kvg9ZLcAVdELgf1XEZ+UF6ES05sgo39PYcPHM2C3 + + wgX/F793zaqu99yYYS7f4Drkqi3/6rBdAJIGNrKBtKKLqD/pVi88in5yr40p7frS + + YkcpAoGBAJ/hMr8iXNI7cOPDx38heBeB4AKSPFBDJhmxNfhk6NbZvnZgyUmbZqUJ + + T3ewhYYrOhAJpGd0sn6O0ReL/R3atMIPKxdMp9lbxItASmdxLtJSFUx2Q285ml/T + + sgmrKsiGM+UAVULBB5kVr57LSmCDzGkNqup+k6RDt2u7zWPbz0K5 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:mftnvh5l3brshwoupvu6kz35cy:sncfokby2tjykbr3zgi3i6lhgerg4kz7fsrslrlxdy7upwkzq6lq:101:256:1024 + format: + kind: chk + params: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:24fiitrvlsely7grnfn3ghrsgq:2kgylma3t7u24nj5tn3p4p7vfyokvyryp7mrymngl64yib5zmfxq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpQIBAAKCAQEA4ofaPstvgkxaUAL20n5dlDVSnjxTiNPDctdvy7WoZ6fGx4wF + + m9F48+0Pa6bnelrJgQg977Yv+Ne65eiryX1QS8IomGJq+8xSWYT2wfiOKd6RthPU + + dp+XhuVOdJNGjfoYrcKDRiCDhAfnSF2rT0ECVDDbW6B1rMoQihbC6nxPMKAgroMn + + 0cJfq+Z3/9txo88BCGXYqNTPJ0tB4U5kQikoylT3KehNcqcu3wBHf3i1JwAAQugi + + JmZPYb6KWV8w22MBuvijSW1l3GZeHrwbhBo2QfNMeEfHHuzuy7jmfXHvsMM3vqUV + + kB5o4Cvxs/zVAX5MkteyRZITudQ3UTV0D0SWkQIDAQABAoIBAAm6xFzFF8LUwl5E + + ZP3bz+t2h4K6BuvODfYn8FUjYMUkTM0zo5FdOQuKXOAGo6jlhFkb6wPEOKGWASDb + + J6G4Vai4q/iw5XUaLKv2ojM+QzuOI9uvqgT29tGkQK87ciDVTeu3T98aX2ZeK94M + + /ozMDIG5is+6f8NjobvknWDn5O4q0HsT2eeDbDf/FIp/dIRDjtp6BJaDy3Opql7L + + 56RWuBj9b3on5dC4zuzKJSM6p0wVsXyiQxI2U6qxDdH6fdzolpeyrpzjpM2lK9I7 + + rhSTgm9E1D3nqgC1mCC+5MVuZNXuTc19Ma5pEP+Ntc7LonOKbFfe8ysC48G8Gn14 + + oJS5Y1cCgYEA9wmAZuzANhyraO2rltuBvWSVny8YiYrVNAatPzNiGTtQEUCdTN3m + + MJ4yprzslUtofLqADBmCTN7eUpYBVuTFSSKK7DJAaDHzIy4h7o12rS7ZD6tkj6D9 + + H3HbmUJLylU/qzx7eHX25WJwEkU9cK/ufoTc799noJYlBhSltNLGIO8CgYEA6r/i + + ugpXuN5fZ2Nu3T+WPQNGEgwQdhrWD4J6HnjlEFfPuj+gxIe42vEU/1slDI4ujgHn + + bFx2lN+LsmbkDsweOpAJV6KqLUALcqdIrInX2BEfhs9KgqfifGROZHf+FwTb9UxG + + 4g+FeE8W7FDz4ENa6i0GvuLRery+zLAoymApwH8CgYEAgf4X+REf29meQSq/njSH + + wteI/CjWKppJsoTI6XbqagiSC2IK5AXoOTElyiOkArOZmfixpKxPqo+kQaT5s3XS + + cregjsWqqqmOHbcK1/LMvjjms54m3oWCbOeG/NCr/R560GqVNkAs2WvBOXwB5qhN + + QXo8oGTYrOIVPWvj/pDi/TUCgYEAjO9a/XqMI+9Ns9KckrRETKkUfm1DzMRb07/v + + 9S97xo4Rpq3gpV0efEPU6WIdIiaSiKtX91Sj1MlJI3hmXwPo+hvToAuGw9f5h4Ir + + PXscXRoapWL6RuroLOpDrknkAInoTKLYw4uyBALnrkUDxZZqlMEnlZ6zSU7b9iOk + + uat2JZ0CgYEA8sEwTSjLOBpLUxnFSm2oVA2lsn3cNr40aHFfqA7Uc4C/Q/znSfV7 + + MfmYuykvFAemOzdHqsveZakeOwJxe9gQQ2Ar73kiHtUt/vVQxCoAFnumW9SBvjIs + + 3/8J8VeUUgbLada2ki0hVIWWYmIG5NmF3+QB1c+/Jaerh1qeM0wUnl0= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:gdvwak2g3obbqzs25y62axubiq:c4j6xu4nazb4axrd3xpouuhhjso4oedqyulo7w643rimve53rkba + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAsc4qFPbdyhiuLD7dSe70KFACIY7EvzKOzcWH3X4Dx0LZ45TH + + 92euaqW27FGBM2T+z2SiYaTAyPEDeqoOiulRZxZTk2HFv+VVUmjNROlbh4gQ8GwG + + /WwCq7eYTWW/N59N/5cSlyO81KqF2BTtVEjdt4Hqx+5D8nkwLoDDd4hHy2EJAL64 + + +TqdS9030116IVSHYuMdW5bn83UQak3+ihLX3AB1xASfv5h1Nwqk9xiJ8PevugC4 + + /gbcwJEqAzxMEXE2jBXrzLaHs4qeCl8wgzg7R/pHlQMOYBzUCDna2ovT7gHHC2Bs + + zmBj7uhHm02uu7dYK0S3WtzYDtbbA7qZrGMLvwIDAQABAoIBACTdxtru7sCrCl4R + + MMfWHFjJcg+sLv4nyPVAajHSIY1svonR/P4+yKrDLmDka2IRJEYzKvoM844Wbu69 + + mONTijXSKsUJxjtKHT1HjpwluH0rCLwY4gAkp48cM5+Eo7ewN7dxhwDAf7QmoTbS + + 6/yIWTRl4xzOOddqKkPSHfVIU/6GiR0taBb44yz7VCp7iMSqa1VHSHcXBxU+lspp + + f53a8mZ8+nupsLGTsne70JGe0ipq9YqE370Rz4J0Onj1EtzOgQJN6fQdZoD1M/Jq + + ldCIs7D94XZ2J8Xp1WXkiSE4dRwNyfJiANFvYbXt9UOCx1XXGm2tVYTUhFbLDzlW + + R9YFYNECgYEA0L0QDQHCJexuuH4AS4BmGcLj1tSrHA7BViSrnhPrQTjaWeqWdiZh + + Kv01tWEeIdODCmSIJPVMS6aywoII2bgQAFvzOX3tAl//Kc8wdFSiz7WASyatVW3r + + uv7dtTGSsBWN7fXYqvjr0eB/mvyqVBlYzk8qgSpaWEhkkudn/IG18jECgYEA2hAl + + hl2LDt0D8En7LxiKIIB+0s7cFMJfkjNRHyXcEzbmTNHhwTQ6puYE5TwbSRw4JMiB + + oDIvIAMgA51Hwk5aC/aGoo6z60wvHTzb0XpMlSWvytuhN5fRH6xXY8TYM9I39ROI + + xU0qvGbKgQQulwgEzRVGc9RAk6BJ+9dN5DH48O8CgYEAvqTuk/KXL6vRNA9glZSf + + q8ej8AIshWO0kMjNNYNbyiXyx0zKPv6uoGTDOPWKX7qeZE+NSLQBCtclTSEWlELX + + 2nwgmNG6NgEXO0hQKO9kA/DxS7H3fZ73PcKpG2Q7ZTdKeZugWAcg2n8ADL3XkxfT + + VBpZ576W5Sq5MLLI9oZBdwECgYBMQk9NMRN7bDF/a+/q5XMQsL8pa+wtWlhf4ZBi + + CzRuh8l8Xf3MOj60tUZLAH0uUS8VNgWXB1XRpSYh/XPl8M6u1lT5LlyfUfI8EFdz + + Z4i2tApJMAuuTGp24CdjnahaXw7wpxcyoKzsXCo/ej3s1YIQUntj8Htw5SJab04v + + GobLgwKBgGzt/D8Kilx8RmDCBSYnUhYGqpw2m09knujK1O61S5yLODcUVgzp1CL+ + + eTJQGAZxtGH//P1FHSl1VE3nGCuwCv44a3xZ94+BooL0NHFyLbLSMbekz4Imnsva + + dgkZ0y5hxm3MzWWZex+5YaGOLTzHCZbv+ec2SLnpYT9h9BMlZE84 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:sshu77h6opnto3jnngot3lwzl4:4cjhp6u3i2uzdzrjitje3inlhs6gnlgwc6wgi6vtedc2grgvcpza:101:256:4096 + format: + kind: chk + params: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:vfgmoktxuv5e5ajwayx7qn4h2m:n2se3m5k26k474losb3nhwevksf7iewruqwcwvk52dyfe6ihieqa + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAxsCouaG6eszb6f1uywMum/M3wvq8N1j5JlamB6q5AJp8WzG2 + + ABOiemaykaAf2p47/yA0V3JTYS4+lUye8jZhMaN2iNKk7Z4aSaHNc+uZEJQxVeVW + + CblpobGIgtSdVHf+bsZBWh/rS3Eo7msxtmlx8EZ9H3ww3gJMwuneIfy5n28R1+mF + + Mxq0iFO46PaCP85kbvFwjH9AUioImx8uP8L6eUNdx/L2IqAd/cQhT/3eVXF/n+fI + + 0rydpLT3F5cKSdKqta03A3mFD/BLjbwbx4dCFYaH9W5IRWx8ytx5sRcalBe8g06N + + kXA2WQeAqCpMVTs9KmIvYIU4QeblU2d9SXchNwIDAQABAoIBAEYeWChNb229cmRl + + vb2vnLT2JJkMPnTEVfn5nc+conIdDnxZ2FzEkJDgRGVt+W72XjJO2Uh0lAf3+apQ + + gs7u8nFBuyLgNcGDAsExbTtVRgX8Uj98jlMV77dU29VUT0EqqD/Kf+nc0vUlsgwT + + E1HId6MOKzx9YvwgEZa+TVjuQUqGlIhu/inRXtJryNKXoK3p9X2vCxr9bGcj2urt + + 3PWIj1EhcDagoBFporOFcvknjEI6FdYFxHUMHABDricPmsPlyWgR0P0T0BfALDSq + + 9n21DNn5/dJ7SUt2oCsHGOjVLw6aNZ3hVrpiFnkWYo4HsUBrDr3ChUj38SC/1tHc + + iM6oRrkCgYEA7RmTZvzXxZCMNBwFQWsTrlfl8HEdyKZNhT9VR3F1zES34Db1FeVI + + Z4VyQC+oBIVef4A8HNEmdA00SRDrxEGVuxBMwQZu15d0HlZT1NMutYZQq7E7h3z5 + + 2YIgVRQ9lG07/jDaEwjfcNLo/yCYFzfhx2O7xCKKzdtlYIwU5AlWxSMCgYEA1piK + + jJ1kmM5zhb+jxF+lHbUd2W/d4ZqQUyQ98Hetk5uV2aoBbCLiJaTiO6r/ZQafgX1s + + VWx7dLC3sV9qUKI++ymkhV5vUmgg5eQkg31hXg5SIUNVWypN/jOg33e5P/TdUDJ3 + + yycy4YuwptaXL9T9Qp0Js+RyK9toC9zSJ2ShZt0CgYBKUDLYG7WRca3QA1xOVb5U + + ba5fP0UDh8RSWDhlbRVr0boEJ5WHqFaaQ8Q8g/NYf2jP86Rjr9Yql5zkrc4HtDq2 + + 5/P2qAqDvi+h9pLN6OcB9DhCqAktfSleWB/EKtTmOZqNIEipoKVP2ns2w8OHu3cj + + pInMfrscrIBI038UviyZGwKBgBLdK+i6eTpZg5wxQXMkuT4ISsxvYgDP9nnoiK1X + + x+Fe3uhYYnGgC2MlwGFgYbz+vQzD+r7zn1KdqjgkXBMkgAbSHU1ABOcokiPDT1Zj + + sihzd9LGuX0fFeYPocejHZy6qK3BEfjAxF9BSVERMg8ZWP3/EfhHT6X9ToMkcTDX + + TrzdAoGBAOahVk7AQlyZ2doxTPLA8+gcJevkLoEGmAHSKIZwvyWoAitfAlBNBEnV + + zfGePpIJF39DbRrqyTbYTVBrKp3/Z9KWM+QORxqWlwFd//lw3bTLMGL7o0hX/nAN + + X5HWJ0r2CwMbRMFl0Jgwml4ONor4WcbM4GGdHKX0m1h1c82EuGFc + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:wosfoa5glh4le3cvced4iz75au:gmw5hlx2n5vmryjujpt6bfxntgfgbdcqwykgxaq7sqocsgbjtkiq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEA3Z+dHpI1JFhLpqYkOBcmoiEbOS7m+DJBxmWsBpy3owBVcKHW + + k0kxM9+bq6oyiUBP4MI7OPcH5A+1dKaUaTBUKRGaFvJfPlsO1PHV5wyD4KdfOIeM + + ZrcZ01glAIlezAdINKLXUgnOE9cY969/q7hcMLUVRuCQoF23kSI3pEQDQxC5+lIQ + + /GjyNAL3mCgeYGsVeG4Ey+cNgrX3SaBjKMW6UJRNHnt5oszDPNi86ErsthDwmg6m + + lgj0qqXdOYYIc8JHq2e+3ERsota5ucBHnfS1UHpcFxgMkteymrzGmKd9Cv9alAtS + + dRyHlngdt4VK3Jw4EIlaLRPnVqT5xY4huxQPSwIDAQABAoIBAB07kwl3xpuzK9Nh + + AdEGOLnU/RbHWX7ufh+RxKWgoVZWUm7HYhrOYjeR7KIxknXpLkAazp3+c6OA8PHg + + kR3o2okKQdV6BdcfQq8S8SCHVZPZ7+TweDQKPdVTQJo4BHGMGlmbCyTOl0ilrCzr + + kL8RU9O25wYQ0/LbOb3ikg1QTU/Yk5SXqbquyK3c1Amz79NxuzeMTDEjCpRlTbmn + + Sq2PW2k7jNoULTuCsw0nEl6wtHADPx39xMYWgfXoK4caiTY+JbG3Ru3TJYHHytHF + + MTCvJWFX3ykjVPtl054syo/z0krqcacBmagocA2lG4Yael3AcxbOox7Z7zwAZjbm + + 2rxAeBkCgYEA98QXfpX/xuwezaDuABm+5cQqIXUjL2QG9cO/7yvdQFs2QNHDztqr + + /WpGxlJCOAK955HNWJ6AVQQaNsl5LZ2HaxiRNwSEiv+FKLUo6KknPDE7eppDu1en + + mpKrQa1W6+cG2Ufsg5BHr5VQr/0KbeIQ+izulsEfqxMq9PLhEYpnNkMCgYEA5P0c + + T7ptCa3eGJEDq/Q1zH0BMJMgczbljT559psTmHVcckW6R1x/SN6p9wH5vzUDqlVy + + qeD9Kz1aG4YvWJXWh9JgSqgkeJVvfRm8SUR8CDunKshQ8fSbqsFLDXaModIy6sUS + + dEgUpC1c/ufkrPeeGI2CZJy2Uwnuqa0jUtGm5lkCgYEAySTsAfuapA7LTxroPTKp + + lPU1UuY7A11MfTdG2c+dloK2P9dMBOHoIRqnjJf5ZGltbNMkh15eRybGdVYJR6wM + + 5TgTpDvJsuKQYyT3qjKxRJ+fbwBQHoah7c5GtFIaL/flyn4mmASI/hXVZJqkXeLa + + 74+MvtzYbdVo2WVYrRnUgusCgYAdG9vcertfrp18C/smgb3RB9b94MYQP1tA8D86 + + zQ3ZpJmi4SBD8AsyLTP39WVVHB0iKwiPdc1ZEMyCkTU1kp6Z13FsLCGuvnhUs8/O + + lIkb1tFyS9KWX1zmgPnUdUx9SaY1V+X3qC4PjMC0mq/kGPoc7ugzeARpW+rd4OeL + + oKERyQKBgE4p6k5qG2JtFc15uYeRX9Dg+zcGDbKDJIHK/NlF740a9Ffwao7ul69z + + qx5HycLgIIk+uVamRu6Lm/8if1E7bwf1Phr1GV5qLm5QeVQXhAjCFL3d7ieNrbQr + + 66YNSC7CyUg8oj5r9CPGrFrAQnSqT/OH8Q6i4RTMMaX6YxK7tHch + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:kl2jayrstlf5q3gax7o65ybivy:mnn5doti4wllssdg35ymojuiv4hd2ir7i6asurgms3ea67watm7a:101:256:131071 + format: + kind: chk + params: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:rems4oug5xkgjc24vouktsjheq:jkytq32l7ih2xsaosgrrjhsblmyjnxn23zacvefyg6lnh2s2fmxa + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAqt7c+hoHCRc82m10tbn6HkGoPCP8ezDEw0jxvH6b1ads4+Fn + + tzavlbpSY6oXnSvISGLfOuxnDX0iBJ25m9qJaeZufP/uc94UPxxPIjyKrC/87ixo + + Fi8Fh4KMd3ZbJbIoJi5MSnONEoecIx5lgPf9ckRsq0UqsHHRacWiA1M1c5ZTRw7u + + LZuaB0udxBFuvsPSB2vejvqLMqmiLUYn22yrsbFPlufUw6hh3QLbjiW/qQB3KflG + + 70W/6Gog2aLErsu45RG+uxoM28HHuzinYmnoSAUWmA6D7P+gEKtyS508u/3EWH+l + + O5BC4s4CJRDDaoVjShWwcICwotnQGF8yCjwlRwIDAQABAoIBAAeTD+D8Wgx9SGLM + + ZayP8nZN5cseCJleEj2FPxtifL5OCNsvM52hDHaK3GUeS06vJqviyiTUCZtvonGU + + RlU81548q0WE7jPvsnO7vwb0VRdd8eHi+7g5XGQ4lXPO6NUfySeBd5CjVQEZxted + + siqIk7vG1w8JPA5h9UJcPZrVIPJJ3eF+wtJkNh5Vv1CUyR4yER0ZDulSJkrccxqW + + ELMkpLn38wc8iFsa2yDWJy/h/e9j5ElhXedqYC1Ii/WKp2jPebd+rZ94/GXu7/yC + + E8xABg1p6MwVkVgykYwYrlS56fJ3hTz+NdGpVA1e6DklkD9KvyL4JEm7Wmk86Og1 + + A7cQJ7kCgYEAwSC34k+SpF3Gzci1Bb3DJFSwvX46ASHr9jRQBItP8NGgw7leIxbo + + 8ebQo1W/9peZiZsBGGXSDbL66+ZJ9BD16yF5hoKzE3w3AAEmYqqyl6PQfGMpatuI + + wzl4AmF+jQ7dMbGM4UDXrGZUbM6WgCcDcat9LEQafct53bcHPxK6oW8CgYEA4n85 + + iuvaH6HON4lik1yDMwlwgZ//pt1YpYWg25Sb7SbhzLCXhUP0r0XYdCrXROos0AtG + + 59c4uMY1bG2lXXpKEv+r6yHkmVSshag8gle2LBPewYmeULxoa0FsPHYW/Wwc8SEK + + ogkZzuTmPYXJbqVGqSqx5rdV5hEqzGBDf2giHakCgYBD+BjfbDPm5x4lpIKZL6zz + + J19AgaE2btLVxpl2z/Tlg1F6MM4BuXloUVySb4Zs6fPeaxAanxMrQRdwWI8kd6el + + BhX4Eh2mOOw+cykoRn0uQzgH3vpfoj3iv2IOLHPWfym36I31ZNXC1gzWcmqjVZev + + tLQMFTfhl/Ae6OCDATtvvwKBgGe8XV6DJyPVt903zy4u8OgvKpgz76M9PZyR11q6 + + da/oXwKg3sTqmuar1rdd57pohp3CjHci25fFMDK5BUQK/mI1N0g5/bk8Tsfohc4s + + 3gLSFvQNU7UmlayKCkimzWDEY30M5RHRmUBpFgqXe+pxSCuyokhJL85vjmqMrF1u + + FDIBAoGACVQqNIjcyMXkw8U2b2PhIIlUjTOp/l4rZh5xXdvzqnTPGCDZnVSKO3++ + + XNitZKlJUNWzJdUxIE7V+WgDTcNu8RNZVfBJetNnaLGol4hPtBE8QgD+qs6rOg0e + + rNFOU9pBTSeDNM4oovChqmOj6zMkO8qeUlOmlLg7ELkHGLfwxak= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:ynzckg6zuaoi4ttwllb2b2o23q:sbrcnxnuqnnefcqeg5vglctxu565gl73fubzmapouivrxjqz33zq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAmCloFzFxnh0SodP3jlmVdWPwHgx4mCHU/qAxxWYASfX1h910 + + uMZTsJZ+O8AsIFfkQtCy16C6VneL68JZ/Aq4qFGgajfWziGNrFbPoe5gxpl+06vM + + ViGLjYE4Pn4qM5tgIe9U1xby62nbYEQVhe/ApFD/DMhCfIhhpBp+4vUtSzXQaxjy + + FFyusFTNPHD2fD7U5CFB00vStQmvmSSXeKcdT0ciQmxJh1dSgPVMbKFO9tM9TeNq + + HwQzOR/YZn4Y7zxqsHJ8wQS83YBYqpvDfvK1ILrieboW8EJmZsiM7Tq3NCcOBOwa + + n4dYjNqgrfkcQ+R0eMqyhTBoH2U9dPe3AMZ3kwIDAQABAoIBAEsb9VOthmYD99SF + + 6ycLNWly4W4Tvdtqp9bggHDuPqpDjOV5/UnQLDN4tesMmzuD5xrMJdumbRSNgjXo + + A78UE76SPFryIUgy69nsKCXIo2ClGCOoI/9II7i/1mGSqYY75iIaH4jkvRhTcoR8 + + Vxt8E12I1b0bhSYvs/LrWULyv17l+GBW3QhLZRYpAVpXipySK2hvzNS13z6SPo7+ + + wwyYg5Si5+WhY7DJP1yGku7ihLGw6MbHv8Fqr+L7bceeCAZ2BAoAP+UlqQo1UumM + + d7BDpicjhbQycscf0cNVpq77uPtp6ULQjkur/GOEU9CkKr94uThABqVID+oQ59TY + + NuevEJECgYEAvEF1f151xSw4jZAsVw/ZV6F3xslnc8cJjsXAD3mb9bbdVUxKU0Ms + + VQBwIRpx+FXM/gv8dHPEITM9U4DM3DjAVZEKIPxmCQwiaf8NQyCMVtoD+2VOpVbX + + GSu4E43uDVg46ECdyrTz2J+cc6so87sWbX4MMXI1z7kJOKhvg2cDm+cCgYEAzurk + + DvXW2k9XvvEv5dYdKRqgUpjNqNqFij0nQ9W9JHvz4tR5eWgSXMgfHyeJKMrr3Cbh + + q1IgIyJtF+esFJZ3cPdMzf7ZwEZPF/ivngnTgdlOQ69lO10pVALmO2Od82HBYjxS + + m7mvRoK88E6j0PODjmcJqLXM5MN6eHKyvwlPMXUCgYBdE/RXNEoAYfvYKmdx6Fkq + + laAV/jCTMt7L44QxYow08eP/L4g0IKtDn8LQ6zVcdnezSBPbM/3N+Hqi1bT0UW7v + + H5YldwWwBXric4OIJAifTI3Zd15qK0SQomgR6wO/P1Zrpr8doVhLS6dcHU1TLLZL + + Dp5SuEhY2wDvLYBtNLq5EwKBgQCfXBSs2PXSSQ1BR6wmDVOEFrenJXwvMa1rnFGj + + UvhLIxPgfNfZgyexQYeGjQJ74lzovyFKuwN5S8hNguXrLT9sR2pltIOsK/o6chN/ + + Wf4FoYE/a9RBdiygQWNkFgLOMVmo+OB+gvHVElfFlCtigEmv4Pd1ch8NiOfH4D5+ + + FwNhCQKBgCJIoZu9Cef70VcRWjdsNxbI6Da6zulD8gIbsw2yyR+c/qjQMULIf0GB + + SFEFdQNNn6eWRgwK0hyQBi1UGpR+kLEf11T4bepWD6KNbVwEUuve2TRTCWf3GjKp + + MTMJv/OAU6uulvOsoBFhHnhgOj+IV9M8GgeDyVPXYL4+IOjTfxCb + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:a6wuekh5mynr4c74ma3b2gswfa:b6lovwnbv3523n5kaxga3ruxa2dvxef6h4a2jnl6lasn4fwvvdba:101:256:131073 + format: + kind: chk + params: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:n73xrpurczd5aoyiwcvpnjnmtq:zq6mwbtqsg3kutvuhwrlfhmiu5s4qmtbkccv67p226zowcylkaea + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAsIX7cTKvU5wt4iW8FKUMN5i+uZCAfZrq/KdefeHFnYJStQTl + + ov8IrO7aArYUh8gK/tPVCz2Yl6JV9A1Y/8qyzBqRVAT4vIDNwqyVwlhUJQc1pwUr + + xiSsFroLCR4ljJxZkp2CqJSyrYHjVwAs0Sy6L9v16gV8wOAGS/Z879nb2UB2RZzF + + JrRAlAy5P54DvzLA2t5VMzV4TeZgnz/YlYHLBl6e1+Xj3JU9KB0YoyCbSPV8kgKn + + PxmKKCyVxfFSpqqBWN0M3s3yVZ42PffvErzcXjNzx5LFj/dCRBmU9aZnwxHpD1SK + + d6EVSEZI6Yq6Y5PhYW6bqTUImHN6+yobbPIkKwIDAQABAoIBADBicSK8dVEyGGOJ + + 200/ViNxEyoS4R1Mlsds6toPRdbgD2J9tqHgTNT13TzsAqGbI+RoVNdxaT966Btu + + gywNt8d5KseAW1tz5LJNEvmDs4C4wqyGntJ/X8oU8YxsvncVrfmhgdxKcdVcKl/A + + 9Qfavif7HyMnoOPPI/qzU9h8eyXHb/A+HBAUsofDpwcATFTCfgDDAQcY8k8yAD7r + + xI4VshbyVlIZrYgdVLKjkHNf3uuUISQ6Lam/pMyatutyQmGyaL/NSG4PbSYSLlg+ + + q1tEmHqDeGWsLupb2qD9Xsk/bpDD5XbZpernC6SVc1oBSNb9AV8w1qGT9JYBVcHe + + g9rEx5UCgYEA6gqEZVjgo0kbt+F018JjtDeC5Vd13nkZ5jVxYngRZhx2rIu5bysr + + hYfnW0iEWNcY2x0PJE0Z+mO78g8w/pLZFYl0FIgGFSqIHMHrEGG0oexos5ndDl5R + + BlToPxA7Z2tbsNK2YwlKgM+0JeMiecJXEdQ1uPmFKN30CNFLzHi53N8CgYEAwRXv + + SoDEYN5YZJ/rAPLFedrmm/ima6YCBT3gkVnaZDwX4ii3hgSpr6VHktukf+dIgi9O + + /bLId1DmGqFLLfaifjLBD2DO9UCzWP0WOz8POq2XJg7Kdsc9UUq4dfH7/rGdk4L+ + + zvWWx9ZZWNaACi/JXUpeAUyDIVWHn1KLWXXL1jUCgYEA1HxZ+d24jec5WDhEqgNe + + HGft2qUOab8PSZgp6lnSih+7iyqMYCcUq3ZZEeKD7ljTw1PdxHqP5GoaYEl0lRzk + + JQ6XqnBY/WyRCXLyJPxgUEbgRHekYIA3FgWOmnr1RA8Pvzl/x+jOkKaDC4btbRiJ + + jrFZWSiJwjHJdxv2spzFOocCgYB/V04HnsDk+f7l7in46COg5+NrPiPTnxp6BoMS + + mWXU8WT2/M98jZqzgpefnUfyKsDBSx4XZ0+akToQmguQ9rXX8PUuhTQ4v0EJEXEW + + BdKvakjjCqIwj9o6wMLC1qLRKKa54IzYRVP52731PxIWpclxw1gYFzPsShI12ySY + + DX4veQKBgEXU69EPIoJcr6VAkK8nBNR8IC3KyywyYXSPd2ClRxFn7voo7anlwbFx + + pgsC0o6FkIswGmo7sdDbdpWH/Th1w1dsZVX2ZsIvG9JATj7/OJ3trt8YxTRz73SE + + 0c4pi85sMLZXs4mKyxpP3U9rdIHi2+2FWzDoHenktW+pGsxRhc99 + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:bhlcqlyropnz2rikjv4f6fcaua:tfd4e5zhktjok5wdltqic35h4yya4ohubpnymwu6w74qxopby7wa + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAtt5BBPkmlQ3RzinuPNFTEEcVwo4ApkrWk+zeeAv45dR3LNpu + + um2wFXqHUc3t4dukG8bV7n2zIbvfJ2rjWzCqBXAPgzHqm+ks4fjtsazKTu00vU8G + + W2+zyhDvFiiNJpXl9C4E4Pb6emD5vAtUyc43v1ubBgmPUB2gu7ZBhZHdQY2jL1aF + + EQIbtV1tzjx4eV5MpLiv/NnaEz6WxRdd0nJ2kfecxgvTZWdXxn5HnKEgPqmF4Bw8 + + +6sn45Og1yh84TzqYAzemdi2eFozKLztgUH2803XdPzrf2dJ6a2QUOOxYtQCiTdj + + 8EM46IWvl761+XEnHxyjvNCYUwVSRWO2QYTrrQIDAQABAoIBAAdk8UgcMv0v893c + + QC/hXvR3i1+0kj4nJIoSt+Qux7+zWaZMptGPAeG8dKBQLWBGm1osLhZYqtegWyOi + + 5NKZIybZIydw60WmphP1FtdqXzvVx54oBd/IooJ3MNO6jAqVYRkAi131Xqd4KGD8 + + LE+EsShhseEKnerlZ8xvUDLwdPvjhsZZ+ia7u4mHTr3JGHCVwotiIjKt6OZUvmR+ + + CJ9G7QYbScTp2MgEfDYZwq7PnzapZHSuaIE+bLlZs8AO6IhVBfJLORTSxudU9SJu + + 2E/t10ubaI5j2BvOcv7WIca0SrWGmb0q54n0UUog5Qdc7vNnI9GYkVygAYLdixkQ + + IzuAtVkCgYEA0iyiXyNtrUloxrEKrDRjSG+Bvh94uUxm7MW4uZoIWvj5h251Wr/O + + s/WIEEwgS+Qr+CUWBjzgRRlrRzHB13B3+qcgkOZsJ2UKsikcnZLUQKetFK3MaTjR + + 44/EzlOrY+2gc4nmf0Ihxh1Sn3IB50x2xVW9Aa0SoiabPYBanoIbHLMCgYEA3r12 + + d2SbXFTh4rWmqq6lUP54rQEcxfInxnkMnSNmBXjj0bM0i2auc0O601724H+HGybA + + KHtF9lNLNuRSsFVgcQtPSUCsJooliGY4rlXHoOcXkVYf2ElthYbRUgqLcEC3NDZR + + Z5eKC/bYuS3bHFAJVYsflr1/J+/WqtVWu9DOxh8CgYEAz3leRFqd710zQEkOxxXk + + GGJzCnLY4trIE93PT/D9ZIi5Evd4g8Aq1b2Ats3fZ+tzeD9r8XZw0eWY4Cv/NaSB + + 2/7ViBTfGTiGiX9KD0cdnkGn+2ziB9EeaOzIlAFGhJvUM5oi3ucynfbeVCXgOStj + + Z8QOk7P9W/KOdvTY//Zhuz8CgYBWx1/pQiQZQ+TBi94EL4iu1oWzeXR5Vk/SzoRw + + kEMGLMQthfEZwoaC18do5F2wt16u4FkLLIPkZS0vlKL2mjy5rhtUwcKQPVBEJPc+ + + TKM69+3BrNk5TdpCpHTWzs7mjAAUcnkir/KTmLd05f2wuSn5zvseonNw3ss2wWlK + + QR7eJwKBgCzDobkvlnchBuDD+TPuhU2IiNLkLTCoLB8Ucifp1+Te7gB6nVwWPkqG + + FPafuOHUkE5HbZnQJr9YKbKdaRJRF4uq7Bm0pgvYiZrrLalAzuaR4XlxkHHGhQcL + + +/IKsYJQc/APe9+VNpT85NTulKtBx+1tLmSAq22opfBAfFbu0JrV + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:pmuo6vpcodol76sexq7s2ojej4:irp6rxnkgwbn3h4g5fzgjpqgsvhvycxobr3xrk3zr44r64wimgqa:101:256:2097151 + format: + kind: chk + params: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:mz4aqmo7whthklijqffcfixkoi:bghtfcymccbxrij7odre2xmjc4boyyzxxqy6r5rjgo35k4xmpi6q + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAqJwmyXuOk1WqyoiVygVY90sEyxvUgibDOxmwpbkztfTykTVo + + pD71+dXfqPuHZoPX2LCLZaHdXdQdxV/zmsL9D/DFmZs+DeIL9DEEzgo9ehOKThpo + + /2NTxdFhIe1f7oWsZBStDcIvgZkVeqNryHpt7i+jSt7j/LR3MyLNQ4hVdAoSB7Gq + + CzvpclFr5BGs9icArp1xcLu1BM8+Rjk0WVmRbVqUdW3S4ksagQl+X78betPl2fb/ + + QiOWPeNhKvHD978C+iwEX/mrTvTL3p7a8kBrxO9NIt+t9aEjf+pSGxUZi5YCOl4i + + kREf39CLrgCeTXdvnYJEQHVSAM4RaPX46Ce17QIDAQABAoIBAARrg4DIomYuWrnQ + + dszC8yAYcVm5swpuZbPI6p6NilN8xlcUJVgY5m3UM3bEkToYvrHJfv39DkaFZvpj + + l4k5D1U5pJRwQ2ItyM5v8oZMMmxe0sNVYec//VQ0Nu2iwV8JVgmRmS/BJWmqT6vV + + WN/6haM20HsH+MYJHQ7UHLlme4b9KZzBv87rD/UOfOU+oB6V3ydvMoxdQggm02UC + + b2f/Mz9k2e1Vn5C/79Q+V3PjPCp30QZNX1MQB8bIAXhMhXK6Dm5MeMcTpJPHsKkJ + + GW10zDfnn9+OsaHc9exFt2otx5l+55L0/xxoIx7qX9/gkQaIf+EsYmvBhQsgsgXi + + 9z0WnsECgYEA6+q0Da/bPCjwWQddvEAlGhmJDR7e+UWn0PLFPtzmIvCbv2FCbEtj + + MpPCoGoChco91gRnr51OpeA+eoiZ08FW+czbN6Z0L5wnJCIAMO42HarZzGKstCAX + + iB+HsETXwkSSdqbAU76KDMsRI6mjQFVXxIKns5iCvTCvbC3X094taUUCgYEAtvak + + Hspo5BrSVCsnXoARI3tlwRHRDTHDqFvSquqg1+XunerPxjLYUpdr3ez91+UKXZmZ + + M5CpmdGbU6oX5smz29MtxVdo7LwAjRrUiXLe167R4p+hXUpF/PGTQZd6K5OHtQWs + + ZpqcmjnHCYHEDL5g35r2o7VeDDkP3vXUXaem4IkCgYEAubUemSuWU4wSbrKaueZw + + jlQNi3OCqAyJ5rRESpDO5DAtGgCwrdjGNHkWGvp4E+M4u/Dpwdb9oxubcw92r7ch + + BTCaW/s+uH+eXBYbumi51q64FeiS9JPSkkfnovz+LqGV/aqT+RgjSaDMVBtkM+86 + + UKlc48YpHE5nuKt5mwDpFFUCgYAiqCmCY5jmzGXW5623as7UR8WIgtV0iF6lf28y + + TOtWugkvBJGC25K6YlBeY0vaH1qNTFEGwXo+1sNzX57TapWVKDVdUidf4GTCVbi4 + + qHepp0W+hbpNL4p+VUwteoH7yyBDm+WCMftEA3m+RURbnZw//tyFOg+shQqKk0o8 + + y0sC8QKBgQDTLI8FBom/UNpBTOwISJzNIQbDv6ws1iidoMXhlnB9G/Q+7DHDI14J + + ByAeiAPOSav0ogNxoMOdHu6KXXMACriTuFa8s/h4Sno4vXETLqJrukjs265TQXZU + + 1EB0u+QnZVYXUmEGIfjr8fhlOfEkUGh8DhEW+U2QyM+wlXChr3rr2w== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:far7xpcwexy5c6rsnh2utklpfy:fun6nzy7kxfr2zsrxmkwy3rslurixuq5dgsvzw5tg7cl7mnffzma + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAyK1QDzzJ+fA4Qk2K9m3DVSqHCiWtCNPWu4JQtn4Rz3ligo3C + + 1Duq335a1RVYMayuOma1miTKAS1rp01Xi37q/o5cMEL9Y9aRrehvomYoezDQKfP9 + + pO4GQJ+WHqFCDeCj2Y2/d8fCOzmc0A94yNAJNf7+Y1ymqLefS2aoL15ElG6amQT8 + + rHKi667pK5rcHA4q0GK5osaJRQd6TR7jmzcZeNh4fq0HkNb+8fCYq2ZXOwtUM9Hr + + U9E23/zpqZCvHNqXVv9p3H+psPW1ZaN9yIA8RCtGuWgkt7oRJ3K0ovzhe+oNOq9A + + zaMFMTTvjURAvLC6FloZ2atK7AKgm70fIBngVwIDAQABAoIBABSpP2pqNFNe57Pv + + 3uRTVb6hg8jIK0IS6XNhzeSUI0pMsZdGeC44vHWJQVnZ+jwXDtMlewIVUpT/c6uE + + e4R5u0EdMCGp7API9jPFECVUxks0seH35IAEH7GsnJyntrDOFaCTBwkSkI1fd6U2 + + SpXGUYR1LgTV57TMPwLY3W0PFBmCAQF3TU0RQFK36cOB+ufGEKds2U3xJZWQV9n6 + + gVt6Izd8D8s8bbPU1bUH0Q4TZvsl9D9yabAkSmh/3a2mta3l2Po0rhtjfNXTTib6 + + yZU5IU/aYO2fWBWXK4GzqjE1daWuc9zXQQQeRRh5OVU+LVJQxAbjEsTlA+wqOj7J + + aCgYIdUCgYEA7Rxx7iM2PBjPm6xKApLNWAyk+V/nd/ccnJoWx4uZJeiZrp257wlx + + XBnSQAFE0HHvQ32RROnF/dHaBC+98GUH7RQLCYcISnsphya2PdOZotkQRXG/GB0p + + 3pt+YJAHygnSiKxtyWY1Q+6PzA8WA6crkypq1NYrc9+4x/MHmWKm1u0CgYEA2KnY + + GO/ESa/dGlaY0z9Y5iIiwetnD/zJGkb76wsQfB+vjfqqGgSFisudzW3O2mdDz7Sd + + cjTtvdplDpVLx6r10jqyWU/2HvJpLVfpmu+x5jp/RQpqNnkXLyfrTX8K6ZvcKLX5 + + dja4z6Fee5c9VhY3/PRHE2Ovdi4uoLdKjYSHR9MCgYEAj/WLPphmX0p5Ef0i2jkj + + L2hN6ZJOyMlht7reRbz9+MQmOpxMvVKwXsjWnEGo9B2YtRNR1dNRgG+evJf37DKL + + A2f944T2hbINXp8kWplUWEkN1fvfl9ZtC1jA/AO2lvYruwtlhLfncx0udShbp1Ah + + 5rIENsDplOqqF8v4OypoPWkCgYB1jBA11z+DSuqGM51OXvv4P2TkGLcdsWPZ4dEj + + QCl9biNswCYxX2qkVrwSjBTB4Wyk77TMFXM2oZpaQx2OAm7D1ByW4A8D0zjE5QFU + + kd7OrcYGyxO84g12BA5hSR++hlT3sWLag+3YmBAOtYsNfZh6oH0/Q8IaOAwMHeVQ + + yiorCQKBgGFHKWecHohrZozSwg7DJoaQdWw6h9RWMA/oOXM+80CYdbbP7GksWV1o + + Ay9q1NuZGfe2z3xXYP3F2wGjIWehqGsYc53zXu3mGGYhvQJzxyRCwAYp1Vsn8R5s + + XWsmcm5jw+o0N5vCjq5vDqlpjrnib9Gdbz8/8hK6wXZlUWyeudSC + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:fh4boug3wkciqhkgpusjsjyzbe:sy5lwooxchqu5mrzwr3mgc7qtwfjgmpz2srdmwpaip4iuj3tn4vq:101:256:2097153 + format: + kind: chk + params: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:vj2bo3rs7lkwz5lla46j5mfd7a:zfqfc377sixrjzajsk6kf27ouoz5fpdgszpg37k35q3ccoie2mxq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpQIBAAKCAQEA1Ct7KgFNZeSkYGbFtOwIHbMXiIr9zYZLKsHNkg15XGBnG/ii + + 13p6Sv1XuqugHcTCk+xXjWL1GjB4xxD4Bxo3Vz30T6EpDVd+6FNXB2YO97NJVrn1 + + DNUYgQ0UihnEo2WThNGcRRmthyYBPYGll7TgbgNMdhazOtX124mJM1qA+OlRm6dJ + + fF7rAvfYUfXSmKKk1pWxhX0Fk2PhMGQi7HSMR42dgy9ZmpM6I5v+CRflPI9Q1Lwl + + gDjpZA/nTRhiALhHMEHmOC3aCSznM7Glf56MOvuvP6L+QUnK4DYDs2Bv4oVy1EVY + + dZFQ+2nh/OWjW9igPOhYzq/y3e0XCvV3rGXWiQIDAQABAoIBAAG8aHOS3dIkReMk + + 7QNqnaV+yPeel/V4iWDo2QiiAHqlrmQSMroBEBhtbHNyVHzUiGsIiR+96mzQNZr+ + + 6v3wVE6zGRIyKrTuy3LOS8Jw2VUrSF4cn9eNCXhKLRu8KOfxytGl1IRicZ95rwTq + + CRdDHvBOm33FoxwAy7e8y65dcMLt7mmzY3V7eaUruJx8fRob3dA7POIml10NbM2z + + 1TM65uqrVQgihMlQg3cArL95K/R84FZs23V1+EaobpC7gpvFn9J13pgXhT/pBP7s + + V+9tXPmITVDaaoO5Bw4Z5EqCQCEEzqeN6v0agl4b57k2eoE/mE8p+sZ2gyPUH0Wj + + llFujMkCgYEA4X7MtD80nEA+dg6uRhMnN9eLVmVLHXpQpuMdcdg/5EXPUCOUz4Xk + + I3N3Mx3EVWOc2vw8gvY3ZbS7vpMEEzCzyoYbEPa4fQ24UQ4aI1qL8gFuHsA5ZnUI + + FcvnTyZsJgoT8rRTm3+lsqmkzAhcHVMH8W/Q3uJhtCtJGNEzo2yyiPcCgYEA8N8z + + /s8gU8HA6hFQw2drjJCglJnrzK8jGq26bnnmLykXPnTuOwQLH2gKY4PRqPEOtgvp + + yEMXSGXktXxMNvdfY7FNDZvmdBBVR/wAPFbz2yZgmdAQdZHpmRAJPW4kJlXSTmg8 + + 6QgW8bsYV4RNL5f94IOk7QinchiF3jGaK9GWPH8CgYEAmWNLlAC6pN7+ngf2fCxj + + LRUt7yMQKYkee6daTCqxq3HhR74sZ83IFmVg3CCPgRY1iLCz6NHbdQ+v9j7DMtqa + + MlVu6+coL8i5bEmPdiUNtR1L7xcK9Kr/SPRe7/RO9ME+OIZ5qPj3mcTUGQZGwpvM + + d2t8RWDw3UHkg0ErQyuZdpkCgYEAzKtsG/zVpDXDfWCfNpp/GV6fBAXSBgdfFcE+ + + 47ayr6oDtS9Yak8iQFqAUVTl5t6FuIxg5qiTdRIXh1qJzD7gD+7M4V5yMHbccCEh + + 9iOQa8utU6UnBy+nxUaKA2e+UdCktbj+4KfeDyMCKQMjLujAcXCKyFqNJXbO8SFG + + tisNtHMCgYEAiaub4VuVuJGuJKYSdlSL3G6NGZaXYqkeyx3AiBZw+qBoECNIjVmr + + afsUnv5ocfpBc+7zQGb/hmEpAeqxMwza0oMfJSt68kLm0ljL/kUxpRR6MSv1yPMQ + + DaI4cX1tEURu5Wqs+iXtKiASxFNCXcD+tSL6937wef2BDte/QFWEyus= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:avc56kylzakxlqqhpo52hd73oe:c7x52qxnbsjgdc5fgxlrc2as3rie6x6ro266qnu62dbsncn3l24a + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAn2qG7SUv6kD+8MZ13j8G9Us+w8B3/j1QC4SdgV7qoQ8ziAFx + + mJe440QDfjFQNhPtoa17RRzzBeZfaMu83oBscl9oqBVf+WuxQJlHNh8zVNPZEBtw + + VkA82X7RHzNjH97YU3DV/DRypdMPCmuIUW++TI+t2c+W7KsKPAOJx6rrJHK+3RgC + + n2VuyQHhZkLqqNc3ZexwoKFGMHIB9XtHSCSAMakD7o7ITcqp9W55xMeLPWLtEvnr + + 0k2uSWjJ4AmKNf1NKLQ5eLcIzBi04SiOAUm3p1gtW+qPfIChbV72RB5ZWL1miiaS + + XpE+F424wm4pZNf4EwvRKKcX8QEcBMH/m2sVywIDAQABAoIBABv/E9oS9VLA/mTf + + nbSdwgWTJN8w7oHWV7fmHtkpB7CoYEbq5f3D64LyH2DqnSkaH9oMgwEUv/NRzYC0 + + gyNaT3FYoyMdueCuUo9DO/fby+KCX/UNtJFZL7aMqII/vpFKzBf/UX084sOPiO0u + + DF8s7jE47HG7nMhMk3wNrQVFVY1tWGlL1VUgt4ICm8Zz4+o+F+P7/Zp8fZB9iGNj + + Zq9z1q5VMDj2wdNEt+vvLcWrvBnphmqYj2axk5VWi0LfMMVycWGtsFV7JHurNYBa + + iEJa9Ew1LxZc5Vq0s6RBtNjT5rDqrlXZwZi3nyUHDls21auke/ivK4PhQWz6ThZP + + /u88Z3ECgYEA0aeS+9HC2wsBW7SH87N0Be/J7K7YSiAnWRE2gyi9Lc9pcB9zsZHC + + NkJykJeiocrDduAm/EwjwSgP56u9ESpIh9RK4XdoSUmlYlcxmCekWNZVyTNoWIY9 + + 3wDOCeEK44bPPl5/dAWqlG3/t3BP3UddoZoosJXNNMtwb8aq8yJ1TZsCgYEAwqfw + + 0mxNAdk/GzT6yAYf8Kf+eDCYhzlnnG0v3h3Eq6wm5enuQ4TqRplem3m8bs97hYsM + + ofso6HjsmAQtm7JMM5gduBERIx2cH1uIYRhJhxgQOwq8yRTukzRGEzi+Nyfmf/jZ + + Ui8aPwRxtx0h6g9C8LhdORVxISR7Mj1mD0fw85ECgYAnxGBl8ZjDUagVS/4JpL1a + + LuyfP175WHX+N/yeDkkr+k6mnOCmCt7KyfnPIWQQylQfJU9fxdV2WvIBYJsBOYL+ + + eK8nay3V0OlU6PMYSFStISKugljFidkMhquORih5leWTj/se98AuXVsG4X/UmifR + + cltLe26sF/agzQ86BQw5BwKBgDc36tHWVRYEKamvIsDhM+hRz5cKugoKF8FBHAYX + + TbYhVLt929AdgVPbqAHUy8ZnZzPf2QqOM/GWdA8/iCyVrJYqPav8c28RtDsU/SAG + + Ar2m7tvA1QL5xB/QAVzsiNEeqX19+zAcGobr3NJEGl3KTIP62L8bvQbY0XXUAwKs + + tsZxAoGBANGJEzWLUx7m4ZLKvapAU1FPx0pqqZCyzrU02mg831Eeht9K3s7B4WBb + + HMNM5wdVuN0eT+hUEmazU6Hwg8JvPMCrWVLgj9SraBgyTWLEMAbKY0TYYjaOwhdy + + 5B4F4j+30c9o7l7jr2x3I/cZETNM0avvB+1sd8AgKk9px2oRYqEv + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:oakc4ghiol3loekapj7br572kq:exzvg47pa2b5v6tojc4wguux4pmerisstqacowbzwtbqybsxtxcq:101:256:4194304 + format: + kind: chk + params: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:obw4p237di4oqr2qlt6gkodahi:4f7ql2uzoornpccbjnmaucgnsiie4mhoqq7lnir423nvfqfj75oq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAnP0nyUESoXrvtjUGHtMh6BCdCVa4SwMDktzM9p7CDXVjr1jz + + EH6I2Fq1EmhJJ1TbaXod3s4SRuzRw5OGyAaX2PIzkntzpdHKai4VEeRGJTGUAh1+ + + UjNoUHSTgoiPf+VuBfl8uHUWe1uIKPEEqPDddvzGR6iFWTfp+eQHz6w4piAeLlth + + CA8+Ti97iuoqFk7Q07aPSY/v80dhuyNL1nMwHSJWePYBODTNGc7RsUa6He/K5dK/ + + dptSZGFtHPZ8PsI+C4KqsKtGoyIcbefUQ1QlFJcxQbq8pr1FtCHZXGXzGcza8od0 + + uziCBzU0iVHyqrkCgDGcjSci+z82a+HEEtAEzQIDAQABAoIBADqIi3KXAC7USxd5 + + SrhoiW5g72RhgKJ7U7RI/mT/yaPB/rKM7EfcngJpQ7VCy+/NzGdWAFgoJplqSEXv + + NiRTjP93QvJddD/B6oJPf0yl+993TlPBkm1svHqvFKbpavPJZA33OWD/SywgczKs + + tsuUz0ZDtlxWga3D0sn5E06DzLVnkyGqDW55st/lYx4sTJ3luRd6tF+y0vz9v871 + + 6N/t/j5LcvaOjuLTkOmvoCG2cOsHQGj7rlXKIkxiHaIbFONEYuhK0+txs3XfuWsY + + +2yfqQiavT/1UyfqZu7J1GXS8HrhtBxR3a8854uHUiaDP8ncGUEI9vJ6m3uM6rMz + + UlBhP4MCgYEAtn/vyTqGtzTIfh48aUfiGFOtshZ5MRBVtcG86GIoGSK1wN5Xwoku + + Vtwmz73HPbctd8WXF32OpKPwoobM1L816sb3yoDSTLjC+nl2vXlPOjY9eVFZEaht + + jQhF5V25CR9sb2RwhnriXz0rfW/BoADvYm3lh5a7l87enIQI3vcovs8CgYEA3DcA + + U+S8zq3o+PSk9ywiS5rJ5W9mfGzVg0v5UvCNTMBbFfkkh1206Oolr7TlAKBbgM52 + + Z8XVzosNjU02ULZFA8UM/g1/t4kFf1LmQ43rZm/TO4Bm6l5sOsx2+Qr8s0V0ekON + + 6F4dex4n2WQx0ODikEQkcwGKVwOejTz+ydySyaMCgYAJwTbg66btvf2FeDpEalo0 + + cKyVG0xpCfV63JsrVKvOBCPw5jGMrWZzsBrG+d7fdp4Qi9gyojxwom6nUUs7h+jq + + 3q25/j6/aRTK7JkjMYvBkcqhZG69WeJZKnsJ8oOEcFCMd7LoDUNyUcO0VbfkxIgH + + G9ar86udRqpxdUFAIbfk0wKBgDqZDCZGxJL+pfKxLsBy5wFVRAogVZYgY8RXUBXo + + 2sCkotg6/qRipAQiYjraGOHMyeyBg/JjK1yVldqWxDBAACdbpPRpZSXSeTsDNTCe + + sBgHA59esIQG8ifHRpVIfiu5/J+YIEfH23JqeNIZHkRlwwP+jfBoZYZ3+RW+OFJA + + tnKfAoGAWrOXeeycShbIKjWS1XljZ3+Nxd7ZARX5pf7FvP+MwqVpW7zEgcG3Rc3I + + gkh1In8t7bzKhUVMLDmJIZnpMFNUHjIWAAshIQHivs6d1EnErqCHkTEofQnnIzgG + + HzUTEC2ppRiXZLg/5l+opBbah2afagIMbODMgc7KEmVfdTjkDwk= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:ytzxo5777my4naxdqn6c2zwmpa:shrbjb27qpwlmcjimpuhnysfondpd54j44c73h76eudklkc2kb5a + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAwrgs9CQIUW4BZ228HTF0MJ0xGU6AhvNcNwmSJ+Of5tPj2Yx5 + + g/MNsMJYyOxxnMoODLPAgSX/J1EqU/+/RqwmvcXOw3OBmLdxnMk+VYPBZxbup3xw + + fYCygPHGR/qcQvJKQ76XvkiFaiD2iRTtX0aEG7iZYeLgcQD6PQvgMKsdX03nKmyn + + l1zWTeCpUBaR0RUJK1kwh4wO95Ma6PLAkQles39N/A4I5pBlVpN5j5olIHyG8pLW + + hvbNU1yV+5JY1OCtpoAxoKzehQx2vvKeCwlqh5gViu+FN5j1/7trA2dz7RNAEGu9 + + X5lw3tvqZ3IxhX1/4Brswvs9BYvFfPyFzb8sVwIDAQABAoIBAAZJSlCHK9glufQF + + OEWTAO/sj83fa9vUDRBM85Y1qPz299U7zdTH6spNljfFzT4JzVS+XSduOvYPV9Nr + + H1lgYv07A2TFilznl8p2FaFzRdaEPzpSlKSOUyvT0BonN5at/svtz8eZqDNFkwSP + + gVPBCGc+IlvTAegYng3WDYdUtJxDpUnO+BDg2PoZ6lzRlzyNbxiPBA7221yyBk6U + + b4WZUtHl5mfCcfMby7vaWAZtwtFiYXDeu7bOZwBtYyjpI1T5l4yUVw2dF97eAsWd + + wwbSYbVs3bspN66VFfaZQJrZEeX2gYDizQo0iROySC7hcyXQrNmnufrsgGIwhrXl + + AKch9mECgYEA+sUKHB1nR+Zl7QzB8iS+aIK/XUgCYVavkg7eTreUzVe8/CPAEWGW + + yQnvDlWKWSnge+LtEHEWYmNedUhMW8zGx+b8/3aGMFr9VeWDrFKGe5yNnGms5pkm + + RJrpJxC1CXzullHoIQ9Gw4XQStaSqK/ShJowh9q00QznV0owbKMXW7cCgYEAxsfc + + eKvlwgpyddL3WNSXhggqOjwd8cBIku8GoVyfzrgsPYHPoZSUZCO8iZ/Gg04tbEAe + + oRLLYQ/5i5kcbFD5tbWji895Lwds87Gw5TAIAc7f4Za339Vqpl6dqOOe2pi/q7F2 + + LjLeaOfWPFsnCg0qZ3IzADiZ0cY3KqQMh/td9GECgYAaeYj6tOP9hEaIg0tKjDSK + + Bhu79mlB64v3qJgxyVHtZ/Ds0b1qWFo5+VGCuuczSKeJjMiobrgFRSZozWw6WOE3 + + o5xcQCAkpMaQNf3zyHaoQDv3InT9l3eh0JUC6dGjIcxylE0kiF9ZLxxxejvbkUxx + + cXHkNePXGjymS4/XOFSz+wKBgDVvKkvh4Xw8tLIJiOX/F9A2x6sp197RknC1AjJE + + JM074uCR0Y+c0hrtJFRWd9V6IWm0/sbLt5Ia6jjlaqePSODYt+LwXaIPu/DyNhwV + + wkFCLBqHGlx4ERgx3O22alBWuUddB+i5UeIfWA6XbjIcgeaW4zDPBkJGpzO2L4wq + + PQJhAoGAKGFp1XoDgr9k3y4yfUn8AsLauTsZn0JZk8lvYuZQqMxaV5F7riF6jYkQ + + SlkF8LATvS/0QUpLOuF3QiSafKlXpAodQoTG+vBkaR708U5qN6m7fFyH/S7seaKL + + m23LxcRu4bvsRAsPC29MnLdhECtTSr6SBrfqG0fY7bm2Bnpolls= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:asmjgajvwpwvylxdcf5rm2gyd4:imwtdi4eqq3wm4bvk6npho3ta2fewceqzl4f7wyyabg5i4s6h3mq:101:256:8388607 + format: + kind: chk + params: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:vyjriuuydnhdjssca562b4qkue:en4dcdgixll72bqyhfbfyzp2xoood2ti6x6djtvaritkvgg7ktcq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEArKoEUAfL/7SQTQnIgVEBkZ7WYFHMKO2DHSDnHT3rn4xn6D29 + + u224k2GNtOQ/WiwhGHyCXKAY9fhBReTkmMeR7NZZCTgafZhI8LlVgw4NTTZhHPxX + + HPaLl9AtWWqPqtSl5NB9pckMoFm/E/dWOlfVmPFAV0VtcWxKis6B6maFKDxjifTD + + cMu7T01bG0/WDDYZrok18CHbc9ci6YsxwheOo1O3BL9Id3ZzS7MkLjguT5pS7b/A + + O7+ll2v2QLdgwR8hXLR/OqYdQA6HyGi38DQ0YaIMwzKgqE9US6OYCpiD8SU69dcU + + y1IncnfkVYPivmfABsaxQNndd3OSWc6qoJnjwwIDAQABAoIBAEr6Fx5TjGuw0lqJ + + rYYuEXLVGP1Bd9ir7pv2/jUN/uPM+g/4w4uApT5mhbzvwmzbLdHuu0MSiFRDJcD+ + + mJ+ZRc4k9AvTT3mLZ90UdcQPlYoaW4hVMVTT4KEfVpn18oX5ikI2oOEdUTzOS/GV + + HV3/ZzLfTBO5g8FVh3cIHpUVSKxD+h8mbCTiFlgL6kDWEiUbl+LdON4c+uatBzUU + + LNtviE4166nWK9ruVTqGVL9TQ6kTEFUwZNKsVbAQdpyV509SWP29LtWevXyFBcWU + + e+RCZyPZG0cStomWPrmbktfgSV1ZsjgNdajQqUYzc4tsZRnQiRK2+0qcwc07r+kD + + ZsU3rTUCgYEA3HKc6riwJKRIAaSK1gIl9sqvTPtvU8zTLW24Yj/2wwFpNj0uGYnX + + EorA0yt5PY7bW55TXCgRIk8HpuYdQiH0atitjEd/0oVzpXW5hFDhLYh8kGQv2aly + + /t+y06ZIpw2QF32S+PsIfDztUuNv4qLivtu78LJHxEZPIW3UEdf1Hp8CgYEAyIKd + + YAOZ01wfgFhP6gezFhlHgn5ThMpYnNYF4QDwfwK6iQ7CagzyqW8/lfggYooE/QCN + + hqRGWsNh1/bndt7gXvF3Kvwhhx5FbHZZdfU4YNna2BVtYxxXUEgxqDNCDpe7Q/2w + + 5iOg8oXMLxt6fI8iObLNa6EInHI5rH9fbOlPvF0CgYEAmk90Pe2oBw4kBVpbgPCi + + CH3adeWvCRbgX/Vk0wl5PwmWz0vGIERXk3gi/+53gLqmHBzYtzKow75UWeTMaEWC + + ZORln0NRW1jlGdYtVUyUQx4+K4il4hP2FikacYL9akpZKchSAA0g5G51pcbkw91H + + IViI1zTEfcTFkV3iy9bCk3sCgYAHoJhV960ZUi7Mlg9sKqDQXWPP/fg1W/Ek/is5 + + FO0RF8x6vDn/CMEOWvIDRW4N8YwhB61aitM2TqphKb5CUlYcpnjPBMpNtoQTjSj4 + + CLz9Siw9/gqsM37KygRBjrmbjoAMJRFen8pWj2pl/FibdmJp6XhQ+M44DUxOWIYL + + wZBL7QKBgQDKYr2S186YX9X3Vfypoq6s5Jsvq//smGYzcar1xIfpLKx7+sY+Grym + + yVE9Af5tsvYQuzSmZb76E4ro8uoq0+5q09C0me8ZUhtDyotl9aBpK11eldCikWAr + + rCMfdanQQ7BakB0tj4GBiTYbH46DwS0lEZ8hcxQdBrL6Pi2TU4ydYA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:sdheilfncok34xxpgh52hgiyli:zq4duuz3gntecfnj6o5mu4rqkl3edfzg3ohpqdn4jjy37okcefta + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAtNBH7DSJbQ2f1i93iJVTEOt55EDlz+BTI+E5bB++KTHobKLn + + RJFvxTX3wfwrqs8KpCNh/YNAgwOuHlzmEvE8rs6eWXWo2Ik/YvIeBaTFynYjYaoK + + acMBhGCMHyfc/1vQ80+PnRYMCjJjNvtfxIJKWcT4EmGxO/gtFYZ0OJVpHJMTiaQe + + XPl378CS/bTx84LzZNGkQ9VJ38Qq3ywx/zMm2jYtABggNv+ygMRdYC3lFgDY5duk + + DrbrninZG3ghBRo6LFxYbWxBd14XrnaoT6feip3lxkSMg1BKNJxFiZ67CGp9cLj+ + + EbAADOVwFN7u4g6vuRmfGJFpRfYsTmnLeEQCYQIDAQABAoIBABcHbBGmg1ZTZOcn + + toa71g+Snjy3E04NmSk5t1GRHWwrwhmMCf6Os3ifrgWT84/WvNk49HMQc3f8UQHm + + /RquhPcSs9JbDP0/RcZ5Zd98JADsWQdIW/kqcBgHH/Gb5ybS7+L9YCI2u6PU1RQc + + og7qNUQHtTzKGoOz0TIrpMPMK84Kqp3O1RQERX7iLef5d6N30FRguXByX+ixLXNz + + elAeTaTzexLdRwVBsCcYAEaCerqNznHbSKIpHsIApYYp3ZzPls5qRmFX81jTngwp + + 2i9WV44/B4GWH/ttHHlIrpiUijhPSYvvEf0dwKtELxP1h1BgHXL3hy2lJyGUHFHf + + 9sQ4hp0CgYEA0CygU/QTwP06IFUlyH7O2LIGcDcqeTS6TPuaxGpzATMqWDVhqNLw + + 49FTKzfLHqnUSB86c38215CW14dXU2DqQDG8zeqhdFeLnelcQw4Zp6BnUp9YyVuZ + + S8OScGFyt6k1CmDRiVYyaH8YGQqWBkORSFRcULIVGS13hFUiXTNrLEMCgYEA3lp8 + + cuIQBerGla2zRaSwD+WUxSxa0U9lEvV7Ov0KEX6mf21veDPaoCqD1O/Wo04CZQ0d + + 8GLKm7OL2A7DqpCs4BAew+fWteoHUsujii00kfxh41WcjkN/qzj4K6sJDkuA1wnf + + usa5QExLz7C9yFOsXVTSlimBCvlzGMcAr83ufosCgYBhzKVh66ggIZdeO0Jt6A07 + + Rp+5tmEQ4lGn+whhwHTZGnWJTULdMoSTMvM0uZiGhljBrVIjkp9sNHR5Ow8uj7hd + + gkBmKRXC96ITBOAgbI5m7ve7nDr1FkB1lKLGgzGG0Uqm3odyUvmJmDP1B8Elnjax + + 2VgpXRCGbJLaq7hiOtbdywKBgQCogaaCYbOG6G9qi2Kqyq3qvi/KZVzF9wdAIO0s + + vQreSz7enw6054ctjkquGrxssfe6oQApZpTo/l5idH3wSwfYHh9Sk+XxotO9+TmM + + w8ltQPjmEcE9RwX/uoLIhSutu6Z+UKtOnr9RbQCe+vA4WsDrUhbtWbLDoFuLUXTB + + xqyRFQKBgQDA0pWlsy6Al/7cokmPuxIWX6Un+o0DQb0Hia+eXlFvEcuQUQ0W2rja + + JIgZvmKVu1V4yI8M9NRfC+E6QDsKNAsDTKBR0T5xWwhSeofP8+BUqEpbsTwgup+L + + DIoa/pu9VGGp17UcNUhNy9pQ38jHHw9TBtQ57HINmKyFNQzaYJchYw== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:CHK:yrznm2uuybfbda6jlj7tqs3yum:zy45lpc7ilfzgltxxqvy55wcokiay2fbbbv2wdvu6ubjluh4gf4a:101:256:8388609 + format: + kind: chk + params: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:SSK:vivq5nfnxznyjdxt47o56uef4m:t74laopuqbdodc46r2xm36sffbonespuirkipsudey2qyza4cdda + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEA3IYI6nlVJ1Jjks7Ovoh6Bn7aWEoZm+fZD2ArQQGdi7RWHGDD + + hG6T1e3m8cTx8uJdVDYT9FfMH/Q6ux9buLsk+CYii6ftGeXiie5wiIZRCN8hTFbg + + CcDv9SA6sF4CRo5V2MSnNN1tTu+WHVE0G5T9n1YSE4kPvufq9tBaWda0dqEAXMXc + + zv5nWMCyJHB+zrR8GbIyRFgQGFW5xmXzXgBIrNFAoEArrinzjje3AJTRGG18i25J + + HVAYGXFbhkZQIl1D+hBDq8Cncux8z9VkTPk9t0P4MH3DAYQcxyA3oSu7XykYwNml + + cMimZpdBAzKxozLD2Kw61jAJMTuJrfXRAMFw7QIDAQABAoIBACzebrj6h/twtYbc + + 4k79KMrii52UMiK5KT6KJDLdV8dhoXWzsIRlFVpdRfSiTFJNgYzNVgEMziEgScTi + + DAEJvutoovXEbKcs0YucArSck6dY8wb71CjX41r+PEK/VfoyIsBwvs6wUPnTILmF + + WLNV/MarNhFYfWYr/PBME14dI+nQ1oFNYm6XSZ1XHmWt+tyKt2quvtQUwCRoo+HV + + D2zymB/238OcxOEK9w4cWC7Rt/x9+C/Sij6eNSrDsJ9UqCeQFzWwd3QAcu1zaeAf + + r2GDG0eFQ9Ylepg5dempFTzzPs6Wg0UJ4z9ZDd5vazGHCoxg05/JpfKZ0khBkdbQ + + Z+8ANZECgYEA/fptrH82rIyYFFxJHtAQn3bO8o8lMwYxMVmvLVMw5EVSPMyqOoWK + + idf9s9e/FifdjzhtefIng9tkiL1eAWom++KrCQZfp9UOa0/Qo7jrWQunZd4HtLlv + + C5uGHPxqivLMorCqAbdx1v9CKMNpdeRojOFYhI3qm86BNXpCDxv9iKkCgYEA3kdu + + OVDtiW0zBGuBTJM+QWt39+tE7MWkPoCHGWaz11pksI3Wa3ShwNUUior8+a9zs7u3 + + TqR0VIYQ1BIYft1gEjS3rGzrJDD/MFT0oCYJ3Zfk4oFu8qHQbjGGVO3VV6r4aXB4 + + 5RSEBxQ3pFwKnxQ0o/tJRfK6WOi+m7o/t86C/KUCgYEA0QkxA2yg31vIP9nFBOtT + + AyySH+nZQCm0i125ZKC3+OllSk1ZPllzMQjo8wB7cgzVum9DC79W7pvAHxtdJ+Tq + + uR5Sj1cDm+srtv82RcqJSfzhhmI8DW8iCney5mCKgFpeOvkUs9z8gWwOU+aiAjpA + + ItPGOzNjCWHpzs4VWMI85iECgYBDU+mcLNo2dUAtx457rmH+GNpW2wmemmMcl3vU + + gtpYkcXMALqBA+v259P3/w+PZcirGWH1zTR7Ybx5MB4BV3bBLPyxmrBC3yB8+E68 + + r6jvWRH4VfJQRhlHN3MUCJJFosDp1yqXYPZ42nPcMhD5jHpBbV0Nde9h/OW3b9vb + + Bg+BDQKBgGkKUXDFHqCT5w5I6MHReATnuENJmRTUGGCZa/tfN2GDDZA2/HRV3Rr+ + + pk48N7BZH8gst8hUwI+2iTuWnm1zenuKkiLbF3J9kzpyj4ujagXOWpGgiE+r2hPP + + ltoNSOSWFBnJEiLw4CbUInwfmwagHvThaHN97eoLDbGuVOFviM7f + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: YWFhYWFhYWFhYWFhYWFhYQ== + expected: URI:MDMF:f75y3bpbubssf334zpkgvuvbgi:nsiebimpy75qg57jqp7266brgjsrbloegjm6eepbnpb2bxltuf3q + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAqsL1Nb01oNkFMGk4oZbitKIILdWtFu4Awk06LC6cLbZSJm/q + + drIYi/43UWHFpww0UI8E4BZiG1+AUmap2fsYK0vrsQQmiORhxwM7F6TeWvfa+3L7 + + 1XXAnjSFnJXMNrozxYAprhapt4zKLS1xzDWXlqH9hS4dbEercfdui0XeC4xGpGpB + + Rw7CSY7JJf/RG6XCbeI2T1lwkUhNeajy/xsWihhTdG4zUsrvFzu3eJfH9kgYs/M7 + + eUK+cvF0K6YgoFwzg3MdbQXMSuGUbNz1VJjWZK8hDrbNbY6vufP0sqmmRWnd2a+S + + gSAXl53o/y626ILVy02rCG7Q5jtNLCgZpOW/HQIDAQABAoIBABbIfD73R7h2PCMB + + ZvToVMcU91JmN/nfN2q1MxXCAkR3Fu7Z78Z2bKABAxBwoxZuomw9KMFdOym7zDsy + + R2c2ATuFnaS4kQuirQkIfVHiRWiNuUHjTYZld5WkHE+QDPcgUNgBCY6Yp3w95Juy + + fIRqghcu9cxXIsXXqiBCixU59S5FCRsuoCtVvKrZrD+9NzwC+mOPkNf4uHnsYP/g + + 5ZjAqJNQWbRqaslq8R7vJ6lLzVgjGBLFlaJKepv5cOMMpxxlj12j3gg2DWNSHbZe + + s3Hv9NyWhje5sI+96Stowc5oiuzfbiDvNsD023Mz+9rfheZdxd4l2ELrU9BVdkA4 + + HxJtOWECgYEA3/evZTGPlmpPCoy4URtj7j0fpxYVxCPlEHjpdghEywqA8EMz0MZa + + 2JHxWZKmeA79wWCbb9qvupzMiys41bK4qXsWnjg7ESNTD+HhJtpNXNkpSs+zZcC9 + + pLjABEEsfvWs6/laclbrqI3IhXKQRCjBtfAga562PkqG/v9aypm0cSMCgYEAwy8y + + cmT1N5DVq5X1Fd1LPpFvSIDjoZNsSYaqP40FkWnIMSoUswHY8XWW/0LTXz8b9bgg + + CijCavAGFdcK26TlAo+k/zQ0MwinjH348VuDQPcp8P+LAjeEqUwYngXqBpi+uw8F + + CEUIvX/lV98y627XfEHjv/nk4H7bsgUIWjYpsr8CgYEAn6syzc/RcAiGJR1BYgFG + + 8td8s1/ZUKXObjnlJpKqiJ4KYj9mt1ZR+cfB6nvUVg9J9Qzsg4fCdCXI5QaBVEg/ + + wgPQkifAZG1skAwWud4z/ReMipscaFRKXx6fNelI0ZJQH0L7qjwxcU7zP7/2/cCY + + qR5x3oedoTb8mtptXbbKn0UCgYEAuoC4tXIeli/A26n4fCHuKiURrrfpypRxnngc + + 6Yi4z0/CyKerC7kyMNbpp5OVIafN8ac0hkCYNVKQngHTEDmp0h6rzGd3kWQtpSMh + + 4o5NBqCl5PBpRX8DNjnONAD2s8L0TQ13A4Xjah9xZ9uQbkKFiOf01ZXUy2asSphg + + eMLUaUUCgYA/J5+yMOFTta88jkSCFcdlSH5WqwSkU7qvkrHwO/rEPOITpP+UbkKr + + fWj3UwDW1MPx4HFrhuDpCaUX3dq9iAyXe/6FemPHvMR6YMC1CqimT/XoJtUSruIY + + SCXiDkDZcPwqOzGy/ZJJj6cXRXbzwMxnxbPSt/Se1RHJeY7aTobIhA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:td223psbzins2k6m4frmfw26xy:opgmb6zhnwsksydgjwzpfdoz7epm4ynzmvkjuw6s2jntioqk72ga:101:256:56 + format: + kind: chk + params: null + sample: + length: 56 + seed: YQ== + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:trfwvi5t2nxr6ag7n37ziyiaui:gpom7gv2rn7pfsoqr2bdyr36au4sac46d4cbxn626jsc2ls2tboa + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEoQIBAAKCAQEA24TZYXTTI5iKyFpzzyXUWZgltXj+oYogvqnh1Q7z/Yn2qxno + + 7r+lssZobVMFPbLaKVHZwoY+zzzNu39/Ahif12f/c89Jk133m6xV0QNnwiuP7q7/ + + g4hXjAiRe99xnjf0lU1AsuIFrErhoCHthj+Q4ejI4kl6vorfOJRLMdeeWEDhP+bM + + O0r6ElNwg289MaedCJUl4xMHeclfTVxtZ5qJp16U6H6iey/YF/6vN4tN18mjYygY + + vOk1pOLRHuWjVy6K4iOyJpPDcXzARLazAQQy/5SFMk+g02EIvEGqjpoQY9F5tFCa + + 0bullE/52NzDoi/5lS2iV9E64nK342s2giEwpwIDAQABAoH/IJWXrt+od6As+ZBz + + oEv9OU9cSZOsOE5IjgSpgPa3QOs5siwmZ0oLTn4lAhVQsdfaikecC0PiDuD3qN1D + + /Quqrk7BnG8ofLd2CaWLF3tp62iL9OIFC4ExfZfIAJsqQlIL/B17fRIPxYNX+m0O + + 1N6aou3q46wEjEQQ3lUk1EEtP+wbIKfC+9el4jmImdroBiV+hQYgzYCKvC7ZysBF + + wVPWZ9w+BecTwqwFelkvNfN9FDaN0TawGuTRYg2SsDbTo8NfddmoluqOryGEkfJj + + xUWtpt9GGeERBXytKY4oyEMLN+xglSvUkTdgREHyKvJdIJUI0+nVD2FfXeoB7YfR + + uxLhAoGBAPDXyXTSgs85qUSn0EwUlvLkqXX4CSr3kPudAErRe2vnB6GzIWLoixxd + + coFvLgSCYTzlQ3TXfHpykSeNmadavsDFuaCES95OwZOl9xYHLJzdFwVv/0jv9Flo + + yzOyajThxBLV5k/vnoR5igK10I7YsT2tTHoPI/DA/+gq7VoA/IuZAoGBAOlVgyPy + + L7Ntu5IVokLFIWJI2dmBvW8fBUjo7+lH+XBkDzdczxlr6ZJMRFksgP8Gk0tXCah8 + + c/yzMEZB43Y71MHeR5Vog4mDJZpsdcdM9j8RTteEpssVlyQ29EwqMQduECAXWglG + + 2iSWgSoqgeNyR7/hfmUYFiomQSGgp/xtp0Y/AoGAaBD1rZLgjuYda9sPODCVYPLI + + /n5kh7pdXTtjyvBlYiR7ubULMg/FPEZsmd0Oh0hG9+cglLYfxVEHw4193UBquCU3 + + plJD7hUds8y8zTngXw9xSRoxtrRoYtHTK81l8t+yt2jRkay6VAeoSK+DJJYhT8M4 + + Dm3IW9kpOoqB8KgId9ECgYATNxSOoEInX5EDzb5IC13dbyxpihKklQRlZbFkH6Y+ + + CC9smrr/V/CrOJakVVLmLY9xs+A6vMz8cXE3R/PIZ9L0iC6S8kFq0J8HIYlteTwK + + I42/l8/4h3Wj6NajcxIIj5rKWcHzY59RRgerBkceCOo5tgMnph0lKXNRpp5O3mTZ + + FwKBgQDBC9od6eTL5KYL/PU0S3im4HA0Hy4530WHS5TEJVsuv0Jnc6vNayimMnGV + + jhauS2BivD4GSe2wdI0MUtGUW8XN8qf1jNBY07vzEn9Ag1k9Rd3F/K5blYU+B27W + + XZMNa+DYA2bZN0rDnX3Y6otwPRuWHS/Bnt34oORhcle8AD4kNQ== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:civy35latqkoitmqun3f4vlg2a:5fxqze6lzmdznzaxvpk6dmopps4ixesnjkp6m2vpjeh2l5imgpea + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAtzVd7X2GXo1jANrdYD8jCHkl1Zt7fO/TDWahvNbBHQYcm6Uv + + UI6l3ZLTW/nRLJ/ZvB9ymauAQ3cAs5C1Ok3S66YhlYqbvCmRHYbvBiKIun0nElxG + + IP5Ox0y++xM0MCWctRKi7WhidQqywivNBZglERI5L4/O1/TipcxEFedLTQY9UVl4 + + dEXrw2zYw76PMlReg88Q9jJcHQRcAJqwaRvkOTZ2qjtLnYk8tJfBEqHG44pKVzvL + + sLFFZLNgl9cCq/xHLPpCfpwpm98KR8uFfYyudX2fG7TY4i9mCS0SgcKBJiNAV6Aa + + IkQHrMs1PQJe4/ychz73SkjKRvoOreIVcESHYQIDAQABAoIBABiRSThq2/0raAKK + + FuQMa2H8PujTSf6xuUNDhz5HrQs7kdQEVWEv08mv4fRkPlrFz8CUlf1J3HAPkfJC + + Xi8ElxtfAoNnXCViDJHhUYWo1V1uoXHqmkPb3kwG/Fg2Vcn8DTTR3DPKSuOnjNuR + + XJOauKO+phj854+ZiNgTWXD3fGdGS9G8WVmGWy1yrJaL7TgjjrfSBFOAN0yyy1qO + + k6WUFy/tF74UiAT10vMnkfzMMtaAOt7BL5siGOfndpH40hM1231lf39oWp+DDxOu + + /yt7MysTvHIWB4L76k/s1XehAtEPCjrwF2ucqHlh4Dv4lCLIJTy6hbtd9VnDE/ZM + + RGSBZI0CgYEA7oQCFLKAUdcmpsmjF8Nb+CsuwhmbOLbfXdaz+gG1pfqMxf4kw7NG + + Af5Z6HgnheXRg6CX0QPv7qPe/V66Fm1WhD79Tja0OWJ742e9h+P6WoBVj92xKzVC + + +0CHh6HyQLVBfkulMVNu/EBchrKlW9V2y2ndE9Y+gl1DRQaCI1g4DrUCgYEAxKN2 + + XeI3a4oYtT53nGW0zQwALWBfVNWznjx88Ywlof31GFF8gRHgXYtiPzeN+mOjQMhG + + 6d1q/V5PJguMw+2cQIPw/+CWQA7s4dwRqk3Jc6Zr4V40Gpdn7vUnGbTAw3b7QLJR + + BW/Fp3/331v1LPbJziWvIJe51SZQOPDIUcq+lX0CgYAgiFrsTciY4RrBhyE6vYfO + + 2rz+9pUocDEZUI6t3AvVvs3yt452Lv6uiO3kencRmV4xcPckKEBSsYFZ19DT/Eff + + s+PDBk0gwqEZTG4amers6zJAdEGVHiers4qI4nrzfoWXX2QBzVqHB5RXPwi09PHG + + HwNrkD5oc6YYRSH9BixnPQKBgCPFYLj/d/l2K7x82qF21wceEcIvb+gs3/n/IvOF + + /SqU2ktMN4v7Rod93aeGYauVCJO2W0Ab6WSiDV/sZfUWeoA6AFNr9ak9jdYghI4o + + jGcfdSyQSIY12NBdhHlmqlJkiJxiU97bUGeCHgNh5R4C3v9DV7JkJ4gg6iMRmj9W + + 1Ii5AoGAUzrIstKLKfJBPXp4UP5LTglZSGCGqDj62/iBAdIZ4xz8703g/riq0h/d + + uAZipYubDV8U0WipfEhjUw2NTY74FzFRnl3Ya31Ms1gOXwuc+EagS/M+ohmb6eK3 + + HVaVQohUL//L0C+jqlxfjl6sQxUF0IizsR5XPm9VkFSJqEVYb/A= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 56 + seed: YQ== + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:ihyeqq5peg2pdyw45ct5mmj4ay:2rzhndv6o4knlvo4po6rvbnknlcitasseea3mirqjor5c7quh5ra:101:256:1024 + format: + kind: chk + params: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:vwfthgrx5rep4ixdssp2psxjja:vs6adgwkfwnganksm6w42saotjlebku2jni4y23uuczi3q23i6iq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA8TipW7unNnPRZ6aycrHuDFicLcuI+QXAG9F393JiO87SnmZm + + zjI2FWGFDV3XpTb8+6QA44n9TTWiAT62b9kljU7s9Wf4dBfaJlYIrC5HIS209i19 + + tpIoQgdh31TYqXy+1o8oJDhTBD6mUJ3Lotj8oAKqUaVTKqJnfNSzPMdChYHYvE1c + + /Tr4c7gpxXMXEMTD311HtYZd3YnZ4dZryWfyrdf5/eQfrUFeY2MuPdl1ag57+2yN + + 4mXynTGHmENgG0isd2OKgyBmlHhNPq1lNsxUrQda9ht0dcwYPvLL5qNnVc2z3e5+ + + RH/2lBzIh9QTjpo+pdMxqlQR6od4jfZdrFZSDwIDAQABAoIBAFnzVaAZ+VAiXy+G + + J1wCwrCC6HZhRCoMPWeCNHinBD+mL78Wk3aHnchaTam+2TfIKg1SSmyPG9BLVCaf + + sptBv7GSgWU/yJPOAzCxe1lthmO2bhkwvIS0uuNoalRECOkm1ekfiAn9oONf01gT + + h4ip3oZyh/2bJ0iqN+oCTPY4nbM3j3Hx7Q7NI0AJ+rutEhM6Ina7RjE/VmDSmM1a + + ICGCXmae7Zg20bM8rg75BZ8Jf/td+5lvtfaEK/070X4AGPri1lAJ3vdlHjQbvJJw + + atSA9Ky1Qpe1t+rPjMBEAdUxp3bzinMGRwt/tSj3JNXKfyp6JMO5zM/dOknT4C99 + + WkM2O6UCgYEA83SKCn/lZhGBZD4thyRHRCcyVeO3GxlIO9mjt1f7JXiay3hOlqOu + + x/sT/w/Hdn+qSD8F3l+IFYP8X0hToN8oslu140qPS0RPuOyIFigm7jw++VqnZ85i + + OhZ/KuHsfnlxPmc+vS4Qh3MkDiqG3xfLVH9jUmCXDhQG5UhOMgOsgBUCgYEA/aan + + lBQA2jyoXiwnlE9P0HF6ZSbxHZaUQrw8xDuufjQcBR3mmAg7p8Yb/EOnQKTjage3 + + EDzNJnqPxtYjubxKtBu/7rjvaMkvavfH6JAN/Rl3fkibOcKbkoSa7mGmuUE6P2UR + + 6tV9vd1ITN359AE4VdUEapzanyuAjIhc8ehxLpMCgYAsMOM9tKl3NYY/I+ovta4Z + + +ONyI7uA973c30yQYy/7RUET3eql/WAkfLbMfZi/Mb0/D/GIw953yVVuFjrX4KoK + + dgs2Drqj9uphrs2k9/TZGaZ0rLfmZ9f8o0jCB/BdpL2hjiwdOtdVPtk0mROSO0d1 + + NwpYUaAZthjqVY2cFn6hYQKBgQC/hTJLML9kCSDn2lcYOLp/HO/ZqImuWaAgs5j+ + + YkHisN3nTyhp6u2ARKmk1EBZIydDTAgBjqcoQqqE6/OVroKJc9p8Gc9LQ302O1kK + + VJr7XFtJUvFBr5tgChghnkIQ5xtf+qSIuCJ1Vbvdrk2o27L5vBnVlhHM1T/+3Iex + + cFzlWQKBgQCsutY68bH2Wx5E2iiO/Eay9XQRky8HRvXAZDZSG0Bfn7dTcF5EddWk + + vhRSjEpU/6wrHcVJb1clP99uIpzgK4QG7t6mZNZ/StqCHTtyNisaungfz/B3k+li + + HMntS+IYxlYIsxBNLIPJLqNPBqAlQb9SJkoO1noTR/+4OQlSb5KNpA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:2o3r63lhfs3qvi7r54gawvyuca:cl62d7ozemosy26dkj3xuvdjrzuf554mbqyawhvi4jj53ddqrisq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAjNzgfrhm9Cs93A9LJ9yUmKkTL4JBhDUXXT/O09oWWn3wpq61 + + Ouf2NcMGwMC+1tsNWA/QQYwU66JE1s+iow3k5a2jd+Ov+pRZJa8R1HO21iIERvjF + + qJOvcxa+PBL37dZcJP+ubnhK6O3sIdgvZRy8NOM0iscXoDgJZWJnWlIIcQIVy2w7 + + 32596JhdXHH1eAd4WH5NtjCQQId5/29OPPUlo3CscH18gymseagJ1Al7ixSRnkp4 + + AnyHxk1I1aelpzRIVSJZPWrfFp1eIm3KPha2SPvcHGugGk+faTuJ7HWFHN8pLQcy + + qTXCSMjw4tsHKEbDvb7A3LCnAtTi5ovuYHt06wIDAQABAoIBAD/J7me0PfsocdTr + + oA8nFqujNSr4g47JNBFoSdMqGaFVEtuIlk1cqeRisvYq0sEdZYeRca+dLgQe8amN + + UYshSZyw6yvpkdGZyF0GUL6ywANsWB+DnI7ggj1N+UvfEyNDRWsD1gv0sYeV5q7U + + 5XGWd6xDj5Gg4xQNDEQ7Ma53I5d4vD9QgQpIHBrpTuTlz3nowkGIzXnpVQ+54Oae + + lPb8oBmLBagIHVzWY7Bh64oFh/kr6o9db/QkGIz0h1/UdMhj6HU0+kOUG/pSWHbk + + 5fRoHL7mbXgNzKZaGPOZbATzgXcha5uyWDs0r8WekKrNsUvm7E5vOE9UPdLCnM0O + + 7YHe1+ECgYEAuwDpwl+IzdAtjPNQUtsOBKuDD6/VempGKX0oyTwf4SvY8VmU0Juv + + AhHIq3UyPdJpBLY0Be0UIvbQWNXdmRDnriADLTq7g2QYDwVOI/bmRxYUI85FXjIC + + RT2u8nQWAzFfOhURsF23PUwB/rdjtBSvAdfNSletKp8abFApFonhuNMCgYEAwNXS + + BQ9nw20GeqZnYZOASnD4a+iW45coNSkSGETGO1J917xWtXzvGAAAqlSv/kNXYYqr + + RA8yWuYc9nLBx7623cpjt9CH2vXefodDLlUB4QSKEuP4f9/ttsw/L5xmJVdQ6pTL + + MNpU3XgtNl8qsOVAokOORpbY5f8leuuloylaxIkCgYAhSyRTKtccbXfupFMkrUNt + + qWuIG3ISfWFIebQNP9sdJ8VUEvLfwRgDck8b152+S/vOjvHsLC1tnCuz5T+yxMO6 + + yJBIOTCxT9zIr9UdqhONjGzBgzPudVDaKwU+vVQ99UhS+vVPRSAela21P8lMgnI2 + + DcnK9pkqAXGe3xaxoJLDaQKBgQCoASJLelKC9xfv/86OOr5JHQeyrB/aBbXoKvIy + + 5qh2wrYVIWfCEykUFdx+ie4TboRQ3Um9sCfE/js5lF20MzqLHWunmCzk3dWNEze6 + + xCEw9I1/S9MTRfuLiYN7bZ2o5tv+pMgqte2+TpfFiUBegj2/oW/xnDc4mwUChQ/4 + + iW0lUQKBgQColYu/R9ympb0+hMYMtTDrJRuREuyNCB6429vX5OG/RIxfduxCvVmI + + SASInZIbWFSqH+zt8l+L2BEi7fzpGSk3I5/9o0obRRM+9HnJD0c44P7Bg5IIsFv4 + + aIg0X4KaSZdNaLndO5ieyffsY+dEH0pSg3GRSrSn9inqiogQZB50pA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 1024 + seed: YQ== + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:mvk5f2mghudk7xnknqddhvnl4m:c74myeugtix7e7l6uwybtrdtfselha2qrcc7hhbymqr6brtg4cea:101:256:4096 + format: + kind: chk + params: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:7adguvusvvbmctuzlf4g3arfv4:tkh7d36kwo7aexp5kcxl5qlhcahkaox56wgnaoz3ezeciurh5qua + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAmvahwGh2jLEIK2xDDGU7d3aleXiC23PzGCDVbngFhe61hfAG + + ViA5s4fvMo7cgz/GGc1acIPKaLiZESPViz2P1QPNNMCFvTgKkYdRzE9JWks4Vzx8 + + cv3V8x6fwgzPeuGij0y/20AosukVws+9QyAvEXleO0BtDnddPhPPfebe0JOr64BE + + 1ky8r4VE3wIMKdxigyqWvCuIrSolLDpiCaj5Puw5KwMN17+KMuyIkARse0PFGboK + + PB+n99MyxpMZZWeDesrOF56HT0Wl18AdEfqDs/M3OqSobMy4SCmc91CjkXEnw2dz + + GVMJbyutQn4/5vrLVZb/Y8I/y/gzZ6YQOCNtZwIDAQABAoIBAEPv9KIiKjcsNeSz + + pgF9MEEDpzBGATis8NqXKnsv61v4d2StAlon7qQi6F9F+q8f+n29ZfUGEmsu4wx8 + + pVZSOwisjf6emQOH2jpLFTV5XTNU3vJ/9h+D4ZSgzHGKpDu/SEGC6Gn7CtzFC2FJ + + KjSPm5MRnppjeGxrMFnS3ZjY6r6OEFe7wRVjH2wOYQqHOqPOUpqWZJ1bktz3DzkP + + tK1ZQ+rxu27kpFbxeqLohOLW3JTrgphxPAgq8VltL0Z1wC9Umd7FjKb0irkcFgNm + + 5ZtQJdHwXHuEyuYGO+zI601vw9XsBnMpl6NOv9nvhX9OFEDaft7RnU/2GuX900y0 + + dG6zWUECgYEAvB00OoqIKnXkBHRJDGlL3degn6oYZlrQOjzQpX80HBA6/hmC4yy/ + + oll+pPlppmtuHlzniAQXcUsEtoaP5F/AXXDALKn9Ebk19VZUUBKWrMBE3jcV4ULT + + AdQhzfMExZC63TNKmn9v2naKzg1HtwX9Ms0ze+jKVxLDIGWxg6fy7M8CgYEA0uLT + + G3/xSrQbhYzBk+pBIIzPAZ9HDySJfrtje+qqmO40k9dfIrA482SlPUN1m0vQvMNq + + yvuPhiX1gSvnEFbVSpNUsezIU8j3JDtzxmuTljYqURE7ZSpSXe7WYCgEAZeBQY2n + + Ncp6jYRptOn7O/E3wyVptrdY5RZc8Y4XwD+TC+kCgYBSZuiODEkBcIrleJrXGPjm + + wKHXzwbJL1avbBxpooMNF/7/d+Vh5iQ71cAoPCkPgVfHbSLu7fvm4Nm7qs41V8xI + + Ii/MYNo+fUcppRthyALAwahpPvASsNcFogr80Etyz6dLZkBz1QcGR48eG6sifTkg + + m8rFqH+aDNn0wxczeMps+QKBgHEJE47R7UvVbksPP1NBZNdFok+ESFpdgzViy9hH + + 2FlQlO4Jqvy06FHNyKQl3Iv4/1GujTdvz2ZgQk+ScK/ZW0o13lfgSyBdv9qz40Kf + + tuP09Imvat626J9gvZec20jfJHE2tEGo3jesmdxW7ksa6IC5NQizDfr9GaSAPUrW + + yMLBAoGAHI8TJFiri+YF0HonCiJvQzSb1dX4CQ8zWoLb8bgRN204Opx2jF6fmGER + + B0RkYizKEjWwpbTAaTiYvMsjOn7eClmsyj2V/03cQ7mppZbFo+TmxhunYrfiXKpe + + AiQTdpujt6/dkc7b/LlDVjZ1eNRCBy/ChJW85mfpogC794xyiWg= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:vd6qxch4fcvs2szi2saiklzoky:btq6xrsirtyy6ugjjgpaguaajd6zopupixy5e3ngo7odya3i33iq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAseah1KDj5lbm5ULH1Tkdtp2aNSeWxxYzQ7ifXjfJGDx5yh7j + + 9aGPRxmB4tmdABQUfNY58aoaYoJo/JWOV5OAq68RdGGHx3o3Pa4gjpDMdIq52biJ + + lNnAGh20TJcd8lirD7RRtEoBpzQ/mWDBKiWPxm24imUUlaRzEs4BdzjaK9mASuu+ + + etKDvoLy8ajTGtomcU4gKAwbLYoMJwsG5hGicJlnVJ53PTjfsH0egpLW6vUfglEH + + tgaNaGfPtPvj5uJMxmm1vrXUufLok1dUxHaFPHzX4j16v8FUCZWoP5QEKPASh4Vq + + 5/BnFdWuQP46C/Wbwbn0Q3F2WqwnCWVuvXe68wIDAQABAoIBAB73PaEEtlagNsWe + + N6lyLS1dxntNHk4eG6NEjhz4ydyZnjtj4Bsf2ZAvLPAfH9hlJmHKakCZ8sjF2V9p + + 6uJsjt+TdA7VcSx0Jgxq9EjMhIIeqZXvrKcHtgv1sq4IOdK2w2PS58vhe+MuUYmx + + kT30Vs/bxlz0lj4r50nVKOUnNLaf9PwSSR3SoLAARLsvjpCSjs2QjxUDcnTmDp8U + + DAffOA9lGz8Ch8m3BR561h8h0+muLAXxJ6qx0cZ0+FqXof/FWo9MFMJy41+IkO6O + + YNRRD3bPQmbegvAx/yiKiVjfo1J5Z8bEgYNM8nKX5IUbdVd3sLaobEFXX0CAOzE6 + + 6JA84cECgYEA8XRtvIkAgcp3D6bDCTymxwBX8t2ltCqYezjQif2Tfb3Nnoem5wxE + + CTJypmX0BqVYlYmb5bZ2BAi5A0ijRTwpZx9mnXB3HKmH6SX9Bnqlq8EiZMLeTYgd + + I0hHdd2P3y9vbu6MuZX2KYbXKun70Qb54lGfi7D3FWQ7e4V/IWkoYEECgYEAvJ4d + + CMDNiqQOHZXHLEJgUID3JlrIFEMrp7pc191pjg7hZ0pOoQzonGzfzkxxrtWGsA1O + + AyIxI5Wutoe/VPi4dpkM5oqNiYACUhwTCNmwoO64K95IUwyCsFJ/LXhhHVpu04GJ + + THB56kQ5HbyOoAZimlCrw2LxwOnicYQwaHrpDjMCgYANBDbKPCR/2rdSa64F+HQR + + NE6JdDNzo/w2YFi1p6rk02+bRTrVJ88fI84UdFiUZyOAZDu4RX7VNtcqeyb6G4Ur + + 3wB8KkzxiZ4fDoI2cDQwLyg4gFzVlyni9gmMLBaOdJMwSsHhW1k64d8FnDmMCjE8 + + ZyQPtsmLKK0gOpEg7vdTQQKBgQC3xtsFP05FqmkyfFAvGJFdfvrQfR17WKM9bsCt + + d0c0qd0HVghctQYj+5TpHeSac+QivyPmu7bjNCGiKYvMD/czXxaJvi//7CDWvhHx + + yqFlfJMn8xHHEWZ4xDi0JhmBjy5ymEEdoG25SzXXenQBCZejQbzJyCtDSt9euWyt + + MCzJrwKBgQDUR4StEeumfmLz6yGPxIucP6hxYBJPfS3i9dDV08rJkw+2Ubzk+oGV + + AU5NGUlSKdw/pbBRyXgICdWwB2I1NNv1qHiGoiaavK+/OmI89MiXhZ19OvrUc9P2 + + ZX+BDrxdjMWbm1uZxap0yMTAc5G3Y42M0HHFgM+fU2JZF/CDqDsW4A== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4096 + seed: Yw== + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:rvytwcc4sbmmkrdwh6b23ckv5u:klk5nqpbv37pr2qavcebcugfnikarjdefjlseo3ru5uv7guai22q:101:256:131071 + format: + kind: chk + params: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:2nkvk3zukvkwkomg7lcmwi4k2a:ulzy6ok2oaftysynb5uqactvbqa57ohasakq2qe3bd5j4jiyz5vq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEArb0j+SDFBcKDajB2eSNb95aBayyfNNwnuw/flNkdxKlwDFEd + + k3JU+xAfc3+CdmMVlugQtXFNIAADiorcu/zCs1mh0Y1nLrrQGKLdiARPkUMavbgz + + Ubv/GAl20OMER3fg87SguoHJNvkeHOSSsd3wiczXkB4H8tUZJffGe4Syoxv5CLhY + + /9H1sd/Ducqjdf26RTbFDL/jLTPjmiOroZAv7p/yF3mVaXsQPgN1TOSJs71BmFKv + + SlLHwKjiFycMNbOoEGHL8iEUzWBU0HIEVm9ISQY91kOIb7SGN/PkOPQNWjyq5yeC + + XLTL5YQ2qIy3J2CjOTUR1h5gEuK5S1+ig1j05QIDAQABAoIBAA6tC3zf58S0yaUO + + svNIqVwguo3zFv/AGRsUHC7WqE0UgwKHV5g88DDFC+MVwk99zzUQJVkuWPV7CtGJ + + KVw33bqIt8KbzzuDTFDIcS4sLwx2PqwIA03EM6g0JHVAt/vRhI8RkwIuNHEQWhrW + + tA2SUd9SDmN+Je29UoKCi6Gjc/OTJXcc8hTHIoAcoMX23pNQWDjNEKXRrXs6F7Gs + + Q2yfauPXf2cM71cZCeV9hkqedjIln0R30SdtAS7I2IeRNTsB25w/S3zE7xZryZa9 + + +Jg++rW8hlbmJpm/00iIaJWhHIxbPO7tyQIOkr4PCFMvtHLUKnzCjmL3Gsd/BW+m + + +QAZ14ECgYEA7O1dRawkbzapYPUNed54lBWtIlaC/J5vpAgnelqbxDH9BIRhxQnV + + T7Up49feR5a+2x0QSgjsg2rUxF0t6vP3BAHfe9D0zGFU6YOa4HrQ5fH2/wXx2g9T + + aOBCtwOL/C/DUc3aMD4EOHLXxismQXXWxPk/+EBtLUA0XIRLZSnuj0cCgYEAu7mU + + XH7no73Xo8IqdJbUhfKfZSsIN1klCcEnnCZEuIi77x8ZvPM8pHyrJomFew5jsvZ+ + + EPGTvd77u01/79wLgKE94s8GvYnggWE1ItScBx3A/8PnQODSdEMsr4Tdm6Qx47oZ + + 5tgM1u7kYBwao3L9aguP9dChBn6e93mOqQuYqHMCgYEAkmkgcXSuUzeRNgRZHo13 + + L/OxOP4DFf8GeHQ9iSPDDFvjwk3YaT3pXsdSKqV0jALA0IDGVynqlk+HSg1W2dGH + + PSe3Jjl7fW1MXr1gEQZ0XxTGkNPon9tGrRGgyJ3dfKs7ZSrzgUphq0x0wNZbXqpm + + XPS2LkAJ96Osd9udB9gAvvMCgYAvtyIAyLj0I8L1+tpzvArU6TCetGtoNh519kSt + + KgT5qreqNguCvYjCfnW6W+YzuxqYWJL+l4joEA+IMlC8lP/PeCyUw+6AqtUHzb+F + + 1Oi73lI6MH9NPFgB+TkYe/sgHoIX0ivXQz8wOpSN4VbcCNRk6f9zic4EKpcZbCpY + + yXvKBQKBgQDHDB8/e4ZaiWY5VQmCk1GRZmNmuyII+vC0ZynlSUP6HqXn/Std/6Ze + + EEfnVCZHEvRkNwRYAWcaY54uvqj+yeLYFeIX8vi+rs68uZ4jKI3Hz8vn7rgzPulP + + Aa6fShcdGh8xhhdl3bM62/E87ACcr97QSvUsOuhIYwr098ivyyStCw== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:jnnfwffwn6qx6qtfhvk742p6g4:gzqrivbdeczwpgkwkjna2taooajonyb3h7aijcyhv7ptaxtycroa + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAj/NZYu1pNcdeSCnt3N/BDg1jmU7jlbD1pgKaSXLAblkSAhD8 + + CE1VLCsWfNw+dbTJtvStGncEiw8gK/fQwSoQjxxP91qxD/6RpNAA7RamHol2bMiQ + + 0gtjT8H32LmS1TBYfszI3AD07+cFGAqbV+X6lobQKRK8U2eWD3x8V1pD9RHLJr6h + + nMPQ/epF1pxGf22+g7aauEEWPOStTUH3l8sf33Wg3WN2SkxDqSRyZXiETpta9Rdv + + oMjcsWHqGOTy9y94oG2B4AJHWrR1mefFN3Uu0ApfpThoUcVyI7k1XN/7y3HhEZru + + GJPaFafHjH5DZeHC6fxmJkC6vXSMLClNcfY/RQIDAQABAoIBAAmf2ddbK7hghM16 + + 3jAxFqGHpCPdKiq5OudXT7T+8t493s4cEBnG/92U4OtGt3dbt8vfdo5pLDjW8U33 + + QvITS6mh2TfezK1W1iqIjLNNWxx6ENyrmUEt6T0tKRLIr4hI8/XAX+KTvsyma8kE + + doftNLCpQVQpsEU8TQRqjI6zlo0VQo0sPKZsvSfN6IOvPciLu039H7oqHxknHRy6 + + ACQIkCvAEH2U0OY3+ABE2y7Mn4/TW1eFsh/lp++FngmGgQcMIYZUR3TnOWx1wzwl + + IiPfFo1jVAXa70Up4yHe2Nz7zjNh/VCJCTMA66MbPczOjIbUtc/GFcCvDp3qCnCj + + nRjMDOkCgYEAw1kF9qQNgFlLTagCh+uVug1hvkZVPeDUd9MXtIn2TvjbdSkHilsn + + wrKguLRSgo/OKRj2J63xhvfgb91cMxsXb1+OHG+YNU2qhFhTq4k489rHID/iVskN + + Kr2GaAMGXQb+9t/2CqHCjkur5m5VxZSIM5OgRR/nLutM1NpHWG929r0CgYEAvKUV + + yqv6gYMnolZBcC9LhB7nZ8yIBtlmYjXRHUuh1Dotx6UItlA41k6GNhFiZXL4xPbf + + F69QzvV9Wuir9iW6b7tP2JUb1gnfgB+KH1F5PoI7hriUF88VIHWFeSg1ezwIMQvd + + 9IlJhlQTk2kvADo8jwCZahVGDwsQzsf/n2xw1ykCgYBau7axYHGE8/SuFSNXzmy1 + + BhIoNrLREuSc40dXa90jwSLtwCjocn59SEquf9LzIag4Hof21iwg7HEqhD6W3jZ8 + + XH29Z3fjCjfxULVML2hsm2lx6TpP5QJgn7cWCJGkE+PI9y1ossmTHkKxvP3Jz7uT + + eTYv5SmT+WauVtRclylCYQKBgQCsBZZTlHQA+gqAXEub82TXfB7kZnx8Um6sjAq+ + + viM6Fjt83J+PMKRDuKNmVn/1ptv3MG/Ld1EnCHFhHt8AvPK/xH1RMNeLXMF0Yk5f + + tLntKHEDrvlMpMfNK52lF+d9EwcdQocJ4M8tMSoQuE/l0zU56f/73p5eRWb0SShu + + xkI30QKBgQCw8SH8qMlso796eW6V4QKOD/zdfNTrZWfAK4BaFR5l4LuX7G25xZNL + + rNe7CbDlFzzp4ukkW6kcJrxR/4RLJA6nRDR86kvzl4A4fwrGKCv1vY5y27ioQmWM + + WAEYfQ+aorfZ9lbO1+QEZc7kjXlbyt2/SiZxOqzm4CutbetMlaG7nQ== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131071 + seed: LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:ocqx55dopakqnzyui6q6euz4gm:wklziyoctwrz5zjfg5wfejz4bnaagtg57fgjlspl4ba6ij6n2obq:101:256:131073 + format: + kind: chk + params: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:7u3yuuf5t6ixafggoo6rozlkca:c4vxsjiots2kqxh2xx2f7j6e2rgxtszskkvv2geby4f3flm4zcdq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAr1qCo/bz9q/oRev0Upfgc7gVB3b5/2e8jkhQ7hPyNL9xPzZF + + bih8Lbn1bX+1ZAR/cm8/Vtn58TTQTrJH66TCbkBYHryvodl2yA3gLExnfxoESpWu + + tNRF2VOab7/RMrmseicirS706Nxk99aMk0Mc0kJ/3DnEmDJUKjfq+/zjCPUmrfv0 + + 9xEiHAKHPN6EncJxQbPqWW1y7QPoEKoK/vBYusYrXa7OdDRmK6cmKWgtmNCoEvGS + + IujkkeP3lCiqrVcofGGC00VizESa0q4Iyq8Fuz1LaDHNT5HO6EYPevNjW/hy89Z/ + + NZG4tOvodZsCsGqD5XlwyIrmjxrETiS2J49oPwIDAQABAoIBAB5GtuK/i1zu/2A/ + + PraaAYeJY5wf10dZbm10oACTUhD4cwGyiadc9x/gCTeoQrXrruOfwKRqy2RxtXu4 + + /YD6uBVYJ77a3kpIJWGiP6/2WzJPWClWkc0oD41YCYS87k5fT5/hrPOQ9XlVQFuo + + YCo2/r1w/OmV0dNjcTO+5uQuRDbx75xtgYgICfHMOkFnwqJWRsNPtvq1oMA9VVrI + + zu3GyKfKQXWMrJvhmE3s+HLpv3j3whh7W5yGw5Dum8bPxh7Atc+15mYr5mC9eoQh + + Wdu1dlY7lHX2uHJgcPwhLz53jQiqlLgZ+8nw8ZNvSeKqmqeR6nhrDaUb4idQBzBI + + aWiv670CgYEA5e5J8QQuiF76HZqDkAN19yvMue8n7vckKyoHuhtTL+JOFdPkcob8 + + nb+WZkxyce9MjrRzSnXmvn1+HcY/g0+hw4qzpcQFJbyjaPREGKH2WQmYuJTHTJ5f + + owfvfSJqK4p1tNqxAtCHHWJfOA6v9ulAhHZBF+2BoZgl1xZxOrGLXAUCgYEAwzwf + + slrlisyPa04r8TfgMmwt+Pc/4g5UFC5WK66rWKMCzs8p1uLLhg39QwnrmaRxa/O1 + + t7NKEVz1uOwikxmjJZCvnUgHrYVeCp0uVfzS6s2hY/iMh+36Ocy4L1/cBXEHXbdw + + 2TAkQwNYueexeO8dvBkuXll67PQaHsSfaRxfanMCgYEAsdIDpT3SruylClgA/1Nt + + 2+YnwnROseS4OBmdODUBtLqUIRVqS5hRrb4JlrvwlmS3FHZB44gjF5b9/hDf9bGU + + LSILpVtfj7u/tN+T+mjnmBxv2/BT4dFprS/p6yC+c0X1mhS3aLHUjMkTUsspEw95 + + Mfgyh0rLQinkud9FWlsMp/ECgYB5IpnsGPfpeejWxIcBQRELWBHiMs7hXNCQQPvY + + WKUZ9vKsDN/B47Ax+gYVDVewWdbCC1HJrCWdxlb0KRd+u959VVuRM/sHkAN8hHAW + + jCr14yZrF/Fh+adTK5FwW4LxoWLXpBURvQwSxEXN+1MjXQHPDrS1d8GMuhxm0Mqz + + 9hXBvwKBgQCpF+uROS92UluZoWsjLuc0Tz5YP5ykYzXM8e5FBD9dzs8DEudMyhe6 + + vsSmuxaSPNNEAKG26wO1+774W2JFJkRLeJOpeyX5nhEl1tKxi3ACe+cMQvd2/bHA + + blKtALR2ZvXcoU6QC8XVABQ8S3h6+wMk4i1DjJ58UgHZcoHrozlBiA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:kwets5gntkkdhed4hwdsfv3ujm:ws5ltuz5fytzncl3xkfkqc2f2zrrdnxarr22rckmkoruglgyin4a + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEApvGy6mRElXz50jCzj5EW5hrMZo8fXQHgZghl+yaQ0NJoVagZ + + TOjtkuzVYnOlF/9QxOt+yAfuc+XjPn3DJGbnnIUi/8qOUI6CwQY3uUKBK8cSRMTw + + A/LihRrwM0R4Qws/QiBQhD5KmanfcMlu309Pg3enAeAVpt86I8mgMxE6f26iXgFj + + IaLFx+5el+PvPYqdOkcEDcL+HXVYDVEawzifkXGq1JtWWKnDhM3SsdmS32mTh2/6 + + RmocrD7o6LSdOhgbhNqWdOvRt/ZLZif4Mm7TqG2nHFJ8OYCkpHK4m+JpNOiKmM5W + + BFdipmKRo5jhGZWG7w8rWAqsNY6GMbcTXwgNoQIDAQABAoIBAAHqoq2Q8N4f+Qy+ + + ESOn7GHAI0JWqIskbT9yn3wYg19YWQkJtN+miWqvRBxdHEM4I8Tc+L/CYo0LUbZr + + EnFRqp1IBIC4AjX/ytW9NOjQMAQxBP9L3P8Im+vgBSurgK9xWryvOwlnnyrgMb/d + + WiPfaNfnKOBLQqhfpe7Y/tkzPI8PsPWrhXL1CZJ7o3zWjyn/l7TNvqB+i7AgcxUx + + cgIV4XxY/HKOHHDz2OsPxJk7G5/iLZbl0Toks1G28NP26rOP49UPBq1eycfnZ6Q2 + + 8oywX+ivnK2AYZle1cMynCgIOdZNE+e7eb+bB539LjUtjmDSculRFvUn5AK7TV92 + + gZfCrMkCgYEAw2cjlJhfPg2d+bMGcRPDGLEfincev2r0rbok7beEra5OqNaH1CD9 + + +ohgZd/WgrNbHsF0aC3kh+6p/kUrunbIYy6ZcAQKiQ0vnOie/TuLcPHWocxSTrLh + + xm1qt/Y2KBeSaR2MyhQVChJSWosRJYj4e/ErfCr8aLiMLRgA3dGr9SkCgYEA2rc+ + + vTulxOHBKAnlBS7A2wnfaCdyYL/Pk2/Jec4yaSd+xsYgcIuH7p2EjO6xflU1oyZk + + U7b6YvbpU/rwkJksdFiDrMrN7EPJFynYK4g0CRPl4P59Rc+gvJqDsVG2Vh2/ijeg + + 0ybnNZFz4sKyKRVqI0Wu2ewTcDuduW4i5uemK7kCgYB4rcsoq44uycQmAa3ZykW0 + + izeakYT43TptzMef1LZpeXx1A8Fxfkq9HtrCMCLQJ6r/7KRS7vz0Aq8ULW4bQ97w + + ekgjCSvkhrNAKd5/MPYmdAWFeaXfmtSbctn08WdzDVPL/YcFCrAPv08DQl39m4Ez + + MrgTgIzQtCFGfEuUsziLOQKBgQDI3+noZMMACxObEVNdKi6IPg4Im8op36Dm2ZGi + + pGWaPGLsbwVWOGB1IAigY41y6RGlMVqNpI1cnUd5EQ0m0PeKN81fwrfUGgGzm4Pl + + n2ejOrozpagqmOIYtpTjI5giiZnkeOjlZWKOyXM0vfphT0C2+oX3siG8P5TBvMyj + + Y/gzSQKBgQC15p03OtcgZJpBwQ8bEGYVLu/1YiEQS2TdIKib+XOLwUfgWzr8KHsk + + 1rv9A3s7OxBZu4MD+iA0WyhcNSzexf6wEOvada9LHQl7eRmsW5OKDJYWyh5wo6PW + + CQLCE0HGlEZeBF6vexE+oD2/9T12/x/q0ylExTmXpWlkiydG2Vw55Q== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 131073 + seed: /N4rLtula/QIYB+3If6bXDONEO5CnqBPrlURto+/j7k= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:lf6qub3bkdhfozpdis4bmorj7q:zte3u3ohbuom3iqjjx4odrjylcpw5myw4n4kwf4zcaif36vngf6q:101:256:2097151 + format: + kind: chk + params: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:m34dbj7wfsaq4efuz3viwht42u:66nxvmks4d4ebbj2c3behcobuk7w7y2i4yz43ethbflgqpgf6nha + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEA79z0LJE/B4nEn+ehBk24vaDlLdj7N9ieX2njI0PrBfe1hiPk + + YOhLDWXh+112jXJ9wLBdv5UrJxeFw4hxBADycqPSkC9BVte646lCANS47G5sHibX + + vpmVoeuIV2GBA5OnWUV/5Z57JrNim3dcqJXtifmj8HmxuHOIBWxmKdl82jf9AXcX + + kk+F588b7YRjUNPNpv4PROeLpFu6xv7RzdyZJ/ByrSCU/WpW20OKCqNmBWRp0uV2 + + HS5Ii5NCql/Cl21O0ITD+XmKefD0gz+ek0XHWkRMb394Y6igeS+ZCJadMqD7FzBE + + Jm+n4JOCmJjicPRHPFzUWRsTOTAzTDZnzy6/MwIDAQABAoIBAGuQhQlVa1QIkp34 + + 5Cus//s943h/dQ0Svdbg58ShSQyIjKVmhBx3H20XMtOkEq2U2dLm5GutS8hAkrJg + + hfn7KL6DO8KABoeYv80nUpuHyZPxYtfUqGxneIQ/2QkChzYg6WutsJC61NRCnqZE + + TU+myHrW8g89q5ahbK6t8VS0HPrIQZOuqmYiGM/seTLbsQVkppli6ZfCASMV1Wqo + + 0A3q1RiHlLY5UCMYUxdPDv1MrhkRqMZ1HScHtQuqRrpijXcfeHYQwU50CFgPs2nJ + + kk3Tm41jAFzYoBqe3E7p2R5Wlc7rbC43yalQ73JKRneS8IdAu75SWWcRH5+QASRa + + ojT2kS0CgYEA+AVYqwznAuhEp5x6w9uamy+/jwEOK1usM7MOo7PrPCu47z52t3Vb + + M9lgo8eY/PVn2jlgSMz48iZIE6fdr43xdtpxsZQZVu4bpXJ40B3YdNmob1easivx + + 4fPjZZ/OTb46veFN1GNsndyQ7IP49NkiyOStJUey3LDxJQiIP56lxJ0CgYEA95Rr + + 5S17yepS3dwkcr3H+zN7/UK1CnCzzxNKp7iTqK1YYXeOjNZb5s6qrtwAZx9GmRWi + + U3gEWTOcKaAQGPjHA+ohYU/qDpYbY+EtfHVDJqc+hMy648vyWfeyZuVghJxtSPxp + + jDJT/ACxnD6fqDzUXRtViBfaSia1VKtCipr7Ag8CgYAI7mhbAIPxHtwaDRB+rRHM + + NNP5GligRxTUZ8ZHLttxt0FZnC46PQejvlg0jaN8uHmc6iQFexwb3DUMQCdDgyEG + + 3qbpdiPTdY+ZTZ38IJcC3jOqjsULVXnIYTf3GOIc+pSy8cITu+DVbnPpkHcOmiMe + + iN2TUhmmyNhmNQBzCgt+IQKBgDO5coFC62XX6tAnOgYu2CUHMJRM533y5d4Rbbt0 + + uIS5EonqbIHIFxM1gjteA0eIJTu+ZVeC74WjXrDjm/lboFiVBbxK8d9yRO6tEM+7 + + v/fHYSxliXYmGc/qC/+rVGrgM4TYF0UPDrTLgE/gVYLUkpmRKGFyekyboa66yQAk + + OOTJAoGBAIm61KjWaZZlE5u4SwzYijI261G9M6pXbSChEjket+TOd2wHMRK5bBnx + + G0wESE41BeycrL44V4YWzJFCB2qB1efwnbNSeWkvxOsXMFok5IZxFOzKjPflcALA + + fhejJzLC8m3nOklf7hudeDOYUKDezMnrr/ZW5VFHgMAw3rqq6pF+ + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:36rzjuc6qwedeiybhkflvevttu:as44hfkawdkbcvicdc3nfynzgjtmznyihu5773wyll4rt64bbtvq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAxts3c6ig0ClkvLx4Cl5tAT6xpfuw0NrZJ2RhFFBKz/ddOw+X + + QWp6zneMjN3j/vPZzJLq3ZECGqFdsl1o3qYSAtZFDLnyh2cewAiYttv9EyZLHeIF + + hSKqa8IegYtem3pNYzp7W8K4yxv8KIq3cO837BmeDaRNUVv88sO03AWFyb+4B+z9 + + mCJsHsHrfegoXCAKKSAILPTfHB0wbLEl/zh6u06yIcSRzSSGtbcun5qoCH5AGsf6 + + wOWDJfZdJXF35tQ8RZnV8edYMmH7KeNQkpiYiP6eGN1fYtLJJak10nvy51460lm4 + + t59VbMG5UVmsgSe56xNm808p5nQJ+XvclRcCFwIDAQABAoIBAAnCOUrs6Wi9wfVR + + 1HGXJ7H0xy1g7XuaZHy+kYBjM6tYCtRlU/lrBc17EsPY9yGMYFGzRgu13DdoAtvy + + zJIx7oFYIuRYfb1FA2kjeDHFGFAnxBjv03CF/nyyS+KfRHch0zQS6ySE06JtZFGt + + rl9ARhYekwySqxQFN85xaenq6swMjTpslHpkcNKLLBuCIXbQT/351JqW90p2VYVT + + 9n9fQ5teggKaTQYcDkYR7uPMUJRDQRczLP8xU9naqOki7Q+SAnmxbWOO3jyJIoQj + + mLqWzxV4l0Yts14WYDiw7SpNVdcN7qVcfVsn60Do6ekxnJ7E/d0GbRsrN/VdksDr + + RcjJv1ECgYEA5gBkKiDf3kvZT8R55g0kTuFgEPPLVcKxR3D4QfUitioMHKTe7dRA + + xs4N/0yzgurAx5FzJtMmcWFd7ypj1+UaNgR3Q1q+8n7mdpxEp1dnN5XTEa8sEAQx + + EF4MYV9RT8xnVYzmXtWKRq3UYhYM/rYcYlRs/SYaRRvD0Kd0oZxk4ecCgYEA3VWR + + pITCHne6zQwgj3hFyUsHCk59KeCbyiBP4BKYLWaFKfSj+bgyYATvuZtSGDBJUpyZ + + cFXw0IlLTgMGqGQvAMySXH3aDK21aGglkDxpDpIBywxW767wmvA/stP/T7IPLvtp + + 7ZC85Zt6v1LmAy8/xfZ9Rvu0qilGTzlFbuQXOFECgYEAgc44uZoCHphaFe3CCjbi + + he4mZIri+AzANpyoT7lElOCYI1ZdRoZi5JCIT8x/B2Tr1fXdsky6xoR4GjGnVcJE + + D7ZnhMjjOUKrWMeK65KlezaAf9uIF6X19tHNVOsRneKzcxHpNh54Qrl6Qr1FKj+n + + N0uEkz581wH7enf3l/oG6YMCgYAfz2uGNJpdnKGZVLPdStDk1EanwY4VlbVuQGSa + + dLGwXLqoxANJIaMDz9HQYDVVSqNPHziiP4fDwOe0x5SOYQ+sUrp6VpAfIFwhLE6x + + wyzqLivZzeU0v3TPH9ZX0kYwYwvxmaqovROZAFaM5tIuBP1qazmoGQbnKdV0D2we + + OuPncQKBgFGxH9re7e6aoCSxRnPwZgD2dbI/Ddpw7qmuRc1vSrwfQ0I+eNyIQzhv + + sT/Bkw0J2SY3IjLdFgPbqkKgXX045kT5acVJFns7dQG4lh+sayiUjM9dhiG8Dhfk + + U3gbMspIoeGx+zH7RX9NwDaUP8M7oeyZKBRZV0QX85hxwv0wNF2z + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097151 + seed: uqWglk0zIPvAxqkiFARTyFE+okq4/QV3A0gEqWckgJY= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:a2qoww5xx6so5uftwg4msdtrpy:h7hwsjco5i3jkd7icytrtsuzcjvjzlnkr2wwau2tjlj5wehjrokq:101:256:2097153 + format: + kind: chk + params: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:xd45qxekarwrt6evpagw3yq6qa:56q3eead3qjwxvxxrzwwoleounmeneuw2o6mdcho2jqaghv7unra + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAw0dwdT9MuyMSXaX2etcE8p/PwUd76I0p+mTR/0uiS/L5t1Dn + + wW6xgdXqMKKw1OAdfJ8ANrgLkoTpnl8gBlalR1OKG6r/yLhVFj1qoHfIq/iDdyH+ + + O+9P7deS0Huowz6WsOkN6HQ4vG4Kswj/SxHcbWuSmffb3otKkNr8zwRdfYDcCrv5 + + 8PTE45SYFCU1x/ZFLCXb7y8pyPQ64h/M2MCNE3NvnxntkWMtxPDdc7577jPM+9Pg + + A4Yo0Oy+H65rN3x6Gpgtm9p0A9HuD++XFIs/vfAdnHBn9gdKegtUKxxlcQZuAm7L + + Oj4JpeckQj1DBcOz4V+uRnJVmjXz8GqPUjt4CwIDAQABAoIBADHXTv2t7VCqL+rV + + DCboMGwYm/cR3q9JMFinWO3XCRJnbpL6RFybexTPW1mUfJuoo/4FuxE8Qj/gSpxQ + + XSUhAkhFKy/KokRBv/3QKSHR0NcLs8o5U+FKQFVNOzyZA0bp954prZIR79UwJOzz + + M+2eHmuTFDTWDTksxhhYvEO+MnpwchZEUyUWGYwQu+bCIrGNI4SzfJhlRM7wBUy6 + + 9iZ7e+hKsj3dwZVKxXNq2m7hhxZ2h9EqfRCq2LJjFiSto+zM4crDgQslxzntHdtO + + bKn5McyWtR2FbzDhz4/uPc2E91+JMjA9jIMZ24egXs9OSejN8rQvw04DojiCKMHp + + cDjagDkCgYEA3mF3bHymDPkc9EiDkr0vx7cR0O3+Caw17vVlQ5OtMhCFLhW7f2th + + 2m6e8yZfQA0B0srQM5rPgSE+9U/HvLwSQzpZJewfOevS1hohTvUaf+9CRfRTHxKv + + nsimR7SIfi3bcHUltPDiGKkqlmLNofN2zoB1A8SqblwnLHdfRP/kQI0CgYEA4M0W + + 7FoSWO7a/26MW0ibxhxXl8sfcgzIhDQa9P353g2HDIc2AQK4//xHQ/nXEcL1jOvZ + + I3IgjqjfIjW51HIwbjEz976QceIuJKv8xw2VBk265tnE9171KdpKpv9T7oooQVBg + + kW7F8leKTXIFoFH0Ug3xe//sHEFffddg7sux8PcCgYEA2xX/B/yNwz4xzmEabxi/ + + 1+x+Ou4dVv44bGGLEFaPTUGFU+/JNzFdyEsvgbGOKZYm87sn/49HW5qbYiblSwWm + + oGD9ryS/Ztr0bkZ0BkvnfZ8EFdMtiPFp3+8iEobD6jvXcyWWrnqa5VzUPjC9Eg7A + + P5XCsqGwnuVfGqnITDwmbYkCgYAJUddacx2BnF1t64pcGnWC0Bf9jgk+tDL38CUR + + 9RmP0CXCKjTd89vxmObndYsqDFgbwIdfBdM9ttiRVYLfwOArIVUTN05LumHJWWwA + + YJrGCSDvgyW8T66ATrF7nOhA4m9qzcdDIEGKm4B7V3fOPrePU06oomKKhVdNI0m8 + + fKti3wKBgQCRlS4RDAQXu+LbCwvGfiR5LRN/UzS2TwetqoRkrE99V9LvO24aFNJN + + HEtBxhnpo29SXStmRTiV6Dsyp9LjQxFsVlSsiHN+f5SwuKKNMVxZlRAxLx5uCE/v + + O6qsKi7zb3B/eTNLSKk+KjMM5PSpfgKnb30I6hHxUWH0cKULheK4UQ== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:xo543uh52xe7gpramuim6dsxt4:5zxemp7hunykp752nywtbfgqiacrxy4qzptesaaijjlhvwshrjja + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEowIBAAKCAQEAlS1aW4nTOvDv3ObiFiccAmhSCUcQxJDTMW5o/ohlr4nofwm2 + + z9N3XJzlQ1z/O9zBUVbhPKvtqNxT7IhobM/YTzycvoutknDoNaSE3bkreh5++xhR + + F0hn0Q2he+KPY1KdxtktwTaROLl1rd/6qC1OR7VHAJF7qjhhsyTqw6aZFXjV2Eq2 + + lh2PYARfOsH2UxAY3ZP1hmXiCI5wNbsxXzhm8le07sJVQSo+kFbQ6alroy9DYQlK + + bnIaPy+WOMVzYOyZRiG5ZW6A4sNdSTAbJ/3dMf8zRumDCrrGpv+K+gsaThcaOVSJ + + jf414vtlzpih0Ot5etOhQ2UE6acx8+L2T3uTcQIDAQABAoIBACju8+9QR1zSBg9o + + bztC6gWjGHehP3Ggh8L1l+vYA4cCYYCSas5mKUeJacNtPj/v2D/4hf9+8cy3AHRU + + Dctl0OYVLLGAZFVdk+o8RZUNnWd5/L/rsTyhSpNrmRcEWPIZFmAc8dgln/2frjHS + + 1tXU8Ljufhgi03sm03AzvhOHoFPqk8WfslmsnpD36qmbGbV7AU1A0oLa5M73TkHu + + rY4u8q2vzDBRfnJ8hwkcyHzA+43uHf5uGeYieSZBxP+m0GzUChnYBkNGKmuHGqOC + + RPx5YAMfu/oECq6cRvuTLTGiHhDOi15ukh2HVwjGDZaj1qHMXXQJ/ghVcpZcDUNv + + 4H7vpI0CgYEAten7o2hbutKmuh6e/fr3TJrbD0jWajYAf0/Luo4rmF/VOskeUbiT + + OJ98NeDomiLDBAiRK1kaaxoBadMVN1lNQ5UiRW0srHWNsfQr/998+ZzlJCIWmQ0Z + + B4GOpUdRo2GwhgsWS3xIM+GS5iqTc9QyciPAMjqM1esCqkliMMhjXGsCgYEA0e5J + + uQE21G4RpV95hLpep8kBa1HwrIoFqOev8636/F+nMVR3X8RHy1Z02tEK1uXxKr0g + + 1n4KYWTURYRlvPehDunPhR6Wc3GAWOJ2gvcZSgPjKjzlaRPXDQDTGSJidd7/ClKh + + XzG0zdSFmvpA+tRSbbtGQQjSyhSvRN2Fip+JBpMCgYAD44mel6eGWeR4jBkIAupw + + d8sBC6SRxq/CCPmo9ksWSc4sIIqGYrS6/CXSnQk76kxS9L/ttkzrRzYKhhmpAj61 + + mCWQaGIRGb46tKaQJL3uNB1t5VCoWvBTCcD75YdoP7lfVDNYz8JXYZYbV4OpcTrW + + 187PBBNoq0p2S3VO56nAGwKBgQCVDZ+Cn/4SLmSRCoz5VGpIr0s2q+M6XnVOS9J+ + + LhV6g1/ugo6PjIl9MlGd27bahkEJm2dpY+xy4mhlQ3AJD7lnIVOarPEd3oTGl2SV + + 8GQgTUpJfxtT1CZosSExQ1ytXDuxVKIHOP+q9S43r1/buE0eZE2pd15S5QTc3Hwo + + xMVByQKBgFfnZcaXdyC4PtOahvs9UoV/15FtKjw9luS1zFrr+llA7aGeDRkoSUNH + + oBSvpotaM8xUaHbUN2jTY+GCZe+d4fgEqofeDE8dVqIoFfwvKsH6EHRxwhGjamlN + + HRur7hNPvC4QcDiUK+zF4jT4qnJHXo1M+7tlwbOXEMUk4E23AvDC + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 2097153 + seed: BTBX/ampNfLU+ox7xipBGiaSbgC0kcB8Gy7BkJB4oKI= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:tnevhyl46kwtokjchy7pymrdga:7w2z4jd2hfnnng4n2ziz64hi6yh4rqfodxmlpt5w5ksknlfv4xha:101:256:4194304 + format: + kind: chk + params: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:isisbwovtqib45rwg5g2mfdi7y:muylv2hhzo5wugdclu6us7ngvlbecv3uxpzrqgj3lnwsgxodz6wa + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAsPiJlX/DPcGvoJPp1ChAI6HlQO6qRMiGHZ6tqA6fto5b5yK3 + + bB2UzU+bG94CgObFMqUf479s+1VRWbSybfsRwXLx6SXVmvSHYFawC8ZRs3YIEP7i + + Fl6U1vJqtxdxOLLXUHfd4VQZuRah3fAx+ZHqpO5/pWRW9JJSGqJ83C01MIwI4WmI + + YryKu9oIJIi1bv9VkbfhMNx5fM6MSHQltm8fuBWBLzntql2eckvHUryRj/glAV9p + + VLU28bgLVMeViLmZFNVUlmjczJFSVAML7v37n7/xPQABAPpSnhlN775p5yN+cB1y + + 9JsPpL45IF7ihNe0jHQEKI+sQ5bnWdvtznWTiwIDAQABAoH/RLqS1rYjjrZ/7ZJz + + H+7cU46uYkM5jpK+hD8wRDj9KlF0Xe2ChtZh2EI2EiETIumKXLWviu6fXOcMmB3W + + xRqQAJGWZyb6X1SkCWiKqXV1qPIitNF/a+LOQRqd+ERrPHU7CiVoIusLEJYqMGXx + + 6HAMpJocCQiKGe6XOu/ke+K4UF3ZTBL4ar6yZuIYyiiGlpgNAwroDyg+hO+snyXA + + catMRT0h9oAFp0864cFTHanxjgSsbZjkyZtV2AFb1u2rjjd6E6NgUmJ+KKjK4QKR + + qIMNOzlEupAzEkh8jObtlLY+9ZAM8eRGNOa8RNimZq8ntl7Hhl9y/uw539M/Xdjf + + ejG1AoGBAMy1Vza5NKvnAFOaaORF+S1gpT7GaDfvtXjaokZbdEBDVy83KlrGDJNl + + XR5HbqZoeKGA/mbzHTWZrnZ2y2BzoGbc0aM6rtFsJ3rqabRlbmdyQFK6mjLmZGzC + + 5EMPG+pWXt95d7woxtqdbcMutz6ZtWn2uquXz4kM9R2SoWPvucKlAoGBAN1QBciq + + k+a0r4z1DEs20Yz8P1dCZdO4FnicI8Vk0IKwRXbJDYPpTwAH5ePNXlx1jDynLs7B + + 4J9csTWfhFwR5oeVzR5GWwmbx9cw6g1n+q/rpOaYqtQ+h0Jia5kXCKNcEio982kb + + CxtGDUgJj3iNi483a/wCCcKQXIIAkbxmcBZvAoGAQYA82cvFKMQPfLDJo1Eoe/aS + + qVV+/3b6ECOVDQIyXmWtvfPe35DDcV5bv1aH90MyZisKPBLKY946zrkQNlqJFqDN + + i3c5fNUohNIA5LIX843BOzduI59IvuxVcYeiHQdp8APD5jb9+fGpr2yBQcyZGcDS + + 1hkLVQUKYV4Luhh4zekCgYEA0VkjP4DsS25cKbCcIoIGk6EBod9zR2V6DDlXNSB6 + + hUWNUCI7oK6QRm0yL91TB49CSxWyl26atuUN1LXClP1x3ov77kmLUHmF/q+Ml4Xm + + g4cbA+8imYdUl51WPwik6TLtE/xqRuCIDxKi+aPhjZ4HiEBa65ZZ+Sxp9afoNBmK + + qg0CgYEAwZynW9qj3nH19nqqHPvOgGhYShotmLRznf6mmPqapQInlv+42ltxVsCB + + bKYgyM5KRZSFYVjKH5y1SFAByY32wqIZSx7Z7IbuSQNeoAt+DLlPfusuSnQRk03u + + PU4IolenY9gbyniTLxkcKjmRDQa6L9C6/ADI4FhUSTWlbcw3228= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:2vhlj3tpmx3nhv4zq65aoss3um:dh3ql3v2mlnxizs3med6mmd72dxzzgh2varaq5zgs5e5vmob6hcq + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA03uKqCbjzcK4tMQi1f7mAQl2t++ihbWQI2m12rsf312o7QOp + + td+qmKsQNGokH80lc8wdlkgGMTUtn4J869+i9uE3EDT+JhOEkRS9tPYT+sOaMoVt + + EVhioMHc55FruqXOTKv+BxkF8DFnBF6jFciQafH6RE2D8L+ASwjXOghgqLcZeIQp + + BRDe03niKh/Es8R8R+NunUd65qn0ztXTHITDBbxVUAN0Mx8rYy7cYi7drGLA56N2 + + KqY+kvWOXoDYzuzY/ILz8fQWHdNUOp2WbVWQ2MnDm0GSBUgy8y3CpWxmSifdg5mi + + nunNG1RsEWHV56N3SOCz2+zYiNaTyC2BzIOmLQIDAQABAoIBAAx4SrsS3U3YVHx8 + + MDL+vmCDRq0eg9kZ1UTxSm16U9fKNa1mu4641d3+ADecUUL6yAw66z0IRC00ndsQ + + 86gTiLrBRDZAGa2fFasEVmdMN0NgXerqs9KuVn/KI+oXngrkafSwE4t8Qz21fAXl + + mqeB1a9u5ZCPeDSC5jVbxm0VP2B98qElxzdBxYZnxxmnisYa66/ghkDgmwcmN/mV + + /r00j2ZXz/15CKd0JKNmztt2g+iXNDKtUSmJQZL78oQiYzFDL5nm0wQNM8XBBE1c + + arMSlZ9ZpOZ5NJXp33XsLbjF+cWn2oCb8fwEHkrVYP4Km4rebSCEZpx5bqQfIOrP + + +NtyF+ECgYEA61MofwHzJkeIxcKUal3aKmgD3XqDbRgn3cZfkqdG3Osm+YwO/mmu + + MBW9RzrMuBTtqxckYNta04UbsunfFiaQm3xCrmmtwxRzn+re+jp410yxeYj0fQuh + + +vRkez8LHYn2BKzfQbWgX1QdrUzoR24oJezwWQWgbkuMGq7ORqnNR2kCgYEA5hAh + + zne7xz3iChd5vzsbBmkfcT6Tp+QcQqLt9d9wEupMJ9ppOQzk4rBXwFQvjcTUOs9n + + AIo/qcB7+XAchCp3WCBdGC77Rz9hBosCjah/coNJ2PVM0CMF8VhxMbI7C7IQlKPd + + bZEmTCwQOR6kKKTD28CqWHA7NIryj5Ux2gi/NCUCgYEAx5Jy2aOxrlkkaXMnoz2M + + 9EHaZU6tfyvpQ3AlRZ6PvnO/Tgu1+5VsoGMPbwUy8TruhRbPR0VAtfpBD27AP2zd + + Xr/3XStKrhL+LDVofRZxvUXRjZzUm+ftq4LwZIWGy7pg5n4lqPh71dzkfkCnDU0i + + x2c2PolDEccIPujZD5yZ92ECgYB6+jSYATjHEDU738CckCOqEZdVGXYkULMqi51X + + yNBHzCZZR07nyBSxeEHv9RBWX9hyd1s/1qahPtsGQv97Rpf065fXzYVUWHSs4rHC + + t0cpFzTqXHVq7M3IbNZVEkitv8lNKyq53tTx8rvZTJ/Deg+X8C0eiR+cvolaZw32 + + 1qYeYQKBgQCtjRniQKjBS9A+cs7/5XOPCKfOqYOQSxlMrnto5sX/CO+PTvT2m4jn + + YTgXKQ3SerpTVqNITtghSPBPV/eVJfLDuG8c0ZzWNOjCaZ5M91rMuAgJ3ug3LupF + + XUrl6Ks6B5OzXgGqkk8SCnsaKq+3JDQdvN72xOQdaS9Gi5HhZfJ4Ww== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 4194304 + seed: 2NVWOXvtwcAFf0J/g0fDC+ypQbnCU7awZoCxcnhpkVM= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:tg5w4m6hi5ezeapuix3lrrbbtq:7kerk5plqpsd2upnole3ifxoegxdux7prazbvzeu2kyilc6idk7a:101:256:8388607 + format: + kind: chk + params: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:3y6p62da5ek5bl263mhgva45ke:hvu3gjbdhqkaauzqy7wt2aiz4t47qcqfm4h3ur34gcrxq4skwxkq + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAzclQ4Lb4GgTvkDXTIih0g1v4ys/bce595lQxSX2nNjeSbKjr + + o3hZRVJFxFCe3ylEFcsAtMflktITGzje/mWgrmn38bxYRASOBVdoLFXPiO8aCgT0 + + Excm4smNf++TQp6G3oQtey+xf4EwOtuBw8R4Ehm+CqFZrhB+qfMY726eUW4XtmZY + + zmpmM8pTWISgoZEqm1tKrV+qLfArtvrG84+VhSkzTg4u+oJKOnUINWfSPfQZd9nY + + rzNMe3K9OuM5JqYbdIVbnUS5jXFs0egNui5IwEgUtsom1nAWiljgo4o5NepXa60H + + XTFs0gpWzoZAO1A8J7JuuLN7PLzBJhk4ZwLTeQIDAQABAoIBAAmJxFiIDoeZNTfi + + UMkPZjgc9BMFb5rQJrB9dEPfYbfU+1HTQgnDhyK8CZ0L7hMypNPcQwn+Dm1f1JAh + + Uo+oyvnukjYXiGFNsz3640qTxyDmETdH676zRuOB20/D3VfcBG0NpBSGvUPHS4Kc + + 4D7ATV7sZ8czG5aib9aFfKFDZ63nNsAYWFJ0SqwwVembFkC9vOsxenPKmM5O2d8i + + 7G4wXRtzTMjhsccmQJfCoe4xAVuFCKhNlPB2LegglHfFowXcTc3LD8dninnZBfsp + + APwbb+2g/tWzj7+qn28ESxjYG84kLRvg5g1/RMgaYqLRti3vgERVnjG7Luf4hCxM + + fATU90ECgYEA2x1HqjMhSlkmjZNEjsQ8Z5sMv/+rD747IWGFumCf0PVl23WmQujC + + OW+bGHnY8A1TBKlrkW4V3HJfn3TGIaRkTRGzYsvtYGnIrgREJeNiQOkETydJq281 + + Dw8yvBquPAlruuCWucxL2lLpTJvDJYQ8WAKATXR/0XpSnynBGz9jq7kCgYEA8G2q + + 1zPbvS6Gj+7yMHNH1TiiFub3lZEevUkDGhHdFg/cROZCuMTK+DFzB4uGqZilXUw8 + + OASKuTw4lGUSNLNEkBuuZAUnnZ+LpGt7Cln1avnVwzSOK639zfYRaWMo8Rc3Yy2X + + puWu7VjHn5/GojACdeThr4xU5gkhBXv6Xy4BxcECgYBFWEXm+pmNkxtdcP8gg8Bu + + NabaWMrFh7nk/Z059/x8QD3FL723rTxSuxyFqYJbrovYjNnLQ+DNTLEwoN9XpFRO + + A80W9l0gxznIwPbkWsssqdJATrnE9MQBCRlQaM09mOmsUgnBsYNMDDNjmGQxSmFi + + pR//41/UZvchAjDoM66SmQKBgE1t5CEeUFwiya888r5rweyHKpxZkc6XR+EJzHfu + + 3NaoEPYXedFrfzpjInqBksK3qDndvV8FB3AUVtxjmHNkcGZAo+8OQe3fXed7vcpd + + ok3rW85b9JVYmW5lGsJn7t2F7o6ANmDHg4homRFtMVk2QPSa25vfg8/5jKrpfH5+ + + oI+BAoGAW8ezqR6768jFvKx1Fd9m2vTk29TF7CSHSCgjp8qBtryIQralfdDqdFSW + + 1riWoPU5uW4HWNzuQuZOCiqnwTCsc6tHxsxwgWCbtv0nliJ/VxsGQBfM7lyNHyfq + + tr4pVuZ8uXd9v2JEe6sI2muFjGmTu76ieglgamb/KlIwb61+EWQ= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:qvcv4vtu3lbnjijwf26wiojbbe:snscnddmlq6642bhlhlaqtw7actaqggvwv4gpezgjqf7sxs3kena + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEogIBAAKCAQEAq1Nn0JufzDRSFksuIYmhr4sLG2RjLhZD8Lf1E10JhSWrfb7Q + + muqmRmSv2f/2ell0cXxDtM/7OCjxrgZ92uspBZ70io/T90EjQ9D0Nxlaz+CpW9m5 + + 1qXhCdYzGjaV6M0rsOnU5lWXPFjdTz9f81VD6paPSGLJsoO7jSpu20n7DL67O66C + + OMdKof2Pr4/pN6sfOyn09VKAzOAZoP7sGrre5xJ8T4LnBYGJ6nS2wyIU8dlou0Q6 + + PqOx+7OIsDxYgaMPtA0nR3n84WpnsKGnQBkrPGqPClhfhF/7w44WU075CIvuq/Gz + + B04e7D49UlEEwOOiYG/r4NpYIHrhmulv5mOinwIDAQABAoIBADeJJdHdYINVQnav + + kBiXAK5iqAsNE4lQ9l0FhI/uTLO4bkqom/5bqeKPqOFFs6Qdcz2GRnxKHukpfI4o + + 1IsuR3HnAOYZkWBI4SGOjlt+AI36CWwYu8D0rGn/4TjSEO4R8+O5KKYxgICzXane + + pT+/l/BnNbMFMtSHFzi/VIgJBzQt54OAe2sKwCSB5JUBBgJVce0idFwqvoVv7jpe + + mxZpevJlwUjJPw8ieDOPoS7UwzEMKHkbH/iJ+mGWV3JeLH76DG6BDGIA6+oLB9X1 + + QE/JXXB54t4jybw9vIT7toC0A6lF24FoXqCzoAHX8RgJjCeNvpA0Q103FktgErp0 + + awKCMKECgYEAxO6Hy8RIK7rcmSpdPbTPOd/X9uMfJ5wuhGHgt3TFqlNYHTiQY35v + + S58Rh5v3wsDbFczg9BMbYYY//PuCoAeNI2O+iDR4El9u1T8aMRCePIm8Z5v4SFUk + + /Y7bAO2Sg9y95IF5s8EL8d5yo6S3ZZUKhZ48zfTN+oDG2fAiOa8/VUsCgYEA3ra2 + + m7VyTxDAKRjq0BqnNUrg/UStv6pHA0N+iyiHoE9F3xVHKHhBP4X0aEps9bfrErYL + + b/cQvRAS/Cb+FaMfeWTSwYzMPok5xyoUtsNwjeLwPyVp+TPdOzJVuQmj6ydaGtpD + + KuJbbNr9vjci5fduB9DFUXrv7rrcTSTJjldf130CgYAUaxLjWq+M8SvsKYtPWY7e + + 1kmjDHtvdO8RxMAy5UWVWlzZcsLtve82LQD5SX+PzsUoZnywcca1/uBlj4JEq2PD + + 1pSrtJz6crCgJZHGoo11g2Zoa7B7d3CFZalpWDiHuXxq083ViF9/rWu/cdWeD6zu + + m7B8PjSZE38Km65Awt3TLwKBgG+oWSr9sD6VnlG8bVVCV5xvWxd/TEDwhMPNHe90 + + tXKY6+XpTBCtIcFQTnXPAou61r89x8QtsRWormv+vJpqewgolUV2apvbvrzsixAK + + Mi7gnSR7hILtDrh0BuhLPgRSaWlXDh+89qs/q8Gm8Pcsstx2PccZBJvC0VpX3Dlh + + 8uodAoGAfD6xgNtZdqc876KrfiGli6IyjQgkKyVdSe41qqlxC6C1i0ICex22WveZ + + pgjbN7JwBRfLht23vnrqIVr/1P6neDQ6nh9aKNUi6wsldg6nqgwfxXx8ChAEFZRR + + RZu9RLIfWHGagPiKF0FIK07/q1PRtfAiQsSIZfo4yA4Lqojauns= + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388607 + seed: w6uP8Tcg6K2QR905Rms8iXTlksL6OD1KOWBxTK7wxPI= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:CHK:ioridipksj6o7ph3bcf37vslmi:pzhkwm4ry67ohj6dcs7zwnsjrvfqzam6tuitrns5vuo3uoomau6q:101:256:8388609 + format: + kind: chk + params: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 101 + segmentSize: 131072 + total: 256 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:SSK:366siuvcfpanoq5xpsby6aeu7m:udkk4awrzcrxtmhjldtezkscedaax4bdkaq3rczqgozdizy5hjha + format: + kind: ssk + params: + format: sdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEAl7MOMMbOn8dHhuStb5vOd3YhKX9ItY8jaFt2aUy4n6vhXSNF + + /KbIN2wQTDZdqHjMfnVIU82TIgmOzfgy3H8y5MStXKI8+zKediZVFW1SX+tVOJkr + + gHg5Yk7t/ahxXFnlNKt/hmU+Wpy65keIhZlDW3DXN8brN/abCN8gkoFHFZtl4FxW + + DhhBk08IBQdSM/m+QQJ4LuPcwlJamgRxL4f5lng2zRpnDXjtv+NMwUp/D+9hN/V9 + + 3IUy/n6+G4iJQOdZDId7PmZB5XreXgDpuOo5Qh6E5iPWEYzpOpc4441F6huwCN+7 + + hOcnbNJv7GjRdl1wVwpoIUVgI7suKk9pDDMr+QIDAQABAoIBAEGGYhbHiPCTD15A + + 4HlY/3GyYNif1jQ2Q8EL4LXTIdw2Tf4BAnYDRHBMCS4iPYpLw2jMGBW6slb9ceWd + + 07pSZxVRruBYY6bNUo0OOaorsm0kJYdxAc1YINFJ7pqma3DMk6iQe2D90lUpZcGa + + HGo4rVOOBihdj7R4nLbUSil+FcpKzuCf/7SBIlhjMeLH9rEtlDJ6az8B+UKilJJP + + nACArs0Q/l4kCyKa9tkjzqU4BGEWB77uTzxwFSBvguiELXhfYRSxTqlCkkQWR6TQ + + jgp3rkMXIDeYaowYOC7HQLClLghH7HMlYkV0lAP7sgZUmdFwGKRpUy95fxFtGI/k + + KCVE+xECgYEAzuDttHfAcTSgP6vzW7Z6QFksB+XLMUspVVQDgf4Bllg899lw3ndV + + +55dDCsljhCZw4o+y8MktgtAWLZte7znUtRLHsbD6l6DPUnDcVXdRcFzqKeqXlAZ + + ATWrti5rEqQu9l1AzduaY7eWJCtXP5mywlb+3xOdJyyYdPP7UI1SnncCgYEAu7gS + + bAJ+h675WhK9bCcBMpeCP0su80Ovz3Bu/UzUa5VW0LPnFt6AMCGlJXu2q4jFo0ba + + j2uIe1qV8SIMWS+G+XxmueqeXfptoTIeHkBYOL/1YlCONkbodHQx33UW7RVTc5MU + + HiTzjBqrL46CrtrN91sA8K7pmeuPAk9apIvs9Q8CgYEAqwmtnSHQmgefYWThW3bf + + VeojjBgBSSzR7Hj8OXHukAU9ytAcD+Fr1g7U8OWPNAgniFH4nvAkntlohq+0jrPc + + ME/SF4zPlyoyqO4eRsptmWlaHRsZsMXaFnTwFTwFTDEvnoH0vP2NhFnZKOgoRy3k + + a+YO7BHEQQoOtcqtgaiFoPsCgYEAtQN+4CBXmscjM7Q2bIAAK6Tlt9rr3zA57DJj + + FGZtv4A2QvH3uJm9yqvm8Aonz6kHy7abMwlihnCHfgpzFd06roFDHawcIktGQ9Zs + + LIenirGwEanUOIqPxRv2q5/hB6U035HIKHlBUKy2vhkR80KSsh9S/MPuBrqbIIMc + + yOcVDAkCgYA1t45yzJwsMy98fVQgJl+Cb50fWmiaDCVlytBt/n7Otxuica96EbqL + + BiDuf5yicNlAjByHeKhv7FlykLqMkxDBiCNyxwZZKErZdn9+6hV5/9zCGx7JeSJr + + uSVEoab5zdhSqEkxNJBBLo8HuGHrjRKyhLchJFMiXuo4maJ1zKu85A== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +- convergence: ZOyIygCyaOW6GjVnihtTFg== + expected: URI:MDMF:buw2ujk4pqecvd224j4tibki74:vi3aaz2qzffsyfvafcencavush7x65mrdwg6stxpqidsckfhhy4q + format: + kind: ssk + params: + format: mdmf + key: '-----BEGIN RSA PRIVATE KEY----- + + MIIEpAIBAAKCAQEA1TmUXQyj6HXUw5Q8Nh+LeL3oFFs2JcXW0qvLbUloNT9G42sR + + 3GPeGQ28QLFRzhtawf7p4hdfQSkMccba/MOwik4vZTq6kzuit0juIt75+p5j6U3P + + LUnMzjEr0cIXLOhQGwWT1aukQGzbxSndz+J8XtnehVEggmhQ2cGDnniM+Ub96FkR + + S+Z/cHWTzL7njyeYZ4MeUwWsO4F9vg/fYd/EFTsdEbqk1IV8yqnKuuYObNTCtlqW + + sJi1D2wFiFBVxi8nfRzy6Gif6B8Hf2+O1WQvmqdC/bUNl0wpouXdNRYttPH43BQs + + 5O5CTbbNgACHskcDqsdqRYuzEwLQfDgKKqnqFwIDAQABAoIBAAkyx1eBInMIubLI + + z14nNzi5lbWHGhSJN9x1dqo3D/pu7iuW9svuXPyoIRr361By9hPZqN26r2M3TVTG + + VyGvWXkczVlWiZvRsG2HTmYNbJlwHRRJss73RmyguI1IegAuvAHvjwSZN5714Pc4 + + ZhLAH0z5ErQwVh56E2yJbhiAhrGAxPjz5VecgpFtegv6gYTSN790qr4LotMkGiae + + q93EBXTxrzp58ReYOTl7OTu4a791XLtJq8ap23Lmu9s55QJ86vvBX1767WArgHcq + + 7rAf3Ti7YfHRKjw+zAK+CeLa+as4f7HD+B508QhJjhPNqQU6xdeOkuQkDX+vjq0m + + CvksTR0CgYEA9pRSPULncSkthLy1PVmNAPRfZl1iCa3Zo/UZsddWMrSS3rMKABtE + + EKyzJWzqLj00w6sQezz0YY6BJwYmHoJkDLDPHoSkxircuEgN+rJNLw7uTtOcSlX9 + + yMonaIUKw3wK5BbPmCIAv456AQw3hicqc1qWcWwBXFWvO+iyiBsxuUUCgYEA3V8I + + pRXIkxltc4LG/tytQFOPChf5fJhLlnR9UPopWK81SiViHmeT/jlvywk8thkvUuQw + + FOo1pXcidkcfvHWYWtbGgaRHmu27o30bNa1Jus1DgLq399VFvXYWQBauUOIfY/1Q + + xtxhHcPFGxSi0SYUA2px7rBb3KOhqkHEylgGlasCgYB98qbLGdhj6beRXF5q1sn6 + + Gdh8zegcr4tCfxg/yZEC109Jp0PNaB/tMHlU/XvkYGkKJN+HQ0xEZGi9yRtBbDK0 + + dL9mhDQx8ITLMCrLybU4+zRoWRg0tBWsMO3OKl6kGUDq3mfs+jlNnvXcgSP/RxQc + + 1cGQb62GP1IBlMtUUCemzQKBgQCgv+/hIS5jUyWdqaujStAsVAEczUgH5/eLq8+M + + S/xWP/SsgPT9Ky3WgBLkFzMU8Ljisn0P0vtdymMmDIPJMIOQA0JmxcqRgGyvTZvC + + oLFXitKn2e7Zcu+Pov6JT28JoQo2a66KmWGUYaLyBUwuID6MNHHDaCFs2Q3+OoAS + + h1VQvQKBgQD0DA0nokwoMxZ6ClV+B+G6NSmo6JbKOtnoqqBiM5rqw0ME3h1B26hO + + 5A41/AUKxjrFbcqE6Cm1WGQmR5vJDmKEEhF3SQXuEYm6Ji+l+awbOdYq6GvS5kCH + + pU+imOa8uwgmK2TlYYj9LaV+mRLqqIvJ6396y66IJeTlcLLoNTasXA== + + -----END RSA PRIVATE KEY----- + + ' + mutable: null + sample: + length: 8388609 + seed: yPi3JHKKbWaEEG5eZOlM6BHJll0Z3UTdBzz4bPQ7wjg= + zfec: + required: 101 + segmentSize: 131072 + total: 255 +version: 2023-01-16.2 diff --git a/integration/vectors/vectors.py b/integration/vectors/vectors.py new file mode 100644 index 000000000..a1bf9c206 --- /dev/null +++ b/integration/vectors/vectors.py @@ -0,0 +1,155 @@ +""" +A module that loads pre-generated test vectors. + +:ivar DATA_PATH: The path of the file containing test vectors. + +:ivar capabilities: The capability test vectors. +""" + +from __future__ import annotations + +from typing import TextIO +from attrs import frozen +from yaml import safe_load, safe_dump +from base64 import b64encode, b64decode + +from twisted.python.filepath import FilePath + +from .model import Param, Sample, SeedParam +from ..util import CHK, SSK + +DATA_PATH: FilePath = FilePath(__file__).sibling("test_vectors.yaml") + +# The version of the persisted test vector data this code can interpret. +CURRENT_VERSION: str = "2023-01-16.2" + +@frozen +class Case: + """ + Represent one case for which we want/have a test vector. + """ + seed_params: Param + convergence: bytes + seed_data: Sample + fmt: CHK | SSK + segment_size: int + + @property + def data(self): + return stretch(self.seed_data.seed, self.seed_data.length) + + @property + def params(self): + return self.seed_params.realize(self.fmt.max_shares) + + +def encode_bytes(b: bytes) -> str: + """ + Base64 encode some bytes to text so they are representable in JSON. + """ + return b64encode(b).decode("ascii") + + +def decode_bytes(b: str) -> bytes: + """ + Base64 decode some text to bytes. + """ + return b64decode(b.encode("ascii")) + + +def stretch(seed: bytes, size: int) -> bytes: + """ + Given a simple description of a byte string, return the byte string + itself. + """ + assert isinstance(seed, bytes) + assert isinstance(size, int) + assert size > 0 + assert len(seed) > 0 + + multiples = size // len(seed) + 1 + return (seed * multiples)[:size] + + +def save_capabilities(results: list[tuple[Case, str]], path: FilePath = DATA_PATH) -> None: + """ + Save some test vector cases and their expected values. + + This is logically the inverse of ``load_capabilities``. + """ + path.setContent(safe_dump({ + "version": CURRENT_VERSION, + "vector": [ + { + "convergence": encode_bytes(case.convergence), + "format": { + "kind": case.fmt.kind, + "params": case.fmt.to_json(), + }, + "sample": { + "seed": encode_bytes(case.seed_data.seed), + "length": case.seed_data.length, + }, + "zfec": { + "segmentSize": case.segment_size, + "required": case.params.required, + "total": case.params.total, + }, + "expected": cap, + } + for (case, cap) + in results + ], + }).encode("ascii")) + + +def load_format(serialized: dict) -> CHK | SSK: + """ + Load an encrypted object format from a simple description of it. + + :param serialized: A ``dict`` describing either CHK or SSK, possibly with + some parameters. + """ + if serialized["kind"] == "chk": + return CHK.load(serialized["params"]) + elif serialized["kind"] == "ssk": + return SSK.load(serialized["params"]) + else: + raise ValueError(f"Unrecognized format: {serialized}") + + +def load_capabilities(f: TextIO) -> dict[Case, str]: + """ + Load some test vector cases and their expected results from the given + file. + + This is logically the inverse of ``save_capabilities``. + """ + data = safe_load(f) + if data is None: + return {} + if data["version"] != CURRENT_VERSION: + print( + f"Current version is {CURRENT_VERSION}; " + f"cannot load version {data['version']} data." + ) + return {} + + return { + Case( + seed_params=SeedParam(case["zfec"]["required"], case["zfec"]["total"]), + segment_size=case["zfec"]["segmentSize"], + convergence=decode_bytes(case["convergence"]), + seed_data=Sample(decode_bytes(case["sample"]["seed"]), case["sample"]["length"]), + fmt=load_format(case["format"]), + ): case["expected"] + for case + in data["vector"] + } + + +try: + with DATA_PATH.open() as f: + capabilities: dict[Case, str] = load_capabilities(f) +except FileNotFoundError: + capabilities = {} diff --git a/misc/awesome_weird_stuff/boodlegrid.tac b/misc/awesome_weird_stuff/boodlegrid.tac index f13427ceb..f03474756 100644 --- a/misc/awesome_weird_stuff/boodlegrid.tac +++ b/misc/awesome_weird_stuff/boodlegrid.tac @@ -1,6 +1,5 @@ # -*- python -*- -from __future__ import print_function """Monitor a Tahoe grid, by playing sounds in response to remote events. diff --git a/misc/build_helpers/check-build.py b/misc/build_helpers/check-build.py index 994ed650a..03fd53392 100644 --- a/misc/build_helpers/check-build.py +++ b/misc/build_helpers/check-build.py @@ -2,7 +2,6 @@ # This helper script is used with the 'test-desert-island' Makefile target. -from __future__ import print_function import sys diff --git a/misc/build_helpers/gen-package-table.py b/misc/build_helpers/gen-package-table.py index ebcfd1ecd..690e95739 100644 --- a/misc/build_helpers/gen-package-table.py +++ b/misc/build_helpers/gen-package-table.py @@ -2,7 +2,6 @@ # This script generates a table of dependencies in HTML format on stdout. # It expects to be run in the tahoe-lafs-dep-eggs directory. -from __future__ import print_function import re, os, sys import pkg_resources diff --git a/misc/build_helpers/run-deprecations.py b/misc/build_helpers/run-deprecations.py index f99cf90aa..6338b7ccb 100644 --- a/misc/build_helpers/run-deprecations.py +++ b/misc/build_helpers/run-deprecations.py @@ -1,4 +1,3 @@ -from __future__ import print_function import sys, os, io, re from twisted.internet import reactor, protocol, task, defer @@ -26,10 +25,10 @@ python run-deprecations.py [--warnings=STDERRFILE] [--package=PYTHONPACKAGE ] CO class RunPP(protocol.ProcessProtocol): def outReceived(self, data): self.stdout.write(data) - sys.stdout.write(data) + sys.stdout.write(str(data, sys.stdout.encoding)) def errReceived(self, data): self.stderr.write(data) - sys.stderr.write(data) + sys.stderr.write(str(data, sys.stdout.encoding)) def processEnded(self, reason): signal = reason.value.signal rc = reason.value.exitCode @@ -100,17 +99,19 @@ def run_command(main): pp.stdout.seek(0) for line in pp.stdout.readlines(): + line = str(line, sys.stdout.encoding) if match(line): add(line) # includes newline pp.stderr.seek(0) for line in pp.stderr.readlines(): + line = str(line, sys.stdout.encoding) if match(line): add(line) if warnings: if config["warnings"]: - with open(config["warnings"], "wb") as f: + with open(config["warnings"], "w") as f: print("".join(warnings), file=f) print("ERROR: %d deprecation warnings found" % len(warnings)) sys.exit(1) diff --git a/misc/build_helpers/show-tool-versions.py b/misc/build_helpers/show-tool-versions.py index f70183ae1..4a85207f5 100644 --- a/misc/build_helpers/show-tool-versions.py +++ b/misc/build_helpers/show-tool-versions.py @@ -1,8 +1,7 @@ #! /usr/bin/env python -from __future__ import print_function - import locale, os, platform, subprocess, sys, traceback +from importlib.metadata import version, PackageNotFoundError def foldlines(s, numlines=None): @@ -72,17 +71,10 @@ def print_as_ver(): traceback.print_exc(file=sys.stderr) sys.stderr.flush() - def print_setuptools_ver(): try: - import pkg_resources - out = str(pkg_resources.require("setuptools")) - print("setuptools:", foldlines(out)) - except (ImportError, EnvironmentError): - sys.stderr.write("\nGot exception using 'pkg_resources' to get the version of setuptools. Exception follows\n") - traceback.print_exc(file=sys.stderr) - sys.stderr.flush() - except pkg_resources.DistributionNotFound: + print("setuptools:", version("setuptools")) + except PackageNotFoundError: print('setuptools: DistributionNotFound') @@ -91,14 +83,8 @@ def print_py_pkg_ver(pkgname, modulename=None): modulename = pkgname print() try: - import pkg_resources - out = str(pkg_resources.require(pkgname)) - print(pkgname + ': ' + foldlines(out)) - except (ImportError, EnvironmentError): - sys.stderr.write("\nGot exception using 'pkg_resources' to get the version of %s. Exception follows.\n" % (pkgname,)) - traceback.print_exc(file=sys.stderr) - sys.stderr.flush() - except pkg_resources.DistributionNotFound: + print(pkgname + ': ' + version(pkgname)) + except PackageNotFoundError: print(pkgname + ': DistributionNotFound') try: __import__(modulename) diff --git a/misc/build_helpers/test-osx-pkg.py b/misc/build_helpers/test-osx-pkg.py index aaf7bb47a..6dc51eeaf 100644 --- a/misc/build_helpers/test-osx-pkg.py +++ b/misc/build_helpers/test-osx-pkg.py @@ -29,7 +29,6 @@ # characteristic: 14.1.0 (/Applications/tahoe.app/support/lib/python2.7/site-packages) # pyasn1-modules: 0.0.5 (/Applications/tahoe.app/support/lib/python2.7/site-packages/pyasn1_modules-0.0.5-py2.7.egg) -from __future__ import print_function import os, re, shutil, subprocess, sys, tempfile diff --git a/misc/build_helpers/update-version.py b/misc/build_helpers/update-version.py new file mode 100644 index 000000000..75b22edae --- /dev/null +++ b/misc/build_helpers/update-version.py @@ -0,0 +1,95 @@ +# +# this updates the (tagged) version of the software +# +# Any "options" are hard-coded in here (e.g. the GnuPG key to use) +# + +author = "meejah " + + +import sys +import time +from datetime import datetime +from packaging.version import Version + +from dulwich.repo import Repo +from dulwich.porcelain import ( + tag_list, + tag_create, + status, +) + +from twisted.internet.task import ( + react, +) +from twisted.internet.defer import ( + ensureDeferred, +) + + +def existing_tags(git): + versions = sorted( + Version(v.decode("utf8").lstrip("tahoe-lafs-")) + for v in tag_list(git) + if v.startswith(b"tahoe-lafs-") + ) + return versions + + +def create_new_version(git): + versions = existing_tags(git) + biggest = versions[-1] + + return Version( + "{}.{}.{}".format( + biggest.major, + biggest.minor + 1, + 0, + ) + ) + + +async def main(reactor): + git = Repo(".") + + st = status(git) + if any(st.staged.values()) or st.unstaged: + print("unclean checkout; aborting") + raise SystemExit(1) + + v = create_new_version(git) + if "--no-tag" in sys.argv: + print(v) + return + + print("Existing tags: {}".format("\n".join(str(x) for x in existing_tags(git)))) + print("New tag will be {}".format(v)) + + # the "tag time" is seconds from the epoch .. we quantize these to + # the start of the day in question, in UTC. + now = datetime.now() + s = now.utctimetuple() + ts = int( + time.mktime( + time.struct_time((s.tm_year, s.tm_mon, s.tm_mday, 0, 0, 0, 0, s.tm_yday, 0)) + ) + ) + tag_create( + repo=git, + tag="tahoe-lafs-{}".format(str(v)).encode("utf8"), + author=author.encode("utf8"), + message="Release {}".format(v).encode("utf8"), + annotated=True, + objectish=b"HEAD", + sign=author.encode("utf8"), + tag_time=ts, + tag_timezone=0, + ) + + print("Tag created locally, it is not pushed") + print("To push it run something like:") + print(" git push origin {}".format(v)) + + +if __name__ == "__main__": + react(lambda r: ensureDeferred(main(r))) diff --git a/misc/checkers/check_grid.py b/misc/checkers/check_grid.py index 0a68ed899..189e5a260 100644 --- a/misc/checkers/check_grid.py +++ b/misc/checkers/check_grid.py @@ -1,4 +1,3 @@ -from __future__ import print_function """ Test an existing Tahoe grid, both to see if the grid is still running and to diff --git a/misc/checkers/check_load.py b/misc/checkers/check_load.py index 21576ea3a..01a9ed832 100644 --- a/misc/checkers/check_load.py +++ b/misc/checkers/check_load.py @@ -1,5 +1,3 @@ -from __future__ import print_function - """ this is a load-generating client program. It does all of its work through a given tahoe node (specified by URL), and performs random reads and writes @@ -33,20 +31,11 @@ a mean of 10kB and a max of 100MB, so filesize=min(int(1.0/random(.0002)),1e8) """ +from __future__ import annotations import os, sys, httplib, binascii import urllib, json, random, time, urlparse -try: - from typing import Dict -except ImportError: - pass - -# Python 2 compatibility -from future.utils import PY2 -if PY2: - from future.builtins import str # noqa: F401 - if sys.argv[1] == "--stats": statsfiles = sys.argv[2:] # gather stats every 10 seconds, do a moving-window average of the last @@ -54,9 +43,9 @@ if sys.argv[1] == "--stats": DELAY = 10 MAXSAMPLES = 6 totals = [] - last_stats = {} # type: Dict[str, float] + last_stats : dict[str, float] = {} while True: - stats = {} # type: Dict[str, float] + stats : dict[str, float] = {} for sf in statsfiles: for line in open(sf, "r").readlines(): name, str_value = line.split(":") diff --git a/misc/checkers/check_memory.py b/misc/checkers/check_memory.py deleted file mode 100644 index 268d77451..000000000 --- a/misc/checkers/check_memory.py +++ /dev/null @@ -1,522 +0,0 @@ -from __future__ import print_function - -import os, shutil, sys, urllib, time, stat, urlparse - -# Python 2 compatibility -from future.utils import PY2 -if PY2: - from future.builtins import str # noqa: F401 -from six.moves import cStringIO as StringIO - -from twisted.python.filepath import ( - FilePath, -) -from twisted.internet import defer, reactor, protocol, error -from twisted.application import service, internet -from twisted.web import client as tw_client -from twisted.python import log, procutils -from foolscap.api import Tub, fireEventually, flushEventualQueue - -from allmydata import client, introducer -from allmydata.immutable import upload -from allmydata.scripts import create_node -from allmydata.util import fileutil, pollmixin -from allmydata.util.fileutil import abspath_expanduser_unicode -from allmydata.util.encodingutil import get_filesystem_encoding - -from allmydata.scripts.common import ( - write_introducer, -) - -class StallableHTTPGetterDiscarder(tw_client.HTTPPageGetter, object): - full_speed_ahead = False - _bytes_so_far = 0 - stalled = None - def handleResponsePart(self, data): - self._bytes_so_far += len(data) - if not self.factory.do_stall: - return - if self.full_speed_ahead: - return - if self._bytes_so_far > 1e6+100: - if not self.stalled: - print("STALLING") - self.transport.pauseProducing() - self.stalled = reactor.callLater(10.0, self._resume_speed) - def _resume_speed(self): - print("RESUME SPEED") - self.stalled = None - self.full_speed_ahead = True - self.transport.resumeProducing() - def handleResponseEnd(self): - if self.stalled: - print("CANCEL") - self.stalled.cancel() - self.stalled = None - return tw_client.HTTPPageGetter.handleResponseEnd(self) - -class StallableDiscardingHTTPClientFactory(tw_client.HTTPClientFactory, object): - protocol = StallableHTTPGetterDiscarder - -def discardPage(url, stall=False, *args, **kwargs): - """Start fetching the URL, but stall our pipe after the first 1MB. - Wait 10 seconds, then resume downloading (and discarding) everything. - """ - # adapted from twisted.web.client.getPage . We can't just wrap or - # subclass because it provides no way to override the HTTPClientFactory - # that it creates. - scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) - assert scheme == 'http' - host, port = netloc, 80 - if ":" in host: - host, port = host.split(":") - port = int(port) - factory = StallableDiscardingHTTPClientFactory(url, *args, **kwargs) - factory.do_stall = stall - reactor.connectTCP(host, port, factory) - return factory.deferred - -class ChildDidNotStartError(Exception): - pass - -class SystemFramework(pollmixin.PollMixin): - numnodes = 7 - - def __init__(self, basedir, mode): - self.basedir = basedir = abspath_expanduser_unicode(str(basedir)) - if not (basedir + os.path.sep).startswith(abspath_expanduser_unicode(u".") + os.path.sep): - raise AssertionError("safety issue: basedir must be a subdir") - self.testdir = testdir = os.path.join(basedir, "test") - if os.path.exists(testdir): - shutil.rmtree(testdir) - fileutil.make_dirs(testdir) - self.sparent = service.MultiService() - self.sparent.startService() - self.proc = None - self.tub = Tub() - self.tub.setOption("expose-remote-exception-types", False) - self.tub.setServiceParent(self.sparent) - self.mode = mode - self.failed = False - self.keepalive_file = None - - def run(self): - framelog = os.path.join(self.basedir, "driver.log") - log.startLogging(open(framelog, "a"), setStdout=False) - log.msg("CHECK_MEMORY(mode=%s) STARTING" % self.mode) - #logfile = open(os.path.join(self.testdir, "log"), "w") - #flo = log.FileLogObserver(logfile) - #log.startLoggingWithObserver(flo.emit, setStdout=False) - d = fireEventually() - d.addCallback(lambda res: self.setUp()) - d.addCallback(lambda res: self.record_initial_memusage()) - d.addCallback(lambda res: self.make_nodes()) - d.addCallback(lambda res: self.wait_for_client_connected()) - d.addCallback(lambda res: self.do_test()) - d.addBoth(self.tearDown) - def _err(err): - self.failed = err - log.err(err) - print(err) - d.addErrback(_err) - def _done(res): - reactor.stop() - return res - d.addBoth(_done) - reactor.run() - if self.failed: - # raiseException doesn't work for CopiedFailures - self.failed.raiseException() - - def setUp(self): - #print("STARTING") - self.stats = {} - self.statsfile = open(os.path.join(self.basedir, "stats.out"), "a") - self.make_introducer() - d = self.start_client() - def _record_control_furl(control_furl): - self.control_furl = control_furl - #print("OBTAINING '%s'" % (control_furl,)) - return self.tub.getReference(self.control_furl) - d.addCallback(_record_control_furl) - def _record_control(control_rref): - self.control_rref = control_rref - d.addCallback(_record_control) - def _ready(res): - #print("CLIENT READY") - pass - d.addCallback(_ready) - return d - - def record_initial_memusage(self): - print() - print("Client started (no connections yet)") - d = self._print_usage() - d.addCallback(self.stash_stats, "init") - return d - - def wait_for_client_connected(self): - print() - print("Client connecting to other nodes..") - return self.control_rref.callRemote("wait_for_client_connections", - self.numnodes+1) - - def tearDown(self, passthrough): - # the client node will shut down in a few seconds - #os.remove(os.path.join(self.clientdir, client.Client.EXIT_TRIGGER_FILE)) - log.msg("shutting down SystemTest services") - if self.keepalive_file and os.path.exists(self.keepalive_file): - age = time.time() - os.stat(self.keepalive_file)[stat.ST_MTIME] - log.msg("keepalive file at shutdown was %ds old" % age) - d = defer.succeed(None) - if self.proc: - d.addCallback(lambda res: self.kill_client()) - d.addCallback(lambda res: self.sparent.stopService()) - d.addCallback(lambda res: flushEventualQueue()) - def _close_statsfile(res): - self.statsfile.close() - d.addCallback(_close_statsfile) - d.addCallback(lambda res: passthrough) - return d - - def make_introducer(self): - iv_basedir = os.path.join(self.testdir, "introducer") - os.mkdir(iv_basedir) - self.introducer = introducer.IntroducerNode(basedir=iv_basedir) - self.introducer.setServiceParent(self) - self.introducer_furl = self.introducer.introducer_url - - def make_nodes(self): - root = FilePath(self.testdir) - self.nodes = [] - for i in range(self.numnodes): - nodedir = root.child("node%d" % (i,)) - private = nodedir.child("private") - private.makedirs() - write_introducer(nodedir, "default", self.introducer_url) - config = ( - "[client]\n" - "shares.happy = 1\n" - "[storage]\n" - ) - # the only tests for which we want the internal nodes to actually - # retain shares are the ones where somebody's going to download - # them. - if self.mode in ("download", "download-GET", "download-GET-slow"): - # retain shares - pass - else: - # for these tests, we tell the storage servers to pretend to - # accept shares, but really just throw them out, since we're - # only testing upload and not download. - config += "debug_discard = true\n" - if self.mode in ("receive",): - # for this mode, the client-under-test gets all the shares, - # so our internal nodes can refuse requests - config += "readonly = true\n" - nodedir.child("tahoe.cfg").setContent(config) - c = client.Client(basedir=nodedir.path) - c.setServiceParent(self) - self.nodes.append(c) - # the peers will start running, eventually they will connect to each - # other and the introducer - - def touch_keepalive(self): - if os.path.exists(self.keepalive_file): - age = time.time() - os.stat(self.keepalive_file)[stat.ST_MTIME] - log.msg("touching keepalive file, was %ds old" % age) - f = open(self.keepalive_file, "w") - f.write("""\ -If the node notices this file at startup, it will poll every 5 seconds and -terminate if the file is more than 10 seconds old, or if it has been deleted. -If the test harness has an internal failure and neglects to kill off the node -itself, this helps to avoid leaving processes lying around. The contents of -this file are ignored. - """) - f.close() - - def start_client(self): - # this returns a Deferred that fires with the client's control.furl - log.msg("MAKING CLIENT") - # self.testdir is an absolute Unicode path - clientdir = self.clientdir = os.path.join(self.testdir, u"client") - clientdir_str = clientdir.encode(get_filesystem_encoding()) - quiet = StringIO() - create_node.create_node({'basedir': clientdir}, out=quiet) - log.msg("DONE MAKING CLIENT") - write_introducer(clientdir, "default", self.introducer_furl) - # now replace tahoe.cfg - # set webport=0 and then ask the node what port it picked. - f = open(os.path.join(clientdir, "tahoe.cfg"), "w") - f.write("[node]\n" - "web.port = tcp:0:interface=127.0.0.1\n" - "[client]\n" - "shares.happy = 1\n" - "[storage]\n" - ) - - if self.mode in ("upload-self", "receive"): - # accept and store shares, to trigger the memory consumption bugs - pass - else: - # don't accept any shares - f.write("readonly = true\n") - ## also, if we do receive any shares, throw them away - #f.write("debug_discard = true") - if self.mode == "upload-self": - pass - f.close() - self.keepalive_file = os.path.join(clientdir, - client.Client.EXIT_TRIGGER_FILE) - # now start updating the mtime. - self.touch_keepalive() - ts = internet.TimerService(1.0, self.touch_keepalive) - ts.setServiceParent(self.sparent) - - pp = ClientWatcher() - self.proc_done = pp.d = defer.Deferred() - logfile = os.path.join(self.basedir, "client.log") - tahoes = procutils.which("tahoe") - if not tahoes: - raise RuntimeError("unable to find a 'tahoe' executable") - cmd = [tahoes[0], "run", ".", "-l", logfile] - env = os.environ.copy() - self.proc = reactor.spawnProcess(pp, cmd[0], cmd, env, path=clientdir_str) - log.msg("CLIENT STARTED") - - # now we wait for the client to get started. we're looking for the - # control.furl file to appear. - furl_file = os.path.join(clientdir, "private", "control.furl") - url_file = os.path.join(clientdir, "node.url") - def _check(): - if pp.ended and pp.ended.value.status != 0: - # the twistd process ends normally (with rc=0) if the child - # is successfully launched. It ends abnormally (with rc!=0) - # if the child cannot be launched. - raise ChildDidNotStartError("process ended while waiting for startup") - return os.path.exists(furl_file) - d = self.poll(_check, 0.1) - # once it exists, wait a moment before we read from it, just in case - # it hasn't finished writing the whole thing. Ideally control.furl - # would be created in some atomic fashion, or made non-readable until - # it's ready, but I can't think of an easy way to do that, and I - # think the chances that we'll observe a half-write are pretty low. - def _stall(res): - d2 = defer.Deferred() - reactor.callLater(0.1, d2.callback, None) - return d2 - d.addCallback(_stall) - def _read(res): - # read the node's URL - self.webish_url = open(url_file, "r").read().strip() - if self.webish_url[-1] == "/": - # trim trailing slash, since the rest of the code wants it gone - self.webish_url = self.webish_url[:-1] - f = open(furl_file, "r") - furl = f.read() - return furl.strip() - d.addCallback(_read) - return d - - - def kill_client(self): - # returns a Deferred that fires when the process exits. This may only - # be called once. - try: - self.proc.signalProcess("INT") - except error.ProcessExitedAlready: - pass - return self.proc_done - - - def create_data(self, name, size): - filename = os.path.join(self.testdir, name + ".data") - f = open(filename, "wb") - block = "a" * 8192 - while size > 0: - l = min(size, 8192) - f.write(block[:l]) - size -= l - return filename - - def stash_stats(self, stats, name): - self.statsfile.write("%s %s: %d\n" % (self.mode, name, stats['VmPeak'])) - self.statsfile.flush() - self.stats[name] = stats['VmPeak'] - - def POST(self, urlpath, **fields): - url = self.webish_url + urlpath - sepbase = "boogabooga" - sep = "--" + sepbase - form = [] - form.append(sep) - form.append('Content-Disposition: form-data; name="_charset"') - form.append('') - form.append('UTF-8') - form.append(sep) - for name, value in fields.iteritems(): - if isinstance(value, tuple): - filename, value = value - form.append('Content-Disposition: form-data; name="%s"; ' - 'filename="%s"' % (name, filename)) - else: - form.append('Content-Disposition: form-data; name="%s"' % name) - form.append('') - form.append(value) - form.append(sep) - form[-1] += "--" - body = "\r\n".join(form) + "\r\n" - headers = {"content-type": "multipart/form-data; boundary=%s" % sepbase, - } - return tw_client.getPage(url, method="POST", postdata=body, - headers=headers, followRedirect=False) - - def GET_discard(self, urlpath, stall): - url = self.webish_url + urlpath + "?filename=dummy-get.out" - return discardPage(url, stall) - - def _print_usage(self, res=None): - d = self.control_rref.callRemote("get_memory_usage") - def _print(stats): - print("VmSize: %9d VmPeak: %9d" % (stats["VmSize"], - stats["VmPeak"])) - return stats - d.addCallback(_print) - return d - - def _do_upload(self, res, size, files, uris): - name = '%d' % size - print() - print("uploading %s" % name) - if self.mode in ("upload", "upload-self"): - d = self.control_rref.callRemote("upload_random_data_from_file", - size, - convergence="check-memory") - elif self.mode == "upload-POST": - data = "a" * size - url = "/uri" - d = self.POST(url, t="upload", file=("%d.data" % size, data)) - elif self.mode in ("receive", - "download", "download-GET", "download-GET-slow"): - # mode=receive: upload the data from a local peer, so that the - # client-under-test receives and stores the shares - # - # mode=download*: upload the data from a local peer, then have - # the client-under-test download it. - # - # we need to wait until the uploading node has connected to all - # peers, since the wait_for_client_connections() above doesn't - # pay attention to our self.nodes[] and their connections. - files[name] = self.create_data(name, size) - u = self.nodes[0].getServiceNamed("uploader") - d = self.nodes[0].debug_wait_for_client_connections(self.numnodes+1) - d.addCallback(lambda res: - u.upload(upload.FileName(files[name], - convergence="check-memory"))) - d.addCallback(lambda results: results.get_uri()) - else: - raise ValueError("unknown mode=%s" % self.mode) - def _complete(uri): - uris[name] = uri - print("uploaded %s" % name) - d.addCallback(_complete) - return d - - def _do_download(self, res, size, uris): - if self.mode not in ("download", "download-GET", "download-GET-slow"): - return - name = '%d' % size - print("downloading %s" % name) - uri = uris[name] - - if self.mode == "download": - d = self.control_rref.callRemote("download_to_tempfile_and_delete", - uri) - elif self.mode == "download-GET": - url = "/uri/%s" % uri - d = self.GET_discard(urllib.quote(url), stall=False) - elif self.mode == "download-GET-slow": - url = "/uri/%s" % uri - d = self.GET_discard(urllib.quote(url), stall=True) - - def _complete(res): - print("downloaded %s" % name) - return res - d.addCallback(_complete) - return d - - def do_test(self): - #print("CLIENT STARTED") - #print("FURL", self.control_furl) - #print("RREF", self.control_rref) - #print() - kB = 1000; MB = 1000*1000 - files = {} - uris = {} - - d = self._print_usage() - d.addCallback(self.stash_stats, "0B") - - for i in range(10): - d.addCallback(self._do_upload, 10*kB+i, files, uris) - d.addCallback(self._do_download, 10*kB+i, uris) - d.addCallback(self._print_usage) - d.addCallback(self.stash_stats, "10kB") - - for i in range(3): - d.addCallback(self._do_upload, 10*MB+i, files, uris) - d.addCallback(self._do_download, 10*MB+i, uris) - d.addCallback(self._print_usage) - d.addCallback(self.stash_stats, "10MB") - - for i in range(1): - d.addCallback(self._do_upload, 50*MB+i, files, uris) - d.addCallback(self._do_download, 50*MB+i, uris) - d.addCallback(self._print_usage) - d.addCallback(self.stash_stats, "50MB") - - #for i in range(1): - # d.addCallback(self._do_upload, 100*MB+i, files, uris) - # d.addCallback(self._do_download, 100*MB+i, uris) - # d.addCallback(self._print_usage) - #d.addCallback(self.stash_stats, "100MB") - - #d.addCallback(self.stall) - def _done(res): - print("FINISHING") - d.addCallback(_done) - return d - - def stall(self, res): - d = defer.Deferred() - reactor.callLater(5, d.callback, None) - return d - - -class ClientWatcher(protocol.ProcessProtocol, object): - ended = False - def outReceived(self, data): - print("OUT:", data) - def errReceived(self, data): - print("ERR:", data) - def processEnded(self, reason): - self.ended = reason - self.d.callback(None) - - -if __name__ == '__main__': - mode = "upload" - if len(sys.argv) > 1: - mode = sys.argv[1] - if sys.maxsize == 2147483647: - bits = "32" - elif sys.maxsize == 9223372036854775807: - bits = "64" - else: - bits = "?" - print("%s-bit system (sys.maxsize=%d)" % (bits, sys.maxsize)) - # put the logfile and stats.out in _test_memory/ . These stick around. - # put the nodes and other files in _test_memory/test/ . These are - # removed each time we run. - sf = SystemFramework("_test_memory", mode) - sf.run() diff --git a/misc/checkers/check_speed.py b/misc/checkers/check_speed.py deleted file mode 100644 index 2fce53387..000000000 --- a/misc/checkers/check_speed.py +++ /dev/null @@ -1,234 +0,0 @@ -from __future__ import print_function - -import os, sys -from twisted.internet import reactor, defer -from twisted.python import log -from twisted.application import service -from foolscap.api import Tub, fireEventually - -MB = 1000000 - -class SpeedTest(object): - DO_IMMUTABLE = True - DO_MUTABLE_CREATE = True - DO_MUTABLE = True - - def __init__(self, test_client_dir): - #self.real_stderr = sys.stderr - log.startLogging(open("st.log", "a"), setStdout=False) - f = open(os.path.join(test_client_dir, "private", "control.furl"), "r") - self.control_furl = f.read().strip() - f.close() - self.base_service = service.MultiService() - self.failed = None - self.upload_times = {} - self.download_times = {} - - def run(self): - print("STARTING") - d = fireEventually() - d.addCallback(lambda res: self.setUp()) - d.addCallback(lambda res: self.do_test()) - d.addBoth(self.tearDown) - def _err(err): - self.failed = err - log.err(err) - print(err) - d.addErrback(_err) - def _done(res): - reactor.stop() - return res - d.addBoth(_done) - reactor.run() - if self.failed: - print("EXCEPTION") - print(self.failed) - sys.exit(1) - - def setUp(self): - self.base_service.startService() - self.tub = Tub() - self.tub.setOption("expose-remote-exception-types", False) - self.tub.setServiceParent(self.base_service) - d = self.tub.getReference(self.control_furl) - def _gotref(rref): - self.client_rref = rref - print("Got Client Control reference") - return self.stall(5) - d.addCallback(_gotref) - return d - - def stall(self, delay, result=None): - d = defer.Deferred() - reactor.callLater(delay, d.callback, result) - return d - - def record_times(self, times, key): - print("TIME (%s): %s up, %s down" % (key, times[0], times[1])) - self.upload_times[key], self.download_times[key] = times - - def one_test(self, res, name, count, size, mutable): - # values for 'mutable': - # False (upload a different CHK file for each 'count') - # "create" (upload different contents into a new SSK file) - # "upload" (upload different contents into the same SSK file. The - # time consumed does not include the creation of the file) - d = self.client_rref.callRemote("speed_test", count, size, mutable) - d.addCallback(self.record_times, name) - return d - - def measure_rtt(self, res): - # use RIClient.get_nodeid() to measure the foolscap-level RTT - d = self.client_rref.callRemote("measure_peer_response_time") - def _got(res): - assert len(res) # need at least one peer - times = res.values() - self.total_rtt = sum(times) - self.average_rtt = sum(times) / len(times) - self.max_rtt = max(times) - print("num-peers: %d" % len(times)) - print("total-RTT: %f" % self.total_rtt) - print("average-RTT: %f" % self.average_rtt) - print("max-RTT: %f" % self.max_rtt) - d.addCallback(_got) - return d - - def do_test(self): - print("doing test") - d = defer.succeed(None) - d.addCallback(self.one_test, "startup", 1, 1000, False) #ignore this one - d.addCallback(self.measure_rtt) - - if self.DO_IMMUTABLE: - # immutable files - d.addCallback(self.one_test, "1x 200B", 1, 200, False) - d.addCallback(self.one_test, "10x 200B", 10, 200, False) - def _maybe_do_100x_200B(res): - if self.upload_times["10x 200B"] < 5: - print("10x 200B test went too fast, doing 100x 200B test") - return self.one_test(None, "100x 200B", 100, 200, False) - return - d.addCallback(_maybe_do_100x_200B) - d.addCallback(self.one_test, "1MB", 1, 1*MB, False) - d.addCallback(self.one_test, "10MB", 1, 10*MB, False) - def _maybe_do_100MB(res): - if self.upload_times["10MB"] > 30: - print("10MB test took too long, skipping 100MB test") - return - return self.one_test(None, "100MB", 1, 100*MB, False) - d.addCallback(_maybe_do_100MB) - - if self.DO_MUTABLE_CREATE: - # mutable file creation - d.addCallback(self.one_test, "10x 200B SSK creation", 10, 200, - "create") - - if self.DO_MUTABLE: - # mutable file upload/download - d.addCallback(self.one_test, "10x 200B SSK", 10, 200, "upload") - def _maybe_do_100x_200B_SSK(res): - if self.upload_times["10x 200B SSK"] < 5: - print("10x 200B SSK test went too fast, doing 100x 200B SSK") - return self.one_test(None, "100x 200B SSK", 100, 200, - "upload") - return - d.addCallback(_maybe_do_100x_200B_SSK) - d.addCallback(self.one_test, "1MB SSK", 1, 1*MB, "upload") - - d.addCallback(self.calculate_speeds) - return d - - def calculate_speeds(self, res): - # time = A*size+B - # we assume that A*200bytes is negligible - - if self.DO_IMMUTABLE: - # upload - if "100x 200B" in self.upload_times: - B = self.upload_times["100x 200B"] / 100 - else: - B = self.upload_times["10x 200B"] / 10 - print("upload per-file time: %.3fs" % B) - print("upload per-file times-avg-RTT: %f" % (B / self.average_rtt)) - print("upload per-file times-total-RTT: %f" % (B / self.total_rtt)) - A1 = 1*MB / (self.upload_times["1MB"] - B) # in bytes per second - print("upload speed (1MB):", self.number(A1, "Bps")) - A2 = 10*MB / (self.upload_times["10MB"] - B) - print("upload speed (10MB):", self.number(A2, "Bps")) - if "100MB" in self.upload_times: - A3 = 100*MB / (self.upload_times["100MB"] - B) - print("upload speed (100MB):", self.number(A3, "Bps")) - - # download - if "100x 200B" in self.download_times: - B = self.download_times["100x 200B"] / 100 - else: - B = self.download_times["10x 200B"] / 10 - print("download per-file time: %.3fs" % B) - print("download per-file times-avg-RTT: %f" % (B / self.average_rtt)) - print("download per-file times-total-RTT: %f" % (B / self.total_rtt)) - A1 = 1*MB / (self.download_times["1MB"] - B) # in bytes per second - print("download speed (1MB):", self.number(A1, "Bps")) - A2 = 10*MB / (self.download_times["10MB"] - B) - print("download speed (10MB):", self.number(A2, "Bps")) - if "100MB" in self.download_times: - A3 = 100*MB / (self.download_times["100MB"] - B) - print("download speed (100MB):", self.number(A3, "Bps")) - - if self.DO_MUTABLE_CREATE: - # SSK creation - B = self.upload_times["10x 200B SSK creation"] / 10 - print("create per-file time SSK: %.3fs" % B) - - if self.DO_MUTABLE: - # upload SSK - if "100x 200B SSK" in self.upload_times: - B = self.upload_times["100x 200B SSK"] / 100 - else: - B = self.upload_times["10x 200B SSK"] / 10 - print("upload per-file time SSK: %.3fs" % B) - A1 = 1*MB / (self.upload_times["1MB SSK"] - B) # in bytes per second - print("upload speed SSK (1MB):", self.number(A1, "Bps")) - - # download SSK - if "100x 200B SSK" in self.download_times: - B = self.download_times["100x 200B SSK"] / 100 - else: - B = self.download_times["10x 200B SSK"] / 10 - print("download per-file time SSK: %.3fs" % B) - A1 = 1*MB / (self.download_times["1MB SSK"] - B) # in bytes per - # second - print("download speed SSK (1MB):", self.number(A1, "Bps")) - - def number(self, value, suffix=""): - scaling = 1 - if value < 1: - fmt = "%1.2g%s" - elif value < 100: - fmt = "%.1f%s" - elif value < 1000: - fmt = "%d%s" - elif value < 1e6: - fmt = "%.2fk%s"; scaling = 1e3 - elif value < 1e9: - fmt = "%.2fM%s"; scaling = 1e6 - elif value < 1e12: - fmt = "%.2fG%s"; scaling = 1e9 - elif value < 1e15: - fmt = "%.2fT%s"; scaling = 1e12 - elif value < 1e18: - fmt = "%.2fP%s"; scaling = 1e15 - else: - fmt = "huge! %g%s" - return fmt % (value / scaling, suffix) - - def tearDown(self, res): - d = self.base_service.stopService() - d.addCallback(lambda ignored: res) - return d - - -if __name__ == '__main__': - test_client_dir = sys.argv[1] - st = SpeedTest(test_client_dir) - st.run() diff --git a/misc/coding_tools/check-debugging.py b/misc/coding_tools/check-debugging.py index b920f5634..6bd54fee3 100755 --- a/misc/coding_tools/check-debugging.py +++ b/misc/coding_tools/check-debugging.py @@ -8,7 +8,6 @@ Runs on Python 3. Usage: ./check-debugging.py src """ -from __future__ import print_function import sys, re, os diff --git a/misc/coding_tools/check-interfaces.py b/misc/coding_tools/check-interfaces.py index 66bdf808f..d2657877a 100644 --- a/misc/coding_tools/check-interfaces.py +++ b/misc/coding_tools/check-interfaces.py @@ -4,7 +4,6 @@ # # bin/tahoe @misc/coding_tools/check-interfaces.py -from __future__ import print_function import os, sys, re, platform diff --git a/misc/coding_tools/check-umids.py b/misc/coding_tools/check-umids.py index 345610f3e..1ef557cee 100644 --- a/misc/coding_tools/check-umids.py +++ b/misc/coding_tools/check-umids.py @@ -8,7 +8,6 @@ This runs on Python 3. # ./check-umids.py src -from __future__ import print_function import sys, re, os diff --git a/misc/coding_tools/find-trailing-spaces.py b/misc/coding_tools/find-trailing-spaces.py deleted file mode 100644 index 19e7e3c28..000000000 --- a/misc/coding_tools/find-trailing-spaces.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python - -from __future__ import print_function - -import os, sys - -from twisted.python import usage - -class Options(usage.Options): - optFlags = [ - ("recursive", "r", "Search for .py files recursively"), - ] - def parseArgs(self, *starting_points): - self.starting_points = starting_points - -found = [False] - -def check(fn): - f = open(fn, "r") - for i,line in enumerate(f.readlines()): - if line == "\n": - continue - if line[-1] == "\n": - line = line[:-1] - if line.rstrip() != line: - # the %s:%d:%d: lets emacs' compile-mode jump to those locations - print("%s:%d:%d: trailing whitespace" % (fn, i+1, len(line)+1)) - found[0] = True - f.close() - -o = Options() -o.parseOptions() -if o['recursive']: - for starting_point in o.starting_points: - for root, dirs, files in os.walk(starting_point): - for fn in [f for f in files if f.endswith(".py")]: - fn = os.path.join(root, fn) - check(fn) -else: - for fn in o.starting_points: - check(fn) -if found[0]: - sys.exit(1) -sys.exit(0) diff --git a/misc/coding_tools/graph-deps.py b/misc/coding_tools/graph-deps.py index ad049093c..faa94450a 100755 --- a/misc/coding_tools/graph-deps.py +++ b/misc/coding_tools/graph-deps.py @@ -21,11 +21,10 @@ # Install 'click' first. I run this with py2, but py3 might work too, if the # wheels can be built with py3. -from __future__ import unicode_literals, print_function import os, sys, subprocess, json, tempfile, zipfile, re, itertools import email.parser from pprint import pprint -from six.moves import StringIO +from io import StringIO import click all_packages = {} # name -> version diff --git a/misc/coding_tools/make-canary-files.py b/misc/coding_tools/make-canary-files.py index 89f274b38..018462892 100644 --- a/misc/coding_tools/make-canary-files.py +++ b/misc/coding_tools/make-canary-files.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -from __future__ import print_function """ Given a list of nodeids and a 'convergence' file, create a bunch of files diff --git a/misc/coding_tools/make_umid b/misc/coding_tools/make_umid index 6b1759681..870ece1c6 100644 --- a/misc/coding_tools/make_umid +++ b/misc/coding_tools/make_umid @@ -1,6 +1,5 @@ #!/usr/bin/env python -from __future__ import print_function """Create a short probably-unique string for use as a umid= argument in a Foolscap log() call, to make it easier to locate the source code that diff --git a/misc/operations_helpers/cpu-watcher-poll.py b/misc/operations_helpers/cpu-watcher-poll.py index 320dd8ad7..0ecf974c6 100644 --- a/misc/operations_helpers/cpu-watcher-poll.py +++ b/misc/operations_helpers/cpu-watcher-poll.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -from __future__ import print_function from foolscap import Tub, eventual from twisted.internet import reactor diff --git a/misc/operations_helpers/cpu-watcher-subscribe.py b/misc/operations_helpers/cpu-watcher-subscribe.py index 36a69cac7..ce486832f 100644 --- a/misc/operations_helpers/cpu-watcher-subscribe.py +++ b/misc/operations_helpers/cpu-watcher-subscribe.py @@ -1,6 +1,5 @@ # -*- python -*- -from __future__ import print_function from twisted.internet import reactor import sys diff --git a/misc/operations_helpers/cpu-watcher.tac b/misc/operations_helpers/cpu-watcher.tac index c50b51c61..140625d58 100644 --- a/misc/operations_helpers/cpu-watcher.tac +++ b/misc/operations_helpers/cpu-watcher.tac @@ -1,6 +1,5 @@ # -*- python -*- -from __future__ import print_function """ # run this tool on a linux box in its own directory, with a file named diff --git a/misc/operations_helpers/find-share-anomalies.py b/misc/operations_helpers/find-share-anomalies.py index d689a8c99..e3826cc69 100644 --- a/misc/operations_helpers/find-share-anomalies.py +++ b/misc/operations_helpers/find-share-anomalies.py @@ -2,7 +2,6 @@ # feed this the results of 'tahoe catalog-shares' for all servers -from __future__ import print_function import sys diff --git a/misc/operations_helpers/getmem.py b/misc/operations_helpers/getmem.py deleted file mode 100644 index b3c6285fe..000000000 --- a/misc/operations_helpers/getmem.py +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env python - -from __future__ import print_function - -from foolscap import Tub -from foolscap.eventual import eventually -import sys -from twisted.internet import reactor - -def go(): - t = Tub() - d = t.getReference(sys.argv[1]) - d.addCallback(lambda rref: rref.callRemote("get_memory_usage")) - def _got(res): - print(res) - reactor.stop() - d.addCallback(_got) - -eventually(go) -reactor.run() diff --git a/misc/operations_helpers/munin/tahoe_cpu_watcher b/misc/operations_helpers/munin/tahoe_cpu_watcher index 8f2876792..ca7e40a32 100644 --- a/misc/operations_helpers/munin/tahoe_cpu_watcher +++ b/misc/operations_helpers/munin/tahoe_cpu_watcher @@ -1,6 +1,5 @@ #!/usr/bin/env python -from __future__ import print_function import os, sys, re import urllib diff --git a/misc/operations_helpers/munin/tahoe_diskleft b/misc/operations_helpers/munin/tahoe_diskleft index d5ce04b1a..b08422575 100644 --- a/misc/operations_helpers/munin/tahoe_diskleft +++ b/misc/operations_helpers/munin/tahoe_diskleft @@ -5,7 +5,6 @@ # is left on all disks across the grid. The plugin should be configured with # env_url= pointing at the diskwatcher.tac webport. -from __future__ import print_function import os, sys, urllib, json diff --git a/misc/operations_helpers/munin/tahoe_disktotal b/misc/operations_helpers/munin/tahoe_disktotal index b6d1a99e6..801eac164 100644 --- a/misc/operations_helpers/munin/tahoe_disktotal +++ b/misc/operations_helpers/munin/tahoe_disktotal @@ -6,7 +6,6 @@ # used. The plugin should be configured with env_url= pointing at the # diskwatcher.tac webport. -from __future__ import print_function import os, sys, urllib, json diff --git a/misc/operations_helpers/munin/tahoe_diskusage b/misc/operations_helpers/munin/tahoe_diskusage index cc37af3df..7eadd8eeb 100644 --- a/misc/operations_helpers/munin/tahoe_diskusage +++ b/misc/operations_helpers/munin/tahoe_diskusage @@ -5,7 +5,6 @@ # is being used per unit time. The plugin should be configured with env_url= # pointing at the diskwatcher.tac webport. -from __future__ import print_function import os, sys, urllib, json diff --git a/misc/operations_helpers/munin/tahoe_diskused b/misc/operations_helpers/munin/tahoe_diskused index 26303af86..151dc826e 100644 --- a/misc/operations_helpers/munin/tahoe_diskused +++ b/misc/operations_helpers/munin/tahoe_diskused @@ -5,7 +5,6 @@ # used on all disks across the grid. The plugin should be configured with # env_url= pointing at the diskwatcher.tac webport. -from __future__ import print_function import os, sys, urllib, json diff --git a/misc/operations_helpers/munin/tahoe_doomsday b/misc/operations_helpers/munin/tahoe_doomsday index 5a87489c2..348b244fe 100644 --- a/misc/operations_helpers/munin/tahoe_doomsday +++ b/misc/operations_helpers/munin/tahoe_doomsday @@ -5,7 +5,6 @@ # left before the grid fills up. The plugin should be configured with # env_url= pointing at the diskwatcher.tac webport. -from __future__ import print_function import os, sys, urllib, json diff --git a/misc/operations_helpers/munin/tahoe_estimate_files b/misc/operations_helpers/munin/tahoe_estimate_files index 1dda5affb..e6b6eff5d 100644 --- a/misc/operations_helpers/munin/tahoe_estimate_files +++ b/misc/operations_helpers/munin/tahoe_estimate_files @@ -1,6 +1,5 @@ #!/usr/bin/env python -from __future__ import print_function import sys, os.path diff --git a/misc/operations_helpers/munin/tahoe_files b/misc/operations_helpers/munin/tahoe_files index ec3ee5073..d951985a8 100644 --- a/misc/operations_helpers/munin/tahoe_files +++ b/misc/operations_helpers/munin/tahoe_files @@ -18,7 +18,6 @@ # env.basedir_NODE3 /path/to/node3 # -from __future__ import print_function import os, sys diff --git a/misc/operations_helpers/munin/tahoe_helperstats_active b/misc/operations_helpers/munin/tahoe_helperstats_active index ba1032acb..315a00886 100644 --- a/misc/operations_helpers/munin/tahoe_helperstats_active +++ b/misc/operations_helpers/munin/tahoe_helperstats_active @@ -1,6 +1,5 @@ #!/usr/bin/env python -from __future__ import print_function import os, sys import urllib diff --git a/misc/operations_helpers/munin/tahoe_helperstats_fetched b/misc/operations_helpers/munin/tahoe_helperstats_fetched index 5f53bb82c..f9577427c 100644 --- a/misc/operations_helpers/munin/tahoe_helperstats_fetched +++ b/misc/operations_helpers/munin/tahoe_helperstats_fetched @@ -1,6 +1,5 @@ #!/usr/bin/env python -from __future__ import print_function import os, sys import urllib diff --git a/misc/operations_helpers/munin/tahoe_introstats b/misc/operations_helpers/munin/tahoe_introstats index 0373c70e2..5dd07f62d 100644 --- a/misc/operations_helpers/munin/tahoe_introstats +++ b/misc/operations_helpers/munin/tahoe_introstats @@ -1,6 +1,5 @@ #!/usr/bin/env python -from __future__ import print_function import os, sys import urllib diff --git a/misc/operations_helpers/munin/tahoe_nodememory b/misc/operations_helpers/munin/tahoe_nodememory index 061a50dc2..71463c031 100644 --- a/misc/operations_helpers/munin/tahoe_nodememory +++ b/misc/operations_helpers/munin/tahoe_nodememory @@ -4,7 +4,6 @@ # by 'allmydata start', then extracts the amount of memory they consume (both # VmSize and VmRSS) from /proc -from __future__ import print_function import os, sys, re diff --git a/misc/operations_helpers/munin/tahoe_overhead b/misc/operations_helpers/munin/tahoe_overhead index 40640d189..6a25bcd46 100644 --- a/misc/operations_helpers/munin/tahoe_overhead +++ b/misc/operations_helpers/munin/tahoe_overhead @@ -27,7 +27,6 @@ # This plugin should be configured with env_diskwatcher_url= pointing at the # diskwatcher.tac webport, and env_deepsize_url= pointing at the PHP script. -from __future__ import print_function import os, sys, urllib, json diff --git a/misc/operations_helpers/munin/tahoe_rootdir_space b/misc/operations_helpers/munin/tahoe_rootdir_space index 1f5709206..ca61ddb13 100644 --- a/misc/operations_helpers/munin/tahoe_rootdir_space +++ b/misc/operations_helpers/munin/tahoe_rootdir_space @@ -1,6 +1,5 @@ #!/usr/bin/env python -from __future__ import print_function import os, sys import urllib diff --git a/misc/operations_helpers/munin/tahoe_server_latency_ b/misc/operations_helpers/munin/tahoe_server_latency_ index c8930804c..4c9a79a7a 100644 --- a/misc/operations_helpers/munin/tahoe_server_latency_ +++ b/misc/operations_helpers/munin/tahoe_server_latency_ @@ -42,7 +42,6 @@ # of course, these URLs must match the webports you have configured into the # storage nodes. -from __future__ import print_function import os, sys import urllib diff --git a/misc/operations_helpers/munin/tahoe_server_operations_ b/misc/operations_helpers/munin/tahoe_server_operations_ index 6156a7f48..cdf0409dd 100644 --- a/misc/operations_helpers/munin/tahoe_server_operations_ +++ b/misc/operations_helpers/munin/tahoe_server_operations_ @@ -32,7 +32,6 @@ # of course, these URLs must match the webports you have configured into the # storage nodes. -from __future__ import print_function import os, sys import urllib diff --git a/misc/operations_helpers/munin/tahoe_spacetime b/misc/operations_helpers/munin/tahoe_spacetime index 12b5121bf..e3a058851 100644 --- a/misc/operations_helpers/munin/tahoe_spacetime +++ b/misc/operations_helpers/munin/tahoe_spacetime @@ -5,7 +5,6 @@ # then extrapolate to guess how many weeks/months/years of storage space we # have left, and output it to another munin graph -from __future__ import print_function import sys, os, time import rrdtool diff --git a/misc/operations_helpers/munin/tahoe_stats b/misc/operations_helpers/munin/tahoe_stats index 03bf116f5..f94dd5b36 100644 --- a/misc/operations_helpers/munin/tahoe_stats +++ b/misc/operations_helpers/munin/tahoe_stats @@ -1,6 +1,5 @@ #!/usr/bin/env python -from __future__ import print_function import os import json diff --git a/misc/operations_helpers/munin/tahoe_storagespace b/misc/operations_helpers/munin/tahoe_storagespace index 73443b428..318283244 100644 --- a/misc/operations_helpers/munin/tahoe_storagespace +++ b/misc/operations_helpers/munin/tahoe_storagespace @@ -18,7 +18,6 @@ # Allmydata-tahoe must be installed on the system where this plugin is used, # since it imports a utility module from allmydata.utils . -from __future__ import print_function import os, sys import commands diff --git a/misc/operations_helpers/provisioning/reliability.py b/misc/operations_helpers/provisioning/reliability.py index fe274c875..c31e398e0 100644 --- a/misc/operations_helpers/provisioning/reliability.py +++ b/misc/operations_helpers/provisioning/reliability.py @@ -1,6 +1,5 @@ #! /usr/bin/python -from __future__ import print_function import math from allmydata.util import statistics diff --git a/misc/operations_helpers/provisioning/test_provisioning.py b/misc/operations_helpers/provisioning/test_provisioning.py index 2b71c8566..5d46f704e 100644 --- a/misc/operations_helpers/provisioning/test_provisioning.py +++ b/misc/operations_helpers/provisioning/test_provisioning.py @@ -1,4 +1,3 @@ -from __future__ import print_function import unittest from allmydata import provisioning diff --git a/misc/operations_helpers/spacetime/diskwatcher.tac b/misc/operations_helpers/spacetime/diskwatcher.tac index 0a43a468e..22c1f8747 100644 --- a/misc/operations_helpers/spacetime/diskwatcher.tac +++ b/misc/operations_helpers/spacetime/diskwatcher.tac @@ -1,6 +1,5 @@ # -*- python -*- -from __future__ import print_function """ Run this tool with twistd in its own directory, with a file named 'urls.txt' diff --git a/misc/python3/Makefile b/misc/python3/Makefile deleted file mode 100644 index f0ef8b12a..000000000 --- a/misc/python3/Makefile +++ /dev/null @@ -1,53 +0,0 @@ -# Python 3 porting targets -# -# NOTE: this Makefile requires GNU make - -### Defensive settings for make: -# https://tech.davis-hansson.com/p/make/ -SHELL := bash -.ONESHELL: -.SHELLFLAGS := -xeu -o pipefail -c -.SILENT: -.DELETE_ON_ERROR: -MAKEFLAGS += --warn-undefined-variables -MAKEFLAGS += --no-builtin-rules - - -# Top-level, phony targets - -.PHONY: default -default: - @echo "no default target" - -.PHONY: test-py3-all-before -## Log the output of running all tests under Python 3 before changes -test-py3-all-before: ../../.tox/make-test-py3-all-old.log -.PHONY: test-py3-all-diff -## Compare the output of running all tests under Python 3 after changes -test-py3-all-diff: ../../.tox/make-test-py3-all.diff - - -# Real targets - -# Gauge the impact of changes on Python 3 compatibility -# Compare the output from running all tests under Python 3 before and after changes. -# Before changes: -# `$ rm -f .tox/make-test-py3-all-*.log && make .tox/make-test-py3-all-old.log` -# After changes: -# `$ make .tox/make-test-py3-all.diff` -$(foreach side,old new,../../.tox/make-test-py3-all-$(side).log): - cd "../../" - tox --develop --notest -e py36-coverage - (make VIRTUAL_ENV=./.tox/py36-coverage TEST_SUITE=allmydata \ - test-venv-coverage || true) | \ - sed -E 's/\([0-9]+\.[0-9]{3} secs\)/(#.### secs)/' | \ - tee "./misc/python3/$(@)" -../../.tox/make-test-py3-all.diff: ../../.tox/make-test-py3-all-new.log - (diff -u "$(<:%-new.log=%-old.log)" "$(<)" || true) | tee "$(@)" - -# Locate modules that are candidates for naively converting `unicode` -> `str`. -# List all Python source files that reference `unicode` but don't reference `str` -../../.tox/py3-unicode-no-str.ls: - cd "../../" - find src -type f -iname '*.py' -exec grep -l -E '\Wunicode\W' '{}' ';' | \ - xargs grep -L '\Wstr\W' | xargs ls -ld | tee "./misc/python3/$(@)" diff --git a/misc/simulators/bench_spans.py b/misc/simulators/bench_spans.py index c696dac1e..6bc4f045e 100644 --- a/misc/simulators/bench_spans.py +++ b/misc/simulators/bench_spans.py @@ -1,4 +1,3 @@ -from __future__ import print_function """ To use this, get a trace file such as this one: diff --git a/misc/simulators/count_dirs.py b/misc/simulators/count_dirs.py index 22eda8917..62045d1e2 100644 --- a/misc/simulators/count_dirs.py +++ b/misc/simulators/count_dirs.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -from __future__ import print_function """ This tool estimates how much space would be consumed by a filetree into which diff --git a/misc/simulators/hashbasedsig.py b/misc/simulators/hashbasedsig.py index dbb9ca504..fff92b681 100644 --- a/misc/simulators/hashbasedsig.py +++ b/misc/simulators/hashbasedsig.py @@ -1,6 +1,5 @@ #!python -from __future__ import print_function # range of hash output lengths range_L_hash = [128] diff --git a/misc/simulators/ringsim.py b/misc/simulators/ringsim.py index e6616351c..889785bb4 100644 --- a/misc/simulators/ringsim.py +++ b/misc/simulators/ringsim.py @@ -4,7 +4,6 @@ # import time -from __future__ import print_function import math from hashlib import md5 # sha1, sha256 diff --git a/misc/simulators/simulate_load.py b/misc/simulators/simulate_load.py index ed80ab842..989711207 100644 --- a/misc/simulators/simulate_load.py +++ b/misc/simulators/simulate_load.py @@ -2,7 +2,6 @@ # WARNING. There is a bug in this script so that it does not simulate the actual Tahoe Two server selection algorithm that it was intended to simulate. See http://allmydata.org/trac/tahoe-lafs/ticket/302 (stop permuting peerlist, use SI as offset into ring instead?) -from __future__ import print_function from past.builtins import cmp diff --git a/misc/simulators/simulator.py b/misc/simulators/simulator.py index ceeb05edf..b2f51b0e1 100644 --- a/misc/simulators/simulator.py +++ b/misc/simulators/simulator.py @@ -1,6 +1,5 @@ #! /usr/bin/env python -from __future__ import print_function import hashlib import os, random diff --git a/misc/simulators/sizes.py b/misc/simulators/sizes.py index eb5f3adbf..d9f861c2f 100644 --- a/misc/simulators/sizes.py +++ b/misc/simulators/sizes.py @@ -1,12 +1,5 @@ #! /usr/bin/env python -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import input - - import random, math, re from twisted.python import usage diff --git a/misc/simulators/storage-overhead.py b/misc/simulators/storage-overhead.py index 5a741834e..096c18fba 100644 --- a/misc/simulators/storage-overhead.py +++ b/misc/simulators/storage-overhead.py @@ -1,11 +1,10 @@ #!/usr/bin/env python -from __future__ import print_function import sys, math from allmydata import uri, storage from allmydata.immutable import upload -from allmydata.interfaces import DEFAULT_MAX_SEGMENT_SIZE +from allmydata.interfaces import DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE from allmydata.util import mathutil def roundup(size, blocksize=4096): @@ -26,7 +25,7 @@ class BigFakeString(object): def tell(self): return self.fp -def calc(filesize, params=(3,7,10), segsize=DEFAULT_MAX_SEGMENT_SIZE): +def calc(filesize, params=(3,7,10), segsize=DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE): num_shares = params[2] if filesize <= upload.Uploader.URI_LIT_SIZE_THRESHOLD: urisize = len(uri.LiteralFileURI("A"*filesize).to_string()) diff --git a/misc/windows-enospc/passthrough.py b/misc/windows-enospc/passthrough.py new file mode 100644 index 000000000..1d4cd48bb --- /dev/null +++ b/misc/windows-enospc/passthrough.py @@ -0,0 +1,36 @@ +""" +Writing to non-blocking pipe can result in ENOSPC when using Unix APIs on +Windows. So, this program passes through data from stdin to stdout, using +Windows APIs instead of Unix-y APIs. +""" + +from twisted.internet.stdio import StandardIO +from twisted.internet import reactor +from twisted.internet.protocol import Protocol +from twisted.internet.interfaces import IHalfCloseableProtocol +from twisted.internet.error import ReactorNotRunning +from zope.interface import implementer + +@implementer(IHalfCloseableProtocol) +class Passthrough(Protocol): + def readConnectionLost(self): + self.transport.loseConnection() + + def writeConnectionLost(self): + try: + reactor.stop() + except ReactorNotRunning: + pass + + def dataReceived(self, data): + self.transport.write(data) + + def connectionLost(self, reason): + try: + reactor.stop() + except ReactorNotRunning: + pass + + +std = StandardIO(Passthrough()) +reactor.run() diff --git a/mypy.ini b/mypy.ini index 01cbb57a8..901304c87 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,3 +1,24 @@ [mypy] ignore_missing_imports = True plugins=mypy_zope:plugin +show_column_numbers = True +pretty = True +show_error_codes = True +warn_unused_configs =True +no_implicit_optional = True +warn_redundant_casts = True +strict_equality = True + +[mypy-allmydata.test.cli.wormholetesting,allmydata.listeners,allmydata.test.test_connection_status] +disallow_any_generics = True +disallow_subclassing_any = True +disallow_untyped_calls = True +disallow_untyped_defs = True +disallow_incomplete_defs = True +check_untyped_defs = True +disallow_untyped_decorators = True +warn_unused_ignores = True +warn_return_any = True +no_implicit_reexport = True +strict_equality = True +strict_concatenate = True diff --git a/newsfragments/1549.installation b/newsfragments/1549.installation deleted file mode 100644 index cbb91cea5..000000000 --- a/newsfragments/1549.installation +++ /dev/null @@ -1 +0,0 @@ -Tahoe-LAFS now requires Twisted 19.10.0 or newer. As a result, it now has a transitive dependency on bcrypt. diff --git a/newsfragments/3037.other b/newsfragments/3037.other deleted file mode 100644 index 947dc8f60..000000000 --- a/newsfragments/3037.other +++ /dev/null @@ -1 +0,0 @@ -The "Great Black Swamp" proposed specification has been expanded to include two lease management APIs. \ No newline at end of file diff --git a/newsfragments/3326.installation b/newsfragments/3326.installation deleted file mode 100644 index 2a3a64e32..000000000 --- a/newsfragments/3326.installation +++ /dev/null @@ -1 +0,0 @@ -Debian 8 support has been replaced with Debian 10 support. diff --git a/newsfragments/3399.feature b/newsfragments/3399.feature deleted file mode 100644 index d30a91679..000000000 --- a/newsfragments/3399.feature +++ /dev/null @@ -1 +0,0 @@ -Added 'typechecks' environment for tox running mypy and performing static typechecks. diff --git a/newsfragments/3428.minor b/newsfragments/3428.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3432.minor b/newsfragments/3432.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3433.installation b/newsfragments/3433.installation deleted file mode 100644 index 3c06e53d3..000000000 --- a/newsfragments/3433.installation +++ /dev/null @@ -1 +0,0 @@ -Tahoe-LAFS no longer depends on Nevow. \ No newline at end of file diff --git a/newsfragments/3434.minor b/newsfragments/3434.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3435.minor b/newsfragments/3435.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3454.minor b/newsfragments/3454.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3459.minor b/newsfragments/3459.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3460.minor b/newsfragments/3460.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3465.minor b/newsfragments/3465.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3466.minor b/newsfragments/3466.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3467.minor b/newsfragments/3467.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3468.minor b/newsfragments/3468.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3470.minor b/newsfragments/3470.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3471.minor b/newsfragments/3471.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3472.minor b/newsfragments/3472.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3473.minor b/newsfragments/3473.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3474.minor b/newsfragments/3474.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3475.minor b/newsfragments/3475.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3477.minor b/newsfragments/3477.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3479.minor b/newsfragments/3479.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3481.minor b/newsfragments/3481.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3482.minor b/newsfragments/3482.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3483.minor b/newsfragments/3483.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3485.minor b/newsfragments/3485.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3486.installation b/newsfragments/3486.installation deleted file mode 100644 index 7b24956b2..000000000 --- a/newsfragments/3486.installation +++ /dev/null @@ -1 +0,0 @@ -Tahoe-LAFS now requires the `netifaces` Python package and no longer requires the external `ip`, `ifconfig`, or `route.exe` executables. diff --git a/newsfragments/3488.minor b/newsfragments/3488.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3490.minor b/newsfragments/3490.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3491.minor b/newsfragments/3491.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3492.minor b/newsfragments/3492.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3493.minor b/newsfragments/3493.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3496.minor b/newsfragments/3496.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3497.installation b/newsfragments/3497.installation deleted file mode 100644 index 4a50be97e..000000000 --- a/newsfragments/3497.installation +++ /dev/null @@ -1 +0,0 @@ -The Tahoe-LAFS project no longer commits to maintaining binary packages for all dependencies at . Please use PyPI instead. diff --git a/newsfragments/3499.minor b/newsfragments/3499.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3500.minor b/newsfragments/3500.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3501.minor b/newsfragments/3501.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3502.minor b/newsfragments/3502.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3503.other b/newsfragments/3503.other deleted file mode 100644 index 5d0c681b6..000000000 --- a/newsfragments/3503.other +++ /dev/null @@ -1 +0,0 @@ -The specification section of the Tahoe-LAFS documentation now includes explicit discussion of the security properties of Foolscap "fURLs" on which it depends. diff --git a/newsfragments/3504.configuration b/newsfragments/3504.configuration deleted file mode 100644 index 9ff74482c..000000000 --- a/newsfragments/3504.configuration +++ /dev/null @@ -1 +0,0 @@ -The ``[client]introducer.furl`` configuration item is now deprecated in favor of the ``private/introducers.yaml`` file. \ No newline at end of file diff --git a/newsfragments/3509.bugfix b/newsfragments/3509.bugfix deleted file mode 100644 index 4d633feab..000000000 --- a/newsfragments/3509.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix regression that broke flogtool results on Python 2. \ No newline at end of file diff --git a/newsfragments/3510.bugfix b/newsfragments/3510.bugfix deleted file mode 100644 index d4a2bd5dc..000000000 --- a/newsfragments/3510.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a logging regression on Python 2 involving unicode strings. \ No newline at end of file diff --git a/newsfragments/3511.minor b/newsfragments/3511.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3513.minor b/newsfragments/3513.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3514.minor b/newsfragments/3514.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3515.minor b/newsfragments/3515.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3517.minor b/newsfragments/3517.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3518.removed b/newsfragments/3518.removed deleted file mode 100644 index 460af5142..000000000 --- a/newsfragments/3518.removed +++ /dev/null @@ -1 +0,0 @@ -Announcements delivered through the introducer system are no longer automatically annotated with copious information about the Tahoe-LAFS software version nor the versions of its dependencies. diff --git a/newsfragments/3520.minor b/newsfragments/3520.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3521.minor b/newsfragments/3521.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3522.minor b/newsfragments/3522.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3523.minor b/newsfragments/3523.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3524.minor b/newsfragments/3524.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3525.minor b/newsfragments/3525.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3528.minor b/newsfragments/3528.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3529.minor b/newsfragments/3529.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3532.minor b/newsfragments/3532.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3533.minor b/newsfragments/3533.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3534.minor b/newsfragments/3534.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3536.minor b/newsfragments/3536.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3537.minor b/newsfragments/3537.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3539.bugfix b/newsfragments/3539.bugfix deleted file mode 100644 index ed4aeb9af..000000000 --- a/newsfragments/3539.bugfix +++ /dev/null @@ -1 +0,0 @@ -Certain implementation-internal weakref KeyErrors are now handled and should no longer cause user-initiated operations to fail. diff --git a/newsfragments/3542.minor b/newsfragments/3542.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3544.minor b/newsfragments/3544.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3545.other b/newsfragments/3545.other deleted file mode 100644 index fd8adc37b..000000000 --- a/newsfragments/3545.other +++ /dev/null @@ -1 +0,0 @@ -The README, revised by Viktoriia with feedback from the team, is now more focused on the developer community and provides more information about Tahoe-LAFS, why it's important, and how someone can use it or start contributing to it. \ No newline at end of file diff --git a/newsfragments/3546.minor b/newsfragments/3546.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3547.minor b/newsfragments/3547.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3549.removed b/newsfragments/3549.removed deleted file mode 100644 index 53c7a7de1..000000000 --- a/newsfragments/3549.removed +++ /dev/null @@ -1 +0,0 @@ -The stats gatherer, broken since at least Tahoe-LAFS 1.13.0, has been removed. The ``[client]stats_gatherer.furl`` configuration item in ``tahoe.cfg`` is no longer allowed. The Tahoe-LAFS project recommends using a third-party metrics aggregation tool instead. diff --git a/newsfragments/3550.removed b/newsfragments/3550.removed deleted file mode 100644 index 2074bf676..000000000 --- a/newsfragments/3550.removed +++ /dev/null @@ -1 +0,0 @@ -The deprecated ``tahoe`` start, restart, stop, and daemonize sub-commands have been removed. \ No newline at end of file diff --git a/newsfragments/3551.minor b/newsfragments/3551.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3552.minor b/newsfragments/3552.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3553.minor b/newsfragments/3553.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3555.minor b/newsfragments/3555.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3557.minor b/newsfragments/3557.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3558.minor b/newsfragments/3558.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3560.minor b/newsfragments/3560.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3563.minor b/newsfragments/3563.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3564.minor b/newsfragments/3564.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3565.minor b/newsfragments/3565.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3566.minor b/newsfragments/3566.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3567.minor b/newsfragments/3567.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3568.minor b/newsfragments/3568.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3572.minor b/newsfragments/3572.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3574.minor b/newsfragments/3574.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3575.minor b/newsfragments/3575.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3576.minor b/newsfragments/3576.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3577.minor b/newsfragments/3577.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3578.minor b/newsfragments/3578.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3579.minor b/newsfragments/3579.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3580.minor b/newsfragments/3580.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3582.minor b/newsfragments/3582.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3583.removed b/newsfragments/3583.removed deleted file mode 100644 index a3fce48be..000000000 --- a/newsfragments/3583.removed +++ /dev/null @@ -1 +0,0 @@ -FTP is no longer supported by Tahoe-LAFS. Please use the SFTP support instead. \ No newline at end of file diff --git a/newsfragments/3584.bugfix b/newsfragments/3584.bugfix deleted file mode 100644 index faf57713b..000000000 --- a/newsfragments/3584.bugfix +++ /dev/null @@ -1 +0,0 @@ -SFTP public key auth likely works more consistently, and SFTP in general was previously broken. \ No newline at end of file diff --git a/newsfragments/3587.minor b/newsfragments/3587.minor deleted file mode 100644 index 8b1378917..000000000 --- a/newsfragments/3587.minor +++ /dev/null @@ -1 +0,0 @@ - diff --git a/newsfragments/3588.incompat b/newsfragments/3588.incompat deleted file mode 100644 index 402ae8479..000000000 --- a/newsfragments/3588.incompat +++ /dev/null @@ -1 +0,0 @@ -The Tahoe command line now always uses UTF-8 to decode its arguments, regardless of locale. diff --git a/newsfragments/3588.minor b/newsfragments/3588.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3589.minor b/newsfragments/3589.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3590.bugfix b/newsfragments/3590.bugfix deleted file mode 100644 index aa504a5e3..000000000 --- a/newsfragments/3590.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixed issue where redirecting old-style URIs (/uri/?uri=...) didn't work. \ No newline at end of file diff --git a/newsfragments/3591.minor b/newsfragments/3591.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3592.minor b/newsfragments/3592.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3593.minor b/newsfragments/3593.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3594.minor b/newsfragments/3594.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3595.minor b/newsfragments/3595.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3596.minor b/newsfragments/3596.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3599.minor b/newsfragments/3599.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3600.minor b/newsfragments/3600.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3603.minor.rst b/newsfragments/3603.minor.rst deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3605.minor b/newsfragments/3605.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3606.minor b/newsfragments/3606.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3607.minor b/newsfragments/3607.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3608.minor b/newsfragments/3608.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3611.minor b/newsfragments/3611.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3612.minor b/newsfragments/3612.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3613.minor b/newsfragments/3613.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3615.minor b/newsfragments/3615.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3616.minor b/newsfragments/3616.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3617.minor b/newsfragments/3617.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3618.minor b/newsfragments/3618.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3619.minor b/newsfragments/3619.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3620.minor b/newsfragments/3620.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3621.minor b/newsfragments/3621.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3623.minor b/newsfragments/3623.minor deleted file mode 100644 index 8b1378917..000000000 --- a/newsfragments/3623.minor +++ /dev/null @@ -1 +0,0 @@ - diff --git a/newsfragments/3624.minor b/newsfragments/3624.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3625.minor b/newsfragments/3625.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3626.minor b/newsfragments/3626.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3628.minor b/newsfragments/3628.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3629.feature b/newsfragments/3629.feature deleted file mode 100644 index cdca48a18..000000000 --- a/newsfragments/3629.feature +++ /dev/null @@ -1 +0,0 @@ -The NixOS-packaged Tahoe-LAFS now knows its own version. diff --git a/newsfragments/3630.minor b/newsfragments/3630.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3631.minor b/newsfragments/3631.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3632.minor b/newsfragments/3632.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3633.installation b/newsfragments/3633.installation deleted file mode 100644 index 8f6d7efdd..000000000 --- a/newsfragments/3633.installation +++ /dev/null @@ -1 +0,0 @@ -Tahoe-LAFS now uses a forked version of txi2p (named txi2p-tahoe) with Python 3 support. diff --git a/newsfragments/3634.minor b/newsfragments/3634.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3635.minor b/newsfragments/3635.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3637.minor b/newsfragments/3637.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3638.minor b/newsfragments/3638.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3640.minor b/newsfragments/3640.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3642.minor b/newsfragments/3642.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3644.other b/newsfragments/3644.other deleted file mode 100644 index 4b159e45d..000000000 --- a/newsfragments/3644.other +++ /dev/null @@ -1 +0,0 @@ -The "Great Black Swamp" proposed specification has been changed use ``v=1`` as the URL version identifier. \ No newline at end of file diff --git a/newsfragments/3645.minor b/newsfragments/3645.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3646.minor b/newsfragments/3646.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3647.minor b/newsfragments/3647.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3648.minor b/newsfragments/3648.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3649.minor b/newsfragments/3649.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3650.bugfix b/newsfragments/3650.bugfix deleted file mode 100644 index 09a810239..000000000 --- a/newsfragments/3650.bugfix +++ /dev/null @@ -1 +0,0 @@ -``tahoe invite`` will now read share encoding/placement configuration values from a Tahoe client node configuration file if they are not given on the command line, instead of raising an unhandled exception. diff --git a/newsfragments/3651.minor b/newsfragments/3651.minor deleted file mode 100644 index 9a2f5a0ed..000000000 --- a/newsfragments/3651.minor +++ /dev/null @@ -1 +0,0 @@ -We added documentation detailing the project's ticket triage process diff --git a/newsfragments/3652.removed b/newsfragments/3652.removed deleted file mode 100644 index a3e964702..000000000 --- a/newsfragments/3652.removed +++ /dev/null @@ -1 +0,0 @@ -Removed support for the Account Server frontend authentication type. diff --git a/newsfragments/3653.minor b/newsfragments/3653.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3654.minor b/newsfragments/3654.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3655.minor b/newsfragments/3655.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3656.minor b/newsfragments/3656.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3657.minor b/newsfragments/3657.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3658.minor b/newsfragments/3658.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3659.documentation b/newsfragments/3659.documentation deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3662.minor b/newsfragments/3662.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3663.other b/newsfragments/3663.other deleted file mode 100644 index 62abf2666..000000000 --- a/newsfragments/3663.other +++ /dev/null @@ -1 +0,0 @@ -You can run `make livehtml` in docs directory to invoke sphinx-autobuild. diff --git a/newsfragments/3664.documentation b/newsfragments/3664.documentation deleted file mode 100644 index ab5de8884..000000000 --- a/newsfragments/3664.documentation +++ /dev/null @@ -1 +0,0 @@ -Documentation now has its own towncrier category. diff --git a/newsfragments/3666.documentation b/newsfragments/3666.documentation deleted file mode 100644 index 3f9e34777..000000000 --- a/newsfragments/3666.documentation +++ /dev/null @@ -1 +0,0 @@ -`tox -e docs` will treat warnings about docs as errors. diff --git a/newsfragments/3667.minor b/newsfragments/3667.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3669.minor b/newsfragments/3669.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3670.minor b/newsfragments/3670.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3671.minor b/newsfragments/3671.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3672.minor b/newsfragments/3672.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3674.minor b/newsfragments/3674.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3675.minor b/newsfragments/3675.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3676.minor b/newsfragments/3676.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3677.documentation b/newsfragments/3677.documentation deleted file mode 100644 index 51730e765..000000000 --- a/newsfragments/3677.documentation +++ /dev/null @@ -1 +0,0 @@ -The visibility of the Tahoe-LAFS logo has been improved for "dark" themed viewing. diff --git a/newsfragments/3678.minor b/newsfragments/3678.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3679.minor b/newsfragments/3679.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3681.minor b/newsfragments/3681.minor deleted file mode 100644 index bc84b6b8f..000000000 --- a/newsfragments/3681.minor +++ /dev/null @@ -1,8 +0,0 @@ -(The below text is no longer valid: netifaces has released a 64-bit -Python 2.7 wheel for Windows. Ticket #3733 made the switch in CI. We -should be able to test and run Tahoe-LAFS without needing vcpython27 -now.) - -Tahoe-LAFS CI now runs tests only on 32-bit Windows. Microsoft has -removed vcpython27 compiler downloads from their site, and Tahoe-LAFS -needs vcpython27 to build and install netifaces on 64-bit Windows. diff --git a/newsfragments/3682.documentation b/newsfragments/3682.documentation deleted file mode 100644 index 5cf78bd90..000000000 --- a/newsfragments/3682.documentation +++ /dev/null @@ -1 +0,0 @@ -A cheatsheet-style document for contributors was created at CONTRIBUTORS.rst \ No newline at end of file diff --git a/newsfragments/3683.minor b/newsfragments/3683.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3686.minor b/newsfragments/3686.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3687.minor b/newsfragments/3687.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3691.minor b/newsfragments/3691.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3692.minor b/newsfragments/3692.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3699.minor b/newsfragments/3699.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3700.minor b/newsfragments/3700.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3701.minor b/newsfragments/3701.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3702.minor b/newsfragments/3702.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3703.minor b/newsfragments/3703.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3704.minor b/newsfragments/3704.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3705.minor b/newsfragments/3705.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3707.minor b/newsfragments/3707.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3708.minor b/newsfragments/3708.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3709.minor b/newsfragments/3709.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3711.minor b/newsfragments/3711.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3712.installation b/newsfragments/3712.installation deleted file mode 100644 index b80e1558b..000000000 --- a/newsfragments/3712.installation +++ /dev/null @@ -1 +0,0 @@ -The Nix package now includes correct version information. \ No newline at end of file diff --git a/newsfragments/3713.minor b/newsfragments/3713.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3714.minor b/newsfragments/3714.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3715.minor b/newsfragments/3715.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3716.incompat b/newsfragments/3716.incompat deleted file mode 100644 index aa03eea47..000000000 --- a/newsfragments/3716.incompat +++ /dev/null @@ -1 +0,0 @@ -tahoe backup's --exclude-from has been renamed to --exclude-from-utf-8, and correspondingly requires the file to be UTF-8 encoded. \ No newline at end of file diff --git a/newsfragments/3717.minor b/newsfragments/3717.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3718.minor b/newsfragments/3718.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3721.documentation b/newsfragments/3721.documentation deleted file mode 100644 index 36ae33236..000000000 --- a/newsfragments/3721.documentation +++ /dev/null @@ -1 +0,0 @@ -Our IRC channel, #tahoe-lafs, has been moved to irc.libera.chat. diff --git a/newsfragments/3722.minor b/newsfragments/3722.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3723.minor b/newsfragments/3723.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3726.documentation b/newsfragments/3726.documentation deleted file mode 100644 index fb94fff32..000000000 --- a/newsfragments/3726.documentation +++ /dev/null @@ -1 +0,0 @@ -Tahoe-LAFS project is now registered with Libera.Chat IRC network. diff --git a/newsfragments/3727.minor b/newsfragments/3727.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3728.minor b/newsfragments/3728.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3729.minor b/newsfragments/3729.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3730.minor b/newsfragments/3730.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3731.minor b/newsfragments/3731.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3732.minor b/newsfragments/3732.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3733.installation b/newsfragments/3733.installation deleted file mode 100644 index c1cac649b..000000000 --- a/newsfragments/3733.installation +++ /dev/null @@ -1 +0,0 @@ -Use netifaces 0.11.0 wheel package from PyPI.org if you use 64-bit Python 2.7 on Windows. VCPython27 downloads are no longer available at Microsoft's website, which has made building Python 2.7 wheel packages of Python libraries with C extensions (such as netifaces) on Windows difficult. diff --git a/newsfragments/3734.minor b/newsfragments/3734.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3735.minor b/newsfragments/3735.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3736.minor b/newsfragments/3736.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3738.bugfix b/newsfragments/3738.bugfix deleted file mode 100644 index 6a4bc1cd9..000000000 --- a/newsfragments/3738.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix regression where uploading files with non-ASCII names failed. \ No newline at end of file diff --git a/newsfragments/3739.bugfix b/newsfragments/3739.bugfix deleted file mode 100644 index 875941cf8..000000000 --- a/newsfragments/3739.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixed annoying UnicodeWarning message on Python 2 when running CLI tools. \ No newline at end of file diff --git a/newsfragments/3741.minor b/newsfragments/3741.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3743.minor b/newsfragments/3743.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3744.minor b/newsfragments/3744.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3745.minor b/newsfragments/3745.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3746.minor b/newsfragments/3746.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3747.documentation b/newsfragments/3747.documentation deleted file mode 100644 index a2559a6a0..000000000 --- a/newsfragments/3747.documentation +++ /dev/null @@ -1 +0,0 @@ -Rewriting the installation guide for Tahoe-LAFS. diff --git a/newsfragments/3749.documentation b/newsfragments/3749.documentation deleted file mode 100644 index 554564a0b..000000000 --- a/newsfragments/3749.documentation +++ /dev/null @@ -1 +0,0 @@ -Documentation and installation links in the README have been fixed. diff --git a/newsfragments/3751.minor b/newsfragments/3751.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3757.other b/newsfragments/3757.other deleted file mode 100644 index 3d2d3f272..000000000 --- a/newsfragments/3757.other +++ /dev/null @@ -1 +0,0 @@ -Refactored test_introducer in web tests to use custom base test cases \ No newline at end of file diff --git a/newsfragments/3759.minor b/newsfragments/3759.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3760.minor b/newsfragments/3760.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3763.minor b/newsfragments/3763.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3764.documentation b/newsfragments/3764.documentation deleted file mode 100644 index d473cd27c..000000000 --- a/newsfragments/3764.documentation +++ /dev/null @@ -1 +0,0 @@ -The Great Black Swamp proposed specification now includes sample interactions to demonstrate expected usage patterns. \ No newline at end of file diff --git a/newsfragments/3765.documentation b/newsfragments/3765.documentation deleted file mode 100644 index a3b59c4d6..000000000 --- a/newsfragments/3765.documentation +++ /dev/null @@ -1 +0,0 @@ -The Great Black Swamp proposed specification now includes a glossary. \ No newline at end of file diff --git a/newsfragments/3769.documentation b/newsfragments/3769.documentation deleted file mode 100644 index 3d4ef7d4c..000000000 --- a/newsfragments/3769.documentation +++ /dev/null @@ -1 +0,0 @@ -The Great Black Swamp specification now allows parallel upload of immutable share data. diff --git a/newsfragments/3773.minor b/newsfragments/3773.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3774.documentation b/newsfragments/3774.documentation deleted file mode 100644 index d58105966..000000000 --- a/newsfragments/3774.documentation +++ /dev/null @@ -1 +0,0 @@ -There is now a specification for the scheme which Tahoe-LAFS storage clients use to derive their lease renewal secrets. diff --git a/newsfragments/3777.documentation b/newsfragments/3777.documentation deleted file mode 100644 index 7635cc1e6..000000000 --- a/newsfragments/3777.documentation +++ /dev/null @@ -1 +0,0 @@ -The Great Black Swamp proposed specification now has a simplified interface for reading data from immutable shares. diff --git a/newsfragments/3779.bugfix b/newsfragments/3779.bugfix deleted file mode 100644 index 073046474..000000000 --- a/newsfragments/3779.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixed bug where share corruption events were not logged on storage servers running on Windows. \ No newline at end of file diff --git a/newsfragments/3781.minor b/newsfragments/3781.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3782.documentation b/newsfragments/3782.documentation deleted file mode 100644 index 5e5cecc13..000000000 --- a/newsfragments/3782.documentation +++ /dev/null @@ -1 +0,0 @@ -tahoe-dev mailing list is now at tahoe-dev@lists.tahoe-lafs.org. diff --git a/newsfragments/3784.minor b/newsfragments/3784.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3785.documentation b/newsfragments/3785.documentation deleted file mode 100644 index 4eb268f79..000000000 --- a/newsfragments/3785.documentation +++ /dev/null @@ -1 +0,0 @@ -The Great Black Swamp specification now describes the required authorization scheme. diff --git a/newsfragments/3786.feature b/newsfragments/3786.feature deleted file mode 100644 index ecbfc0372..000000000 --- a/newsfragments/3786.feature +++ /dev/null @@ -1 +0,0 @@ -tahoe-lafs now provides its statistics also in OpenMetrics format (for Prometheus et. al.) at `/statistics?t=openmetrics`. diff --git a/newsfragments/3792.minor b/newsfragments/3792.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3793.minor b/newsfragments/3793.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3795.minor b/newsfragments/3795.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3797.minor b/newsfragments/3797.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3798.minor b/newsfragments/3798.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3799.minor b/newsfragments/3799.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3801.bugfix b/newsfragments/3801.bugfix deleted file mode 100644 index 504b3999d..000000000 --- a/newsfragments/3801.bugfix +++ /dev/null @@ -1 +0,0 @@ -When uploading an immutable, overlapping writes that include conflicting data are rejected. In practice, this likely didn't happen in real-world usage. \ No newline at end of file diff --git a/newsfragments/3805.minor b/newsfragments/3805.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3806.minor b/newsfragments/3806.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3808.installation b/newsfragments/3808.installation deleted file mode 100644 index 157f08a0c..000000000 --- a/newsfragments/3808.installation +++ /dev/null @@ -1 +0,0 @@ -Tahoe-LAFS now supports running on NixOS 21.05 with Python 3. diff --git a/newsfragments/3810.minor b/newsfragments/3810.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3812.minor b/newsfragments/3812.minor deleted file mode 100644 index e69de29bb..000000000 diff --git a/newsfragments/3815.documentation b/newsfragments/3815.documentation deleted file mode 100644 index 7abc70bd1..000000000 --- a/newsfragments/3815.documentation +++ /dev/null @@ -1 +0,0 @@ -The news file for future releases will include a section for changes with a security impact. \ No newline at end of file diff --git a/newsfragments/4072.feature b/newsfragments/4072.feature new file mode 100644 index 000000000..3b0db7a02 --- /dev/null +++ b/newsfragments/4072.feature @@ -0,0 +1 @@ +Continued work to make Tahoe-LAFS take advantage of multiple CPUs. \ No newline at end of file diff --git a/newsfragments/4076.minor b/newsfragments/4076.minor new file mode 100644 index 000000000..2fec812e5 --- /dev/null +++ b/newsfragments/4076.minor @@ -0,0 +1 @@ +Release 1.19.0 diff --git a/newsfragments/2928.minor b/newsfragments/4082.minor similarity index 100% rename from newsfragments/2928.minor rename to newsfragments/4082.minor diff --git a/newsfragments/3283.minor b/newsfragments/4085.minor similarity index 100% rename from newsfragments/3283.minor rename to newsfragments/4085.minor diff --git a/newsfragments/3314.minor b/newsfragments/4087.bugfix similarity index 100% rename from newsfragments/3314.minor rename to newsfragments/4087.bugfix diff --git a/newsfragments/4088.bugfix b/newsfragments/4088.bugfix new file mode 100644 index 000000000..765bdc24f --- /dev/null +++ b/newsfragments/4088.bugfix @@ -0,0 +1 @@ +Stop using the C version of the cbor2 decoder. \ No newline at end of file diff --git a/newsfragments/3384.minor b/newsfragments/4090.minor similarity index 100% rename from newsfragments/3384.minor rename to newsfragments/4090.minor diff --git a/newsfragments/3385.minor b/newsfragments/4091.minor similarity index 100% rename from newsfragments/3385.minor rename to newsfragments/4091.minor diff --git a/newsfragments/3390.minor b/newsfragments/4092.minor similarity index 100% rename from newsfragments/3390.minor rename to newsfragments/4092.minor diff --git a/newsfragments/3478.minor b/newsfragments/4093.minor similarity index 100% rename from newsfragments/3478.minor rename to newsfragments/4093.minor diff --git a/newsfragments/4094.feature b/newsfragments/4094.feature new file mode 100644 index 000000000..85c98f3d5 --- /dev/null +++ b/newsfragments/4094.feature @@ -0,0 +1 @@ +Mutable directories can now be created with a pre-determined "signature key" via the web API using the "private-key=..." parameter. The "private-key" value must be a DER-encoded 2048-bit RSA private key in urlsafe base64 encoding. diff --git a/newsfragments/4100.bugfix b/newsfragments/4100.bugfix new file mode 100644 index 000000000..d580108ca --- /dev/null +++ b/newsfragments/4100.bugfix @@ -0,0 +1 @@ +Fix incompatibility with cryptography 43. \ No newline at end of file diff --git a/newsfragments/4101.bugfix b/newsfragments/4101.bugfix new file mode 100644 index 000000000..b03ca46d6 --- /dev/null +++ b/newsfragments/4101.bugfix @@ -0,0 +1 @@ +Fix incompatibility with attrs 24.1. \ No newline at end of file diff --git a/nix/autobahn.nix b/nix/autobahn.nix deleted file mode 100644 index 83148c4f8..000000000 --- a/nix/autobahn.nix +++ /dev/null @@ -1,34 +0,0 @@ -{ lib, buildPythonPackage, fetchPypi, isPy3k, - six, txaio, twisted, zope_interface, cffi, futures, - mock, pytest, cryptography, pynacl -}: -buildPythonPackage rec { - pname = "autobahn"; - version = "19.8.1"; - - src = fetchPypi { - inherit pname version; - sha256 = "294e7381dd54e73834354832604ae85567caf391c39363fed0ea2bfa86aa4304"; - }; - - propagatedBuildInputs = [ six txaio twisted zope_interface cffi cryptography pynacl ] ++ - (lib.optionals (!isPy3k) [ futures ]); - - checkInputs = [ mock pytest ]; - checkPhase = '' - runHook preCheck - USE_TWISTED=true py.test $out - runHook postCheck - ''; - - # Tests do no seem to be compatible yet with pytest 5.1 - # https://github.com/crossbario/autobahn-python/issues/1235 - doCheck = false; - - meta = with lib; { - description = "WebSocket and WAMP in Python for Twisted and asyncio."; - homepage = "https://crossbar.io/autobahn"; - license = licenses.mit; - maintainers = with maintainers; [ nand0p ]; - }; -} diff --git a/nix/collections-extended.nix b/nix/collections-extended.nix index 3f1ad165a..05254fc1b 100644 --- a/nix/collections-extended.nix +++ b/nix/collections-extended.nix @@ -1,19 +1,12 @@ -{ lib, buildPythonPackage, fetchPypi }: -buildPythonPackage rec { - pname = "collections-extended"; - version = "1.0.3"; - - src = fetchPypi { - inherit pname version; - sha256 = "0lb69x23asd68n0dgw6lzxfclavrp2764xsnh45jm97njdplznkw"; +# Package a version that's compatible with Python 3.11. This can go away once +# https://github.com/mlenzen/collections-extended/pull/199 is merged and +# included in a version of nixpkgs we depend on. +{ fetchFromGitHub, collections-extended }: +collections-extended.overrideAttrs (old: { + src = fetchFromGitHub { + owner = "mlenzen"; + repo = "collections-extended"; + rev = "8b93390636d58d28012b8e9d22334ee64ca37d73"; + hash = "sha256-e7RCpNsqyS1d3q0E+uaE4UOEQziueYsRkKEvy3gCHt0="; }; - - # Tests aren't in tarball, for 1.0.3 at least. - doCheck = false; - - meta = with lib; { - homepage = https://github.com/mlenzen/collections-extended; - description = "Extra Python Collections - bags (multisets), setlists (unique list / indexed set), RangeMap and IndexedDict"; - license = licenses.asl20; - }; -} +}) diff --git a/nix/default.nix b/nix/default.nix deleted file mode 100644 index bd7460c2f..000000000 --- a/nix/default.nix +++ /dev/null @@ -1,7 +0,0 @@ -# This is the main entrypoint for the Tahoe-LAFS derivation. -{ pkgs ? import { } }: -# Add our Python packages to nixpkgs to simplify the expression for the -# Tahoe-LAFS derivation. -let pkgs' = pkgs.extend (import ./overlays.nix); -# Evaluate the expression for our Tahoe-LAFS derivation. -in pkgs'.python2.pkgs.callPackage ./tahoe-lafs.nix { } diff --git a/nix/eliot.nix b/nix/eliot.nix deleted file mode 100644 index c5975e990..000000000 --- a/nix/eliot.nix +++ /dev/null @@ -1,31 +0,0 @@ -{ lib, buildPythonPackage, fetchPypi, zope_interface, pyrsistent, boltons -, hypothesis, testtools, pytest }: -buildPythonPackage rec { - pname = "eliot"; - version = "1.7.0"; - - src = fetchPypi { - inherit pname version; - sha256 = "0ylyycf717s5qsrx8b9n6m38vyj2k8328lfhn8y6r31824991wv8"; - }; - - postPatch = '' - substituteInPlace setup.py \ - --replace "boltons >= 19.0.1" boltons - ''; - - # A seemingly random subset of the test suite fails intermittently. After - # Tahoe-LAFS is ported to Python 3 we can update to a newer Eliot and, if - # the test suite continues to fail, maybe it will be more likely that we can - # have upstream fix it for us. - doCheck = false; - - checkInputs = [ testtools pytest hypothesis ]; - propagatedBuildInputs = [ zope_interface pyrsistent boltons ]; - - meta = with lib; { - homepage = https://github.com/itamarst/eliot/; - description = "Logging library that tells you why it happened"; - license = licenses.asl20; - }; -} diff --git a/nix/future.nix b/nix/future.nix deleted file mode 100644 index 814b7c1b5..000000000 --- a/nix/future.nix +++ /dev/null @@ -1,35 +0,0 @@ -{ lib -, buildPythonPackage -, fetchPypi -}: - -buildPythonPackage rec { - pname = "future"; - version = "0.18.2"; - - src = fetchPypi { - inherit pname version; - sha256 = "sha256:0zakvfj87gy6mn1nba06sdha63rn4njm7bhh0wzyrxhcny8avgmi"; - }; - - doCheck = false; - - meta = { - description = "Clean single-source support for Python 3 and 2"; - longDescription = '' - python-future is the missing compatibility layer between Python 2 and - Python 3. It allows you to use a single, clean Python 3.x-compatible - codebase to support both Python 2 and Python 3 with minimal overhead. - - It provides future and past packages with backports and forward ports - of features from Python 3 and 2. It also comes with futurize and - pasteurize, customized 2to3-based scripts that helps you to convert - either Py2 or Py3 code easily to support both Python 2 and 3 in a - single clean Py3-style codebase, module by module. - ''; - homepage = https://python-future.org; - downloadPage = https://github.com/PythonCharmers/python-future/releases; - license = with lib.licenses; [ mit ]; - maintainers = with lib.maintainers; [ prikhi ]; - }; -} diff --git a/nix/klein.nix b/nix/klein.nix new file mode 100644 index 000000000..be4426465 --- /dev/null +++ b/nix/klein.nix @@ -0,0 +1,9 @@ +{ klein, fetchPypi }: +klein.overrideAttrs (old: rec { + pname = "klein"; + version = "23.5.0"; + src = fetchPypi { + inherit pname version; + sha256 = "sha256-kGkSt6tBDZp/NRICg5w81zoqwHe9AHHIYcMfDu92Aoc="; + }; +}) diff --git a/nix/overlay.nix b/nix/overlay.nix new file mode 100644 index 000000000..41f0e3086 --- /dev/null +++ b/nix/overlay.nix @@ -0,0 +1,10 @@ +# This overlay adds Tahoe-LAFS and all of its properly-configured Python +# package dependencies to a Python package set. Downstream consumers can +# apply it to their own nixpkgs derivation to produce a Tahoe-LAFS package. +final: prev: { + # Add our overrides such that they will be applied to any Python derivation + # in nixpkgs. + pythonPackagesExtensions = prev.pythonPackagesExtensions ++ [ + (import ./python-overrides.nix) + ]; +} diff --git a/nix/overlays.nix b/nix/overlays.nix deleted file mode 100644 index fbd0ce3bb..000000000 --- a/nix/overlays.nix +++ /dev/null @@ -1,33 +0,0 @@ -self: super: { - python27 = super.python27.override { - packageOverrides = python-self: python-super: { - # eliot is not part of nixpkgs at all at this time. - eliot = python-self.pythonPackages.callPackage ./eliot.nix { }; - - # NixOS autobahn package has trollius as a dependency, although - # it is optional. Trollius is unmaintained and fails on CI. - autobahn = python-super.pythonPackages.callPackage ./autobahn.nix { }; - - # Porting to Python 3 is greatly aided by the future package. A - # slightly newer version than appears in nixos 19.09 is helpful. - future = python-super.pythonPackages.callPackage ./future.nix { }; - - # Need version of pyutil that supports Python 3. The version in 19.09 - # is too old. - pyutil = python-super.pythonPackages.callPackage ./pyutil.nix { }; - - # Need a newer version of Twisted, too. - twisted = python-super.pythonPackages.callPackage ./twisted.nix { }; - - # collections-extended is not part of nixpkgs at this time. - collections-extended = python-super.pythonPackages.callPackage ./collections-extended.nix { }; - }; - }; - - python39 = super.python39.override { - packageOverrides = python-self: python-super: { - # collections-extended is not part of nixpkgs at this time. - collections-extended = python-super.pythonPackages.callPackage ./collections-extended.nix { }; - }; - }; -} diff --git a/nix/py3.nix b/nix/py3.nix deleted file mode 100644 index 34ede49dd..000000000 --- a/nix/py3.nix +++ /dev/null @@ -1,7 +0,0 @@ -# This is the main entrypoint for the Tahoe-LAFS derivation. -{ pkgs ? import { } }: -# Add our Python packages to nixpkgs to simplify the expression for the -# Tahoe-LAFS derivation. -let pkgs' = pkgs.extend (import ./overlays.nix); -# Evaluate the expression for our Tahoe-LAFS derivation. -in pkgs'.python39.pkgs.callPackage ./tahoe-lafs.nix { } diff --git a/nix/pycddl.nix b/nix/pycddl.nix new file mode 100644 index 000000000..8b214a91b --- /dev/null +++ b/nix/pycddl.nix @@ -0,0 +1,57 @@ +# package https://gitlab.com/tahoe-lafs/pycddl +# +# also in the process of being pushed upstream +# https://github.com/NixOS/nixpkgs/pull/221220 +# +# we should switch to the upstream package when it is available from our +# minimum version of nixpkgs. +# +# if you need to update this package to a new pycddl release then +# +# 1. change value given to `buildPythonPackage` for `version` to match the new +# release +# +# 2. change the value given to `fetchPypi` for `sha256` to `lib.fakeHash` +# +# 3. run `nix-build` +# +# 4. there will be an error about a hash mismatch. change the value given to +# `fetchPypi` for `sha256` to the "actual" hash value report. +# +# 5. change the value given to `cargoDeps` for `hash` to lib.fakeHash`. +# +# 6. run `nix-build` +# +# 7. there will be an error about a hash mismatch. change the value given to +# `cargoDeps` for `hash` to the "actual" hash value report. +# +# 8. run `nix-build`. it should succeed. if it does not, seek assistance. +# +{ lib, fetchPypi, python, buildPythonPackage, rustPlatform }: +buildPythonPackage rec { + pname = "pycddl"; + version = "0.6.1"; + format = "pyproject"; + + src = fetchPypi { + inherit pname version; + sha256 = "sha256-63fe8UJXEH6t4l7ujV8JDvlGb7q3kL6fHHATFdklzFc="; + }; + + # Without this, when building for PyPy, `maturin build` seems to fail to + # find the interpreter at all and then fails early in the build process with + # an error saying "unsupported Python interpreter". We can easily point + # directly at the relevant interpreter, so do that. + maturinBuildFlags = [ "--interpreter" python.executable ]; + + nativeBuildInputs = with rustPlatform; [ + maturinBuildHook + cargoSetupHook + ]; + + cargoDeps = rustPlatform.fetchCargoTarball { + inherit src; + name = "${pname}-${version}"; + hash = "sha256-ssDEKRd3Y9/10oXBZHCxvlRkl9KMh3pGYbCkM4rXThQ="; + }; +} diff --git a/nix/pyopenssl.nix b/nix/pyopenssl.nix new file mode 100644 index 000000000..b8966fad1 --- /dev/null +++ b/nix/pyopenssl.nix @@ -0,0 +1,10 @@ +{ pyopenssl, fetchPypi, isPyPy }: +pyopenssl.overrideAttrs (old: rec { + pname = "pyOpenSSL"; + version = "23.2.0"; + name = "${pname}-${version}"; + src = fetchPypi { + inherit pname version; + sha256 = "J2+TH1WkUufeppxxc+mE6ypEB85BPJGKo0tV+C+bi6w="; + }; +}) diff --git a/nix/python-overrides.nix b/nix/python-overrides.nix new file mode 100644 index 000000000..006c2682d --- /dev/null +++ b/nix/python-overrides.nix @@ -0,0 +1,152 @@ +# Override various Python packages to create a package set that works for +# Tahoe-LAFS on CPython and PyPy. +self: super: +let + + # Run a function on a derivation if and only if we're building for PyPy. + onPyPy = f: drv: if super.isPyPy then f drv else drv; + + # Disable a Python package's test suite. + dontCheck = drv: drv.overrideAttrs (old: { doInstallCheck = false; }); + + # string -> any -> derivation -> derivation + # + # If the overrideable function for the given derivation accepts an argument + # with the given name, override it with the given value. + # + # Since we try to work with multiple versions of nixpkgs, sometimes we need + # to override a parameter that exists in one version but not others. This + # makes it a bit easier to do so. + overrideIfPresent = name: value: drv: + if (drv.override.__functionArgs ? ${name}) + then drv.override { "${name}" = value; } + else drv; + + # Disable building a Python package's documentation. + dontBuildDocs = drv: ( + overrideIfPresent "sphinxHook" null ( + overrideIfPresent "sphinx-rtd-theme" null + drv + ) + ).overrideAttrs ({ outputs, ... }: { + outputs = builtins.filter (x: "doc" != x) outputs; + }); + +in { + tahoe-lafs = self.callPackage ./tahoe-lafs.nix { + # Define the location of the Tahoe-LAFS source to be packaged (the same + # directory as contains this file). Clean up as many of the non-source + # files (eg the `.git` directory, `~` backup files, nix's own `result` + # symlink, etc) as possible to avoid needing to re-build when files that + # make no difference to the package have changed. + tahoe-lafs-src = self.lib.cleanSource ../.; + }; + + # Some dependencies aren't packaged in nixpkgs so supply our own packages. + pycddl = self.callPackage ./pycddl.nix { }; + txi2p = self.callPackage ./txi2p.nix { }; + + # Some packages are of somewhat too-old versions - update them. + klein = self.callPackage ./klein.nix { + # Avoid infinite recursion. + inherit (super) klein; + }; + txtorcon = self.callPackage ./txtorcon.nix { + inherit (super) txtorcon; + }; + + # With our customized package set a Twisted unit test fails. Patch the + # Twisted test suite to skip that test. + # Filed upstream at https://github.com/twisted/twisted/issues/11877 + twisted = super.twisted.overrideAttrs (old: { + patches = (old.patches or []) ++ [ ./twisted.patch ]; + }); + + # Update the version of pyopenssl - and since we're doing that anyway, we + # don't need the docs. Unfortunately this triggers a lot of rebuilding of + # dependent packages. + pyopenssl = dontBuildDocs (self.callPackage ./pyopenssl.nix { + inherit (super) pyopenssl; + }); + + # The cryptography that we get from nixpkgs to satisfy the pyopenssl upgrade + # that we did breaks service-identity ... so get a newer version that works. + service-identity = self.callPackage ./service-identity.nix { }; + + # collections-extended is currently broken for Python 3.11 in nixpkgs but + # we know where a working version lives. + collections-extended = self.callPackage ./collections-extended.nix { + inherit (super) collections-extended; + }; + + # greenlet is incompatible with PyPy but PyPy has a builtin equivalent. + # Fixed in nixpkgs in a5f8184fb816a4fd5ae87136838c9981e0d22c67. + greenlet = onPyPy (drv: null) super.greenlet; + + # tornado and tk pull in a huge dependency trees for functionality we don't + # care about, also tkinter doesn't work on PyPy. + matplotlib = onPyPy (matplotlib: matplotlib.override { + tornado = null; + enableTk = false; + }) super.matplotlib; + + tqdm = onPyPy (tqdm: tqdm.override { + # ibid. + tkinter = null; + # pandas is only required by the part of the test suite covering + # integration with pandas that we don't care about. pandas is a huge + # dependency. + pandas = null; + }) super.tqdm; + + # The treq test suite depends on httpbin. httpbin pulls in babel (flask -> + # jinja2 -> babel) and arrow (brotlipy -> construct -> arrow). babel fails + # its test suite and arrow segfaults. + treq = onPyPy dontCheck super.treq; + + # the six test suite fails on PyPy because it depends on dbm which the + # nixpkgs PyPy build appears to be missing. Maybe fixed in nixpkgs in + # a5f8184fb816a4fd5ae87136838c9981e0d22c67. + six = onPyPy dontCheck super.six; + + # Likewise for beautifulsoup4. + beautifulsoup4 = onPyPy dontBuildDocs super.beautifulsoup4; + + # The autobahn test suite pulls in a vast number of dependencies for + # functionality we don't care about. It might be nice to *selectively* + # disable just some of it but this is easier. + autobahn = dontCheck super.autobahn; + + # and python-dotenv tests pulls in a lot of dependencies, including jedi, + # which does not work on PyPy. + python-dotenv = onPyPy dontCheck super.python-dotenv; + + # By default, the sphinx docs are built, which pulls in a lot of + # dependencies - including jedi, which does not work on PyPy. + hypothesis = onPyPy dontBuildDocs super.hypothesis; + + # flaky's test suite depends on nose and nose appears to have Python 3 + # incompatibilities (it includes `print` statements, for example). + flaky = onPyPy dontCheck super.flaky; + + # collections-extended is packaged with poetry-core. poetry-core test suite + # uses virtualenv and virtualenv test suite fails on PyPy. + poetry-core = onPyPy dontCheck super.poetry-core; + + # The test suite fails with some rather irrelevant (to us) string comparison + # failure on PyPy. Probably a PyPy bug but doesn't seem like we should + # care. + rich = onPyPy dontCheck super.rich; + + # The pyutil test suite fails in some ... test ... for some deprecation + # functionality we don't care about. + pyutil = onPyPy dontCheck super.pyutil; + + # testCall1 fails fairly inscrutibly on PyPy. Perhaps someone can fix that, + # or we could at least just skip that one test. Probably better to fix it + # since we actually depend directly and significantly on Foolscap. + foolscap = onPyPy dontCheck super.foolscap; + + # CircleCI build systems don't have enough memory to run this test suite. + lz4 = onPyPy dontCheck super.lz4; +} diff --git a/nix/pyutil.nix b/nix/pyutil.nix deleted file mode 100644 index 6852c2acc..000000000 --- a/nix/pyutil.nix +++ /dev/null @@ -1,48 +0,0 @@ -{ stdenv -, buildPythonPackage -, fetchPypi -, setuptoolsDarcs -, setuptoolsTrial -, simplejson -, twisted -, isPyPy -}: - -buildPythonPackage rec { - pname = "pyutil"; - version = "3.3.0"; - - src = fetchPypi { - inherit pname version; - sha256 = "8c4d4bf668c559186389bb9bce99e4b1b871c09ba252a756ccaacd2b8f401848"; - }; - - buildInputs = [ setuptoolsDarcs setuptoolsTrial ] ++ (if doCheck then [ simplejson ] else []); - propagatedBuildInputs = [ twisted ]; - - # Tests fail because they try to write new code into the twisted - # package, apparently some kind of plugin. - doCheck = false; - - prePatch = stdenv.lib.optionalString isPyPy '' - grep -rl 'utf-8-with-signature-unix' ./ | xargs sed -i -e "s|utf-8-with-signature-unix|utf-8|g" - ''; - - meta = with stdenv.lib; { - description = "Pyutil, a collection of mature utilities for Python programmers"; - - longDescription = '' - These are a few data structures, classes and functions which - we've needed over many years of Python programming and which - seem to be of general use to other Python programmers. Many of - the modules that have existed in pyutil over the years have - subsequently been obsoleted by new features added to the - Python language or its standard library, thus showing that - we're not alone in wanting tools like these. - ''; - - homepage = "http://allmydata.org/trac/pyutil"; - license = licenses.gpl2Plus; - }; - -} \ No newline at end of file diff --git a/nix/service-identity.nix b/nix/service-identity.nix new file mode 100644 index 000000000..fef68b16e --- /dev/null +++ b/nix/service-identity.nix @@ -0,0 +1,61 @@ +{ lib +, attrs +, buildPythonPackage +, cryptography +, fetchFromGitHub +, hatch-fancy-pypi-readme +, hatch-vcs +, hatchling +, idna +, pyasn1 +, pyasn1-modules +, pytestCheckHook +, pythonOlder +, setuptools +}: + +buildPythonPackage rec { + pname = "service-identity"; + version = "23.1.0"; + format = "pyproject"; + + disabled = pythonOlder "3.8"; + + src = fetchFromGitHub { + owner = "pyca"; + repo = pname; + rev = "refs/tags/${version}"; + hash = "sha256-PGDtsDgRwh7GuuM4OuExiy8L4i3Foo+OD0wMrndPkvo="; + }; + + nativeBuildInputs = [ + hatch-fancy-pypi-readme + hatch-vcs + hatchling + setuptools + ]; + + propagatedBuildInputs = [ + attrs + cryptography + idna + pyasn1 + pyasn1-modules + ]; + + nativeCheckInputs = [ + pytestCheckHook + ]; + + pythonImportsCheck = [ + "service_identity" + ]; + + meta = with lib; { + description = "Service identity verification for pyOpenSSL"; + homepage = "https://service-identity.readthedocs.io"; + changelog = "https://github.com/pyca/service-identity/releases/tag/${version}"; + license = licenses.mit; + maintainers = with maintainers; [ fab ]; + }; +} diff --git a/nix/tahoe-lafs.nix b/nix/tahoe-lafs.nix index c7db6c583..273fa3a76 100644 --- a/nix/tahoe-lafs.nix +++ b/nix/tahoe-lafs.nix @@ -1,125 +1,79 @@ -{ fetchFromGitHub, lib -, git, python -, twisted, foolscap, zfec -, setuptools, setuptoolsTrial, pyasn1, zope_interface -, service-identity, pyyaml, magic-wormhole, treq, appdirs -, beautifulsoup4, eliot, autobahn, cryptography, netifaces -, html5lib, pyutil, distro, configparser +let + pname = "tahoe-lafs"; + version = "1.19.0.post1"; +in +{ lib +, pythonPackages +, buildPythonPackage +, tahoe-lafs-src }: -python.pkgs.buildPythonPackage rec { - # Most of the time this is not exactly the release version (eg 1.15.1). - # Give it a `post` component to make it look newer than the release version - # and we'll bump this up at the time of each release. - # - # It's difficult to read the version from Git the way the Python code does - # for two reasons. First, doing so involves populating the Nix expression - # with values from the source. Nix calls this "import from derivation" or - # "IFD" (). This is - # discouraged in most cases - including this one, I think. Second, the - # Python code reads the contents of `.git` to determine its version. `.git` - # is not a reproducable artifact (in the sense of "reproducable builds") so - # it is excluded from the source tree by default. When it is included, the - # package tends to be frequently spuriously rebuilt. - version = "1.15.1.post1"; - name = "tahoe-lafs-${version}"; - src = lib.cleanSourceWith { - src = ../.; - filter = name: type: - let - basename = baseNameOf name; +buildPythonPackage rec { + inherit pname version; + src = tahoe-lafs-src; + propagatedBuildInputs = with pythonPackages; [ + attrs + autobahn + cbor2 + click + collections-extended + cryptography + distro + eliot + filelock + foolscap + future + klein + magic-wormhole + netifaces + psutil + pyyaml + pycddl + pyrsistent + pyutil + six + treq + twisted + werkzeug + zfec + zope_interface + ] ++ + # Get the dependencies for the Twisted extras we depend on, too. + twisted.passthru.optional-dependencies.tls ++ + twisted.passthru.optional-dependencies.conch; - split = lib.splitString "."; - join = builtins.concatStringsSep "."; - ext = join (builtins.tail (split basename)); + # The test suite lives elsewhere. + doCheck = false; - # Build up a bunch of knowledge about what kind of file this is. - isTox = type == "directory" && basename == ".tox"; - isTrialTemp = type == "directory" && basename == "_trial_temp"; - isVersion = basename == "_version.py"; - isBytecode = ext == "pyc" || ext == "pyo"; - isBackup = lib.hasSuffix "~" basename; - isTemporary = lib.hasPrefix "#" basename && lib.hasSuffix "#" basename; - isSymlink = type == "symlink"; - isGit = type == "directory" && basename == ".git"; - in - # Exclude all these things - ! (isTox - || isTrialTemp - || isVersion - || isBytecode - || isBackup - || isTemporary - || isSymlink - || isGit - ); + passthru = { + extras = with pythonPackages; { + tor = [ + txtorcon + ]; + i2p = [ + txi2p + ]; + unittest = [ + beautifulsoup4 + html5lib + fixtures + hypothesis + mock + prometheus-client + testtools + ]; + integrationtest = [ + pytest + pytest-twisted + paramiko + pytest-timeout + ]; + }; }; - postPatch = '' - # Chroots don't have /etc/hosts and /etc/resolv.conf, so work around - # that. - for i in $(find src/allmydata/test -type f) - do - sed -i "$i" -e"s/localhost/127.0.0.1/g" - done - - # Some tests are flaky or fail to skip when dependencies are missing. - # This list is over-zealous because it's more work to disable individual - # tests with in a module. - - # Many of these tests don't properly skip when i2p or tor dependencies are - # not supplied (and we are not supplying them). - rm src/allmydata/test/test_i2p_provider.py - rm src/allmydata/test/test_connections.py - rm src/allmydata/test/cli/test_create.py - - # Generate _version.py ourselves since we can't rely on the Python code - # extracting the information from the .git directory we excluded. - cat > src/allmydata/_version.py < /dev/null - ''; - - checkPhase = '' - ${python.interpreter} -m unittest discover -s twisted/test - ''; - # Tests require network - doCheck = false; - - meta = with stdenv.lib; { - homepage = https://twistedmatrix.com/; - description = "Twisted, an event-driven networking engine written in Python"; - longDescription = '' - Twisted is an event-driven networking engine written in Python - and licensed under the MIT license. - ''; - license = licenses.mit; - maintainers = [ ]; - }; -} diff --git a/nix/twisted.patch b/nix/twisted.patch new file mode 100644 index 000000000..1b6846c8e --- /dev/null +++ b/nix/twisted.patch @@ -0,0 +1,12 @@ +diff --git a/src/twisted/internet/test/test_endpoints.py b/src/twisted/internet/test/test_endpoints.py +index c650fd8aa6..a1754fd533 100644 +--- a/src/twisted/internet/test/test_endpoints.py ++++ b/src/twisted/internet/test/test_endpoints.py +@@ -4214,6 +4214,7 @@ class WrapClientTLSParserTests(unittest.TestCase): + connectionCreator = connectionCreatorFromEndpoint(reactor, endpoint) + self.assertEqual(connectionCreator._hostname, "\xe9xample.example.com") + ++ @skipIf(True, "self.assertFalse(plainClient.transport.disconnecting) fails") + def test_tls(self): + """ + When passed a string endpoint description beginning with C{tls:}, diff --git a/nix/txi2p.nix b/nix/txi2p.nix new file mode 100644 index 000000000..3464b7b3d --- /dev/null +++ b/nix/txi2p.nix @@ -0,0 +1,39 @@ +# package https://github.com/tahoe-lafs/txi2p +# +# if you need to update this package to a new txi2p release then +# +# 1. change value given to `buildPythonPackage` for `version` to match the new +# release +# +# 2. change the value given to `fetchPypi` for `sha256` to `lib.fakeHash` +# +# 3. run `nix-build` +# +# 4. there will be an error about a hash mismatch. change the value given to +# `fetchPypi` for `sha256` to the "actual" hash value report. +# +# 5. if there are new runtime dependencies then add them to the argument list +# at the top. if there are new test dependencies add them to the +# `checkInputs` list. +# +# 6. run `nix-build`. it should succeed. if it does not, seek assistance. +# +{ fetchPypi +, buildPythonPackage +, parsley +, twisted +, unittestCheckHook +}: +buildPythonPackage rec { + pname = "txi2p-tahoe"; + version = "0.3.7"; + + src = fetchPypi { + inherit pname version; + hash = "sha256-+Vs9zaFS+ACI14JNxEme93lnWmncdZyFAmnTH0yhOiY="; + }; + + propagatedBuildInputs = [ twisted parsley ]; + checkInputs = [ unittestCheckHook ]; + pythonImportsCheck = [ "parsley" "ometa"]; +} diff --git a/nix/txtorcon.nix b/nix/txtorcon.nix new file mode 100644 index 000000000..552c03fd0 --- /dev/null +++ b/nix/txtorcon.nix @@ -0,0 +1,9 @@ +{ txtorcon, fetchPypi }: +txtorcon.overrideAttrs (old: rec { + pname = "txtorcon"; + version = "23.5.0"; + src = fetchPypi { + inherit pname version; + hash = "sha256-k/2Aqd1QX2mNCGT+k9uLapwRRLX+uRUwggtw7YmCZRw="; + }; +}) diff --git a/pyinstaller.spec b/pyinstaller.spec index 875629c13..5c24df430 100644 --- a/pyinstaller.spec +++ b/pyinstaller.spec @@ -1,6 +1,5 @@ # -*- mode: python -*- -from __future__ import print_function from distutils.sysconfig import get_python_lib import hashlib @@ -11,7 +10,10 @@ import struct import sys -if not hasattr(sys, 'real_prefix'): +try: + import allmydata + del allmydata +except ImportError: sys.exit("Please run inside a virtualenv with Tahoe-LAFS installed.") @@ -33,6 +35,7 @@ hidden_imports = [ 'allmydata.stats', 'base64', 'cffi', + 'charset_normalizer.md__mypyc', 'collections', 'commands', 'Crypto', diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..fed528d4a --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 000000000..9ff725e7b --- /dev/null +++ b/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +markers = + slow: marks tests as slow (not run by default; run them with '--runslow') diff --git a/relnotes.txt b/relnotes.txt index 4afbd6cc5..d8caa5e6e 100644 --- a/relnotes.txt +++ b/relnotes.txt @@ -1,6 +1,6 @@ -ANNOUNCING Tahoe, the Least-Authority File Store, v1.15.1 +ANNOUNCING Tahoe, the Least-Authority File Store, v1.19.0 -The Tahoe-LAFS team is pleased to announce version 1.15.1 of +The Tahoe-LAFS team is pleased to announce version 1.19.0 of Tahoe-LAFS, an extremely reliable decentralized storage system. Get it with "pip install tahoe-lafs", or download a tarball here: @@ -15,17 +15,34 @@ unique security and fault-tolerance properties: https://tahoe-lafs.readthedocs.org/en/latest/about.html -The previous stable release of Tahoe-LAFS was v1.15.0, released on -January 19, 2021. +The previous stable release of Tahoe-LAFS was v1.18.0, released on +October 2, 2022. Major new features and changes in this release: -In this release: PyPI does not accept uploads of packages that use -PEP-508 version specifiers. +A new "Grid Manager" feature allows clients to specify any number of +parties whom they will use to limit which storage-server that client +talks to. See docs/managed-grid.rst for more. -Note that Python3 porting is underway but not yet complete in this -release. Developers may notice python3 as new targets for certain -tools. +The new HTTP-based "Great Black Swamp" protocol is now enabled +(replacing Foolscap). This allows integrators to start with their +favourite HTTP library (instead of implementing Foolscap first). Both +storage-servers and clients support this new protocol. -Please see ``NEWS.rst`` for a more complete list of changes. +`tahoe run` will now exit if its stdin is closed (but accepts --allow-stdin-close now). + +Mutables may be created with a pre-determined signature key; care must +be taken! + +This release drops Python 3.7 support and adds Python 3.11 and 3.12 +support. Several performance improvements have been made. Introducer +correctly listens on Tor or I2P. Debian 10 and Ubuntu 20.04 are no +longer tested. + +Besides all this there have been dozens of other bug-fixes and +improvements. + +Enjoy! + +Please see ``NEWS.rst`` [1] for a complete list of changes. WHAT IS IT GOOD FOR? @@ -64,12 +81,12 @@ to v1.0 (which was released March 25, 2008). Clients from this release can read files and directories produced by clients of all versions since v1.0. -Network connections are limited by the Introducer protocol in -use. If the Introducer is running v1.10 or v1.11, then servers -from this release (v1.12) can serve clients of all versions -back to v1.0 . If it is running v1.12, then they can only -serve clients back to v1.10. Clients from this release can use -servers back to v1.10, but not older servers. +Network connections are limited by the Introducer protocol in use. If +the Introducer is running v1.10 or v1.11, then servers from this +release can serve clients of all versions back to v1.0 . If it is +running v1.12 or higher, then they can only serve clients back to +v1.10. Clients from this release can use servers back to v1.10, but +not older servers. Except for the new optional MDMF format, we have not made any intentional compatibility changes. However we do not yet have @@ -77,7 +94,7 @@ the test infrastructure to continuously verify that all new versions are interoperable with previous versions. We intend to build such an infrastructure in the future. -This is the twenty-first release in the version 1 series. This +This is the twenty-second release in the version 1 series. This series of Tahoe-LAFS will be actively supported and maintained for the foreseeable future, and future versions of Tahoe-LAFS will retain the ability to read and write files compatible @@ -137,24 +154,23 @@ Of Fame" [13]. ACKNOWLEDGEMENTS -This is the eighteenth release of Tahoe-LAFS to be created -solely as a labor of love by volunteers. Thank you very much -to the team of "hackers in the public interest" who make -Tahoe-LAFS possible. +This is the twentieth release of Tahoe-LAFS to be created solely as a +labor of love by volunteers. Thank you very much to the team of +"hackers in the public interest" who make Tahoe-LAFS possible. meejah on behalf of the Tahoe-LAFS team -March 23, 2021 +October 1, 2022 Planet Earth -[1] https://github.com/tahoe-lafs/tahoe-lafs/blob/tahoe-lafs-1.15.1/NEWS.rst +[1] https://github.com/tahoe-lafs/tahoe-lafs/blob/tahoe-lafs-1.19.0/NEWS.rst [2] https://github.com/tahoe-lafs/tahoe-lafs/blob/master/docs/known_issues.rst [3] https://tahoe-lafs.org/trac/tahoe-lafs/wiki/RelatedProjects -[4] https://github.com/tahoe-lafs/tahoe-lafs/blob/tahoe-lafs-1.15.1/COPYING.GPL -[5] https://github.com/tahoe-lafs/tahoe-lafs/blob/tahoe-lafs-1.15.1/COPYING.TGPPL.rst -[6] https://tahoe-lafs.readthedocs.org/en/tahoe-lafs-1.15.1/INSTALL.html +[4] https://github.com/tahoe-lafs/tahoe-lafs/blob/tahoe-lafs-1.19.0/COPYING.GPL +[5] https://github.com/tahoe-lafs/tahoe-lafs/blob/tahoe-lafs-1.19.0/COPYING.TGPPL.rst +[6] https://tahoe-lafs.readthedocs.org/en/tahoe-lafs-1.19.0/INSTALL.html [7] https://lists.tahoe-lafs.org/mailman/listinfo/tahoe-dev [8] https://tahoe-lafs.org/trac/tahoe-lafs/roadmap [9] https://github.com/tahoe-lafs/tahoe-lafs/blob/master/CREDITS diff --git a/setup.cfg b/setup.cfg index f4539279e..9415b3ab4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -6,6 +6,9 @@ develop = update_version develop bdist_egg = update_version bdist_egg bdist_wheel = update_version bdist_wheel +# This has been replaced by ruff (see .ruff.toml), which has same checks as +# flake8 plus many more, and is also faster. However, we're keeping this config +# in case people still use flake8 in IDEs, etc.. [flake8] # Enforce all pyflakes constraints, and also prohibit tabs for indentation. # Reference: diff --git a/setup.py b/setup.py index 8c6396937..71be1e2e1 100644 --- a/setup.py +++ b/setup.py @@ -32,9 +32,8 @@ VERSION_PY_FILENAME = 'src/allmydata/_version.py' version = read_version_py(VERSION_PY_FILENAME) install_requires = [ - # we don't need much out of setuptools but the version checking stuff - # needs pkg_resources and PEP 440 version specifiers. - "setuptools >= 28.8.0", + # importlib.resources.files and friends are new in Python 3.9. + "importlib_resources; python_version < '3.9'", "zfec >= 1.1.0", @@ -55,8 +54,9 @@ install_requires = [ # * foolscap >= 0.12.6 has an i2p.sam_endpoint() that takes kwargs # * foolscap 0.13.2 drops i2p support completely # * foolscap >= 21.7 is necessary for Python 3 with i2p support. - "foolscap == 0.13.1 ; python_version < '3.0'", - "foolscap >= 21.7.0 ; python_version > '3.0'", + # * foolscap >= 23.3 is necessary for Python 3.11. + "foolscap >= 21.7.0", + "foolscap >= 23.3.0; python_version > '3.10'", # * cryptography 2.6 introduced some ed25519 APIs we rely on. Note that # Twisted[conch] also depends on cryptography and Twisted[tls] @@ -64,6 +64,9 @@ install_requires = [ # version of cryptography will *really* be installed. "cryptography >= 2.6", + # * Used for custom HTTPS validation + "pyOpenSSL >= 23.2.0", + # * The SFTP frontend depends on Twisted 11.0.0 to fix the SSH server # rekeying bug # * The SFTP frontend and manhole depend on the conch extra. However, we @@ -97,7 +100,9 @@ install_requires = [ # an sftp extra in Tahoe-LAFS, there is no point in having one. # * Twisted 19.10 introduces Site.getContentFile which we use to get # temporary upload files placed into a per-node temporary directory. - "Twisted[tls,conch] >= 19.10.0", + # * Twisted 22.8.0 added support for coroutine-returning functions in many + # places (mainly via `maybeDeferred`) + "Twisted[tls,conch] >= 22.8.0", "PyYAML >= 3.11", @@ -106,22 +111,16 @@ install_requires = [ # for 'tahoe invite' and 'tahoe join' "magic-wormhole >= 0.10.2", - # Eliot is contemplating dropping Python 2 support. Stick to a version we - # know works on Python 2.7. - "eliot ~= 1.7 ; python_version < '3.0'", - # On Python 3, we want a new enough version to support custom JSON encoders. - "eliot >= 1.13.0 ; python_version > '3.0'", + # We want a new enough version to support custom JSON encoders. + "eliot >= 1.14.0", - # Pyrsistent 0.17.0 (which we use by way of Eliot) has dropped - # Python 2 entirely; stick to the version known to work for us. - "pyrsistent < 0.17.0 ; python_version < '3.0'", - "pyrsistent ; python_version > '3.0'", + "pyrsistent", # A great way to define types of values. - "attrs >= 18.2.0", + "attrs >= 20.1.0", # WebSocket library for twisted and asyncio - "autobahn >= 19.5.2", + "autobahn >= 22.4.3", # Support for Python 3 transition "future >= 0.18.2", @@ -135,22 +134,33 @@ install_requires = [ # Linux distribution detection: "distro >= 1.4.0", - # Backported configparser for Python 2: - "configparser ; python_version < '3.0'", + # For the RangeMap datastructure. Need 2.0.2 at least for bugfixes. + "collections-extended >= 2.0.2", - # For the RangeMap datastructure. - "collections-extended", -] + # HTTP server and client + # Latest version is necessary to work with latest werkzeug: + "klein >= 23.5.0", + # 2.2.0 has a bug: https://github.com/pallets/werkzeug/issues/2465 + "werkzeug != 2.2.0", + "treq", + # 5.6.0 excluded because https://github.com/agronholm/cbor2/issues/208 + "cbor2 != 5.6.0", -setup_requires = [ - 'setuptools >= 28.8.0', # for PEP-440 style versions + # 0.6 adds the ability to decode CBOR. 0.6.1 fixes PyPy. + "pycddl >= 0.6.1", + + # Command-line parsing + "click >= 8.1.1", + + # for pid-file support + "psutil", + "filelock", ] tor_requires = [ - # This is exactly what `foolscap[tor]` means but pip resolves the pair of - # dependencies "foolscap[i2p] foolscap[tor]" to "foolscap[i2p]" so we lose - # this if we don't declare it ourselves! - "txtorcon >= 0.17.0", + # 23.5 added support for custom TLS contexts in web_agent(), which is + # needed for the HTTP storage client to run over Tor. + "txtorcon >= 23.5.0", ] i2p_requires = [ @@ -192,8 +202,7 @@ trove_classifiers=[ "Natural Language :: English", "Programming Language :: C", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", "Topic :: Utilities", "Topic :: System :: Systems Administration", "Topic :: System :: Filesystems", @@ -220,7 +229,7 @@ def run_command(args, cwd=None): use_shell = sys.platform == "win32" try: p = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=cwd, shell=use_shell) - except EnvironmentError as e: # if this gives a SyntaxError, note that Tahoe-LAFS requires Python 2.7+ + except EnvironmentError as e: # if this gives a SyntaxError, note that Tahoe-LAFS requires Python 3.8+ print("Warning: unable to run %r." % (" ".join(args),)) print(e) return None @@ -371,41 +380,53 @@ setup(name="tahoe-lafs", # also set in __init__.py package_dir = {'':'src'}, packages=find_packages('src') + ['allmydata.test.plugins'], classifiers=trove_classifiers, - # We support Python 2.7, and we're working on support for 3.6 (the - # highest version that PyPy currently supports). - python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*", + # We support Python 3.8 or later, 3.13 is untested for now + python_requires=">=3.8, <3.13", install_requires=install_requires, extras_require={ # Duplicate the Twisted pywin32 dependency here. See # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2392 for some # discussion. ':sys_platform=="win32"': ["pywin32 != 226"], + "build": [ + "dulwich", + "gpg", + ], + + # Here are the dependencies required to set up a reproducible test + # environment. This could be for CI or local development. These + # are *not* library dependencies of the test suite itself. They are + # the tools we use to run the test suite at all. + "testenv": [ + # Pin all of these versions for the same reason you ever want to + # pin anything: to prevent new releases with regressions from + # introducing spurious failures into CI runs for whatever + # development work is happening at the time. The versions + # selected here are just the current versions at the time. + # Bumping them to keep up with future releases is fine as long + # as those releases are known to actually work. + "pip==23.3.1", + "wheel==0.41.3", + "subunitreporter==23.8.0", + "python-subunit==1.4.2", + "junitxml==0.7", + "coverage==7.2.5", + ], + + # Here are the library dependencies of the test suite. "test": [ - "flake8", - # Pin a specific pyflakes so we don't have different folks - # disagreeing on what is or is not a lint issue. We can bump - # this version from time to time, but we will do it - # intentionally. - "pyflakes == 2.2.0", - "coverage ~= 5.0", "mock", - "tox", "pytest", "pytest-twisted", - # XXX: decorator isn't a direct dependency, but pytest-twisted - # depends on decorator, and decorator 5.x isn't compatible with - # Python 2.7. - "decorator < 5", "hypothesis >= 3.6.1", - "treq", "towncrier", "testtools", "fixtures", "beautifulsoup4", "html5lib", - "junitxml", - "tenacity", - "paramiko", + # Pin old version until + # https://github.com/paramiko/paramiko/issues/1961 is fixed. + "paramiko < 2.9", "pytest-timeout", # Does our OpenMetrics endpoint adhere to the spec: "prometheus-client == 0.11.0", @@ -421,7 +442,11 @@ setup(name="tahoe-lafs", # also set in __init__.py "allmydata": ["ported-modules.txt"], }, include_package_data=True, - setup_requires=setup_requires, - entry_points = { 'console_scripts': [ 'tahoe = allmydata.scripts.runner:run' ] }, + entry_points={ + 'console_scripts': [ + 'tahoe = allmydata.scripts.runner:run', + 'grid-manager = allmydata.cli.grid_manager:grid_manager', + ] + }, **setup_args ) diff --git a/src/allmydata/__init__.py b/src/allmydata/__init__.py index 333394fc5..8fc7064ca 100644 --- a/src/allmydata/__init__.py +++ b/src/allmydata/__init__.py @@ -3,16 +3,6 @@ Decentralized storage grid. community web site: U{https://tahoe-lafs.org/} """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2, PY3 -if PY2: - # Don't import future str() so we don't break Foolscap serialization on Python 2. - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401 - from past.builtins import unicode as str __all__ = [ "__version__", @@ -52,12 +42,6 @@ __appname__ = "tahoe-lafs" # https://tahoe-lafs.org/trac/tahoe-lafs/wiki/Versioning __full_version__ = __appname__ + '/' + str(__version__) - -# Install Python 3 module locations in Python 2: -from future import standard_library -standard_library.install_aliases() - - # Monkey-patch 3rd party libraries: from ._monkeypatch import patch patch() @@ -72,8 +56,7 @@ del patch # # Also note that BytesWarnings only happen if Python is run with -b option, so # in practice this should only affect tests. -if PY3: - import warnings - # Error on BytesWarnings, to catch things like str(b""), but only for - # allmydata code. - warnings.filterwarnings("error", category=BytesWarning, module=".*allmydata.*") +import warnings +# Error on BytesWarnings, to catch things like str(b""), but only for +# allmydata code. +warnings.filterwarnings("error", category=BytesWarning, module=".*allmydata.*") diff --git a/src/allmydata/__main__.py b/src/allmydata/__main__.py index 87f1f6522..c6b200991 100644 --- a/src/allmydata/__main__.py +++ b/src/allmydata/__main__.py @@ -2,15 +2,6 @@ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import sys from allmydata.scripts.runner import run diff --git a/src/allmydata/_auto_deps.py b/src/allmydata/_auto_deps.py deleted file mode 100644 index 521b17a45..000000000 --- a/src/allmydata/_auto_deps.py +++ /dev/null @@ -1,89 +0,0 @@ -""" -Ported to Python 3. -""" - -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -# Note: please minimize imports in this file. In particular, do not import -# any module from Tahoe-LAFS or its dependencies, and do not import any -# modules at all at global level. That includes setuptools and pkg_resources. -# It is ok to import modules from the Python Standard Library if they are -# always available, or the import is protected by try...except ImportError. - -# Includes some indirect dependencies, but does not include allmydata. -# These are in the order they should be listed by --version, etc. -package_imports = [ - # package name module name - ('foolscap', 'foolscap'), - ('zfec', 'zfec'), - ('Twisted', 'twisted'), - ('zope.interface', 'zope.interface'), - ('python', None), - ('platform', None), - ('pyOpenSSL', 'OpenSSL'), - ('OpenSSL', None), - ('pyasn1', 'pyasn1'), - ('service-identity', 'service_identity'), - ('pyasn1-modules', 'pyasn1_modules'), - ('cryptography', 'cryptography'), - ('cffi', 'cffi'), - ('six', 'six'), - ('enum34', 'enum'), - ('pycparser', 'pycparser'), - ('PyYAML', 'yaml'), - ('magic-wormhole', 'wormhole'), - ('setuptools', 'setuptools'), - ('eliot', 'eliot'), - ('attrs', 'attr'), - ('autobahn', 'autobahn'), -] - -# Dependencies for which we don't know how to get a version number at run-time. -not_import_versionable = [ - 'zope.interface', -] - -# Dependencies reported by pkg_resources that we can safely ignore. -ignorable = [ - 'argparse', - 'distribute', - 'twisted-web', - 'twisted-core', - 'twisted-conch', -] - - -# These are suppressed globally: - -global_deprecation_messages = [ - "BaseException.message has been deprecated as of Python 2.6", - "twisted.internet.interfaces.IFinishableConsumer was deprecated in Twisted 11.1.0: Please use IConsumer (and IConsumer.unregisterProducer) instead.", - "twisted.internet.interfaces.IStreamClientEndpointStringParser was deprecated in Twisted 14.0.0: This interface has been superseded by IStreamClientEndpointStringParserWithReactor.", -] - -# These are suppressed while importing dependencies: - -deprecation_messages = [ - "the sha module is deprecated; use the hashlib module instead", - "object.__new__\(\) takes no parameters", - "The popen2 module is deprecated. Use the subprocess module.", - "the md5 module is deprecated; use hashlib instead", - "twisted.web.error.NoResource is deprecated since Twisted 9.0. See twisted.web.resource.NoResource.", - "the sets module is deprecated", -] - -runtime_warning_messages = [ - "Not using mpz_powm_sec. You should rebuild using libgmp >= 5 to avoid timing attack vulnerability.", -] - -warning_imports = [ - 'twisted.persisted.sob', - 'twisted.python.filepath', -] diff --git a/src/allmydata/_monkeypatch.py b/src/allmydata/_monkeypatch.py index da37fd979..61f12750f 100644 --- a/src/allmydata/_monkeypatch.py +++ b/src/allmydata/_monkeypatch.py @@ -4,45 +4,5 @@ Monkey-patching of third party libraries. Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from warnings import catch_warnings - - def patch(): """Path third-party libraries to make Tahoe-LAFS work.""" - # Make sure Foolscap always get native strings passed to method names in callRemote. - # This can be removed when any one of the following happens: - # - # 1. Tahoe-LAFS on Python 2 switches to version of Foolscap that fixes - # https://github.com/warner/foolscap/issues/72 - # 2. Foolscap is dropped as a dependency. - # 3. Tahoe-LAFS drops Python 2 support. - - if not PY2: - # Python 3 doesn't need to monkey patch Foolscap - return - - # We need to suppress warnings so as to prevent unexpected output from - # breaking some integration tests. - with catch_warnings(record=True): - # Only tested with this version; ensure correctness with new releases, - # and then either update the assert or hopefully drop the monkeypatch. - from foolscap import __version__ - assert __version__ == "0.13.1", "Wrong version %s of Foolscap" % (__version__,) - - from foolscap.referenceable import RemoteReference - original_getMethodInfo = RemoteReference._getMethodInfo - - def _getMethodInfo(self, name): - if isinstance(name, str): - name = name.encode("utf-8") - return original_getMethodInfo(self, name) - RemoteReference._getMethodInfo = _getMethodInfo diff --git a/src/allmydata/blacklist.py b/src/allmydata/blacklist.py index 43eb36cc6..db499315a 100644 --- a/src/allmydata/blacklist.py +++ b/src/allmydata/blacklist.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os diff --git a/src/allmydata/check_results.py b/src/allmydata/check_results.py index 4f997b614..44a4a1db8 100644 --- a/src/allmydata/check_results.py +++ b/src/allmydata/check_results.py @@ -1,15 +1,5 @@ """Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from past.builtins import unicode from zope.interface import implementer from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \ @@ -71,8 +61,8 @@ class CheckResults(object): # On Python 2, we can mix bytes and Unicode. On Python 3, we want # unicode. if isinstance(summary, bytes): - summary = unicode(summary, "utf-8") - assert isinstance(summary, unicode) # should be a single string + summary = str(summary, "utf-8") + assert isinstance(summary, str) # should be a single string self._summary = summary assert not isinstance(report, str) # should be list of strings self._report = report diff --git a/newsfragments/3404.minor b/src/allmydata/cli/__init__.py similarity index 100% rename from newsfragments/3404.minor rename to src/allmydata/cli/__init__.py diff --git a/src/allmydata/cli/grid_manager.py b/src/allmydata/cli/grid_manager.py new file mode 100644 index 000000000..ddb1f2f37 --- /dev/null +++ b/src/allmydata/cli/grid_manager.py @@ -0,0 +1,228 @@ +""" +A CLI for configuring a grid manager. +""" + +from typing import Optional +from datetime import ( + timedelta, +) + +import click + +from twisted.python.filepath import ( + FilePath, +) + +from allmydata.crypto import ( + ed25519, +) +from allmydata.util.abbreviate import ( + abbreviate_time, +) +from allmydata.grid_manager import ( + create_grid_manager, + save_grid_manager, + load_grid_manager, + current_datetime_with_zone, +) +from allmydata.util import jsonbytes as json + + +@click.group() +@click.option( + '--config', '-c', + type=click.Path(), + help="Configuration directory (or - for stdin)", + required=True, +) +@click.pass_context +def grid_manager(ctx, config): + """ + A Tahoe Grid Manager issues certificates to storage-servers + + A Tahoe client with one or more Grid Manager public keys + configured will only upload to a Storage Server that presents a + valid certificate signed by one of the configured Grid + Manager keys. + + Grid Manager configuration can be in a local directory or given + via stdin. It contains long-term secret information (a private + signing key) and should be kept safe. + """ + + class Config(object): + """ + Available to all sub-commands as Click's context.obj + """ + _grid_manager = None + + @property + def grid_manager(self): + if self._grid_manager is None: + config_path = _config_path_from_option(config) + try: + self._grid_manager = load_grid_manager(config_path) + except ValueError as e: + raise click.ClickException( + "Error loading Grid Manager from '{}': {}".format(config, e) + ) + return self._grid_manager + + ctx.obj = Config() + + +@grid_manager.command() +@click.pass_context +def create(ctx): + """ + Make a new Grid Manager + """ + config_location = ctx.parent.params["config"] + fp = None + if config_location != '-': + fp = FilePath(config_location) + + gm = create_grid_manager() + try: + save_grid_manager(fp, gm) + except OSError as e: + raise click.ClickException( + "Can't create '{}': {}".format(config_location, e) + ) + + +@grid_manager.command() +@click.pass_obj +def public_identity(config): + """ + Show the public identity key of a Grid Manager + + This is what you give to clients to add to their configuration so + they use announcements from this Grid Manager + """ + click.echo(config.grid_manager.public_identity()) + + +@grid_manager.command() +@click.argument("name") +@click.argument("public_key", type=click.STRING) +@click.pass_context +def add(ctx, name, public_key): + """ + Add a new storage-server by name to a Grid Manager + + PUBLIC_KEY is the contents of a node.pubkey file from a Tahoe + node-directory. NAME is an arbitrary label. + """ + public_key = public_key.encode("ascii") + try: + ctx.obj.grid_manager.add_storage_server( + name, + ed25519.verifying_key_from_string(public_key), + ) + except KeyError: + raise click.ClickException( + "A storage-server called '{}' already exists".format(name) + ) + save_grid_manager( + _config_path_from_option(ctx.parent.params["config"]), + ctx.obj.grid_manager, + create=False, + ) + return 0 + + +@grid_manager.command() +@click.argument("name") +@click.pass_context +def remove(ctx, name): + """ + Remove an existing storage-server by name from a Grid Manager + """ + fp = _config_path_from_option(ctx.parent.params["config"]) + try: + ctx.obj.grid_manager.remove_storage_server(name) + except KeyError: + raise click.ClickException( + "No storage-server called '{}' exists".format(name) + ) + cert_count = 0 + if fp is not None: + while fp.child('{}.cert.{}'.format(name, cert_count)).exists(): + fp.child('{}.cert.{}'.format(name, cert_count)).remove() + cert_count += 1 + + save_grid_manager(fp, ctx.obj.grid_manager, create=False) + + +@grid_manager.command() # noqa: F811 +@click.pass_context +def list(ctx): + """ + List all storage-servers known to a Grid Manager + """ + for name in sorted(ctx.obj.grid_manager.storage_servers.keys()): + blank_name = " " * len(name) + click.echo("{}: {}".format( + name, + str(ctx.obj.grid_manager.storage_servers[name].public_key_string(), "utf-8"))) + for cert in ctx.obj.grid_manager.storage_servers[name].certificates: + delta = current_datetime_with_zone() - cert.expires + click.echo("{} cert {}: ".format(blank_name, cert.index), nl=False) + if delta.total_seconds() < 0: + click.echo("valid until {} ({})".format(cert.expires, abbreviate_time(delta))) + else: + click.echo("expired {} ({})".format(cert.expires, abbreviate_time(delta))) + + +@grid_manager.command() +@click.argument("name") +@click.argument( + "expiry_days", + type=click.IntRange(1, 5*365), # XXX is 5 years a good maximum? +) +@click.pass_context +def sign(ctx, name, expiry_days): + """ + sign a new certificate + """ + fp = _config_path_from_option(ctx.parent.params["config"]) + expiry = timedelta(days=expiry_days) + + try: + certificate = ctx.obj.grid_manager.sign(name, expiry) + except KeyError: + raise click.ClickException( + "No storage-server called '{}' exists".format(name) + ) + + certificate_data = json.dumps(certificate.marshal(), indent=4) + click.echo(certificate_data) + if fp is not None: + next_serial = 0 + f = None + while f is None: + fname = "{}.cert.{}".format(name, next_serial) + try: + f = fp.child(fname).create() + except FileExistsError: + f = None + except OSError as e: + raise click.ClickException(f"{fname}: {e}") + next_serial += 1 + with f: + f.write(certificate_data.encode("ascii")) + + +def _config_path_from_option(config: str) -> Optional[FilePath]: + """ + :param str config: a path or - + :returns: a FilePath instance or None + """ + if config == "-": + return None + return FilePath(config) + + +if __name__ == '__main__': + grid_manager() # type: ignore diff --git a/src/allmydata/client.py b/src/allmydata/client.py index aabae9065..48f372b05 100644 --- a/src/allmydata/client.py +++ b/src/allmydata/client.py @@ -1,23 +1,18 @@ """ -Ported to Python 3. +Functionality related to operating a Tahoe-LAFS node (client _or_ server). """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import annotations -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401 - # Don't use future str to prevent leaking future's newbytes into foolscap, which they break. - from past.builtins import unicode as str - -import os, stat, time, weakref +import os +import stat +import time +import weakref +from typing import Optional, Iterable from base64 import urlsafe_b64encode from functools import partial -# On Python 2 this will be the backported package: from configparser import NoSectionError +from six import ensure_text from foolscap.furl import ( decode_furl, ) @@ -34,13 +29,15 @@ from twisted.application.internet import TimerService from twisted.python.filepath import FilePath import allmydata +from allmydata import node from allmydata.crypto import rsa, ed25519 from allmydata.crypto.util import remove_prefix -from allmydata.storage.server import StorageServer +from allmydata.dirnode import DirectoryNode +from allmydata.storage.server import StorageServer, FoolscapStorageServer from allmydata import storage_client from allmydata.immutable.upload import Uploader from allmydata.immutable.offloaded import Helper -from allmydata.control import ControlServer +from allmydata.mutable.filenode import MutableFileNode from allmydata.introducer.client import IntroducerClient from allmydata.util import ( hashutil, base32, pollmixin, log, idlib, @@ -51,21 +48,22 @@ from allmydata.util.encodingutil import get_filesystem_encoding from allmydata.util.abbreviate import parse_abbreviated_size from allmydata.util.time_format import parse_duration, parse_date from allmydata.util.i2p_provider import create as create_i2p_provider -from allmydata.util.tor_provider import create as create_tor_provider +from allmydata.util.tor_provider import create as create_tor_provider, _Provider as TorProvider +from allmydata.util.cputhreadpool import defer_to_thread +from allmydata.util.deferredutil import async_to_deferred from allmydata.stats import StatsProvider from allmydata.history import History from allmydata.interfaces import ( IStatsProducer, SDMF_VERSION, MDMF_VERSION, - DEFAULT_MAX_SEGMENT_SIZE, + DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE, IFoolscapStoragePlugin, IAnnounceableStorageServer, ) from allmydata.nodemaker import NodeMaker from allmydata.blacklist import Blacklist -from allmydata import node - +from allmydata.node import _Config KiB=1024 MiB=1024*KiB @@ -81,7 +79,8 @@ def _is_valid_section(section_name): """ return ( section_name.startswith("storageserver.plugins.") or - section_name.startswith("storageclient.plugins.") + section_name.startswith("storageclient.plugins.") or + section_name in ("grid_managers", "grid_manager_certificates") ) @@ -96,7 +95,9 @@ _client_config = configutil.ValidConfiguration( "shares.happy", "shares.needed", "shares.total", + "shares._max_immutable_segment_size_for_testing", "storage.plugins", + "force_foolscap", ), "storage": ( "debug_discard", @@ -113,6 +114,8 @@ _client_config = configutil.ValidConfiguration( "reserved_space", "storage_dir", "plugins", + "grid_management", + "force_foolscap", ), "sftpd": ( "accounts.file", @@ -169,33 +172,20 @@ class SecretHolder(object): class KeyGenerator(object): """I create RSA keys for mutable files. Each call to generate() returns a - single keypair. The keysize is specified first by the keysize= argument - to generate(), then with a default set by set_default_keysize(), then - with a built-in default of 2048 bits.""" - def __init__(self): - self.default_keysize = 2048 + single keypair.""" - def set_default_keysize(self, keysize): - """Call this to override the size of the RSA keys created for new - mutable files which don't otherwise specify a size. This will affect - all subsequent calls to generate() without a keysize= argument. The - default size is 2048 bits. Test cases should call this method once - during setup, to cause me to create smaller keys, so the unit tests - run faster.""" - self.default_keysize = keysize + @async_to_deferred + async def generate(self) -> tuple[rsa.PublicKey, rsa.PrivateKey]: + """ + I return a Deferred that fires with a (verifyingkey, signingkey) + pair. The returned key will be 2048 bit. + """ + keysize = 2048 + private, public = await defer_to_thread( + rsa.create_signing_keypair, keysize + ) + return public, private - def generate(self, keysize=None): - """I return a Deferred that fires with a (verifyingkey, signingkey) - pair. I accept a keysize in bits (2048 bit keys are standard, smaller - keys are used for testing). If you do not provide a keysize, I will - use my default, which is set by a call to set_default_keysize(). If - set_default_keysize() has never been called, I will create 2048 bit - keys.""" - keysize = keysize or self.default_keysize - # RSA key generation for a 2048 bit key takes between 0.8 and 3.2 - # secs - signer, verifier = rsa.create_signing_keypair(keysize) - return defer.succeed( (verifier, signer) ) class Terminator(service.Service): def __init__(self): @@ -208,7 +198,7 @@ class Terminator(service.Service): return service.Service.stopService(self) -def read_config(basedir, portnumfile, generated_files=[]): +def read_config(basedir, portnumfile, generated_files: Iterable=()): """ Read and validate configuration for a client-style Node. See :method:`allmydata.node.read_config` for parameter meanings (the @@ -283,18 +273,16 @@ def create_client_from_config(config, _client_factory=None, _introducer_factory= config, tub_options, default_connection_handlers, foolscap_connection_handlers, i2p_provider, tor_provider, ) - control_tub = node.create_control_tub() introducer_clients = create_introducer_clients(config, main_tub, _introducer_factory) storage_broker = create_storage_farm_broker( config, default_connection_handlers, foolscap_connection_handlers, - tub_options, introducer_clients + tub_options, introducer_clients, tor_provider ) client = _client_factory( config, main_tub, - control_tub, i2p_provider, tor_provider, introducer_clients, @@ -485,7 +473,7 @@ def create_introducer_clients(config, main_tub, _introducer_factory=None): return introducer_clients -def create_storage_farm_broker(config, default_connection_handlers, foolscap_connection_handlers, tub_options, introducer_clients): +def create_storage_farm_broker(config: _Config, default_connection_handlers, foolscap_connection_handlers, tub_options, introducer_clients, tor_provider: Optional[TorProvider]): """ Create a StorageFarmBroker object, for use by Uploader/Downloader (and everybody else who wants to use storage servers) @@ -505,6 +493,11 @@ def create_storage_farm_broker(config, default_connection_handlers, foolscap_con storage_client_config = storage_client.StorageClientConfig.from_node_config( config, ) + # ensure that we can at least load all plugins that the + # configuration mentions; doing this early (i.e. before creating + # storage-clients themselves) allows us to exit in case of a + # problem. + storage_client_config.get_configured_storage_plugins() def tub_creator(handler_overrides=None, **kwargs): return node.create_tub( @@ -515,11 +508,14 @@ def create_storage_farm_broker(config, default_connection_handlers, foolscap_con **kwargs ) + # create the actual storage-broker sb = storage_client.StorageFarmBroker( permute_peers=True, tub_maker=tub_creator, node_config=config, storage_client_config=storage_client_config, + default_connection_handlers=default_connection_handlers, + tor_provider=tor_provider, ) for ic in introducer_clients: sb.use_introducer(ic) @@ -611,6 +607,10 @@ def anonymous_storage_enabled(config): @implementer(IStatsProducer) class _Client(node.Node, pollmixin.PollMixin): + """ + This class should be refactored; see + https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3931 + """ STOREDIR = 'storage' NODETYPE = "client" @@ -628,15 +628,15 @@ class _Client(node.Node, pollmixin.PollMixin): DEFAULT_ENCODING_PARAMETERS = {"k": 3, "happy": 7, "n": 10, - "max_segment_size": DEFAULT_MAX_SEGMENT_SIZE, + "max_segment_size": DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE, } - def __init__(self, config, main_tub, control_tub, i2p_provider, tor_provider, introducer_clients, + def __init__(self, config, main_tub, i2p_provider, tor_provider, introducer_clients, storage_farm_broker): """ Use :func:`allmydata.client.create_client` to instantiate one of these. """ - node.Node.__init__(self, config, main_tub, control_tub, i2p_provider, tor_provider) + node.Node.__init__(self, config, main_tub, i2p_provider, tor_provider) self.started_timestamp = time.time() self.logSource = "Client" @@ -648,7 +648,6 @@ class _Client(node.Node, pollmixin.PollMixin): self.init_stats_provider() self.init_secrets() self.init_node_key() - self.init_control() self._key_generator = KeyGenerator() key_gen_furl = config.get_config("client", "key_generator.furl", None) if key_gen_furl: @@ -679,6 +678,14 @@ class _Client(node.Node, pollmixin.PollMixin): if webport: self.init_web(webport) # strports string + # TODO this may be the wrong location for now? but as temporary measure + # it allows us to get NURLs for testing in test_istorageserver.py. This + # will eventually get fixed one way or another in + # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3901. See also + # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3931 for the bigger + # picture issue. + self.storage_nurls : Optional[set] = None + def init_stats_provider(self): self.stats_provider = StatsProvider(self) self.stats_provider.setServiceParent(self) @@ -810,16 +817,18 @@ class _Client(node.Node, pollmixin.PollMixin): sharetypes.append("mutable") expiration_sharetypes = tuple(sharetypes) - ss = StorageServer(storedir, self.nodeid, - reserved_space=reserved, - discard_storage=discard, - readonly_storage=readonly, - stats_provider=self.stats_provider, - expiration_enabled=expire, - expiration_mode=mode, - expiration_override_lease_duration=o_l_d, - expiration_cutoff_date=cutoff_date, - expiration_sharetypes=expiration_sharetypes) + ss = StorageServer( + storedir, self.nodeid, + reserved_space=reserved, + discard_storage=discard, + readonly_storage=readonly, + stats_provider=self.stats_provider, + expiration_enabled=expire, + expiration_mode=mode, + expiration_override_lease_duration=o_l_d, + expiration_cutoff_date=cutoff_date, + expiration_sharetypes=expiration_sharetypes, + ) ss.setServiceParent(self) return ss @@ -838,7 +847,16 @@ class _Client(node.Node, pollmixin.PollMixin): if anonymous_storage_enabled(self.config): furl_file = self.config.get_private_path("storage.furl").encode(get_filesystem_encoding()) - furl = self.tub.registerReference(ss, furlFile=furl_file) + furl = self.tub.registerReference(FoolscapStorageServer(ss), furlFile=furl_file) + (_, _, swissnum) = decode_furl(furl) + if hasattr(self.tub.negotiationClass, "add_storage_server"): + nurls = self.tub.negotiationClass.add_storage_server(ss, swissnum.encode("ascii")) + self.storage_nurls = nurls + # There is code in e.g. storage_client.py that checks if an + # announcement has changed. Since NURL order isn't meaningful, + # we don't want a change in the order to count as a change, so we + # send the NURLs as a set. CBOR supports sets, as does Foolscap. + announcement[storage_client.ANONYMOUS_STORAGE_NURLS] = {n.to_text() for n in nurls} announcement["anonymous-storage-FURL"] = furl enabled_storage_servers = self._enable_storage_servers( @@ -856,6 +874,14 @@ class _Client(node.Node, pollmixin.PollMixin): announcement.update(plugins_announcement) + if self.config.get_config("storage", "grid_management", default=False, boolean=True): + grid_manager_certificates = self.config.get_grid_manager_certificates() + announcement[u"grid-manager-certificates"] = grid_manager_certificates + + # Note: certificates are not verified for validity here, but + # that may be useful. See: + # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3977 + for ic in self.introducer_clients: ic.publish("storage", announcement, self._node_private_key) @@ -906,6 +932,13 @@ class _Client(node.Node, pollmixin.PollMixin): DEP["k"] = int(self.config.get_config("client", "shares.needed", DEP["k"])) DEP["n"] = int(self.config.get_config("client", "shares.total", DEP["n"])) DEP["happy"] = int(self.config.get_config("client", "shares.happy", DEP["happy"])) + # At the moment this is only used for testing, thus the janky config + # attribute name. + DEP["max_segment_size"] = int(self.config.get_config( + "client", + "shares._max_immutable_segment_size_for_testing", + DEP["max_segment_size"]) + ) # for the CLI to authenticate to local JSON endpoints self._create_auth_token() @@ -958,6 +991,9 @@ class _Client(node.Node, pollmixin.PollMixin): static_servers = servers_yaml.get("storage", {}) log.msg("found %d static servers in private/servers.yaml" % len(static_servers)) + static_servers = { + ensure_text(key): value for (key, value) in static_servers.items() + } self.storage_broker.set_static_servers(static_servers) except EnvironmentError: pass @@ -985,12 +1021,6 @@ class _Client(node.Node, pollmixin.PollMixin): def get_history(self): return self.history - def init_control(self): - c = ControlServer() - c.setServiceParent(self) - control_url = self.control_tub.registerReference(c) - self.config.write_private_config("control.furl", control_url + "\n") - def init_helper(self): self.helper = Helper(self.config.get_config_path("helper"), self.storage_broker, self._secret_holder, @@ -1003,9 +1033,6 @@ class _Client(node.Node, pollmixin.PollMixin): helper_furlfile = self.config.get_private_path("helper.furl").encode(get_filesystem_encoding()) self.tub.registerReference(self.helper, furlFile=helper_furlfile) - def set_default_mutable_keysize(self, keysize): - self._key_generator.set_default_keysize(keysize) - def _get_tempdir(self): """ Determine the path to the directory where temporary files for this node @@ -1024,14 +1051,14 @@ class _Client(node.Node, pollmixin.PollMixin): def init_web(self, webport): self.log("init_web(webport=%s)", args=(webport,)) - from allmydata.webish import WebishServer + from allmydata.webish import WebishServer, anonymous_tempfile_factory nodeurl_path = self.config.get_config_path("node.url") staticdir_config = self.config.get_config("node", "web.static", "public_html") staticdir = self.config.get_config_path(staticdir_config) ws = WebishServer( self, webport, - self._get_tempdir(), + anonymous_tempfile_factory(self._get_tempdir()), nodeurl_path, staticdir, ) @@ -1099,16 +1126,83 @@ class _Client(node.Node, pollmixin.PollMixin): # may get an opaque node if there were any problems. return self.nodemaker.create_from_cap(write_uri, read_uri, deep_immutable=deep_immutable, name=name) - def create_dirnode(self, initial_children={}, version=None): - d = self.nodemaker.create_new_mutable_directory(initial_children, version=version) + def create_dirnode( + self, + initial_children: dict | None = None, + version: int | None = None, + *, + unique_keypair: tuple[rsa.PublicKey, rsa.PrivateKey] | None = None + ) -> DirectoryNode: + """ + Create a new directory. + + :param initial_children: If given, a structured dict representing the + initial content of the created directory. See + `docs/frontends/webapi.rst` for examples. + + :param version: If given, an int representing the mutable file format + of the new object. Acceptable values are currently `SDMF_VERSION` + or `MDMF_VERSION` (corresponding to 0 or 1, respectively, as + defined in `allmydata.interfaces`). If no such value is provided, + the default mutable format will be used (currently SDMF). + + :param unique_keypair: an optional tuple containing the RSA public + and private key to be used for the new directory. Typically, this + value is omitted (in which case a new random keypair will be + generated at creation time). + + **Warning** This value independently determines the identity of + the mutable object to create. There cannot be two different + mutable objects that share a keypair. They will merge into one + object (with undefined contents). + + :return: A Deferred which will fire with a representation of the new + directory after it has been created. + """ + d = self.nodemaker.create_new_mutable_directory( + initial_children, + version=version, + keypair=unique_keypair, + ) return d def create_immutable_dirnode(self, children, convergence=None): return self.nodemaker.create_immutable_directory(children, convergence) - def create_mutable_file(self, contents=None, keysize=None, version=None): - return self.nodemaker.create_mutable_file(contents, keysize, - version=version) + def create_mutable_file( + self, + contents: bytes | None = None, + version: int | None = None, + *, + unique_keypair: tuple[rsa.PublicKey, rsa.PrivateKey] | None = None, + ) -> MutableFileNode: + """ + Create *and upload* a new mutable object. + + :param contents: If given, the initial contents for the new object. + + :param version: If given, the mutable file format for the new object + (otherwise a format will be chosen automatically). + + :param unique_keypair: **Warning** This value independently determines + the identity of the mutable object to create. There cannot be two + different mutable objects that share a keypair. They will merge + into one object (with undefined contents). + + It is common to pass a None value (or not pass a valuye) for this + parameter. In these cases, a new random keypair will be + generated. + + If non-None, the given public/private keypair will be used for the + new object. The expected use-case is for implementing compliance + tests. + + :return: A Deferred which will fire with a representation of the new + mutable object after it has been uploaded. + """ + return self.nodemaker.create_mutable_file(contents, + version=version, + keypair=unique_keypair) def upload(self, uploadable, reactor=None): uploader = self.getServiceNamed("uploader") diff --git a/src/allmydata/codec.py b/src/allmydata/codec.py index 19345959e..d8a3527c1 100644 --- a/src/allmydata/codec.py +++ b/src/allmydata/codec.py @@ -3,19 +3,12 @@ CRS encoding and decoding. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from zope.interface import implementer -from twisted.internet import defer from allmydata.util import mathutil from allmydata.util.assertutil import precondition +from allmydata.util.cputhreadpool import defer_to_thread +from allmydata.util.deferredutil import async_to_deferred from allmydata.interfaces import ICodecEncoder, ICodecDecoder import zfec @@ -45,7 +38,8 @@ class CRSEncoder(object): def get_block_size(self): return self.share_size - def encode(self, inshares, desired_share_ids=None): + @async_to_deferred + async def encode(self, inshares, desired_share_ids=None): precondition(desired_share_ids is None or len(desired_share_ids) <= self.max_shares, desired_share_ids, self.max_shares) if desired_share_ids is None: @@ -53,9 +47,8 @@ class CRSEncoder(object): for inshare in inshares: assert len(inshare) == self.share_size, (len(inshare), self.share_size, self.data_size, self.required_shares) - shares = self.encoder.encode(inshares, desired_share_ids) - - return defer.succeed((shares, desired_share_ids)) + shares = await defer_to_thread(self.encoder.encode, inshares, desired_share_ids) + return (shares, desired_share_ids) def encode_proposal(self, data, desired_share_ids=None): raise NotImplementedError() @@ -77,14 +70,17 @@ class CRSDecoder(object): def get_needed_shares(self): return self.required_shares - def decode(self, some_shares, their_shareids): + @async_to_deferred + async def decode(self, some_shares, their_shareids): precondition(len(some_shares) == len(their_shareids), len(some_shares), len(their_shareids)) precondition(len(some_shares) == self.required_shares, len(some_shares), self.required_shares) - data = self.decoder.decode(some_shares, - [int(s) for s in their_shareids]) - return defer.succeed(data) + return await defer_to_thread( + self.decoder.decode, + some_shares, + [int(s) for s in their_shareids] + ) def parse_params(serializedparams): pieces = serializedparams.split(b"-") diff --git a/src/allmydata/control.py b/src/allmydata/control.py deleted file mode 100644 index 7efa174ab..000000000 --- a/src/allmydata/control.py +++ /dev/null @@ -1,273 +0,0 @@ -"""Ported to Python 3. -""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -import os, time, tempfile -from zope.interface import implementer -from twisted.application import service -from twisted.internet import defer -from twisted.internet.interfaces import IConsumer -from foolscap.api import Referenceable -from allmydata.interfaces import RIControlClient, IFileNode -from allmydata.util import fileutil, mathutil -from allmydata.immutable import upload -from allmydata.mutable.publish import MutableData -from twisted.python import log - -def get_memory_usage(): - # this is obviously linux-specific - stat_names = (b"VmPeak", - b"VmSize", - #b"VmHWM", - b"VmData") - stats = {} - try: - with open("/proc/self/status", "rb") as f: - for line in f: - name, right = line.split(b":",2) - if name in stat_names: - assert right.endswith(b" kB\n") - right = right[:-4] - stats[name] = int(right) * 1024 - except: - # Probably not on (a compatible version of) Linux - stats['VmSize'] = 0 - stats['VmPeak'] = 0 - return stats - -def log_memory_usage(where=""): - stats = get_memory_usage() - log.msg("VmSize: %9d VmPeak: %9d %s" % (stats[b"VmSize"], - stats[b"VmPeak"], - where)) - -@implementer(IConsumer) -class FileWritingConsumer(object): - def __init__(self, filename): - self.done = False - self.f = open(filename, "wb") - def registerProducer(self, p, streaming): - if streaming: - p.resumeProducing() - else: - while not self.done: - p.resumeProducing() - def write(self, data): - self.f.write(data) - def unregisterProducer(self): - self.done = True - self.f.close() - -@implementer(RIControlClient) -class ControlServer(Referenceable, service.Service): - - def remote_wait_for_client_connections(self, num_clients): - return self.parent.debug_wait_for_client_connections(num_clients) - - def remote_upload_random_data_from_file(self, size, convergence): - tempdir = tempfile.mkdtemp() - filename = os.path.join(tempdir, "data") - f = open(filename, "wb") - block = b"a" * 8192 - while size > 0: - l = min(size, 8192) - f.write(block[:l]) - size -= l - f.close() - uploader = self.parent.getServiceNamed("uploader") - u = upload.FileName(filename, convergence=convergence) - # XXX should pass reactor arg - d = uploader.upload(u) - d.addCallback(lambda results: results.get_uri()) - def _done(uri): - os.remove(filename) - os.rmdir(tempdir) - return uri - d.addCallback(_done) - return d - - def remote_download_to_tempfile_and_delete(self, uri): - tempdir = tempfile.mkdtemp() - filename = os.path.join(tempdir, "data") - filenode = self.parent.create_node_from_uri(uri, name=filename) - if not IFileNode.providedBy(filenode): - raise AssertionError("The URI does not reference a file.") - c = FileWritingConsumer(filename) - d = filenode.read(c) - def _done(res): - os.remove(filename) - os.rmdir(tempdir) - return None - d.addCallback(_done) - return d - - def remote_speed_test(self, count, size, mutable): - assert size > 8 - log.msg("speed_test: count=%d, size=%d, mutable=%s" % (count, size, - mutable)) - st = SpeedTest(self.parent, count, size, mutable) - return st.run() - - def remote_get_memory_usage(self): - return get_memory_usage() - - def remote_measure_peer_response_time(self): - # I'd like to average together several pings, but I don't want this - # phase to take more than 10 seconds. Expect worst-case latency to be - # 300ms. - results = {} - sb = self.parent.get_storage_broker() - everyone = sb.get_connected_servers() - num_pings = int(mathutil.div_ceil(10, (len(everyone) * 0.3))) - everyone = list(everyone) * num_pings - d = self._do_one_ping(None, everyone, results) - return d - def _do_one_ping(self, res, everyone_left, results): - if not everyone_left: - return results - server = everyone_left.pop(0) - server_name = server.get_longname() - storage_server = server.get_storage_server() - start = time.time() - d = storage_server.get_buckets(b"\x00" * 16) - def _done(ignored): - stop = time.time() - elapsed = stop - start - if server_name in results: - results[server_name].append(elapsed) - else: - results[server_name] = [elapsed] - d.addCallback(_done) - d.addCallback(self._do_one_ping, everyone_left, results) - def _average(res): - averaged = {} - for server_name,times in results.items(): - averaged[server_name] = sum(times) / len(times) - return averaged - d.addCallback(_average) - return d - -class SpeedTest(object): - def __init__(self, parent, count, size, mutable): - self.parent = parent - self.count = count - self.size = size - self.mutable_mode = mutable - self.uris = {} - self.basedir = self.parent.config.get_config_path("_speed_test_data") - - def run(self): - self.create_data() - d = self.do_upload() - d.addCallback(lambda res: self.do_download()) - d.addBoth(self.do_cleanup) - d.addCallback(lambda res: (self.upload_time, self.download_time)) - return d - - def create_data(self): - fileutil.make_dirs(self.basedir) - for i in range(self.count): - s = self.size - fn = os.path.join(self.basedir, str(i)) - if os.path.exists(fn): - os.unlink(fn) - f = open(fn, "wb") - f.write(os.urandom(8)) - s -= 8 - while s > 0: - chunk = min(s, 4096) - f.write(b"\x00" * chunk) - s -= chunk - f.close() - - def do_upload(self): - d = defer.succeed(None) - def _create_slot(res): - d1 = self.parent.create_mutable_file(b"") - def _created(n): - self._n = n - d1.addCallback(_created) - return d1 - if self.mutable_mode == "upload": - d.addCallback(_create_slot) - def _start(res): - self._start = time.time() - d.addCallback(_start) - - def _record_uri(uri, i): - self.uris[i] = uri - def _upload_one_file(ignored, i): - if i >= self.count: - return - fn = os.path.join(self.basedir, str(i)) - if self.mutable_mode == "create": - data = open(fn,"rb").read() - d1 = self.parent.create_mutable_file(data) - d1.addCallback(lambda n: n.get_uri()) - elif self.mutable_mode == "upload": - data = open(fn,"rb").read() - d1 = self._n.overwrite(MutableData(data)) - d1.addCallback(lambda res: self._n.get_uri()) - else: - up = upload.FileName(fn, convergence=None) - d1 = self.parent.upload(up) - d1.addCallback(lambda results: results.get_uri()) - d1.addCallback(_record_uri, i) - d1.addCallback(_upload_one_file, i+1) - return d1 - d.addCallback(_upload_one_file, 0) - def _upload_done(ignored): - stop = time.time() - self.upload_time = stop - self._start - d.addCallback(_upload_done) - return d - - def do_download(self): - start = time.time() - d = defer.succeed(None) - def _download_one_file(ignored, i): - if i >= self.count: - return - n = self.parent.create_node_from_uri(self.uris[i]) - if not IFileNode.providedBy(n): - raise AssertionError("The URI does not reference a file.") - if n.is_mutable(): - d1 = n.download_best_version() - else: - d1 = n.read(DiscardingConsumer()) - d1.addCallback(_download_one_file, i+1) - return d1 - d.addCallback(_download_one_file, 0) - def _download_done(ignored): - stop = time.time() - self.download_time = stop - start - d.addCallback(_download_done) - return d - - def do_cleanup(self, res): - for i in range(self.count): - fn = os.path.join(self.basedir, str(i)) - os.unlink(fn) - return res - -@implementer(IConsumer) -class DiscardingConsumer(object): - def __init__(self): - self.done = False - def registerProducer(self, p, streaming): - if streaming: - p.resumeProducing() - else: - while not self.done: - p.resumeProducing() - def write(self, data): - pass - def unregisterProducer(self): - self.done = True diff --git a/src/allmydata/crypto/__init__.py b/src/allmydata/crypto/__init__.py index 04b8f0cc3..ec50d070d 100644 --- a/src/allmydata/crypto/__init__.py +++ b/src/allmydata/crypto/__init__.py @@ -9,11 +9,3 @@ objects that `cryptography` documents. Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 diff --git a/src/allmydata/crypto/aes.py b/src/allmydata/crypto/aes.py index ad7cfcba4..4119f080b 100644 --- a/src/allmydata/crypto/aes.py +++ b/src/allmydata/crypto/aes.py @@ -9,16 +9,9 @@ objects that `cryptography` documents. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -import six +from dataclasses import dataclass +from typing import Optional from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.ciphers import ( @@ -27,34 +20,34 @@ from cryptography.hazmat.primitives.ciphers import ( modes, CipherContext, ) -from zope.interface import ( - Interface, - directlyProvides, -) DEFAULT_IV = b'\x00' * 16 -class IEncryptor(Interface): +@dataclass +class Encryptor: """ An object which can encrypt data. Create one using :func:`create_encryptor` and use it with :func:`encrypt_data` """ + encrypt_context: CipherContext -class IDecryptor(Interface): +@dataclass +class Decryptor: """ An object which can decrypt data. Create one using :func:`create_decryptor` and use it with :func:`decrypt_data` """ + decrypt_context: CipherContext -def create_encryptor(key, iv=None): +def create_encryptor(key: bytes, iv: Optional[bytes]=None) -> Encryptor: """ Create and return a new object which can do AES encryptions with the given key and initialization vector (IV). The default IV is 16 @@ -67,33 +60,30 @@ def create_encryptor(key, iv=None): or None for the default (which is 16 zero bytes) :returns: an object suitable for use with :func:`encrypt_data` (an - :class:`IEncryptor`) + :class:`Encryptor`) """ cryptor = _create_cryptor(key, iv) - directlyProvides(cryptor, IEncryptor) - return cryptor + return Encryptor(cryptor) -def encrypt_data(encryptor, plaintext): +def encrypt_data(encryptor: Encryptor, plaintext: bytes) -> bytes: """ AES-encrypt `plaintext` with the given `encryptor`. - :param encryptor: an instance of :class:`IEncryptor` previously + :param encryptor: an instance of :class:`Encryptor` previously returned from `create_encryptor` :param bytes plaintext: the data to encrypt :returns: bytes of ciphertext """ + if not isinstance(plaintext, (bytes, memoryview)): + raise ValueError(f'Plaintext must be bytes or memoryview: {type(plaintext)}') - _validate_cryptor(encryptor, encrypt=True) - if not isinstance(plaintext, six.binary_type): - raise ValueError('Plaintext must be bytes') - - return encryptor.update(plaintext) + return encryptor.encrypt_context.update(plaintext) -def create_decryptor(key, iv=None): +def create_decryptor(key: bytes, iv: Optional[bytes]=None) -> Decryptor: """ Create and return a new object which can do AES decryptions with the given key and initialization vector (IV). The default IV is 16 @@ -106,33 +96,30 @@ def create_decryptor(key, iv=None): or None for the default (which is 16 zero bytes) :returns: an object suitable for use with :func:`decrypt_data` (an - :class:`IDecryptor` instance) + :class:`Decryptor` instance) """ cryptor = _create_cryptor(key, iv) - directlyProvides(cryptor, IDecryptor) - return cryptor + return Decryptor(cryptor) -def decrypt_data(decryptor, plaintext): +def decrypt_data(decryptor: Decryptor, plaintext: bytes) -> bytes: """ AES-decrypt `plaintext` with the given `decryptor`. - :param decryptor: an instance of :class:`IDecryptor` previously + :param decryptor: an instance of :class:`Decryptor` previously returned from `create_decryptor` :param bytes plaintext: the data to decrypt :returns: bytes of ciphertext """ + if not isinstance(plaintext, (bytes, memoryview)): + raise ValueError(f'Plaintext must be bytes or memoryview: {type(plaintext)}') - _validate_cryptor(decryptor, encrypt=False) - if not isinstance(plaintext, six.binary_type): - raise ValueError('Plaintext must be bytes') - - return decryptor.update(plaintext) + return decryptor.decrypt_context.update(plaintext) -def _create_cryptor(key, iv): +def _create_cryptor(key: bytes, iv: Optional[bytes]) -> CipherContext: """ Internal helper. @@ -145,37 +132,21 @@ def _create_cryptor(key, iv): modes.CTR(iv), backend=default_backend() ) - return cipher.encryptor() + return cipher.encryptor() # type: ignore[return-type] -def _validate_cryptor(cryptor, encrypt=True): - """ - raise ValueError if `cryptor` is not a valid object - """ - klass = IEncryptor if encrypt else IDecryptor - name = "encryptor" if encrypt else "decryptor" - if not isinstance(cryptor, CipherContext): - raise ValueError( - "'{}' must be a CipherContext".format(name) - ) - if not klass.providedBy(cryptor): - raise ValueError( - "'{}' must be created with create_{}()".format(name, name) - ) - - -def _validate_key(key): +def _validate_key(key: bytes) -> bytes: """ confirm `key` is suitable for AES encryption, or raise ValueError """ - if not isinstance(key, six.binary_type): + if not isinstance(key, bytes): raise TypeError('Key must be bytes') if len(key) not in (16, 32): raise ValueError('Key must be 16 or 32 bytes long') return key -def _validate_iv(iv): +def _validate_iv(iv: Optional[bytes]) -> bytes: """ Returns a suitable initialiation vector. If `iv` is `None`, a default is returned. If `iv` is not a suitable initialization @@ -183,7 +154,7 @@ def _validate_iv(iv): """ if iv is None: return DEFAULT_IV - if not isinstance(iv, six.binary_type): + if not isinstance(iv, bytes): raise TypeError('IV must be bytes') if len(iv) != 16: raise ValueError('IV must be 16 bytes long') diff --git a/src/allmydata/crypto/ed25519.py b/src/allmydata/crypto/ed25519.py index 098fa9758..e2d2ceb49 100644 --- a/src/allmydata/crypto/ed25519.py +++ b/src/allmydata/crypto/ed25519.py @@ -13,20 +13,7 @@ cut-and-pasteability. The base62 encoding is shorter than the base32 form, but the minor usability improvement is not worth the documentation and specification confusion of using a non-standard encoding. So we stick with base32. - -Ported to Python 3. ''' -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -import six from cryptography.exceptions import ( InvalidSignature, @@ -72,7 +59,7 @@ def verifying_key_from_signing_key(private_key): return private_key.public_key() -def sign_data(private_key, data): +def sign_data(private_key, data: bytes) -> bytes: """ Sign the given data using the given private key @@ -86,7 +73,7 @@ def sign_data(private_key, data): """ _validate_private_key(private_key) - if not isinstance(data, six.binary_type): + if not isinstance(data, bytes): raise ValueError('data must be bytes') return private_key.sign(data) @@ -110,7 +97,7 @@ def string_from_signing_key(private_key): return PRIVATE_KEY_PREFIX + b2a(raw_key_bytes) -def signing_keypair_from_string(private_key_bytes): +def signing_keypair_from_string(private_key_bytes: bytes): """ Load a signing keypair from a string of bytes (which includes the PRIVATE_KEY_PREFIX) @@ -118,7 +105,7 @@ def signing_keypair_from_string(private_key_bytes): :returns: a 2-tuple of (private_key, public_key) """ - if not isinstance(private_key_bytes, six.binary_type): + if not isinstance(private_key_bytes, bytes): raise ValueError('private_key_bytes must be bytes') private_key = Ed25519PrivateKey.from_private_bytes( @@ -127,7 +114,7 @@ def signing_keypair_from_string(private_key_bytes): return private_key, private_key.public_key() -def verify_signature(public_key, alleged_signature, data): +def verify_signature(public_key, alleged_signature: bytes, data: bytes): """ :param public_key: a verifying key @@ -139,10 +126,10 @@ def verify_signature(public_key, alleged_signature, data): :returns: None (or raises an exception). """ - if not isinstance(alleged_signature, six.binary_type): + if not isinstance(alleged_signature, bytes): raise ValueError('alleged_signature must be bytes') - if not isinstance(data, six.binary_type): + if not isinstance(data, bytes): raise ValueError('data must be bytes') _validate_public_key(public_key) @@ -159,7 +146,7 @@ def verifying_key_from_string(public_key_bytes): :returns: a public_key """ - if not isinstance(public_key_bytes, six.binary_type): + if not isinstance(public_key_bytes, bytes): raise ValueError('public_key_bytes must be bytes') return Ed25519PublicKey.from_public_bytes( @@ -167,7 +154,7 @@ def verifying_key_from_string(public_key_bytes): ) -def string_from_verifying_key(public_key): +def string_from_verifying_key(public_key) -> bytes: """ Encode a public key to a string of bytes @@ -183,7 +170,7 @@ def string_from_verifying_key(public_key): return PUBLIC_KEY_PREFIX + b2a(raw_key_bytes) -def _validate_public_key(public_key): +def _validate_public_key(public_key: Ed25519PublicKey): """ Internal helper. Verify that `public_key` is an appropriate object """ @@ -192,7 +179,7 @@ def _validate_public_key(public_key): return None -def _validate_private_key(private_key): +def _validate_private_key(private_key: Ed25519PrivateKey): """ Internal helper. Verify that `private_key` is an appropriate object """ diff --git a/src/allmydata/crypto/error.py b/src/allmydata/crypto/error.py index 153e48d33..f860cf57b 100644 --- a/src/allmydata/crypto/error.py +++ b/src/allmydata/crypto/error.py @@ -3,15 +3,6 @@ Exceptions raise by allmydata.crypto.* modules Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - class BadSignature(Exception): """ diff --git a/src/allmydata/crypto/rsa.py b/src/allmydata/crypto/rsa.py index b5d15ad4a..e579a3d2a 100644 --- a/src/allmydata/crypto/rsa.py +++ b/src/allmydata/crypto/rsa.py @@ -9,17 +9,14 @@ features of any objects that `cryptography` documents. That is, the public and private keys are opaque objects; DO NOT depend on any of their methods. - -Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations + +from typing_extensions import TypeAlias +from typing import Callable + +from functools import partial from cryptography.exceptions import InvalidSignature from cryptography.hazmat.backends import default_backend @@ -30,6 +27,8 @@ from cryptography.hazmat.primitives.serialization import load_der_private_key, l from allmydata.crypto.error import BadSignature +PublicKey: TypeAlias = rsa.RSAPublicKey +PrivateKey: TypeAlias = rsa.RSAPrivateKey # This is the value that was used by `pycryptopp`, and we must continue to use it for # both backwards compatibility and interoperability. @@ -46,12 +45,12 @@ RSA_PADDING = padding.PSS( -def create_signing_keypair(key_size): +def create_signing_keypair(key_size: int) -> tuple[PrivateKey, PublicKey]: """ Create a new RSA signing (private) keypair from scratch. Can be used with `sign_data` function. - :param int key_size: length of key in bits + :param key_size: length of key in bits :returns: 2-tuple of (private_key, public_key) """ @@ -63,24 +62,62 @@ def create_signing_keypair(key_size): return priv_key, priv_key.public_key() -def create_signing_keypair_from_string(private_key_der): +def create_signing_keypair_from_string(private_key_der: bytes) -> tuple[PrivateKey, PublicKey]: """ Create an RSA signing (private) key from previously serialized private key bytes. - :param bytes private_key_der: blob as returned from `der_string_from_signing_keypair` + :param private_key_der: blob as returned from `der_string_from_signing_keypair` :returns: 2-tuple of (private_key, public_key) """ - priv_key = load_der_private_key( + _load = partial( + load_der_private_key, private_key_der, password=None, backend=default_backend(), ) - return priv_key, priv_key.public_key() + + def load_with_validation() -> PrivateKey: + k = _load() + assert isinstance(k, PrivateKey) + return k + + def load_without_validation() -> PrivateKey: + k = _load(unsafe_skip_rsa_key_validation=True) + assert isinstance(k, PrivateKey) + return k + + # Load it once without the potentially expensive OpenSSL validation + # checks. These have superlinear complexity. We *will* run them just + # below - but first we'll apply our own constant-time checks. + load: Callable[[], PrivateKey] = load_without_validation + try: + unsafe_priv_key = load() + except TypeError: + # cryptography<39 does not support this parameter, so just load the + # key with validation... + unsafe_priv_key = load_with_validation() + # But avoid *reloading* it since that will run the expensive + # validation *again*. + load = lambda: unsafe_priv_key + + if not isinstance(unsafe_priv_key, rsa.RSAPrivateKey): + raise ValueError( + "Private Key did not decode to an RSA key" + ) + if unsafe_priv_key.key_size != 2048: + raise ValueError( + "Private Key must be 2048 bits" + ) + + # Now re-load it with OpenSSL's validation applied. + safe_priv_key = load() + + return safe_priv_key, safe_priv_key.public_key() -def der_string_from_signing_key(private_key): +def der_string_from_signing_key(private_key: PrivateKey) -> bytes: """ Serializes a given RSA private key to a DER string @@ -90,14 +127,14 @@ def der_string_from_signing_key(private_key): :returns: bytes representing `private_key` """ _validate_private_key(private_key) - return private_key.private_bytes( + return private_key.private_bytes( # type: ignore[attr-defined] encoding=Encoding.DER, format=PrivateFormat.PKCS8, encryption_algorithm=NoEncryption(), ) -def der_string_from_verifying_key(public_key): +def der_string_from_verifying_key(public_key: PublicKey) -> bytes: """ Serializes a given RSA public key to a DER string. @@ -113,7 +150,7 @@ def der_string_from_verifying_key(public_key): ) -def create_verifying_key_from_string(public_key_der): +def create_verifying_key_from_string(public_key_der: bytes) -> PublicKey: """ Create an RSA verifying key from a previously serialized public key @@ -126,15 +163,16 @@ def create_verifying_key_from_string(public_key_der): public_key_der, backend=default_backend(), ) + assert isinstance(pub_key, PublicKey) return pub_key -def sign_data(private_key, data): +def sign_data(private_key: PrivateKey, data: bytes) -> bytes: """ :param private_key: the private part of a keypair returned from `create_signing_keypair_from_string` or `create_signing_keypair` - :param bytes data: the bytes to sign + :param data: the bytes to sign :returns: bytes which are a signature of the bytes given as `data`. """ @@ -145,7 +183,7 @@ def sign_data(private_key, data): hashes.SHA256(), ) -def verify_signature(public_key, alleged_signature, data): +def verify_signature(public_key: PublicKey, alleged_signature: bytes, data: bytes) -> None: """ :param public_key: a verifying key, returned from `create_verifying_key_from_string` or `create_verifying_key_from_private_key` @@ -165,23 +203,23 @@ def verify_signature(public_key, alleged_signature, data): raise BadSignature() -def _validate_public_key(public_key): +def _validate_public_key(public_key: PublicKey) -> None: """ Internal helper. Checks that `public_key` is a valid cryptography object """ if not isinstance(public_key, rsa.RSAPublicKey): raise ValueError( - "public_key must be an RSAPublicKey" + f"public_key must be an RSAPublicKey not {type(public_key)}" ) -def _validate_private_key(private_key): +def _validate_private_key(private_key: PrivateKey) -> None: """ Internal helper. Checks that `public_key` is a valid cryptography object """ if not isinstance(private_key, rsa.RSAPrivateKey): raise ValueError( - "private_key must be an RSAPrivateKey" + f"private_key must be an RSAPrivateKey not {type(private_key)}" ) diff --git a/src/allmydata/crypto/util.py b/src/allmydata/crypto/util.py index 8b8619e47..09836533f 100644 --- a/src/allmydata/crypto/util.py +++ b/src/allmydata/crypto/util.py @@ -3,14 +3,6 @@ Utilities used by allmydata.crypto modules Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from allmydata.crypto.error import BadPrefixError diff --git a/src/allmydata/deep_stats.py b/src/allmydata/deep_stats.py index bfb43ebae..b3671718b 100644 --- a/src/allmydata/deep_stats.py +++ b/src/allmydata/deep_stats.py @@ -2,14 +2,6 @@ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import math diff --git a/src/allmydata/dirnode.py b/src/allmydata/dirnode.py index fdf373b45..16be8b9ef 100644 --- a/src/allmydata/dirnode.py +++ b/src/allmydata/dirnode.py @@ -2,16 +2,6 @@ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - # Skip dict so it doesn't break things. - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 -from past.builtins import unicode import time @@ -47,31 +37,29 @@ from eliot.twisted import ( ) NAME = Field.for_types( - u"name", - # Make sure this works on Python 2; with str, it gets Future str which - # breaks Eliot. - [unicode], - u"The name linking the parent to this node.", + "name", + [str], + "The name linking the parent to this node.", ) METADATA = Field.for_types( - u"metadata", + "metadata", [dict], - u"Data about a node.", + "Data about a node.", ) OVERWRITE = Field.for_types( - u"overwrite", + "overwrite", [bool], - u"True to replace an existing file of the same name, " - u"false to fail with a collision error.", + "True to replace an existing file of the same name, " + "false to fail with a collision error.", ) ADD_FILE = ActionType( - u"dirnode:add-file", + "dirnode:add-file", [NAME, METADATA, OVERWRITE], [], - u"Add a new file as a child of a directory.", + "Add a new file as a child of a directory.", ) @@ -678,8 +666,10 @@ class DirectoryNode(object): return d # XXX: Too many arguments? Worthwhile to break into mutable/immutable? - def create_subdirectory(self, namex, initial_children={}, overwrite=True, + def create_subdirectory(self, namex, initial_children=None, overwrite=True, mutable=True, mutable_version=None, metadata=None): + if initial_children is None: + initial_children = {} name = normalize(namex) if self.is_readonly(): return defer.fail(NotWriteableError()) diff --git a/src/allmydata/frontends/auth.py b/src/allmydata/frontends/auth.py index b61062334..973dd2301 100644 --- a/src/allmydata/frontends/auth.py +++ b/src/allmydata/frontends/auth.py @@ -1,18 +1,10 @@ """ Authentication for frontends. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from zope.interface import implementer from twisted.internet import defer -from twisted.cred import error, checkers, credentials +from twisted.cred import checkers, credentials from twisted.conch.ssh import keys from twisted.conch.checkers import SSHPublicKeyChecker, InMemorySSHKeyDB @@ -32,65 +24,93 @@ class FTPAvatarID(object): @implementer(checkers.ICredentialsChecker) class AccountFileChecker(object): - credentialInterfaces = (credentials.IUsernamePassword, - credentials.IUsernameHashedPassword, - credentials.ISSHPrivateKey) + credentialInterfaces = (credentials.ISSHPrivateKey,) + def __init__(self, client, accountfile): self.client = client - self.passwords = BytesKeyDict() - pubkeys = BytesKeyDict() - self.rootcaps = BytesKeyDict() - with open(abspath_expanduser_unicode(accountfile), "rb") as f: - for line in f: - line = line.strip() - if line.startswith(b"#") or not line: - continue - name, passwd, rest = line.split(None, 2) - if passwd.startswith(b"ssh-"): - bits = rest.split() - keystring = b" ".join([passwd] + bits[:-1]) - key = keys.Key.fromString(keystring) - rootcap = bits[-1] - pubkeys[name] = [key] - else: - self.passwords[name] = passwd - rootcap = rest - self.rootcaps[name] = rootcap + path = abspath_expanduser_unicode(accountfile) + with open_account_file(path) as f: + self.rootcaps, pubkeys = load_account_file(f) self._pubkeychecker = SSHPublicKeyChecker(InMemorySSHKeyDB(pubkeys)) def _avatarId(self, username): return FTPAvatarID(username, self.rootcaps[username]) - def _cbPasswordMatch(self, matched, username): - if matched: - return self._avatarId(username) - raise error.UnauthorizedLogin - def requestAvatarId(self, creds): if credentials.ISSHPrivateKey.providedBy(creds): d = defer.maybeDeferred(self._pubkeychecker.requestAvatarId, creds) d.addCallback(self._avatarId) return d - elif credentials.IUsernameHashedPassword.providedBy(creds): - return self._checkPassword(creds) - elif credentials.IUsernamePassword.providedBy(creds): - return self._checkPassword(creds) - else: - raise NotImplementedError() + raise NotImplementedError() - def _checkPassword(self, creds): - """ - Determine whether the password in the given credentials matches the - password in the account file. +def open_account_file(path): + """ + Open and return the accounts file at the given path. + """ + return open(path, "rt", encoding="utf-8") - Returns a Deferred that fires with the username if the password matches - or with an UnauthorizedLogin failure otherwise. - """ - try: - correct = self.passwords[creds.username] - except KeyError: - return defer.fail(error.UnauthorizedLogin()) +def load_account_file(lines): + """ + Load credentials from an account file. - d = defer.maybeDeferred(creds.checkPassword, correct) - d.addCallback(self._cbPasswordMatch, creds.username) - return d + :param lines: An iterable of account lines to load. + + :return: See ``create_account_maps``. + """ + return create_account_maps( + parse_accounts( + content_lines( + lines, + ), + ), + ) + +def content_lines(lines): + """ + Drop empty and commented-out lines (``#``-prefixed) from an iterator of + lines. + + :param lines: An iterator of lines to process. + + :return: An iterator of lines including only those from ``lines`` that + include content intended to be loaded. + """ + for line in lines: + line = line.strip() + if line and not line.startswith("#"): + yield line + +def parse_accounts(lines): + """ + Parse account lines into their components (name, key, rootcap). + """ + for line in lines: + name, passwd, rest = line.split(None, 2) + if not passwd.startswith("ssh-"): + raise ValueError( + "Password-based authentication is not supported; " + "configure key-based authentication instead." + ) + + bits = rest.split() + keystring = " ".join([passwd] + bits[:-1]) + key = keys.Key.fromString(keystring) + rootcap = bits[-1] + yield (name, key, rootcap) + +def create_account_maps(accounts): + """ + Build mappings from account names to keys and rootcaps. + + :param accounts: An iterator if (name, key, rootcap) tuples. + + :return: A tuple of two dicts. The first maps account names to rootcaps. + The second maps account names to public keys. + """ + rootcaps = BytesKeyDict() + pubkeys = BytesKeyDict() + for (name, key, rootcap) in accounts: + name_bytes = name.encode("utf-8") + rootcaps[name_bytes] = rootcap.encode("utf-8") + pubkeys[name_bytes] = [key] + return rootcaps, pubkeys diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index d2d614c77..b775fa49d 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import six import heapq, traceback, stat, struct @@ -53,9 +45,6 @@ noisy = True from allmydata.util.log import NOISY, OPERATIONAL, WEIRD, \ msg as logmsg, PrefixingLogMixin -if six.PY3: - long = int - def createSFTPError(errorCode, errorMessage): """ @@ -1925,7 +1914,11 @@ class FakeTransport(object): def loseConnection(self): logmsg("FakeTransport.loseConnection()", level=NOISY) - # getPeer and getHost can just raise errors, since we don't know what to return + def getHost(self): + raise NotImplementedError() + + def getPeer(self): + raise NotImplementedError() @implementer(ISession) @@ -1990,15 +1983,18 @@ class Dispatcher(object): def __init__(self, client): self._client = client - def requestAvatar(self, avatarID, mind, interface): + def requestAvatar(self, avatarId, mind, *interfaces): + [interface] = interfaces _assert(interface == IConchUser, interface=interface) - rootnode = self._client.create_node_from_uri(avatarID.rootcap) - handler = SFTPUserHandler(self._client, rootnode, avatarID.username) + rootnode = self._client.create_node_from_uri(avatarId.rootcap) + handler = SFTPUserHandler(self._client, rootnode, avatarId.username) return (interface, handler, handler.logout) class SFTPServer(service.MultiService): - name = "frontend:sftp" + # The type in Twisted for services is wrong in 22.10... + # https://github.com/twisted/twisted/issues/10135 + name = "frontend:sftp" # type: ignore[assignment] def __init__(self, client, accountfile, sftp_portstr, pubkey_file, privkey_file): diff --git a/src/allmydata/grid_manager.py b/src/allmydata/grid_manager.py new file mode 100644 index 000000000..662f402d8 --- /dev/null +++ b/src/allmydata/grid_manager.py @@ -0,0 +1,499 @@ +""" +Functions and classes relating to the Grid Manager internal state +""" + +import sys +from datetime import ( + datetime, + timezone, +) +from typing import ( + Optional, + Union, + List, + IO +) + +from twisted.python.filepath import FilePath + +from allmydata.crypto import ( + ed25519, +) +from allmydata.util import ( + base32, + jsonbytes as json, + dictutil, +) + +from attrs import ( + frozen, + Factory, +) + + +@frozen +class SignedCertificate(object): + """ + A signed certificate. + """ + # A JSON-encoded, UTF-8-encoded certificate. + certificate : bytes + + # The signature (although the signature is in base32 in "public", + # this contains the decoded raw bytes -- not base32) + signature : bytes + + @classmethod + def load(cls, file_like): + data = json.load(file_like) + return cls( + certificate=data["certificate"].encode("utf-8"), + signature=base32.a2b(data["signature"].encode("ascii")), + ) + + def marshal(self): + """ + :return dict: a json-able dict + """ + return dict( + certificate=self.certificate, + signature=base32.b2a(self.signature), + ) + + +@frozen +class _GridManagerStorageServer(object): + """ + A Grid Manager's notion of a storage server + """ + + name : str + public_key : ed25519.Ed25519PublicKey + certificates : list = Factory(list) # SignedCertificates + + def add_certificate(self, certificate): + """ + Add ``certificate`` + """ + self.certificates.append(certificate) + + def public_key_string(self) -> bytes: + """ + :returns: the public key as bytes. + """ + return ed25519.string_from_verifying_key(self.public_key) + + def marshal(self): + """ + :returns: a dict suitable for JSON representing this object + """ + return { + u"public_key": self.public_key_string(), + } + + +@frozen +class _GridManagerCertificate(object): + """ + Represents a single certificate for a single storage-server + """ + + filename : str + index : int + expires : datetime + public_key : ed25519.Ed25519PublicKey + + +def create_grid_manager(): + """ + Create a new Grid Manager with a fresh keypair + """ + private_key, public_key = ed25519.create_signing_keypair() + return _GridManager( + ed25519.string_from_signing_key(private_key), + {}, + ) + + +def current_datetime_with_zone(): + """ + :returns: a timezone-aware datetime object representing the + current timestamp in UTC + """ + return datetime.now(timezone.utc) + + +def _load_certificates_for(config_path: FilePath, name: str, gm_key=Optional[ed25519.Ed25519PublicKey]) -> List[_GridManagerCertificate]: + """ + Load any existing certificates for the given storage-server. + + :param FilePath config_path: the configuration location (or None for + stdin) + + :param str name: the name of an existing storage-server + + :param ed25519.Ed25519PublicKey gm_key: an optional Grid Manager + public key. If provided, certificates will be verified against it. + + :returns: list containing any known certificates (may be empty) + + :raises: ed25519.BadSignature if any certificate signature fails to verify + """ + cert_index = 0 + cert_path = config_path.child('{}.cert.{}'.format(name, cert_index)) + certificates = [] + while cert_path.exists(): + container = SignedCertificate.load(cert_path.open('r')) + if gm_key is not None: + validate_grid_manager_certificate(gm_key, container) + cert_data = json.loads(container.certificate) + if cert_data['version'] != 1: + raise ValueError( + "Unknown certificate version '{}' in '{}'".format( + cert_data['version'], + cert_path.path, + ) + ) + certificates.append( + _GridManagerCertificate( + filename=cert_path.path, + index=cert_index, + expires=datetime.fromisoformat(cert_data['expires']), + public_key=ed25519.verifying_key_from_string(cert_data['public_key'].encode('ascii')), + ) + ) + cert_index += 1 + cert_path = config_path.child('{}.cert.{}'.format(name, cert_index)) + return certificates + + +def load_grid_manager(config_path: Optional[FilePath]): + """ + Load a Grid Manager from existing configuration. + + :param FilePath config_path: the configuration location (or None for + stdin) + + :returns: a GridManager instance + + :raises: ValueError if the confguration is invalid or IOError if + expected files can't be opened. + """ + config_file: Union[IO[bytes], IO[str]] + if config_path is None: + config_file = sys.stdin + else: + # this might raise IOError or similar but caller must handle it + config_file = config_path.child("config.json").open("r") + + with config_file: + config = json.load(config_file) + + gm_version = config.get(u'grid_manager_config_version', None) + if gm_version != 0: + raise ValueError( + "Missing or unknown version '{}' of Grid Manager config".format( + gm_version + ) + ) + if 'private_key' not in config: + raise ValueError( + "'private_key' required in config" + ) + + private_key_bytes = config['private_key'].encode('ascii') + try: + private_key, public_key = ed25519.signing_keypair_from_string(private_key_bytes) + except Exception as e: + raise ValueError( + "Invalid Grid Manager private_key: {}".format(e) + ) + + storage_servers = dict() + for name, srv_config in list(config.get(u'storage_servers', {}).items()): + if 'public_key' not in srv_config: + raise ValueError( + "No 'public_key' for storage server '{}'".format(name) + ) + storage_servers[name] = _GridManagerStorageServer( + name, + ed25519.verifying_key_from_string(srv_config['public_key'].encode('ascii')), + [] if config_path is None else _load_certificates_for(config_path, name, public_key), + ) + + return _GridManager(private_key_bytes, storage_servers) + + +class _GridManager(object): + """ + A Grid Manager's configuration. + """ + + def __init__(self, private_key_bytes, storage_servers): + self._storage_servers = dictutil.UnicodeKeyDict( + {} if storage_servers is None else storage_servers + ) + assert isinstance(private_key_bytes, bytes) + self._private_key_bytes = private_key_bytes + self._private_key, self._public_key = ed25519.signing_keypair_from_string(self._private_key_bytes) + self._version = 0 + + @property + def storage_servers(self): + return self._storage_servers + + def public_identity(self): + """ + :returns: public key as a string + """ + return ed25519.string_from_verifying_key(self._public_key) + + def sign(self, name, expiry): + """ + Create a new signed certificate for a particular server + + :param str name: the server to create a certificate for + + :param timedelta expiry: how far in the future the certificate + should expire. + + :returns SignedCertificate: the signed certificate. + """ + assert isinstance(name, str) # must be unicode + try: + srv = self._storage_servers[name] + except KeyError: + raise KeyError( + "No storage server named '{}'".format(name) + ) + expiration = current_datetime_with_zone() + expiry + cert_info = { + "expires": expiration.isoformat(), + "public_key": srv.public_key_string(), + "version": 1, + } + cert_data = json.dumps_bytes(cert_info, separators=(',',':'), sort_keys=True) + sig = ed25519.sign_data(self._private_key, cert_data) + certificate = SignedCertificate( + certificate=cert_data, + signature=sig, + ) + vk = ed25519.verifying_key_from_signing_key(self._private_key) + ed25519.verify_signature(vk, sig, cert_data) + + srv.add_certificate(certificate) + return certificate + + def add_storage_server(self, name, public_key): + """ + :param name: a user-meaningful name for the server + :param public_key: ed25519.VerifyingKey the public-key of the + storage provider (e.g. from the contents of node.pubkey + for the client) + """ + assert isinstance(name, str) # must be unicode + if name in self._storage_servers: + raise KeyError( + "Already have a storage server called '{}'".format(name) + ) + ss = _GridManagerStorageServer(name, public_key, []) + self._storage_servers[name] = ss + return ss + + def remove_storage_server(self, name): + """ + :param name: a user-meaningful name for the server + """ + assert isinstance(name, str) # must be unicode + try: + del self._storage_servers[name] + except KeyError: + raise KeyError( + "No storage server called '{}'".format(name) + ) + + def marshal(self): + """ + :returns: a dict suitable for JSON representing this object + """ + data = { + u"grid_manager_config_version": self._version, + u"private_key": self._private_key_bytes.decode('ascii'), + } + if self._storage_servers: + data[u"storage_servers"] = { + name: srv.marshal() + for name, srv + in self._storage_servers.items() + } + return data + + +def save_grid_manager(file_path, grid_manager, create=True): + """ + Writes a Grid Manager configuration. + + :param file_path: a FilePath specifying where to write the config + (if None, stdout is used) + + :param grid_manager: a _GridManager instance + + :param bool create: if True (the default) we are creating a new + grid-manager and will fail if the directory already exists. + """ + data = json.dumps( + grid_manager.marshal(), + indent=4, + ) + + if file_path is None: + print("{}\n".format(data)) + else: + try: + file_path.makedirs() + file_path.chmod(0o700) + except OSError: + if create: + raise + with file_path.child("config.json").open("w") as f: + f.write(data.encode("utf-8")) + f.write(b"\n") + + +def parse_grid_manager_certificate(gm_data: Union[str, bytes]): + """ + :param gm_data: some data that might be JSON that might be a valid + Grid Manager Certificate + + :returns: json data of a valid Grid Manager certificate, or an + exception if the data is not valid. + """ + + required_keys = { + 'certificate', + 'signature', + } + + js = json.loads(gm_data) + + if not isinstance(js, dict): + raise ValueError( + "Grid Manager certificate must be a dict" + ) + if set(js.keys()) != required_keys: + raise ValueError( + "Grid Manager certificate must contain: {}".format( + ", ".join("'{}'".format(k) for k in required_keys), + ) + ) + return js + + +def validate_grid_manager_certificate(gm_key, alleged_cert): + """ + :param gm_key: a VerifyingKey instance, a Grid Manager's public + key. + + :param alleged_cert SignedCertificate: A signed certificate. + + :return: a dict consisting of the deserialized certificate data or + None if the signature is invalid. Note we do NOT check the + expiry time in this function. + """ + try: + ed25519.verify_signature( + gm_key, + alleged_cert.signature, + alleged_cert.certificate, + ) + except ed25519.BadSignature: + return None + # signature is valid; now we can load the actual data + cert = json.loads(alleged_cert.certificate) + return cert + + +def create_grid_manager_verifier(keys, certs, public_key, now_fn=None, bad_cert=None): + """ + Creates a predicate for confirming some Grid Manager-issued + certificates against Grid Manager keys. A predicate is used + (instead of just returning True/False here) so that the + expiry-time can be tested on each call. + + :param list keys: 0 or more ``VerifyingKey`` instances + + :param list certs: 1 or more Grid Manager certificates each of + which is a ``SignedCertificate``. + + :param str public_key: the identifier of the server we expect + certificates for. + + :param callable now_fn: a callable which returns the current UTC + timestamp (or current_datetime_with_zone() if None). + + :param callable bad_cert: a two-argument callable which is invoked + when a certificate verification fails. The first argument is + the verifying key and the second is the certificate. If None + (the default) errors are print()-ed. Note that we may have + several certificates and only one must be valid, so this may + be called (multiple times) even if the function ultimately + returns successfully. + + :returns: a callable which will return True only-if there is at + least one valid certificate (that has not at this moment + expired) in `certs` signed by one of the keys in `keys`. + """ + + now_fn = current_datetime_with_zone if now_fn is None else now_fn + valid_certs = [] + + # if we have zero grid-manager keys then everything is valid + if not keys: + return lambda: True + + if bad_cert is None: + + def bad_cert(key, alleged_cert): + """ + We might want to let the user know about this failed-to-verify + certificate .. but also if you have multiple grid-managers + then a bunch of these messages would appear. Better would + be to bubble this up to some sort of status API (or maybe + on the Welcome page?) + + The only thing that might actually be interesting, though, + is whether this whole function returns false or not.. + """ + print( + "Grid Manager certificate signature failed. Certificate: " + "\"{cert}\" for key \"{key}\".".format( + cert=alleged_cert, + key=ed25519.string_from_verifying_key(key), + ) + ) + + # validate the signatures on any certificates we have (not yet the expiry dates) + for alleged_cert in certs: + for key in keys: + cert = validate_grid_manager_certificate(key, alleged_cert) + if cert is not None: + valid_certs.append(cert) + else: + bad_cert(key, alleged_cert) + + def validate(): + """ + :returns: True if *any* certificate is still valid for a server + """ + now = now_fn() + for cert in valid_certs: + expires = datetime.fromisoformat(cert["expires"]) + pc = cert['public_key'].encode('ascii') + assert type(pc) == type(public_key), "{} isn't {}".format(type(pc), type(public_key)) + if pc == public_key: + if expires > now: + # not-expired + return True + return False + + return validate diff --git a/src/allmydata/hashtree.py b/src/allmydata/hashtree.py index 17467459b..6c4436958 100644 --- a/src/allmydata/hashtree.py +++ b/src/allmydata/hashtree.py @@ -49,15 +49,6 @@ or eat your children, but it might. Use at your own risk. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - from allmydata.util import mathutil # from the pyutil library from allmydata.util import base32 @@ -332,7 +323,7 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list): name += " (leaf [%d] of %d)" % (leafnum, numleaves) return name - def set_hashes(self, hashes={}, leaves={}): + def set_hashes(self, hashes=None, leaves=None): """Add a bunch of hashes to the tree. I will validate these to the best of my ability. If I already have a @@ -382,7 +373,10 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list): corrupted or one of the received hashes was corrupted. If it raises NotEnoughHashesError, then the otherhashes dictionary was incomplete. """ - + if hashes is None: + hashes = {} + if leaves is None: + leaves = {} assert isinstance(hashes, dict) for h in hashes.values(): assert isinstance(h, bytes) diff --git a/src/allmydata/history.py b/src/allmydata/history.py index b5cfb7318..befc8cf3d 100644 --- a/src/allmydata/history.py +++ b/src/allmydata/history.py @@ -1,14 +1,5 @@ """Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import weakref @@ -20,7 +11,7 @@ class History(object): MAX_UPLOAD_STATUSES = 10 MAX_MAPUPDATE_STATUSES = 20 MAX_PUBLISH_STATUSES = 20 - MAX_RETRIEVE_STATUSES = 20 + MAX_RETRIEVE_STATUSES = 40 def __init__(self, stats_provider=None): self.stats_provider = stats_provider diff --git a/src/allmydata/immutable/checker.py b/src/allmydata/immutable/checker.py index 30abc68c6..483ddb2a2 100644 --- a/src/allmydata/immutable/checker.py +++ b/src/allmydata/immutable/checker.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from zope.interface import implementer from twisted.internet import defer diff --git a/src/allmydata/immutable/downloader/__init__.py b/src/allmydata/immutable/downloader/__init__.py index 2d3d9e2a4..d4f3fe345 100644 --- a/src/allmydata/immutable/downloader/__init__.py +++ b/src/allmydata/immutable/downloader/__init__.py @@ -1,13 +1,3 @@ """ Ported to Python 3. """ - -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - diff --git a/src/allmydata/immutable/downloader/common.py b/src/allmydata/immutable/downloader/common.py index 71430b0d7..30f5bcf4b 100644 --- a/src/allmydata/immutable/downloader/common.py +++ b/src/allmydata/immutable/downloader/common.py @@ -2,15 +2,6 @@ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - (AVAILABLE, PENDING, OVERDUE, COMPLETE, CORRUPT, DEAD, BADSEGNUM) = \ ("AVAILABLE", "PENDING", "OVERDUE", "COMPLETE", "CORRUPT", "DEAD", "BADSEGNUM") diff --git a/src/allmydata/immutable/downloader/fetcher.py b/src/allmydata/immutable/downloader/fetcher.py index 4e8b7f926..e8e4eefbc 100644 --- a/src/allmydata/immutable/downloader/fetcher.py +++ b/src/allmydata/immutable/downloader/fetcher.py @@ -2,15 +2,6 @@ Ported to Python 3. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - from twisted.python.failure import Failure from foolscap.api import eventually from allmydata.interfaces import NotEnoughSharesError, NoSharesError @@ -63,13 +54,14 @@ class SegmentFetcher(object): self._running = True def stop(self): - log.msg("SegmentFetcher(%r).stop" % self._node._si_prefix, - level=log.NOISY, parent=self._lp, umid="LWyqpg") - self._cancel_all_requests() - self._running = False - # help GC ??? XXX - del self._shares, self._shares_from_server, self._active_share_map - del self._share_observers + if self._running: + log.msg("SegmentFetcher(%r).stop" % self._node._si_prefix, + level=log.NOISY, parent=self._lp, umid="LWyqpg") + self._cancel_all_requests() + self._running = False + # help GC ??? + del self._shares, self._shares_from_server, self._active_share_map + del self._share_observers # called by our parent _Node diff --git a/src/allmydata/immutable/downloader/finder.py b/src/allmydata/immutable/downloader/finder.py index 4f6d1aa14..886859e6e 100644 --- a/src/allmydata/immutable/downloader/finder.py +++ b/src/allmydata/immutable/downloader/finder.py @@ -1,14 +1,7 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six import ensure_str import time diff --git a/src/allmydata/immutable/downloader/node.py b/src/allmydata/immutable/downloader/node.py index 10ce0e5c7..d2fec8ec1 100644 --- a/src/allmydata/immutable/downloader/node.py +++ b/src/allmydata/immutable/downloader/node.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import time now = time.time @@ -19,7 +11,7 @@ from foolscap.api import eventually from allmydata import uri from allmydata.codec import CRSDecoder from allmydata.util import base32, log, hashutil, mathutil, observer -from allmydata.interfaces import DEFAULT_MAX_SEGMENT_SIZE +from allmydata.interfaces import DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE from allmydata.hashtree import IncompleteHashTree, BadHashError, \ NotEnoughHashesError @@ -49,6 +41,8 @@ class DownloadNode(object): """Internal class which manages downloads and holds state. External callers use CiphertextFileNode instead.""" + default_max_segment_size = DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE + # Share._node points to me def __init__(self, verifycap, storage_broker, secret_holder, terminator, history, download_status): @@ -76,7 +70,7 @@ class DownloadNode(object): # .guessed_segment_size, .guessed_num_segments, and # .ciphertext_hash_tree (with a dummy, to let us guess which hashes # we'll need) - self._build_guessed_tables(DEFAULT_MAX_SEGMENT_SIZE) + self._build_guessed_tables(self.default_max_segment_size) # filled in when we parse a valid UEB self.have_UEB = False @@ -130,8 +124,8 @@ class DownloadNode(object): def stop(self): # called by the Terminator at shutdown, mostly for tests if self._active_segment: - self._active_segment.stop() - self._active_segment = None + seg, self._active_segment = self._active_segment, None + seg.stop() self._sharefinder.stop() # things called by outside callers, via CiphertextFileNode. get_segment() @@ -408,16 +402,16 @@ class DownloadNode(object): def fetch_failed(self, sf, f): assert sf is self._active_segment + self._active_segment = None # deliver error upwards for (d,c,seg_ev) in self._extract_requests(sf.segnum): seg_ev.error(now()) eventually(self._deliver, d, c, f) - self._active_segment = None self._start_new_segment() def process_blocks(self, segnum, blocks): start = now() - d = defer.maybeDeferred(self._decode_blocks, segnum, blocks) + d = self._decode_blocks(segnum, blocks) d.addCallback(self._check_ciphertext_hash, segnum) def _deliver(result): log.msg(format="delivering segment(%(segnum)d)", @@ -432,6 +426,7 @@ class DownloadNode(object): eventually(self._deliver, d, c, result) else: (offset, segment, decodetime) = result + self._active_segment = None for (d,c,seg_ev) in self._extract_requests(segnum): # when we have two requests for the same segment, the # second one will not be "activated" before the data is @@ -444,7 +439,6 @@ class DownloadNode(object): seg_ev.deliver(when, offset, len(segment), decodetime) eventually(self._deliver, d, c, result) self._download_status.add_misc_event("process_block", start, now()) - self._active_segment = None self._start_new_segment() d.addBoth(_deliver) d.addErrback(log.err, "unhandled error during process_blocks", @@ -531,11 +525,12 @@ class DownloadNode(object): self._segment_requests = [t for t in self._segment_requests if t[2] != cancel] segnums = [segnum for (segnum,d,c,seg_ev,lp) in self._segment_requests] + # self._active_segment might be None in rare circumstances, so make # sure we tolerate it if self._active_segment and self._active_segment.segnum not in segnums: - self._active_segment.stop() - self._active_segment = None + seg, self._active_segment = self._active_segment, None + seg.stop() self._start_new_segment() # called by ShareFinder to choose hashtree sizes in CommonShares, and by diff --git a/src/allmydata/immutable/downloader/segmentation.py b/src/allmydata/immutable/downloader/segmentation.py index 6d7cb7676..80166c965 100644 --- a/src/allmydata/immutable/downloader/segmentation.py +++ b/src/allmydata/immutable/downloader/segmentation.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import time now = time.time diff --git a/src/allmydata/immutable/downloader/share.py b/src/allmydata/immutable/downloader/share.py index 016f1c34d..7bbf2b900 100644 --- a/src/allmydata/immutable/downloader/share.py +++ b/src/allmydata/immutable/downloader/share.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import struct import time diff --git a/src/allmydata/immutable/downloader/status.py b/src/allmydata/immutable/downloader/status.py index 425f6893c..4136db3c5 100644 --- a/src/allmydata/immutable/downloader/status.py +++ b/src/allmydata/immutable/downloader/status.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import division -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import itertools from zope.interface import implementer diff --git a/src/allmydata/immutable/encode.py b/src/allmydata/immutable/encode.py index 42fc18077..9d7af2650 100644 --- a/src/allmydata/immutable/encode.py +++ b/src/allmydata/immutable/encode.py @@ -4,15 +4,6 @@ Ported to Python 3. """ -from __future__ import division -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import time from zope.interface import implementer from twisted.internet import defer @@ -262,6 +253,8 @@ class Encoder(object): d.addCallback(lambda res: self.finish_hashing()) + # These calls have to happen in order; layout.py now requires writes to + # be appended to the data written so far. d.addCallback(lambda res: self.send_crypttext_hash_tree_to_all_shareholders()) d.addCallback(lambda res: self.send_all_block_hash_trees()) @@ -694,3 +687,24 @@ class Encoder(object): return self.uri_extension_data def get_uri_extension_hash(self): return self.uri_extension_hash + + def get_uri_extension_size(self): + """ + Calculate the size of the URI extension that gets written at the end of + immutables. + + This may be done earlier than actual encoding, so e.g. we might not + know the crypttext hashes, but that's fine for our purposes since we + only care about the length. + """ + params = self.uri_extension_data.copy() + params["crypttext_hash"] = b"\x00" * hashutil.CRYPTO_VAL_SIZE + params["crypttext_root_hash"] = b"\x00" * hashutil.CRYPTO_VAL_SIZE + params["share_root_hash"] = b"\x00" * hashutil.CRYPTO_VAL_SIZE + assert params.keys() == { + "codec_name", "codec_params", "size", "segment_size", "num_segments", + "needed_shares", "total_shares", "tail_codec_params", + "crypttext_hash", "crypttext_root_hash", "share_root_hash" + }, params.keys() + uri_extension = uri.pack_extension(params) + return len(uri_extension) diff --git a/src/allmydata/immutable/filenode.py b/src/allmydata/immutable/filenode.py index 6962c31a4..8dda50bee 100644 --- a/src/allmydata/immutable/filenode.py +++ b/src/allmydata/immutable/filenode.py @@ -2,15 +2,6 @@ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - from functools import reduce import binascii from time import time as now diff --git a/src/allmydata/immutable/happiness_upload.py b/src/allmydata/immutable/happiness_upload.py index 3e3eedbc9..a0af17891 100644 --- a/src/allmydata/immutable/happiness_upload.py +++ b/src/allmydata/immutable/happiness_upload.py @@ -4,15 +4,6 @@ on. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - # We omit dict, just in case newdict breaks things for external Python 2 code. - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 from queue import PriorityQueue diff --git a/src/allmydata/immutable/layout.py b/src/allmydata/immutable/layout.py index 79c886237..9154f2f30 100644 --- a/src/allmydata/immutable/layout.py +++ b/src/allmydata/immutable/layout.py @@ -1,24 +1,22 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations import struct +from io import BytesIO + +from attrs import define, field from zope.interface import implementer from twisted.internet import defer from allmydata.interfaces import IStorageBucketWriter, IStorageBucketReader, \ FileTooLargeError, HASH_SIZE -from allmydata.util import mathutil, observer, pipeline, log +from allmydata.util import mathutil, observer, log from allmydata.util.assertutil import precondition from allmydata.storage.server import si_b2a + class LayoutInvalid(Exception): """ There is something wrong with these bytes so they can't be interpreted as the kind of immutable file that I know how to download.""" @@ -90,7 +88,7 @@ FORCE_V2 = False # set briefly by unit tests to make small-sized V2 shares def make_write_bucket_proxy(rref, server, data_size, block_size, num_segments, - num_share_hashes, uri_extension_size_max): + num_share_hashes, uri_extension_size): # Use layout v1 for small files, so they'll be readable by older versions # ( bool: + """ + Queue a write. If the result is ``False``, no further action is needed + for now. If the result is some ``True``, it's time to call ``flush()`` + and do a real write. + """ + self._to_write.write(data) + return self.get_queued_bytes() >= self._batch_size + + def flush(self) -> tuple[int, bytes]: + """Return offset and data to be written.""" + offset = self._written_bytes + data = self._to_write.getvalue() + self._written_bytes += len(data) + self._to_write = BytesIO() + return (offset, data) + + def get_queued_bytes(self) -> int: + """Return number of queued, unwritten bytes.""" + return self._to_write.tell() + + def get_total_bytes(self) -> int: + """Return how many bytes were written or queued in total.""" + return self._written_bytes + self.get_queued_bytes() + + @implementer(IStorageBucketWriter) class WriteBucketProxy(object): + """ + Note: The various ``put_`` methods need to be called in the order in which the + bytes will get written. + """ fieldsize = 4 fieldstruct = ">L" def __init__(self, rref, server, data_size, block_size, num_segments, - num_share_hashes, uri_extension_size_max, pipeline_size=50000): + num_share_hashes, uri_extension_size, batch_size=1_000_000): self._rref = rref self._server = server self._data_size = data_size @@ -124,20 +162,21 @@ class WriteBucketProxy(object): # how many share hashes are included in each share? This will be # about ln2(num_shares). self._share_hashtree_size = num_share_hashes * (2+HASH_SIZE) - # we commit to not sending a uri extension larger than this - self._uri_extension_size_max = uri_extension_size_max + self._uri_extension_size = uri_extension_size self._create_offsets(block_size, data_size) - # k=3, max_segment_size=128KiB gives us a typical segment of 43691 - # bytes. Setting the default pipeline_size to 50KB lets us get two - # segments onto the wire but not a third, which would keep the pipe - # filled. - self._pipeline = pipeline.Pipeline(pipeline_size) + # With a ~1MB batch size, max upload speed is 1MB/(round-trip latency) + # assuming the writing code waits for writes to finish, so 20MB/sec if + # latency is 50ms. In the US many people only have 1MB/sec upload speed + # as of 2022 (standard Comcast). For further discussion of how one + # might set batch sizes see + # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3787#comment:1. + self._write_buffer = _WriteBuffer(batch_size) def get_allocated_size(self): return (self._offsets['uri_extension'] + self.fieldsize + - self._uri_extension_size_max) + self._uri_extension_size) def _create_offsets(self, block_size, data_size): if block_size >= 2**32 or data_size >= 2**32: @@ -178,7 +217,7 @@ class WriteBucketProxy(object): return "" % self._server.get_name() def put_header(self): - return self._write(0, self._offset_data) + return self._queue_write(0, self._offset_data) def put_block(self, segmentnum, data): offset = self._offsets['data'] + segmentnum * self._block_size @@ -192,9 +231,17 @@ class WriteBucketProxy(object): (self._block_size * (self._num_segments - 1))), len(data), self._block_size) - return self._write(offset, data) + return self._queue_write(offset, data) def put_crypttext_hashes(self, hashes): + # plaintext_hash_tree precedes crypttext_hash_tree. It is not used, and + # so is not explicitly written, but we need to write everything, so + # fill it in with nulls. + d = self._queue_write(self._offsets['plaintext_hash_tree'], b"\x00" * self._segment_hash_size) + d.addCallback(lambda _: self._really_put_crypttext_hashes(hashes)) + return d + + def _really_put_crypttext_hashes(self, hashes): offset = self._offsets['crypttext_hash_tree'] assert isinstance(hashes, list) data = b"".join(hashes) @@ -203,7 +250,7 @@ class WriteBucketProxy(object): precondition(offset + len(data) <= self._offsets['block_hashes'], offset, len(data), offset+len(data), self._offsets['block_hashes']) - return self._write(offset, data) + return self._queue_write(offset, data) def put_block_hashes(self, blockhashes): offset = self._offsets['block_hashes'] @@ -214,7 +261,7 @@ class WriteBucketProxy(object): precondition(offset + len(data) <= self._offsets['share_hashes'], offset, len(data), offset+len(data), self._offsets['share_hashes']) - return self._write(offset, data) + return self._queue_write(offset, data) def put_share_hashes(self, sharehashes): # sharehashes is a list of (index, hash) tuples, so they get stored @@ -228,29 +275,45 @@ class WriteBucketProxy(object): precondition(offset + len(data) <= self._offsets['uri_extension'], offset, len(data), offset+len(data), self._offsets['uri_extension']) - return self._write(offset, data) + return self._queue_write(offset, data) def put_uri_extension(self, data): offset = self._offsets['uri_extension'] assert isinstance(data, bytes) - precondition(len(data) <= self._uri_extension_size_max, - len(data), self._uri_extension_size_max) + precondition(len(data) == self._uri_extension_size) length = struct.pack(self.fieldstruct, len(data)) - return self._write(offset, length+data) + return self._queue_write(offset, length+data) - def _write(self, offset, data): - # use a Pipeline to pipeline several writes together. TODO: another - # speedup would be to coalesce small writes into a single call: this - # would reduce the foolscap CPU overhead per share, but wouldn't - # reduce the number of round trips, so it might not be worth the - # effort. + def _queue_write(self, offset, data): + """ + This queues up small writes to be written in a single batched larger + write. - return self._pipeline.add(len(data), - self._rref.callRemote, "write", offset, data) + Callers of this function are expected to queue the data in order, with + no holes. As such, the offset is technically unnecessary, but is used + to check the inputs. Possibly we should get rid of it. + """ + assert offset == self._write_buffer.get_total_bytes() + if self._write_buffer.queue_write(data): + return self._actually_write() + else: + return defer.succeed(False) + + def _actually_write(self): + """Write data to the server.""" + offset, data = self._write_buffer.flush() + return self._rref.callRemote("write", offset, data) def close(self): - d = self._pipeline.add(0, self._rref.callRemote, "close") - d.addCallback(lambda ign: self._pipeline.flush()) + assert self._write_buffer.get_total_bytes() == self.get_allocated_size(), ( + f"{self._written_buffer.get_total_bytes_queued()} != {self.get_allocated_size()}" + ) + if self._write_buffer.get_queued_bytes() > 0: + d = self._actually_write() + else: + # No data queued, don't send empty string write. + d = defer.succeed(True) + d.addCallback(lambda _: self._rref.callRemote("close")) return d def abort(self): @@ -303,8 +366,6 @@ class WriteBucketProxy_v2(WriteBucketProxy): @implementer(IStorageBucketReader) class ReadBucketProxy(object): - MAX_UEB_SIZE = 2000 # actual size is closer to 419, but varies by a few bytes - def __init__(self, rref, server, storage_index): self._rref = rref self._server = server @@ -332,11 +393,6 @@ class ReadBucketProxy(object): # TODO: for small shares, read the whole bucket in _start() d = self._fetch_header() d.addCallback(self._parse_offsets) - # XXX The following two callbacks implement a slightly faster/nicer - # way to get the ueb and sharehashtree, but it requires that the - # storage server be >= v1.3.0. - # d.addCallback(self._fetch_sharehashtree_and_ueb) - # d.addCallback(self._parse_sharehashtree_and_ueb) def _fail_waiters(f): self._ready.fire(f) def _notify_waiters(result): @@ -369,41 +425,18 @@ class ReadBucketProxy(object): self._fieldsize = fieldsize self._fieldstruct = fieldstruct - for field in ( 'data', - 'plaintext_hash_tree', # UNUSED - 'crypttext_hash_tree', - 'block_hashes', - 'share_hashes', - 'uri_extension', - ): + for field_name in ( 'data', + 'plaintext_hash_tree', # UNUSED + 'crypttext_hash_tree', + 'block_hashes', + 'share_hashes', + 'uri_extension', + ): offset = struct.unpack(fieldstruct, data[x:x+fieldsize])[0] x += fieldsize - self._offsets[field] = offset + self._offsets[field_name] = offset return self._offsets - def _fetch_sharehashtree_and_ueb(self, offsets): - sharehashtree_size = offsets['uri_extension'] - offsets['share_hashes'] - return self._read(offsets['share_hashes'], - self.MAX_UEB_SIZE+sharehashtree_size) - - def _parse_sharehashtree_and_ueb(self, data): - sharehashtree_size = self._offsets['uri_extension'] - self._offsets['share_hashes'] - if len(data) < sharehashtree_size: - raise LayoutInvalid("share hash tree truncated -- should have at least %d bytes -- not %d" % (sharehashtree_size, len(data))) - if sharehashtree_size % (2+HASH_SIZE) != 0: - raise LayoutInvalid("share hash tree malformed -- should have an even multiple of %d bytes -- not %d" % (2+HASH_SIZE, sharehashtree_size)) - self._share_hashes = [] - for i in range(0, sharehashtree_size, 2+HASH_SIZE): - hashnum = struct.unpack(">H", data[i:i+2])[0] - hashvalue = data[i+2:i+2+HASH_SIZE] - self._share_hashes.append( (hashnum, hashvalue) ) - - i = self._offsets['uri_extension']-self._offsets['share_hashes'] - if len(data) < i+self._fieldsize: - raise LayoutInvalid("not enough bytes to encode URI length -- should be at least %d bytes long, not %d " % (i+self._fieldsize, len(data),)) - length = struct.unpack(self._fieldstruct, data[i:i+self._fieldsize])[0] - self._ueb_data = data[i+self._fieldsize:i+self._fieldsize+length] - def _get_block_data(self, unused, blocknum, blocksize, thisblocksize): offset = self._offsets['data'] + blocknum * blocksize return self._read(offset, thisblocksize) @@ -446,20 +479,18 @@ class ReadBucketProxy(object): else: return defer.succeed([]) - def _get_share_hashes(self, unused=None): - if hasattr(self, '_share_hashes'): - return self._share_hashes - return self._get_share_hashes_the_old_way() - def get_share_hashes(self): d = self._start_if_needed() d.addCallback(self._get_share_hashes) return d - def _get_share_hashes_the_old_way(self): + def _get_share_hashes(self, _ignore): """ Tahoe storage servers < v1.3.0 would return an error if you tried to read past the end of the share, so we need to use the offset and - read just that much.""" + read just that much. + + HTTP-based storage protocol also doesn't like reading past the end. + """ offset = self._offsets['share_hashes'] size = self._offsets['uri_extension'] - offset if size % (2+HASH_SIZE) != 0: @@ -477,32 +508,29 @@ class ReadBucketProxy(object): d.addCallback(_unpack_share_hashes) return d - def _get_uri_extension_the_old_way(self, unused=None): + def _get_uri_extension(self, unused=None): """ Tahoe storage servers < v1.3.0 would return an error if you tried to read past the end of the share, so we need to fetch the UEB size - and then read just that much.""" + and then read just that much. + + HTTP-based storage protocol also doesn't like reading past the end. + """ offset = self._offsets['uri_extension'] d = self._read(offset, self._fieldsize) def _got_length(data): if len(data) != self._fieldsize: raise LayoutInvalid("not enough bytes to encode URI length -- should be %d bytes long, not %d " % (self._fieldsize, len(data),)) length = struct.unpack(self._fieldstruct, data)[0] - if length >= 2**31: - # URI extension blocks are around 419 bytes long, so this - # must be corrupted. Anyway, the foolscap interface schema - # for "read" will not allow >= 2**31 bytes length. + if length >= 2000: + # URI extension blocks are around 419 bytes long; in previous + # versions of the code 1000 was used as a default catchall. So + # 2000 or more must be corrupted. raise RidiculouslyLargeURIExtensionBlock(length) return self._read(offset+self._fieldsize, length) d.addCallback(_got_length) return d - def _get_uri_extension(self, unused=None): - if hasattr(self, '_ueb_data'): - return self._ueb_data - else: - return self._get_uri_extension_the_old_way() - def get_uri_extension(self): d = self._start_if_needed() d.addCallback(self._get_uri_extension) diff --git a/src/allmydata/immutable/literal.py b/src/allmydata/immutable/literal.py index 544a205e1..05f0ed1bc 100644 --- a/src/allmydata/immutable/literal.py +++ b/src/allmydata/immutable/literal.py @@ -2,15 +2,6 @@ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - from io import BytesIO from zope.interface import implementer diff --git a/src/allmydata/immutable/offloaded.py b/src/allmydata/immutable/offloaded.py index 8ce51782c..c609f3b8a 100644 --- a/src/allmydata/immutable/offloaded.py +++ b/src/allmydata/immutable/offloaded.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os, stat, time, weakref from zope.interface import implementer diff --git a/src/allmydata/immutable/repairer.py b/src/allmydata/immutable/repairer.py index bccd8453d..d12220810 100644 --- a/src/allmydata/immutable/repairer.py +++ b/src/allmydata/immutable/repairer.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from zope.interface import implementer from twisted.internet import defer diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py index cb332dfdf..de59d3dc9 100644 --- a/src/allmydata/immutable/upload.py +++ b/src/allmydata/immutable/upload.py @@ -2,22 +2,10 @@ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import annotations -from future.utils import PY2, native_str -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -from past.builtins import long, unicode from six import ensure_str -try: - from typing import List -except ImportError: - pass - import os, time, weakref, itertools import attr @@ -48,7 +36,7 @@ from allmydata.util.rrefutil import add_version_to_remote_reference from allmydata.interfaces import IUploadable, IUploader, IUploadResults, \ IEncryptedUploadable, RIEncryptedUploadable, IUploadStatus, \ NoServersError, InsufficientVersionError, UploadUnhappinessError, \ - DEFAULT_MAX_SEGMENT_SIZE, IPeerSelector + DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE, IPeerSelector from allmydata.immutable import layout from io import BytesIO @@ -67,7 +55,7 @@ from eliot import ( _TOTAL_SHARES = Field.for_types( u"total_shares", - [int, long], + [int], u"The total number of shares desired.", ) @@ -88,7 +76,7 @@ _READONLY_PEERS = Field( def _serialize_existing_shares(existing_shares): return { - server: list(shares) + ensure_str(server): list(shares) for (server, shares) in existing_shares.items() } @@ -101,7 +89,7 @@ _EXISTING_SHARES = Field( def _serialize_happiness_mappings(happiness_mappings): return { - sharenum: base32.b2a(serverid) + str(sharenum): ensure_str(base32.b2a(serverid)) for (sharenum, serverid) in happiness_mappings.items() } @@ -114,7 +102,7 @@ _HAPPINESS_MAPPINGS = Field( _HAPPINESS = Field.for_types( u"happiness", - [int, long], + [int], u"The computed happiness of a certain placement.", ) @@ -122,7 +110,7 @@ _UPLOAD_TRACKERS = Field( u"upload_trackers", lambda trackers: list( dict( - server=tracker.get_name(), + server=ensure_str(tracker.get_name()), shareids=sorted(tracker.buckets.keys()), ) for tracker @@ -133,7 +121,7 @@ _UPLOAD_TRACKERS = Field( _ALREADY_SERVERIDS = Field( u"already_serverids", - lambda d: d, + lambda d: {str(k): v for k, v in d.items()}, u"Some servers which are already holding some shares that we were interested in uploading.", ) @@ -152,7 +140,7 @@ GET_SHARE_PLACEMENTS = MessageType( _EFFECTIVE_HAPPINESS = Field.for_types( u"effective_happiness", - [int, long], + [int], u"The computed happiness value of a share placement map.", ) @@ -176,7 +164,7 @@ class HelperUploadResults(Copyable, RemoteCopy): # package/module/class name # # Needs to be native string to make Foolscap happy. - typeToCopy = native_str("allmydata.upload.UploadResults.tahoe.allmydata.com") + typeToCopy = "allmydata.upload.UploadResults.tahoe.allmydata.com" copytype = typeToCopy # also, think twice about changing the shape of any existing attribute, @@ -242,31 +230,26 @@ class UploadResults(object): def get_verifycapstr(self): return self._verifycapstr -# our current uri_extension is 846 bytes for small files, a few bytes -# more for larger ones (since the filesize is encoded in decimal in a -# few places). Ask for a little bit more just in case we need it. If -# the extension changes size, we can change EXTENSION_SIZE to -# allocate a more accurate amount of space. -EXTENSION_SIZE = 1000 -# TODO: actual extensions are closer to 419 bytes, so we can probably lower -# this. def pretty_print_shnum_to_servers(s): return ', '.join([ "sh%s: %s" % (k, '+'.join([idlib.shortnodeid_b2a(x) for x in v])) for k, v in s.items() ]) + class ServerTracker(object): def __init__(self, server, sharesize, blocksize, num_segments, num_share_hashes, storage_index, - bucket_renewal_secret, bucket_cancel_secret): + bucket_renewal_secret, bucket_cancel_secret, + uri_extension_size): self._server = server self.buckets = {} # k: shareid, v: IRemoteBucketWriter self.sharesize = sharesize + self.uri_extension_size = uri_extension_size wbp = layout.make_write_bucket_proxy(None, None, sharesize, blocksize, num_segments, num_share_hashes, - EXTENSION_SIZE) + uri_extension_size) self.wbp_class = wbp.__class__ # to create more of them self.allocated_size = wbp.get_allocated_size() self.blocksize = blocksize @@ -314,7 +297,7 @@ class ServerTracker(object): self.blocksize, self.num_segments, self.num_share_hashes, - EXTENSION_SIZE) + self.uri_extension_size) b[sharenum] = bp self.buckets.update(b) return (alreadygot, set(b.keys())) @@ -487,7 +470,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin): def get_shareholders(self, storage_broker, secret_holder, storage_index, share_size, block_size, num_segments, total_shares, needed_shares, - min_happiness): + min_happiness, uri_extension_size): """ @return: (upload_trackers, already_serverids), where upload_trackers is a set of ServerTracker instances that have agreed to hold @@ -529,7 +512,8 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin): # figure out how much space to ask for wbp = layout.make_write_bucket_proxy(None, None, share_size, 0, num_segments, - num_share_hashes, EXTENSION_SIZE) + num_share_hashes, + uri_extension_size) allocated_size = wbp.get_allocated_size() # decide upon the renewal/cancel secrets, to include them in the @@ -547,14 +531,14 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin): # 0. Start with an ordered list of servers. Maybe *2N* of them. # - all_servers = storage_broker.get_servers_for_psi(storage_index) + all_servers = storage_broker.get_servers_for_psi(storage_index, for_upload=True) if not all_servers: raise NoServersError("client gave us zero servers") def _create_server_tracker(server, renew, cancel): return ServerTracker( server, share_size, block_size, num_segments, num_share_hashes, - storage_index, renew, cancel, + storage_index, renew, cancel, uri_extension_size ) readonly_trackers, write_trackers = self._create_trackers( @@ -919,12 +903,12 @@ class _Accum(object): :ivar remaining: The number of bytes still expected. :ivar ciphertext: The bytes accumulated so far. """ - remaining = attr.ib(validator=attr.validators.instance_of(int)) # type: int - ciphertext = attr.ib(default=attr.Factory(list)) # type: List[bytes] + remaining : int = attr.ib(validator=attr.validators.instance_of(int)) + ciphertext : list[bytes] = attr.ib(default=attr.Factory(list)) def extend(self, size, # type: int - ciphertext, # type: List[bytes] + ciphertext, # type: list[bytes] ): """ Accumulate some more ciphertext. @@ -1326,7 +1310,8 @@ class CHKUploader(object): d = server_selector.get_shareholders(storage_broker, secret_holder, storage_index, share_size, block_size, - num_segments, n, k, desired) + num_segments, n, k, desired, + encoder.get_uri_extension_size()) def _done(res): self._server_selection_elapsed = time.time() - server_selection_started return res @@ -1404,7 +1389,9 @@ class CHKUploader(object): def get_upload_status(self): return self._upload_status -def read_this_many_bytes(uploadable, size, prepend_data=[]): +def read_this_many_bytes(uploadable, size, prepend_data=None): + if prepend_data is None: + prepend_data = [] if size == 0: return defer.succeed([]) d = uploadable.read(size) @@ -1633,7 +1620,7 @@ class AssistedUploader(object): # abbreviated), so if we detect old results, just clobber them. sharemap = upload_results.sharemap - if any(isinstance(v, (bytes, unicode)) for v in sharemap.values()): + if any(isinstance(v, (bytes, str)) for v in sharemap.values()): upload_results.sharemap = None def _build_verifycap(self, helper_upload_results): @@ -1695,7 +1682,7 @@ class AssistedUploader(object): class BaseUploadable(object): # this is overridden by max_segment_size - default_max_segment_size = DEFAULT_MAX_SEGMENT_SIZE + default_max_segment_size = DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE default_params_set = False max_segment_size = None @@ -1712,7 +1699,7 @@ class BaseUploadable(object): def set_default_encoding_parameters(self, default_params): assert isinstance(default_params, dict) for k,v in default_params.items(): - precondition(isinstance(k, (bytes, unicode)), k, v) + precondition(isinstance(k, (bytes, str)), k, v) precondition(isinstance(v, int), k, v) if "k" in default_params: self.default_encoding_param_k = default_params["k"] @@ -1854,7 +1841,9 @@ class Uploader(service.MultiService, log.PrefixingLogMixin): """I am a service that allows file uploading. I am a service-child of the Client. """ - name = "uploader" + # The type in Twisted for services is wrong in 22.10... + # https://github.com/twisted/twisted/issues/10135 + name = "uploader" # type: ignore[assignment] URI_LIT_SIZE_THRESHOLD = 55 def __init__(self, helper_furl=None, stats_provider=None, history=None): diff --git a/src/allmydata/interfaces.py b/src/allmydata/interfaces.py index 5522663ee..e44a0e8bb 100644 --- a/src/allmydata/interfaces.py +++ b/src/allmydata/interfaces.py @@ -5,23 +5,14 @@ Ported to Python 3. Note that for RemoteInterfaces, the __remote_name__ needs to be a native string because of https://github.com/warner/foolscap/blob/43f4485a42c9c28e2c79d655b3a9e24d4e6360ca/src/foolscap/remoteinterface.py#L67 """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2, native_str -if PY2: - # Don't import object/str/dict/etc. types, so we don't break any - # interfaces. Not importing open() because it triggers bogus flake8 error. - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, range, max, min # noqa: F401 - -from past.builtins import long +from typing import Dict from zope.interface import Interface, Attribute from twisted.plugin import ( IPlugin, ) +from twisted.internet.defer import Deferred from foolscap.api import StringConstraint, ListOf, TupleOf, SetOf, DictOf, \ ChoiceOf, IntegerConstraint, Any, RemoteInterface, Referenceable @@ -41,7 +32,8 @@ URI = StringConstraint(300) # kind of arbitrary MAX_BUCKETS = 256 # per peer -- zfec offers at most 256 shares per file -DEFAULT_MAX_SEGMENT_SIZE = 128*1024 +# The default size for segments of new CHK ("immutable") uploads. +DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE = 1024*1024 ShareData = StringConstraint(None) URIExtensionData = StringConstraint(1000) @@ -52,6 +44,8 @@ WriteEnablerSecret = Hash # used to protect mutable share modifications LeaseRenewSecret = Hash # used to protect lease renewal requests LeaseCancelSecret = Hash # was used to protect lease cancellation requests +class NoSpace(Exception): + """Storage space was not available for a space-allocating operation.""" class DataTooLargeError(Exception): """The write went past the expected size of the bucket.""" @@ -115,7 +109,7 @@ ReadData = ListOf(ShareData) class RIStorageServer(RemoteInterface): - __remote_name__ = native_str("RIStorageServer.tahoe.allmydata.com") + __remote_name__ = "RIStorageServer.tahoe.allmydata.com" def get_version(): """ @@ -304,12 +298,15 @@ class RIStorageServer(RemoteInterface): store that on disk. """ +# The result of IStorageServer.get_version(): +VersionMessage = Dict[bytes, object] + class IStorageServer(Interface): """ An object capable of storing shares for a storage client. """ - def get_version(): + def get_version() -> Deferred[VersionMessage]: """ :see: ``RIStorageServer.get_version`` """ @@ -490,47 +487,6 @@ class IStorageBroker(Interface): @return: unicode nickname, or None """ - # methods moved from IntroducerClient, need review - def get_all_connections(): - """Return a frozenset of (nodeid, service_name, rref) tuples, one for - each active connection we've established to a remote service. This is - mostly useful for unit tests that need to wait until a certain number - of connections have been made.""" - - def get_all_connectors(): - """Return a dict that maps from (nodeid, service_name) to a - RemoteServiceConnector instance for all services that we are actively - trying to connect to. Each RemoteServiceConnector has the following - public attributes:: - - service_name: the type of service provided, like 'storage' - last_connect_time: when we last established a connection - last_loss_time: when we last lost a connection - - version: the peer's version, from the most recent connection - oldest_supported: the peer's oldest supported version, same - - rref: the RemoteReference, if connected, otherwise None - - This method is intended for monitoring interfaces, such as a web page - that describes connecting and connected peers. - """ - - def get_all_peerids(): - """Return a frozenset of all peerids to whom we have a connection (to - one or more services) established. Mostly useful for unit tests.""" - - def get_all_connections_for(service_name): - """Return a frozenset of (nodeid, service_name, rref) tuples, one - for each active connection that provides the given SERVICE_NAME.""" - - def get_permuted_peers(service_name, key): - """Returns an ordered list of (peerid, rref) tuples, selecting from - the connections that provide SERVICE_NAME, using a hash-based - permutation keyed by KEY. This randomizes the service list in a - repeatable way, to distribute load over many peers. - """ - class IDisplayableServer(Interface): def get_nickname(): @@ -548,14 +504,10 @@ class IServer(IDisplayableServer): def start_connecting(trigger_cb): pass - def get_rref(): - """Obsolete. Use ``get_storage_server`` instead. - - Once a server is connected, I return a RemoteReference. - Before a server is connected for the first time, I return None. - - Note that the rref I return will start producing DeadReferenceErrors - once the connection is lost. + def upload_permitted(): + """ + :return: True if we should use this server for uploads, False + otherwise. """ def get_storage_server(): @@ -568,8 +520,6 @@ class IServer(IDisplayableServer): """ - - class IMutableSlotWriter(Interface): """ The interface for a writer around a mutable slot on a remote server. @@ -1440,7 +1390,7 @@ class IDirectoryNode(IFilesystemNode): is a file, or if must_be_file is True and the child is a directory, I raise ChildOfWrongTypeError.""" - def create_subdirectory(name, initial_children={}, overwrite=True, + def create_subdirectory(name, initial_children=None, overwrite=True, mutable=True, mutable_version=None, metadata=None): """I create and attach a directory at the given name. The new directory can be empty, or it can be populated with children @@ -2579,7 +2529,7 @@ class IClient(Interface): @return: a Deferred that fires with an IMutableFileNode instance. """ - def create_dirnode(initial_children={}): + def create_dirnode(initial_children=None): """Create a new unattached dirnode, possibly with initial children. @param initial_children: dict with keys that are unicode child names, @@ -2634,7 +2584,7 @@ class INodeMaker(Interface): for use by unit tests, to create mutable files that are smaller than usual.""" - def create_new_mutable_directory(initial_children={}): + def create_new_mutable_directory(initial_children=None): """I create a new mutable directory, and return a Deferred that will fire with the IDirectoryNode instance when it is ready. If initial_children= is provided (a dict mapping unicode child name to @@ -2815,13 +2765,13 @@ UploadResults = Any() #DictOf(bytes, bytes) class RIEncryptedUploadable(RemoteInterface): - __remote_name__ = native_str("RIEncryptedUploadable.tahoe.allmydata.com") + __remote_name__ = "RIEncryptedUploadable.tahoe.allmydata.com" def get_size(): return Offset def get_all_encoding_parameters(): - return (int, int, int, long) + return (int, int, int, int) def read_encrypted(offset=Offset, length=ReadSize): return ListOf(bytes) @@ -2831,7 +2781,7 @@ class RIEncryptedUploadable(RemoteInterface): class RICHKUploadHelper(RemoteInterface): - __remote_name__ = native_str("RIUploadHelper.tahoe.allmydata.com") + __remote_name__ = "RIUploadHelper.tahoe.allmydata.com" def get_version(): """ @@ -2844,7 +2794,7 @@ class RICHKUploadHelper(RemoteInterface): class RIHelper(RemoteInterface): - __remote_name__ = native_str("RIHelper.tahoe.allmydata.com") + __remote_name__ = "RIHelper.tahoe.allmydata.com" def get_version(): """ diff --git a/src/allmydata/introducer/__init__.py b/src/allmydata/introducer/__init__.py index bfc960e05..52aa56597 100644 --- a/src/allmydata/introducer/__init__.py +++ b/src/allmydata/introducer/__init__.py @@ -2,16 +2,6 @@ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - - from allmydata.introducer.server import create_introducer # apparently need to support "old .tac files" that may have diff --git a/src/allmydata/introducer/client.py b/src/allmydata/introducer/client.py index 07f8a5f7a..fd605dd1d 100644 --- a/src/allmydata/introducer/client.py +++ b/src/allmydata/introducer/client.py @@ -1,15 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -from past.builtins import long from six import ensure_text, ensure_str @@ -35,7 +26,7 @@ class InvalidCacheError(Exception): V2 = b"http://allmydata.org/tahoe/protocols/introducer/v2" -@implementer(RIIntroducerSubscriberClient_v2, IIntroducerClient) +@implementer(RIIntroducerSubscriberClient_v2, IIntroducerClient) # type: ignore[misc] class IntroducerClient(service.Service, Referenceable): def __init__(self, tub, introducer_furl, @@ -311,7 +302,7 @@ class IntroducerClient(service.Service, Referenceable): if "seqnum" in old: # must beat previous sequence number to replace if ("seqnum" not in ann - or not isinstance(ann["seqnum"], (int,long))): + or not isinstance(ann["seqnum"], int)): self.log("not replacing old announcement, no valid seqnum: %s" % (ann,), parent=lp2, level=log.NOISY, umid="zFGH3Q") diff --git a/src/allmydata/introducer/common.py b/src/allmydata/introducer/common.py index f6f70d861..3b85318ce 100644 --- a/src/allmydata/introducer/common.py +++ b/src/allmydata/introducer/common.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import re diff --git a/src/allmydata/introducer/interfaces.py b/src/allmydata/introducer/interfaces.py index 24fd3945f..e714d7340 100644 --- a/src/allmydata/introducer/interfaces.py +++ b/src/allmydata/introducer/interfaces.py @@ -2,16 +2,6 @@ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2, native_str -if PY2: - # Omitted types (bytes etc.) so future variants don't confuse Foolscap. - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, object, range, max, min # noqa: F401 - from zope.interface import Interface from foolscap.api import StringConstraint, SetOf, DictOf, Any, \ RemoteInterface, Referenceable @@ -41,7 +31,7 @@ FURL = StringConstraint(1000) Announcement_v2 = Any() class RIIntroducerSubscriberClient_v2(RemoteInterface): - __remote_name__ = native_str("RIIntroducerSubscriberClient_v2.tahoe.allmydata.com") + __remote_name__ = "RIIntroducerSubscriberClient_v2.tahoe.allmydata.com" def announce_v2(announcements=SetOf(Announcement_v2)): """I accept announcements from the publisher.""" @@ -54,11 +44,14 @@ class RIIntroducerPublisherAndSubscriberService_v2(RemoteInterface): announcement message. I will deliver a copy to all connected subscribers. To hear about services, connect to me and subscribe to a specific service_name.""" - __remote_name__ = native_str("RIIntroducerPublisherAndSubscriberService_v2.tahoe.allmydata.com") + __remote_name__ = "RIIntroducerPublisherAndSubscriberService_v2.tahoe.allmydata.com" + def get_version(): return DictOf(bytes, Any()) + def publish_v2(announcement=Announcement_v2, canary=Referenceable): return None + def subscribe_v2(subscriber=RIIntroducerSubscriberClient_v2, service_name=bytes, subscriber_info=SubscriberInfo): """Give me a subscriber reference, and I will call its announce_v2() diff --git a/src/allmydata/introducer/server.py b/src/allmydata/introducer/server.py index 1e28f511b..10048c55e 100644 --- a/src/allmydata/introducer/server.py +++ b/src/allmydata/introducer/server.py @@ -2,24 +2,12 @@ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import annotations - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -from past.builtins import long from six import ensure_text import time, os.path, textwrap - -try: - from typing import Any, Dict, Union -except ImportError: - pass +from typing import Any, Union from zope.interface import implementer from twisted.application import service @@ -39,7 +27,6 @@ from allmydata.introducer.common import unsign_from_foolscap, \ from allmydata.node import read_config from allmydata.node import create_node_dir from allmydata.node import create_connection_handlers -from allmydata.node import create_control_tub from allmydata.node import create_tub_options from allmydata.node import create_main_tub @@ -80,23 +67,19 @@ def create_introducer(basedir=u"."): default_connection_handlers, foolscap_connection_handlers = create_connection_handlers(config, i2p_provider, tor_provider) tub_options = create_tub_options(config) - # we don't remember these because the Introducer doesn't make - # outbound connections. - i2p_provider = None - tor_provider = None main_tub = create_main_tub( config, tub_options, default_connection_handlers, foolscap_connection_handlers, i2p_provider, tor_provider, ) - control_tub = create_control_tub() node = _IntroducerNode( config, main_tub, - control_tub, i2p_provider, tor_provider, ) + i2p_provider.setServiceParent(node) + tor_provider.setServiceParent(node) return defer.succeed(node) except Exception: return Failure() @@ -105,8 +88,8 @@ def create_introducer(basedir=u"."): class _IntroducerNode(node.Node): NODETYPE = "introducer" - def __init__(self, config, main_tub, control_tub, i2p_provider, tor_provider): - node.Node.__init__(self, config, main_tub, control_tub, i2p_provider, tor_provider) + def __init__(self, config, main_tub, i2p_provider, tor_provider): + node.Node.__init__(self, config, main_tub, i2p_provider, tor_provider) self.init_introducer() webport = self.get_config("node", "web.port", None) if webport: @@ -136,7 +119,7 @@ class _IntroducerNode(node.Node): os.rename(old_public_fn, private_fn) furl = self.tub.registerReference(introducerservice, furlFile=private_fn) - self.log(" introducer is at %s" % furl, umid="qF2L9A") + self.log(" introducer can be found in {!r}".format(private_fn), umid="qF2L9A") self.introducer_url = furl # for tests def init_web(self, webport): @@ -158,17 +141,20 @@ def stringify_remote_address(rref): return str(remote) +# MyPy doesn't work well with remote interfaces... @implementer(RIIntroducerPublisherAndSubscriberService_v2) -class IntroducerService(service.MultiService, Referenceable): - name = "introducer" +class IntroducerService(service.MultiService, Referenceable): # type: ignore[misc] + # The type in Twisted for services is wrong in 22.10... + # https://github.com/twisted/twisted/issues/10135 + name = "introducer" # type: ignore[assignment] # v1 is the original protocol, added in 1.0 (but only advertised starting # in 1.3), removed in 1.12. v2 is the new signed protocol, added in 1.10 # TODO: reconcile bytes/str for keys - VERSION = { + VERSION : dict[Union[bytes, str], Any]= { #"http://allmydata.org/tahoe/protocols/introducer/v1": { }, b"http://allmydata.org/tahoe/protocols/introducer/v2": { }, b"application-version": allmydata.__full_version__.encode("utf-8"), - } # type: Dict[Union[bytes, str], Any] + } def __init__(self): service.MultiService.__init__(self) @@ -275,7 +261,7 @@ class IntroducerService(service.MultiService, Referenceable): if "seqnum" in old_ann: # must beat previous sequence number to replace if ("seqnum" not in ann - or not isinstance(ann["seqnum"], (int,long))): + or not isinstance(ann["seqnum"], int)): self.log("not replacing old ann, no valid seqnum", level=log.NOISY, umid="ySbaVw") self._debug_counts["inbound_no_seqnum"] += 1 diff --git a/src/allmydata/listeners.py b/src/allmydata/listeners.py new file mode 100644 index 000000000..f97f699b4 --- /dev/null +++ b/src/allmydata/listeners.py @@ -0,0 +1,121 @@ +""" +Define a protocol for listening on a transport such that Tahoe-LAFS can +communicate over it, manage configuration for it in its configuration file, +detect when it is possible to use it, etc. +""" + +from __future__ import annotations + +from typing import Any, Protocol, Sequence, Mapping, Optional, Union, Awaitable +from typing_extensions import Literal + +from attrs import frozen +from twisted.python.usage import Options + +from .interfaces import IAddressFamily +from .util.iputil import allocate_tcp_port +from .node import _Config + +@frozen +class ListenerConfig: + """ + :ivar tub_ports: Entries to merge into ``[node]tub.port``. + + :ivar tub_locations: Entries to merge into ``[node]tub.location``. + + :ivar node_config: Entries to add into the overall Tahoe-LAFS + configuration beneath a section named after this listener. + """ + tub_ports: Sequence[str] + tub_locations: Sequence[str] + node_config: Mapping[str, Sequence[tuple[str, str]]] + +class Listener(Protocol): + """ + An object which can listen on a transport and allow Tahoe-LAFS + communication to happen over it. + """ + def is_available(self) -> bool: + """ + Can this type of listener actually be used in this runtime + environment? + """ + + def can_hide_ip(self) -> bool: + """ + Can the transport supported by this type of listener conceal the + node's public internet address from peers? + """ + + async def create_config(self, reactor: Any, cli_config: Options) -> Optional[ListenerConfig]: + """ + Set up an instance of this listener according to the given + configuration parameters. + + This may also allocate ephemeral resources if necessary. + + :return: The created configuration which can be merged into the + overall *tahoe.cfg* configuration file. + """ + + def create(self, reactor: Any, config: _Config) -> IAddressFamily: + """ + Instantiate this listener according to the given + previously-generated configuration. + + :return: A handle on the listener which can be used to integrate it + into the Tahoe-LAFS node. + """ + +class TCPProvider: + """ + Support plain TCP connections. + """ + def is_available(self) -> Literal[True]: + return True + + def can_hide_ip(self) -> Literal[False]: + return False + + async def create_config(self, reactor: Any, cli_config: Options) -> ListenerConfig: + tub_ports = [] + tub_locations = [] + if cli_config["port"]: # --port/--location are a pair + tub_ports.append(cli_config["port"]) + tub_locations.append(cli_config["location"]) + else: + assert "hostname" in cli_config + hostname = cli_config["hostname"] + new_port = allocate_tcp_port() + tub_ports.append(f"tcp:{new_port}") + tub_locations.append(f"tcp:{hostname}:{new_port}") + + return ListenerConfig(tub_ports, tub_locations, {}) + + def create(self, reactor: Any, config: _Config) -> IAddressFamily: + raise NotImplementedError() + + +@frozen +class StaticProvider: + """ + A provider that uses all pre-computed values. + """ + _available: bool + _hide_ip: bool + _config: Union[Awaitable[Optional[ListenerConfig]], Optional[ListenerConfig]] + _address: IAddressFamily + + def is_available(self) -> bool: + return self._available + + def can_hide_ip(self) -> bool: + return self._hide_ip + + async def create_config(self, reactor: Any, cli_config: Options) -> Optional[ListenerConfig]: + if self._config is None or isinstance(self._config, ListenerConfig): + return self._config + return await self._config + + def create(self, reactor: Any, config: _Config) -> IAddressFamily: + return self._address diff --git a/src/allmydata/monitor.py b/src/allmydata/monitor.py index 1559a30d9..0a213635b 100644 --- a/src/allmydata/monitor.py +++ b/src/allmydata/monitor.py @@ -3,14 +3,6 @@ Manage status of long-running operations. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from zope.interface import Interface, implementer from allmydata.util import observer diff --git a/src/allmydata/mutable/checker.py b/src/allmydata/mutable/checker.py index 0899168c3..14120c476 100644 --- a/src/allmydata/mutable/checker.py +++ b/src/allmydata/mutable/checker.py @@ -1,14 +1,7 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six import ensure_str from allmydata.uri import from_string diff --git a/src/allmydata/mutable/common.py b/src/allmydata/mutable/common.py index 87951c7b2..d663638e7 100644 --- a/src/allmydata/mutable/common.py +++ b/src/allmydata/mutable/common.py @@ -1,14 +1,7 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations MODE_CHECK = "MODE_CHECK" # query all peers MODE_ANYTHING = "MODE_ANYTHING" # one recoverable version @@ -17,6 +10,9 @@ MODE_WRITE = "MODE_WRITE" # replace all shares, probably.. not for initial MODE_READ = "MODE_READ" MODE_REPAIR = "MODE_REPAIR" # query all peers, get the privkey +from allmydata.crypto import aes, rsa +from allmydata.util import hashutil + class NotWriteableError(Exception): pass @@ -68,3 +64,33 @@ class CorruptShareError(BadShareError): class UnknownVersionError(BadShareError): """The share we received was of a version we don't recognize.""" + + +def encrypt_privkey(writekey: bytes, privkey: bytes) -> bytes: + """ + For SSK, encrypt a private ("signature") key using the writekey. + """ + encryptor = aes.create_encryptor(writekey) + crypttext = aes.encrypt_data(encryptor, privkey) + return crypttext + +def decrypt_privkey(writekey: bytes, enc_privkey: bytes) -> bytes: + """ + The inverse of ``encrypt_privkey``. + """ + decryptor = aes.create_decryptor(writekey) + privkey = aes.decrypt_data(decryptor, enc_privkey) + return privkey + +def derive_mutable_keys(keypair: tuple[rsa.PublicKey, rsa.PrivateKey]) -> tuple[bytes, bytes, bytes]: + """ + Derive the SSK writekey, encrypted writekey, and fingerprint from the + public/private ("verification" / "signature") keypair. + """ + pubkey, privkey = keypair + pubkey_s = rsa.der_string_from_verifying_key(pubkey) + privkey_s = rsa.der_string_from_signing_key(privkey) + writekey = hashutil.ssk_writekey_hash(privkey_s) + encprivkey = encrypt_privkey(writekey, privkey_s) + fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey_s) + return writekey, encprivkey, fingerprint diff --git a/src/allmydata/mutable/filenode.py b/src/allmydata/mutable/filenode.py index cd8cb0dc7..ede74d249 100644 --- a/src/allmydata/mutable/filenode.py +++ b/src/allmydata/mutable/filenode.py @@ -1,14 +1,7 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations import random @@ -16,20 +9,25 @@ from zope.interface import implementer from twisted.internet import defer, reactor from foolscap.api import eventually -from allmydata.crypto import aes -from allmydata.crypto import rsa from allmydata.interfaces import IMutableFileNode, ICheckable, ICheckResults, \ NotEnoughSharesError, MDMF_VERSION, SDMF_VERSION, IMutableUploadable, \ IMutableFileVersion, IWriteable from allmydata.util import hashutil, log, consumer, deferredutil, mathutil from allmydata.util.assertutil import precondition +from allmydata.util.cputhreadpool import defer_to_thread from allmydata.uri import WriteableSSKFileURI, ReadonlySSKFileURI, \ WriteableMDMFFileURI, ReadonlyMDMFFileURI from allmydata.monitor import Monitor from allmydata.mutable.publish import Publish, MutableData,\ TransformingUploadable -from allmydata.mutable.common import MODE_READ, MODE_WRITE, MODE_CHECK, UnrecoverableFileError, \ - UncoordinatedWriteError +from allmydata.mutable.common import ( + MODE_READ, + MODE_WRITE, + MODE_CHECK, + UnrecoverableFileError, + UncoordinatedWriteError, + derive_mutable_keys, +) from allmydata.mutable.servermap import ServerMap, ServermapUpdater from allmydata.mutable.retrieve import Retrieve from allmydata.mutable.checker import MutableChecker, MutableCheckAndRepairer @@ -131,7 +129,8 @@ class MutableFileNode(object): return self - def create_with_keys(self, keypair, contents, + @deferredutil.async_to_deferred + async def create_with_keys(self, keypair, contents, version=SDMF_VERSION): """Call this to create a brand-new mutable file. It will create the shares, find homes for them, and upload the initial contents (created @@ -139,13 +138,10 @@ class MutableFileNode(object): Deferred that fires (with the MutableFileNode instance you should use) when it completes. """ - (pubkey, privkey) = keypair - self._pubkey, self._privkey = pubkey, privkey - pubkey_s = rsa.der_string_from_verifying_key(self._pubkey) - privkey_s = rsa.der_string_from_signing_key(self._privkey) - self._writekey = hashutil.ssk_writekey_hash(privkey_s) - self._encprivkey = self._encrypt_privkey(self._writekey, privkey_s) - self._fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey_s) + self._pubkey, self._privkey = keypair + self._writekey, self._encprivkey, self._fingerprint = await defer_to_thread( + derive_mutable_keys, keypair + ) if version == MDMF_VERSION: self._uri = WriteableMDMFFileURI(self._writekey, self._fingerprint) self._protocol_version = version @@ -155,7 +151,7 @@ class MutableFileNode(object): self._readkey = self._uri.readkey self._storage_index = self._uri.storage_index initial_contents = self._get_initial_contents(contents) - return self._upload(initial_contents, None) + return await self._upload(initial_contents, None) def _get_initial_contents(self, contents): if contents is None: @@ -171,16 +167,6 @@ class MutableFileNode(object): (contents, type(contents)) return contents(self) - def _encrypt_privkey(self, writekey, privkey): - encryptor = aes.create_encryptor(writekey) - crypttext = aes.encrypt_data(encryptor, privkey) - return crypttext - - def _decrypt_privkey(self, enc_privkey): - decryptor = aes.create_decryptor(self._writekey) - privkey = aes.decrypt_data(decryptor, enc_privkey) - return privkey - def _populate_pubkey(self, pubkey): self._pubkey = pubkey def _populate_required_shares(self, required_shares): diff --git a/src/allmydata/mutable/layout.py b/src/allmydata/mutable/layout.py index 8bb2f3083..b0a799f5d 100644 --- a/src/allmydata/mutable/layout.py +++ b/src/allmydata/mutable/layout.py @@ -1,16 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - # Omit dict so Python 3 changes don't leak into API callers on Python 2. - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 -from past.utils import old_div import struct from allmydata.mutable.common import NeedMoreDataError, UnknownVersionError, \ @@ -268,7 +258,7 @@ class SDMFSlotWriteProxy(object): self._required_shares) assert expected_segment_size == segment_size - self._block_size = old_div(self._segment_size, self._required_shares) + self._block_size = self._segment_size // self._required_shares # This is meant to mimic how SDMF files were built before MDMF # entered the picture: we generate each share in its entirety, @@ -801,7 +791,7 @@ class MDMFSlotWriteProxy(object): # and also because it provides a useful amount of bounds checking. self._num_segments = mathutil.div_ceil(self._data_length, self._segment_size) - self._block_size = old_div(self._segment_size, self._required_shares) + self._block_size = self._segment_size // self._required_shares # We also calculate the share size, to help us with block # constraints later. tail_size = self._data_length % self._segment_size @@ -810,7 +800,7 @@ class MDMFSlotWriteProxy(object): else: self._tail_block_size = mathutil.next_multiple(tail_size, self._required_shares) - self._tail_block_size = old_div(self._tail_block_size, self._required_shares) + self._tail_block_size = self._tail_block_size // self._required_shares # We already know where the sharedata starts; right after the end # of the header (which is defined as the signable part + the offsets) @@ -1332,7 +1322,7 @@ class MDMFSlotReadProxy(object): self._segment_size = segsize self._data_length = datalen - self._block_size = old_div(self._segment_size, self._required_shares) + self._block_size = self._segment_size // self._required_shares # We can upload empty files, and need to account for this fact # so as to avoid zero-division and zero-modulo errors. if datalen > 0: @@ -1344,7 +1334,7 @@ class MDMFSlotReadProxy(object): else: self._tail_block_size = mathutil.next_multiple(tail_size, self._required_shares) - self._tail_block_size = old_div(self._tail_block_size, self._required_shares) + self._tail_block_size = self._tail_block_size // self._required_shares return encoding_parameters diff --git a/src/allmydata/mutable/publish.py b/src/allmydata/mutable/publish.py index a7bca6cba..ee9faeb2b 100644 --- a/src/allmydata/mutable/publish.py +++ b/src/allmydata/mutable/publish.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import division -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os, time from io import BytesIO @@ -23,6 +15,8 @@ from allmydata.interfaces import IPublishStatus, SDMF_VERSION, MDMF_VERSION, \ IMutableUploadable from allmydata.util import base32, hashutil, mathutil, log from allmydata.util.dictutil import DictOfSets +from allmydata.util.deferredutil import async_to_deferred +from allmydata.util.cputhreadpool import defer_to_thread from allmydata import hashtree, codec from allmydata.storage.server import si_b2a from foolscap.api import eventually, fireEventually @@ -35,8 +29,13 @@ from allmydata.mutable.layout import get_version_from_checkstring,\ MDMFSlotWriteProxy, \ SDMFSlotWriteProxy +from eliot import ( + Message, + start_action, +) + KiB = 1024 -DEFAULT_MAX_SEGMENT_SIZE = 128 * KiB +DEFAULT_MUTABLE_MAX_SEGMENT_SIZE = 128 * KiB PUSHING_BLOCKS_STATE = 0 PUSHING_EVERYTHING_ELSE_STATE = 1 DONE_STATE = 2 @@ -367,7 +366,7 @@ class Publish(object): self.data = newdata self.datalength = newdata.get_size() - #if self.datalength >= DEFAULT_MAX_SEGMENT_SIZE: + #if self.datalength >= DEFAULT_MUTABLE_MAX_SEGMENT_SIZE: # self._version = MDMF_VERSION #else: # self._version = SDMF_VERSION @@ -551,7 +550,7 @@ class Publish(object): def setup_encoding_parameters(self, offset=0): if self._version == MDMF_VERSION: - segment_size = DEFAULT_MAX_SEGMENT_SIZE # 128 KiB by default + segment_size = DEFAULT_MUTABLE_MAX_SEGMENT_SIZE # 128 KiB by default else: segment_size = self.datalength # SDMF is only one segment # this must be a multiple of self.required_shares @@ -701,7 +700,8 @@ class Publish(object): writer.put_salt(salt) - def _encode_segment(self, segnum): + @async_to_deferred + async def _encode_segment(self, segnum): """ I encrypt and encode the segment segnum. """ @@ -721,13 +721,17 @@ class Publish(object): assert len(data) == segsize, len(data) - salt = os.urandom(16) - - key = hashutil.ssk_readkey_data_hash(salt, self.readkey) self._status.set_status("Encrypting") - encryptor = aes.create_encryptor(key) - crypttext = aes.encrypt_data(encryptor, data) - assert len(crypttext) == len(data) + + def encrypt(readkey): + salt = os.urandom(16) + key = hashutil.ssk_readkey_data_hash(salt, readkey) + encryptor = aes.create_encryptor(key) + crypttext = aes.encrypt_data(encryptor, data) + assert len(crypttext) == len(data) + return salt, crypttext + + salt, crypttext = await defer_to_thread(encrypt, self.readkey) now = time.time() self._status.accumulate_encrypt_time(now - started) @@ -748,16 +752,14 @@ class Publish(object): piece = piece + b"\x00"*(piece_size - len(piece)) # padding crypttext_pieces[i] = piece assert len(piece) == piece_size - d = fec.encode(crypttext_pieces) - def _done_encoding(res): - elapsed = time.time() - started - self._status.accumulate_encode_time(elapsed) - return (res, salt) - d.addCallback(_done_encoding) - return d + res = await fec.encode(crypttext_pieces) + elapsed = time.time() - started + self._status.accumulate_encode_time(elapsed) + return (res, salt) - def _push_segment(self, encoded_and_salt, segnum): + @async_to_deferred + async def _push_segment(self, encoded_and_salt, segnum): """ I push (data, salt) as segment number segnum. """ @@ -771,7 +773,7 @@ class Publish(object): hashed = salt + sharedata else: hashed = sharedata - block_hash = hashutil.block_hash(hashed) + block_hash = await defer_to_thread(hashutil.block_hash, hashed) self.blockhashes[shareid][segnum] = block_hash # find the writer for this share writers = self.writers[shareid] @@ -955,12 +957,31 @@ class Publish(object): old_assignments.add(server, shnum) serverlist = [] - for i, server in enumerate(self.full_serverlist): - serverid = server.get_serverid() - if server in self.bad_servers: - continue - entry = (len(old_assignments.get(server, [])), i, serverid, server) - serverlist.append(entry) + + action = start_action( + action_type=u"mutable:upload:update_goal", + homeless_shares=len(homeless_shares), + ) + with action: + for i, server in enumerate(self.full_serverlist): + serverid = server.get_serverid() + if server in self.bad_servers: + Message.log( + message_type=u"mutable:upload:bad-server", + server_id=serverid, + ) + continue + # if we have >= 1 grid-managers, this checks that we have + # a valid certificate for this server + if not server.upload_permitted(): + Message.log( + message_type=u"mutable:upload:no-gm-certs", + server_id=serverid, + ) + continue + + entry = (len(old_assignments.get(server, [])), i, serverid, server) + serverlist.append(entry) serverlist.sort() if not serverlist: diff --git a/src/allmydata/mutable/repairer.py b/src/allmydata/mutable/repairer.py index 23af02203..4d1df410b 100644 --- a/src/allmydata/mutable/repairer.py +++ b/src/allmydata/mutable/repairer.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from zope.interface import implementer from twisted.internet import defer diff --git a/src/allmydata/mutable/retrieve.py b/src/allmydata/mutable/retrieve.py index 32aaa72e5..45d7766ee 100644 --- a/src/allmydata/mutable/retrieve.py +++ b/src/allmydata/mutable/retrieve.py @@ -1,19 +1,11 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - # Don't import bytes and str, to prevent API leakage - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, max, min # noqa: F401 +from __future__ import annotations import time - from itertools import count + from zope.interface import implementer from twisted.internet import defer from twisted.python import failure @@ -28,11 +20,12 @@ from allmydata.interfaces import IRetrieveStatus, NotEnoughSharesError, \ from allmydata.util.assertutil import _assert, precondition from allmydata.util import hashutil, log, mathutil, deferredutil from allmydata.util.dictutil import DictOfSets +from allmydata.util.cputhreadpool import defer_to_thread from allmydata import hashtree, codec from allmydata.storage.server import si_b2a from allmydata.mutable.common import CorruptShareError, BadShareError, \ - UncoordinatedWriteError + UncoordinatedWriteError, decrypt_privkey from allmydata.mutable.layout import MDMFSlotReadProxy @implementer(IRetrieveStatus) @@ -742,7 +735,8 @@ class Retrieve(object): return None - def _validate_block(self, results, segnum, reader, server, started): + @deferredutil.async_to_deferred + async def _validate_block(self, results, segnum, reader, server, started): """ I validate a block from one share on a remote server. """ @@ -775,9 +769,9 @@ class Retrieve(object): "block hash tree failure: %s" % e) if self._version == MDMF_VERSION: - blockhash = hashutil.block_hash(salt + block) + blockhash = await defer_to_thread(hashutil.block_hash, salt + block) else: - blockhash = hashutil.block_hash(block) + blockhash = await defer_to_thread(hashutil.block_hash, block) # If this works without an error, then validation is # successful. try: @@ -879,11 +873,20 @@ class Retrieve(object): shares = shares[:self._required_shares] self.log("decoding segment %d" % segnum) if segnum == self._num_segments - 1: - d = defer.maybeDeferred(self._tail_decoder.decode, shares, shareids) + d = self._tail_decoder.decode(shares, shareids) else: - d = defer.maybeDeferred(self._segment_decoder.decode, shares, shareids) - def _process(buffers): - segment = b"".join(buffers) + d = self._segment_decoder.decode(shares, shareids) + + # For larger shares, this can take a few milliseconds. As such, we want + # to unblock the event loop. In newer Python b"".join() will release + # the GIL: https://github.com/python/cpython/issues/80232 + @deferredutil.async_to_deferred + async def _got_buffers(buffers): + return await defer_to_thread(lambda: b"".join(buffers)) + + d.addCallback(_got_buffers) + + def _process(segment): self.log(format="now decoding segment %(segnum)s of %(numsegs)s", segnum=segnum, numsegs=self._num_segments, @@ -901,8 +904,8 @@ class Retrieve(object): d.addCallback(_process) return d - - def _decrypt_segment(self, segment_and_salt): + @deferredutil.async_to_deferred + async def _decrypt_segment(self, segment_and_salt): """ I take a single segment and its salt, and decrypt it. I return the plaintext of the segment that is in my argument. @@ -911,9 +914,14 @@ class Retrieve(object): self._set_current_status("decrypting") self.log("decrypting segment %d" % self._current_segment) started = time.time() - key = hashutil.ssk_readkey_data_hash(salt, self._node.get_readkey()) - decryptor = aes.create_decryptor(key) - plaintext = aes.decrypt_data(decryptor, segment) + readkey = self._node.get_readkey() + + def decrypt(): + key = hashutil.ssk_readkey_data_hash(salt, readkey) + decryptor = aes.create_decryptor(key) + return aes.decrypt_data(decryptor, segment) + + plaintext = await defer_to_thread(decrypt) self._status.accumulate_decrypt_time(time.time() - started) return plaintext @@ -929,11 +937,20 @@ class Retrieve(object): reason, ) + @deferredutil.async_to_deferred + async def _try_to_validate_privkey(self, enc_privkey, reader, server): + node_writekey = self._node.get_writekey() - def _try_to_validate_privkey(self, enc_privkey, reader, server): - alleged_privkey_s = self._node._decrypt_privkey(enc_privkey) - alleged_writekey = hashutil.ssk_writekey_hash(alleged_privkey_s) - if alleged_writekey != self._node.get_writekey(): + def get_privkey(): + alleged_privkey_s = decrypt_privkey(node_writekey, enc_privkey) + alleged_writekey = hashutil.ssk_writekey_hash(alleged_privkey_s) + if alleged_writekey != node_writekey: + return None + privkey, _ = rsa.create_signing_keypair_from_string(alleged_privkey_s) + return privkey + + privkey = await defer_to_thread(get_privkey) + if privkey is None: self.log("invalid privkey from %s shnum %d" % (reader, reader.shnum), level=log.WEIRD, umid="YIw4tA") @@ -950,7 +967,6 @@ class Retrieve(object): # it's good self.log("got valid privkey from shnum %d on reader %s" % (reader.shnum, reader)) - privkey, _ = rsa.create_signing_keypair_from_string(alleged_privkey_s) self._node._populate_encprivkey(enc_privkey) self._node._populate_privkey(privkey) self._need_privkey = False diff --git a/src/allmydata/mutable/servermap.py b/src/allmydata/mutable/servermap.py index 211b1fc16..99aa85d24 100644 --- a/src/allmydata/mutable/servermap.py +++ b/src/allmydata/mutable/servermap.py @@ -1,16 +1,8 @@ """ Ported to Python 3. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals +from __future__ import annotations -from future.utils import PY2 -if PY2: - # Doesn't import str to prevent API leakage on Python 2 - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401 -from past.builtins import unicode from six import ensure_str import sys, time, copy @@ -29,7 +21,7 @@ from allmydata.storage.server import si_b2a from allmydata.interfaces import IServermapUpdaterStatus from allmydata.mutable.common import MODE_CHECK, MODE_ANYTHING, MODE_WRITE, \ - MODE_READ, MODE_REPAIR, CorruptShareError + MODE_READ, MODE_REPAIR, CorruptShareError, decrypt_privkey from allmydata.mutable.layout import SIGNED_PREFIX_LENGTH, MDMFSlotReadProxy @implementer(IServermapUpdaterStatus) @@ -203,8 +195,8 @@ class ServerMap(object): (seqnum, root_hash, IV, segsize, datalength, k, N, prefix, offsets_tuple) = verinfo print("[%s]: sh#%d seq%d-%s %d-of-%d len%d" % - (unicode(server.get_name(), "utf-8"), shnum, - seqnum, unicode(base32.b2a(root_hash)[:4], "utf-8"), k, N, + (str(server.get_name(), "utf-8"), shnum, + seqnum, str(base32.b2a(root_hash)[:4], "utf-8"), k, N, datalength), file=out) if self._problems: print("%d PROBLEMS" % len(self._problems), file=out) @@ -276,7 +268,7 @@ class ServerMap(object): """Take a versionid, return a string that describes it.""" (seqnum, root_hash, IV, segsize, datalength, k, N, prefix, offsets_tuple) = verinfo - return "seq%d-%s" % (seqnum, unicode(base32.b2a(root_hash)[:4], "utf-8")) + return "seq%d-%s" % (seqnum, str(base32.b2a(root_hash)[:4], "utf-8")) def summarize_versions(self): """Return a string describing which versions we know about.""" @@ -824,7 +816,7 @@ class ServermapUpdater(object): def notify_server_corruption(self, server, shnum, reason): - if isinstance(reason, unicode): + if isinstance(reason, str): reason = reason.encode("utf-8") ss = server.get_storage_server() ss.advise_corrupt_share( @@ -879,7 +871,7 @@ class ServermapUpdater(object): # ok, it's a valid verinfo. Add it to the list of validated # versions. self.log(" found valid version %d-%s from %s-sh%d: %d-%d/%d/%d" - % (seqnum, unicode(base32.b2a(root_hash)[:4], "utf-8"), + % (seqnum, str(base32.b2a(root_hash)[:4], "utf-8"), ensure_str(server.get_name()), shnum, k, n, segsize, datalen), parent=lp) @@ -951,9 +943,10 @@ class ServermapUpdater(object): writekey stored in my node. If it is valid, then I set the privkey and encprivkey properties of the node. """ - alleged_privkey_s = self._node._decrypt_privkey(enc_privkey) + node_writekey = self._node.get_writekey() + alleged_privkey_s = decrypt_privkey(node_writekey, enc_privkey) alleged_writekey = hashutil.ssk_writekey_hash(alleged_privkey_s) - if alleged_writekey != self._node.get_writekey(): + if alleged_writekey != node_writekey: self.log("invalid privkey from %r shnum %d" % (server.get_name(), shnum), parent=lp, level=log.WEIRD, umid="aJVccw") diff --git a/src/allmydata/node.py b/src/allmydata/node.py index 5a6f8c66f..601f64b93 100644 --- a/src/allmydata/node.py +++ b/src/allmydata/node.py @@ -4,16 +4,11 @@ a node for Tahoe-LAFS. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import annotations -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six import ensure_str, ensure_text +import json import datetime import os.path import re @@ -22,11 +17,7 @@ import errno from base64 import b32decode, b32encode from errno import ENOENT, EPERM from warnings import warn - -try: - from typing import Union -except ImportError: - pass +from typing import Union, Iterable import attr @@ -55,6 +46,8 @@ from allmydata.util.yamlutil import ( from . import ( __full_version__, ) +from .protocol_switch import create_tub_with_https_support + def _common_valid_config(): return configutil.ValidConfiguration({ @@ -119,8 +112,8 @@ def formatTimeTahoeStyle(self, when): """ d = datetime.datetime.utcfromtimestamp(when) if d.microsecond: - return d.isoformat(ensure_str(" "))[:-3]+"Z" - return d.isoformat(ensure_str(" ")) + ".000Z" + return d.isoformat(" ")[:-3]+"Z" + return d.isoformat(" ") + ".000Z" PRIV_README = """ This directory contains files which contain private data for the Tahoe node, @@ -179,7 +172,7 @@ def create_node_dir(basedir, readme_text): f.write(readme_text) -def read_config(basedir, portnumfile, generated_files=[], _valid_config=None): +def read_config(basedir, portnumfile, generated_files: Iterable = (), _valid_config=None): """ Read and validate configuration. @@ -207,14 +200,14 @@ def read_config(basedir, portnumfile, generated_files=[], _valid_config=None): config_path = FilePath(basedir).child("tahoe.cfg") try: - config_str = config_path.getContent() + config_bytes = config_path.getContent() except EnvironmentError as e: if e.errno != errno.ENOENT: raise # The file is missing, just create empty ConfigParser. config_str = u"" else: - config_str = config_str.decode("utf-8-sig") + config_str = config_bytes.decode("utf-8-sig") return config_from_string( basedir, @@ -278,8 +271,7 @@ def _error_about_old_config_files(basedir, generated_files): raise e -def ensure_text_and_abspath_expanduser_unicode(basedir): - # type: (Union[bytes, str]) -> str +def ensure_text_and_abspath_expanduser_unicode(basedir: Union[bytes, str]) -> str: return abspath_expanduser_unicode(ensure_text(basedir)) @@ -348,6 +340,19 @@ class _Config(object): "Unable to write config file '{}'".format(fn), ) + def enumerate_section(self, section): + """ + returns a dict containing all items in a configuration section. an + empty dict is returned if the section doesn't exist. + """ + answer = dict() + try: + for k in self.config.options(section): + answer[k] = self.config.get(section, k) + except configparser.NoSectionError: + pass + return answer + def items(self, section, default=_None): try: return self.config.items(section) @@ -482,6 +487,12 @@ class _Config(object): """ returns an absolute path inside the 'private' directory with any extra args join()-ed + + This exists for historical reasons. New code should ideally + not call this because it makes it harder for e.g. a SQL-based + _Config object to exist. Code that needs to call this method + should probably be a _Config method itself. See + e.g. get_grid_manager_certificates() """ return os.path.join(self._basedir, "private", *args) @@ -489,6 +500,12 @@ class _Config(object): """ returns an absolute path inside the config directory with any extra args join()-ed + + This exists for historical reasons. New code should ideally + not call this because it makes it harder for e.g. a SQL-based + _Config object to exist. Code that needs to call this method + should probably be a _Config method itself. See + e.g. get_grid_manager_certificates() """ # note: we re-expand here (_basedir already went through this # expanduser function) in case the path we're being asked for @@ -497,6 +514,35 @@ class _Config(object): os.path.join(self._basedir, *args) ) + def get_grid_manager_certificates(self): + """ + Load all Grid Manager certificates in the config. + + :returns: A list of all certificates. An empty list is + returned if there are none. + """ + grid_manager_certificates = [] + + cert_fnames = list(self.enumerate_section("grid_manager_certificates").values()) + for fname in cert_fnames: + fname = self.get_config_path(fname) + if not os.path.exists(fname): + raise ValueError( + "Grid Manager certificate file '{}' doesn't exist".format( + fname + ) + ) + with open(fname, 'r') as f: + cert = json.load(f) + if set(cert.keys()) != {"certificate", "signature"}: + raise ValueError( + "Unknown key in Grid Manager certificate '{}'".format( + fname + ) + ) + grid_manager_certificates.append(cert) + return grid_manager_certificates + def get_introducer_configuration(self): """ Get configuration for introducers. @@ -695,7 +741,7 @@ def create_connection_handlers(config, i2p_provider, tor_provider): def create_tub(tub_options, default_connection_handlers, foolscap_connection_handlers, - handler_overrides={}, **kwargs): + handler_overrides=None, force_foolscap=False, **kwargs): """ Create a Tub with the right options and handlers. It will be ephemeral unless the caller provides certFile= in kwargs @@ -705,8 +751,19 @@ def create_tub(tub_options, default_connection_handlers, foolscap_connection_han :param dict tub_options: every key-value pair in here will be set in the new Tub via `Tub.setOption` + + :param bool force_foolscap: If True, only allow Foolscap, not just HTTPS + storage protocol. """ - tub = Tub(**kwargs) + if handler_overrides is None: + handler_overrides = {} + # We listen simultaneously for both Foolscap and HTTPS on the same port, + # so we have to create a special Foolscap Tub for that to work: + if force_foolscap: + tub = Tub(**kwargs) + else: + tub = create_tub_with_https_support(**kwargs) + for (name, value) in list(tub_options.items()): tub.setOption(name, value) handlers = default_connection_handlers.copy() @@ -867,7 +924,7 @@ def tub_listen_on(i2p_provider, tor_provider, tub, tubport, location): def create_main_tub(config, tub_options, default_connection_handlers, foolscap_connection_handlers, i2p_provider, tor_provider, - handler_overrides={}, cert_filename="node.pem"): + handler_overrides=None, cert_filename="node.pem"): """ Creates a 'main' Foolscap Tub, typically for use as the top-level access point for a running Node. @@ -888,6 +945,8 @@ def create_main_tub(config, tub_options, :param tor_provider: None, or a _Provider instance if txtorcon + Tor are installed. """ + if handler_overrides is None: + handler_overrides = {} portlocation = _tub_portlocation( config, iputil.get_local_addresses_sync, @@ -896,14 +955,17 @@ def create_main_tub(config, tub_options, # FIXME? "node.pem" was the CERTFILE option/thing certfile = config.get_private_path("node.pem") - tub = create_tub( tub_options, default_connection_handlers, foolscap_connection_handlers, + force_foolscap=config.get_config( + "storage", "force_foolscap", default=False, boolean=True + ), handler_overrides=handler_overrides, certFile=certfile, ) + if portlocation is None: log.msg("Tub is not listening") else: @@ -919,18 +981,6 @@ def create_main_tub(config, tub_options, return tub -def create_control_tub(): - """ - Creates a Foolscap Tub for use by the control port. This is a - localhost-only ephemeral Tub, with no control over the listening - port or location - """ - control_tub = Tub() - portnum = iputil.listenOnUnused(control_tub) - log.msg("Control Tub location set to 127.0.0.1:%s" % (portnum,)) - return control_tub - - class Node(service.MultiService): """ This class implements common functionality of both Client nodes and Introducer nodes. @@ -938,7 +988,7 @@ class Node(service.MultiService): NODETYPE = "unknown NODETYPE" CERTFILE = "node.pem" - def __init__(self, config, main_tub, control_tub, i2p_provider, tor_provider): + def __init__(self, config, main_tub, i2p_provider, tor_provider): """ Initialize the node with the given configuration. Its base directory is the current directory by default. @@ -967,10 +1017,6 @@ class Node(service.MultiService): else: self.nodeid = self.short_nodeid = None - self.control_tub = control_tub - if self.control_tub is not None: - self.control_tub.setServiceParent(self) - self.log("Node constructed. " + __full_version__) iputil.increase_rlimits() diff --git a/src/allmydata/nodemaker.py b/src/allmydata/nodemaker.py index 6b0b77c5c..6e8700cff 100644 --- a/src/allmydata/nodemaker.py +++ b/src/allmydata/nodemaker.py @@ -1,17 +1,12 @@ """ -Ported to Python 3. +Create file nodes of various types. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations import weakref from zope.interface import implementer +from twisted.internet.defer import succeed from allmydata.util.assertutil import precondition from allmydata.interfaces import INodeMaker from allmydata.immutable.literal import LiteralFileNode @@ -22,6 +17,7 @@ from allmydata.mutable.publish import MutableData from allmydata.dirnode import DirectoryNode, pack_children from allmydata.unknown import UnknownNode from allmydata.blacklist import ProhibitedNode +from allmydata.crypto.rsa import PublicKey, PrivateKey from allmydata import uri @@ -126,18 +122,28 @@ class NodeMaker(object): return self._create_dirnode(filenode) return None - def create_mutable_file(self, contents=None, keysize=None, version=None): + def create_mutable_file(self, contents=None, version=None, keypair: tuple[PublicKey, PrivateKey] | None = None): if version is None: version = self.mutable_file_default n = MutableFileNode(self.storage_broker, self.secret_holder, self.default_encoding_parameters, self.history) - d = self.key_generator.generate(keysize) + if keypair is None: + d = self.key_generator.generate() + else: + d = succeed(keypair) d.addCallback(n.create_with_keys, contents, version=version) d.addCallback(lambda res: n) return d - def create_new_mutable_directory(self, initial_children={}, version=None): - # initial_children must have metadata (i.e. {} instead of None) + def create_new_mutable_directory( + self, + initial_children=None, + version=None, + *, + keypair: tuple[PublicKey, PrivateKey] | None = None, + ): + if initial_children is None: + initial_children = {} for (name, (node, metadata)) in initial_children.items(): precondition(isinstance(metadata, dict), "create_new_mutable_directory requires metadata to be a dict, not None", metadata) @@ -145,7 +151,8 @@ class NodeMaker(object): d = self.create_mutable_file(lambda n: MutableData(pack_children(initial_children, n.get_writekey())), - version=version) + version=version, + keypair=keypair) d.addCallback(self._create_dirnode) return d diff --git a/src/allmydata/protocol_switch.py b/src/allmydata/protocol_switch.py new file mode 100644 index 000000000..3b3268b79 --- /dev/null +++ b/src/allmydata/protocol_switch.py @@ -0,0 +1,219 @@ +""" +Support for listening with both HTTPS and Foolscap on the same port. + +The goal is to make the transition from Foolscap to HTTPS-based protocols as +simple as possible, with no extra configuration needed. Listening on the same +port means a user upgrading Tahoe-LAFS will automatically get HTTPS working +with no additional changes. + +Use ``create_tub_with_https_support()`` creates a new ``Tub`` that has its +``negotiationClass`` modified to be a new subclass tied to that specific +``Tub`` instance. Calling ``tub.negotiationClass.add_storage_server(...)`` +then adds relevant information for a storage server once it becomes available +later in the configuration process. +""" + +from __future__ import annotations + +from itertools import chain +from typing import cast + +from twisted.internet.protocol import Protocol +from twisted.internet.interfaces import IDelayedCall, IReactorFromThreads +from twisted.internet.ssl import CertificateOptions +from twisted.web.server import Site +from twisted.protocols.tls import TLSMemoryBIOFactory +from twisted.internet import reactor + +from hyperlink import DecodedURL +from foolscap.negotiate import Negotiation +from foolscap.api import Tub + +from .storage.http_server import HTTPServer, build_nurl +from .storage.server import StorageServer + + +class _PretendToBeNegotiation(type): + """ + Metaclass that allows ``_FoolscapOrHttps`` to pretend to be a + ``Negotiation`` instance, since Foolscap does some checks like + ``assert isinstance(protocol, tub.negotiationClass)`` in its internals, + and sometimes that ``protocol`` is a ``_FoolscapOrHttps`` instance, but + sometimes it's a ``Negotiation`` instance. + """ + + def __instancecheck__(self, instance): + return issubclass(instance.__class__, self) or isinstance(instance, Negotiation) + + +class _FoolscapOrHttps(Protocol, metaclass=_PretendToBeNegotiation): + """ + Based on initial query, decide whether we're talking Foolscap or HTTP. + + Additionally, pretends to be a ``foolscap.negotiate.Negotiation`` instance, + since these are created by Foolscap's ``Tub``, by setting this to be the + tub's ``negotiationClass``. + + Do not instantiate directly, use ``create_tub_with_https_support(...)`` + instead. The way this class works is that a new subclass is created for a + specific ``Tub`` instance. + """ + + # These are class attributes; they will be set by + # create_tub_with_https_support() and add_storage_server(). + + # The Twisted HTTPS protocol factory wrapping the storage server HTTP API: + https_factory: TLSMemoryBIOFactory + # The tub that created us: + tub: Tub + + @classmethod + def add_storage_server( + cls, storage_server: StorageServer, swissnum: bytes + ) -> set[DecodedURL]: + """ + Update a ``_FoolscapOrHttps`` subclass for a specific ``Tub`` instance + with the class attributes it requires for a specific storage server. + + Returns the resulting NURLs. + """ + # We need to be a subclass: + assert cls != _FoolscapOrHttps + # The tub instance must already be set: + assert hasattr(cls, "tub") + assert isinstance(cls.tub, Tub) + + # Tub.myCertificate is a twisted.internet.ssl.PrivateCertificate + # instance. + certificate_options = CertificateOptions( + privateKey=cls.tub.myCertificate.privateKey.original, + certificate=cls.tub.myCertificate.original, + ) + + http_storage_server = HTTPServer(cast(IReactorFromThreads, reactor), storage_server, swissnum) + cls.https_factory = TLSMemoryBIOFactory( + certificate_options, + False, + Site(http_storage_server.get_resource()), + ) + + storage_nurls = set() + # Individual hints can be in the form + # "tcp:host:port,tcp:host:port,tcp:host:port". + for location_hint in chain.from_iterable( + hints.split(",") for hints in cls.tub.locationHints + ): + if location_hint.startswith("tcp:") or location_hint.startswith("tor:"): + scheme, hostname, port = location_hint.split(":") + if scheme == "tcp": + subscheme = None + else: + subscheme = "tor" + # If we're listening on Tor, the hostname needs to have an + # .onion TLD. + assert hostname.endswith(".onion") + # The I2P scheme is yet not supported by the HTTP client, so we + # don't want generate a NURL that won't work. This will be + # fixed in https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4037 + port = int(port) + storage_nurls.add( + build_nurl( + hostname, + port, + str(swissnum, "ascii"), + cls.tub.myCertificate.original.to_cryptography(), + subscheme + ) + ) + + return storage_nurls + + def __init__(self, *args, **kwargs): + self._foolscap: Negotiation = Negotiation(*args, **kwargs) + + def __setattr__(self, name, value): + if name in {"_foolscap", "_buffer", "transport", "__class__", "_timeout"}: + object.__setattr__(self, name, value) + else: + setattr(self._foolscap, name, value) + + def __getattr__(self, name): + return getattr(self._foolscap, name) + + def _convert_to_negotiation(self): + """ + Convert self to a ``Negotiation`` instance. + """ + self.__class__ = Negotiation # type: ignore + self.__dict__ = self._foolscap.__dict__ + + def initClient(self, *args, **kwargs): + # After creation, a Negotiation instance either has initClient() or + # initServer() called. Since this is a client, we're never going to do + # HTTP, so we can immediately become a Negotiation instance. + assert not hasattr(self, "_buffer") + self._convert_to_negotiation() + return self.initClient(*args, **kwargs) + + def connectionMade(self): + self._buffer: bytes = b"" + self._timeout: IDelayedCall = reactor.callLater( + 30, self.transport.abortConnection + ) + + def connectionLost(self, reason): + if self._timeout.active(): + self._timeout.cancel() + + def dataReceived(self, data: bytes) -> None: + """Handle incoming data. + + Once we've decided which protocol we are, update self.__class__, at + which point all methods will be called on the new class. + """ + self._buffer += data + if len(self._buffer) < 8: + return + + # Check if it looks like a Foolscap request. If so, it can handle this + # and later data, otherwise assume HTTPS. + self._timeout.cancel() + if self._buffer.startswith(b"GET /id/"): + # We're a Foolscap Negotiation server protocol instance: + transport = self.transport + buf = self._buffer + self._convert_to_negotiation() + self.makeConnection(transport) + self.dataReceived(buf) + return + else: + # We're a HTTPS protocol instance, serving the storage protocol: + assert self.transport is not None + protocol = self.https_factory.buildProtocol(self.transport.getPeer()) + protocol.makeConnection(self.transport) + protocol.dataReceived(self._buffer) + + # Update the factory so it knows we're transforming to a new + # protocol object (we'll do that next) + value = self.https_factory.protocols.pop(protocol) + self.https_factory.protocols[self] = value + + # Transform self into the TLS protocol 🪄 + self.__class__ = protocol.__class__ + self.__dict__ = protocol.__dict__ + + +def create_tub_with_https_support(**kwargs) -> Tub: + """ + Create a new Tub that also supports HTTPS. + + This involves creating a new protocol switch class for the specific ``Tub`` + instance. + """ + the_tub = Tub(**kwargs) + + class FoolscapOrHttpForTub(_FoolscapOrHttps): + tub = the_tub + + the_tub.negotiationClass = FoolscapOrHttpForTub # type: ignore + return the_tub diff --git a/src/allmydata/scripts/admin.py b/src/allmydata/scripts/admin.py index a9feed0dd..34f6e2eaf 100644 --- a/src/allmydata/scripts/admin.py +++ b/src/allmydata/scripts/admin.py @@ -1,24 +1,29 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six import ensure_binary -try: - from allmydata.scripts.types_ import SubCommands -except ImportError: - pass - from twisted.python import usage -from allmydata.scripts.common import BaseOptions +from twisted.python.filepath import ( + FilePath, +) +from allmydata.scripts.common import ( + BaseOptions, + BasedirOptions, +) +from allmydata.storage import ( + crawler, + expirer, +) +from allmydata.scripts.types_ import SubCommands +from allmydata.client import read_config +from allmydata.grid_manager import ( + parse_grid_manager_certificate, +) +from allmydata.scripts.cli import _default_nodedir +from allmydata.util.encodingutil import argv_to_abspath +from allmydata.util import jsonbytes class GenerateKeypairOptions(BaseOptions): @@ -65,12 +70,155 @@ def derive_pubkey(options): print("public:", str(ed25519.string_from_verifying_key(public_key), "ascii"), file=out) return 0 + +class MigrateCrawlerOptions(BasedirOptions): + + def getSynopsis(self): + return "Usage: tahoe [global-options] admin migrate-crawler" + + def getUsage(self, width=None): + t = BasedirOptions.getUsage(self, width) + t += ( + "The crawler data is now stored as JSON to avoid" + " potential security issues with pickle files.\n\nIf" + " you are confident the state files in the 'storage/'" + " subdirectory of your node are trustworthy, run this" + " command to upgrade them to JSON.\n\nThe files are:" + " lease_checker.history, lease_checker.state, and" + " bucket_counter.state" + ) + return t + + +class AddGridManagerCertOptions(BaseOptions): + """ + Options for add-grid-manager-cert + """ + + optParameters = [ + ['filename', 'f', None, "Filename of the certificate ('-', a dash, for stdin)"], + ['name', 'n', None, "Name to give this certificate"], + ] + + def getSynopsis(self): + return "Usage: tahoe [global-options] admin add-grid-manager-cert [options]" + + def postOptions(self) -> None: + assert self.parent is not None + assert self.parent.parent is not None + + if self['name'] is None: + raise usage.UsageError( + "Must provide --name option" + ) + if self['filename'] is None: + raise usage.UsageError( + "Must provide --filename option" + ) + + data: str + if self['filename'] == '-': + print("reading certificate from stdin", file=self.parent.parent.stderr) # type: ignore[attr-defined] + data = self.parent.parent.stdin.read() # type: ignore[attr-defined] + if len(data) == 0: + raise usage.UsageError( + "Reading certificate from stdin failed" + ) + else: + with open(self['filename'], 'r') as f: + data = f.read() + + try: + self.certificate_data = parse_grid_manager_certificate(data) + except ValueError as e: + raise usage.UsageError( + "Error parsing certificate: {}".format(e) + ) + + def getUsage(self, width=None): + t = BaseOptions.getUsage(self, width) + t += ( + "Adds a Grid Manager certificate to a Storage Server.\n\n" + "The certificate will be copied into the base-dir and config\n" + "will be added to 'tahoe.cfg', which will be re-written. A\n" + "restart is required for changes to take effect.\n\n" + "The human who operates a Grid Manager would produce such a\n" + "certificate and communicate it securely to you.\n" + ) + return t + + +def migrate_crawler(options): + out = options.stdout + storage = FilePath(options['basedir']).child("storage") + + conversions = [ + (storage.child("lease_checker.state"), crawler._convert_pickle_state_to_json), + (storage.child("bucket_counter.state"), crawler._convert_pickle_state_to_json), + (storage.child("lease_checker.history"), expirer._convert_pickle_state_to_json), + ] + + for fp, converter in conversions: + existed = fp.exists() + newfp = crawler._upgrade_pickle_to_json(fp, converter) + if existed: + print("Converted '{}' to '{}'".format(fp.path, newfp.path), file=out) + else: + if newfp.exists(): + print("Already converted: '{}'".format(newfp.path), file=out) + else: + print("Not found: '{}'".format(fp.path), file=out) + + +def add_grid_manager_cert(options): + """ + Add a new Grid Manager certificate to our config + """ + # XXX is there really not already a function for this? + if options.parent.parent['node-directory']: + nd = argv_to_abspath(options.parent.parent['node-directory']) + else: + nd = _default_nodedir + + config = read_config(nd, "portnum") + cert_fname = "{}.cert".format(options['name']) + cert_path = FilePath(config.get_config_path(cert_fname)) + cert_bytes = jsonbytes.dumps_bytes(options.certificate_data, indent=4) + b'\n' + cert_name = options['name'] + + if cert_path.exists(): + msg = "Already have certificate for '{}' (at {})".format( + options['name'], + cert_path.path, + ) + print(msg, file=options.stderr) + return 1 + + config.set_config("storage", "grid_management", "True") + config.set_config("grid_manager_certificates", cert_name, cert_fname) + + # write all the data out + with cert_path.open("wb") as f: + f.write(cert_bytes) + + cert_count = len(config.enumerate_section("grid_manager_certificates")) + print("There are now {} certificates".format(cert_count), + file=options.stderr) + + return 0 + + class AdminCommand(BaseOptions): subCommands = [ ("generate-keypair", None, GenerateKeypairOptions, "Generate a public/private keypair, write to stdout."), ("derive-pubkey", None, DerivePubkeyOptions, "Derive a public key from a private key."), + ("migrate-crawler", None, MigrateCrawlerOptions, + "Write the crawler-history data as JSON."), + ("add-grid-manager-cert", None, AddGridManagerCertOptions, + "Add a Grid Manager-provided certificate to a storage " + "server's config."), ] def postOptions(self): if not hasattr(self, 'subOptions'): @@ -85,10 +233,14 @@ each subcommand. """ return t + subDispatch = { "generate-keypair": print_keypair, "derive-pubkey": derive_pubkey, - } + "migrate-crawler": migrate_crawler, + "add-grid-manager-cert": add_grid_manager_cert, +} + def do_admin(options): so = options.subOptions @@ -98,10 +250,10 @@ def do_admin(options): return f(so) -subCommands = [ +subCommands : SubCommands = [ ("admin", None, AdminCommand, "admin subcommands: use 'tahoe admin' for a list"), - ] # type: SubCommands + ] dispatch = { "admin": do_admin, - } +} diff --git a/src/allmydata/scripts/backupdb.py b/src/allmydata/scripts/backupdb.py index c7827e56e..45c2bc026 100644 --- a/src/allmydata/scripts/backupdb.py +++ b/src/allmydata/scripts/backupdb.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os.path, sys, time, random, stat diff --git a/src/allmydata/scripts/cli.py b/src/allmydata/scripts/cli.py index 55975b8c5..6e1f28d11 100644 --- a/src/allmydata/scripts/cli.py +++ b/src/allmydata/scripts/cli.py @@ -1,22 +1,10 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import os.path, re, fnmatch -try: - from allmydata.scripts.types_ import SubCommands, Parameters -except ImportError: - pass +from allmydata.scripts.types_ import SubCommands, Parameters from twisted.python import usage from allmydata.scripts.common import get_aliases, get_default_nodedir, \ @@ -29,14 +17,14 @@ NODEURL_RE=re.compile("http(s?)://([^:]*)(:([1-9][0-9]*))?") _default_nodedir = get_default_nodedir() class FileStoreOptions(BaseOptions): - optParameters = [ + optParameters : Parameters = [ ["node-url", "u", None, "Specify the URL of the Tahoe gateway node, such as " "'http://127.0.0.1:3456'. " "This overrides the URL found in the --node-directory ."], ["dir-cap", None, None, "Specify which dirnode URI should be used as the 'tahoe' alias."] - ] # type: Parameters + ] def postOptions(self): self["quiet"] = self.parent["quiet"] @@ -180,10 +168,22 @@ class GetOptions(FileStoreOptions): class PutOptions(FileStoreOptions): optFlags = [ ("mutable", "m", "Create a mutable file instead of an immutable one (like --format=SDMF)"), - ] + ] + optParameters = [ ("format", None, None, "Create a file with the given format: SDMF and MDMF for mutable, CHK (default) for immutable. (case-insensitive)"), - ] + + ("private-key-path", None, None, + "***Warning*** " + "It is possible to use this option to spoil the normal security properties of mutable objects. " + "It is also possible to corrupt or destroy data with this option. " + "Most users will not need this option and can ignore it. " + "For mutables only, " + "this gives a file containing a PEM-encoded 2048 bit RSA private key to use as the signature key for the mutable. " + "The private key must be handled at least as strictly as the resulting capability string. " + "A single private key must not be used for more than one mutable." + ), + ] def parseArgs(self, arg1=None, arg2=None): # see Examples below @@ -472,7 +472,7 @@ class DeepCheckOptions(FileStoreOptions): (which must be a directory), like 'tahoe check' but for multiple files. Optionally repair any problems found.""" -subCommands = [ +subCommands : SubCommands = [ ("mkdir", None, MakeDirectoryOptions, "Create a new directory."), ("add-alias", None, AddAliasOptions, "Add a new alias cap."), ("create-alias", None, CreateAliasOptions, "Create a new alias cap."), @@ -491,7 +491,7 @@ subCommands = [ ("check", None, CheckOptions, "Check a single file or directory."), ("deep-check", None, DeepCheckOptions, "Check all files/directories reachable from a starting point."), ("status", None, TahoeStatusCommand, "Various status information."), - ] # type: SubCommands + ] def mkdir(options): from allmydata.scripts import tahoe_mkdir diff --git a/src/allmydata/scripts/common.py b/src/allmydata/scripts/common.py index 0a9ab8714..d6ca8556d 100644 --- a/src/allmydata/scripts/common.py +++ b/src/allmydata/scripts/common.py @@ -4,29 +4,13 @@ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -else: - from typing import Union - +from typing import Union, Optional import os, sys, textwrap import codecs from os.path import join import urllib.parse -try: - from typing import Optional - from .types_ import Parameters -except ImportError: - pass - from yaml import ( safe_dump, ) @@ -37,6 +21,8 @@ from allmydata.util.assertutil import precondition from allmydata.util.encodingutil import quote_output, \ quote_local_unicode_path, argv_to_abspath from allmydata.scripts.default_nodedir import _default_nodedir +from .types_ import Parameters + def get_default_nodedir(): return _default_nodedir @@ -59,7 +45,7 @@ class BaseOptions(usage.Options): def opt_version(self): raise usage.UsageError("--version not allowed on subcommands") - description = None # type: Optional[str] + description : Optional[str] = None description_unwrapped = None # type: Optional[str] def __str__(self): @@ -80,10 +66,10 @@ class BaseOptions(usage.Options): class BasedirOptions(BaseOptions): default_nodedir = _default_nodedir - optParameters = [ + optParameters : Parameters = [ ["basedir", "C", None, "Specify which Tahoe base directory should be used. [default: %s]" % quote_local_unicode_path(_default_nodedir)], - ] # type: Parameters + ] def parseArgs(self, basedir=None): # This finds the node-directory option correctly even if we are in a subcommand. @@ -141,7 +127,9 @@ def write_introducer(basedir, petname, furl): """ if isinstance(furl, bytes): furl = furl.decode("utf-8") - basedir.child(b"private").child(b"introducers.yaml").setContent( + private = basedir.child(b"private") + private.makedirs(ignoreExistingDirectory=True) + private.child(b"introducers.yaml").setContent( safe_dump({ "introducers": { petname: { @@ -281,9 +269,8 @@ def get_alias(aliases, path_unicode, default): quote_output(alias)) return uri.from_string_dirnode(aliases[alias]).to_string(), path[colon+1:] -def escape_path(path): - # type: (Union[str,bytes]) -> str - u""" +def escape_path(path: Union[str, bytes]) -> str: + """ Return path quoted to US-ASCII, valid URL characters. >>> path = u'/føö/bar/☃' @@ -300,9 +287,4 @@ def escape_path(path): ]), "ascii" ) - # Eventually (i.e. as part of Python 3 port) we want this to always return - # Unicode strings. However, to reduce diff sizes in the short term it'll - # return native string (i.e. bytes) on Python 2. - if PY2: - result = result.encode("ascii").__native__() return result diff --git a/src/allmydata/scripts/common_http.py b/src/allmydata/scripts/common_http.py index 95099a2eb..f138b9c07 100644 --- a/src/allmydata/scripts/common_http.py +++ b/src/allmydata/scripts/common_http.py @@ -1,19 +1,11 @@ """ -Ported to Python 3. +Blocking HTTP client APIs. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os from io import BytesIO -from six.moves import urllib, http_client -import six +from http import client as http_client +import urllib import allmydata # for __full_version__ from allmydata.util.encodingutil import quote_output @@ -51,7 +43,7 @@ class BadResponse(object): def do_http(method, url, body=b""): if isinstance(body, bytes): body = BytesIO(body) - elif isinstance(body, six.text_type): + elif isinstance(body, str): raise TypeError("do_http body must be a bytestring, not unicode") else: # We must give a Content-Length header to twisted.web, otherwise it @@ -61,10 +53,17 @@ def do_http(method, url, body=b""): assert body.seek assert body.read scheme, host, port, path = parse_url(url) + + # For testing purposes, allow setting a timeout on HTTP requests. If this + # ever become a user-facing feature, this should probably be a CLI option? + timeout = os.environ.get("__TAHOE_CLI_HTTP_TIMEOUT", None) + if timeout is not None: + timeout = float(timeout) + if scheme == "http": - c = http_client.HTTPConnection(host, port) + c = http_client.HTTPConnection(host, port, timeout=timeout, blocksize=65536) elif scheme == "https": - c = http_client.HTTPSConnection(host, port) + c = http_client.HTTPSConnection(host, port, timeout=timeout, blocksize=65536) else: raise ValueError("unknown scheme '%s', need http or https" % scheme) c.putrequest(method, path) @@ -85,7 +84,7 @@ def do_http(method, url, body=b""): return BadResponse(url, err) while True: - data = body.read(8192) + data = body.read(65536) if not data: break c.send(data) @@ -94,16 +93,14 @@ def do_http(method, url, body=b""): def format_http_success(resp): - # ensure_text() shouldn't be necessary when Python 2 is dropped. return quote_output( - "%s %s" % (resp.status, six.ensure_text(resp.reason)), + "%s %s" % (resp.status, resp.reason), quotemarks=False) def format_http_error(msg, resp): - # ensure_text() shouldn't be necessary when Python 2 is dropped. return quote_output( - "%s: %s %s\n%s" % (msg, resp.status, six.ensure_text(resp.reason), - six.ensure_text(resp.read())), + "%s: %s %s\n%r" % (msg, resp.status, resp.reason, + resp.read()), quotemarks=False) def check_http_error(resp, stderr): diff --git a/src/allmydata/scripts/create_node.py b/src/allmydata/scripts/create_node.py index 4959ed391..4357abb49 100644 --- a/src/allmydata/scripts/create_node.py +++ b/src/allmydata/scripts/create_node.py @@ -1,25 +1,16 @@ -# Ported to Python 3 -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals +from __future__ import annotations -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from typing import Optional import io import os -try: - from allmydata.scripts.types_ import ( - SubCommands, - Parameters, - Flags, - ) -except ImportError: - pass +from allmydata.scripts.types_ import ( + SubCommands, + Parameters, + Flags, +) from twisted.internet import reactor, defer from twisted.python.usage import UsageError @@ -33,12 +24,40 @@ from allmydata.scripts.common import ( write_introducer, ) from allmydata.scripts.default_nodedir import _default_nodedir +from allmydata.util import dictutil from allmydata.util.assertutil import precondition from allmydata.util.encodingutil import listdir_unicode, argv_to_unicode, quote_local_unicode_path, get_io_encoding -from allmydata.util import fileutil, i2p_provider, iputil, tor_provider, jsonbytes as json -from wormhole import wormhole +i2p_provider: Listener +tor_provider: Listener +from allmydata.util import fileutil, i2p_provider, tor_provider, jsonbytes as json + +from ..listeners import ListenerConfig, Listener, TCPProvider, StaticProvider + +def _get_listeners() -> dict[str, Listener]: + """ + Get all of the kinds of listeners we might be able to use. + """ + return { + "tor": tor_provider, + "i2p": i2p_provider, + "tcp": TCPProvider(), + "none": StaticProvider( + available=True, + hide_ip=False, + config=defer.succeed(None), + # This is supposed to be an IAddressFamily but we have none for + # this kind of provider. We could implement new client and server + # endpoint types that always fail and pass an IAddressFamily here + # that uses those. Nothing would ever even ask for them (at + # least, yet), let alone try to use them, so that's a lot of extra + # work for no practical result so I'm not doing it now. + address=None, # type: ignore[arg-type] + ), + } + +_LISTENERS = _get_listeners() dummy_tac = """ import sys @@ -51,7 +70,7 @@ def write_tac(basedir, nodetype): fileutil.write(os.path.join(basedir, "tahoe-%s.tac" % (nodetype,)), dummy_tac) -WHERE_OPTS = [ +WHERE_OPTS : Parameters = [ ("location", None, None, "Server location to advertise (e.g. tcp:example.org:12345)"), ("port", None, None, @@ -60,29 +79,29 @@ WHERE_OPTS = [ "Hostname to automatically set --location/--port when --listen=tcp"), ("listen", None, "tcp", "Comma-separated list of listener types (tcp,tor,i2p,none)."), -] # type: Parameters +] -TOR_OPTS = [ +TOR_OPTS : Parameters = [ ("tor-control-port", None, None, "Tor's control port endpoint descriptor string (e.g. tcp:127.0.0.1:9051 or unix:/var/run/tor/control)"), ("tor-executable", None, None, "The 'tor' executable to run (default is to search $PATH)."), -] # type: Parameters +] -TOR_FLAGS = [ +TOR_FLAGS : Flags = [ ("tor-launch", None, "Launch a tor instead of connecting to a tor control port."), -] # type: Flags +] -I2P_OPTS = [ +I2P_OPTS : Parameters = [ ("i2p-sam-port", None, None, "I2P's SAM API port endpoint descriptor string (e.g. tcp:127.0.0.1:7656)"), ("i2p-executable", None, None, "(future) The 'i2prouter' executable to run (default is to search $PATH)."), -] # type: Parameters +] -I2P_FLAGS = [ +I2P_FLAGS : Flags = [ ("i2p-launch", None, "(future) Launch an I2P router instead of connecting to a SAM API port."), -] # type: Flags +] def validate_where_options(o): if o['listen'] == "none": @@ -115,8 +134,11 @@ def validate_where_options(o): if o['listen'] != "none" and o.get('join', None) is None: listeners = o['listen'].split(",") for l in listeners: - if l not in ["tcp", "tor", "i2p"]: - raise UsageError("--listen= must be none, or one/some of: tcp, tor, i2p") + if l not in _LISTENERS: + raise UsageError( + "--listen= must be one/some of: " + f"{', '.join(sorted(_LISTENERS))}", + ) if 'tcp' in listeners and not o['hostname']: raise UsageError("--listen=tcp requires --hostname=") if 'tcp' not in listeners and o['hostname']: @@ -125,7 +147,7 @@ def validate_where_options(o): def validate_tor_options(o): use_tor = "tor" in o["listen"].split(",") if use_tor or any((o["tor-launch"], o["tor-control-port"])): - if tor_provider._import_txtorcon() is None: + if not _LISTENERS["tor"].is_available(): raise UsageError( "Specifying any Tor options requires the 'txtorcon' module" ) @@ -140,7 +162,7 @@ def validate_tor_options(o): def validate_i2p_options(o): use_i2p = "i2p" in o["listen"].split(",") if use_i2p or any((o["i2p-launch"], o["i2p-sam-port"])): - if i2p_provider._import_txi2p() is None: + if not _LISTENERS["i2p"].is_available(): raise UsageError( "Specifying any I2P options requires the 'txi2p' module" ) @@ -162,11 +184,17 @@ class _CreateBaseOptions(BasedirOptions): def postOptions(self): super(_CreateBaseOptions, self).postOptions() if self['hide-ip']: - if tor_provider._import_txtorcon() is None and i2p_provider._import_txi2p() is None: + ip_hiders = dictutil.filter(lambda v: v.can_hide_ip(), _LISTENERS) + available = dictutil.filter(lambda v: v.is_available(), ip_hiders) + if not available: raise UsageError( - "--hide-ip was specified but neither 'txtorcon' nor 'txi2p' " - "are installed.\nTo do so:\n pip install tahoe-lafs[tor]\nor\n" - " pip install tahoe-lafs[i2p]" + "--hide-ip was specified but no IP-hiding listener is installed.\n" + "Try one of these:\n" + + "".join([ + f"\tpip install tahoe-lafs[{name}]\n" + for name + in ip_hiders + ]) ) class CreateClientOptions(_CreateBaseOptions): @@ -235,8 +263,34 @@ class CreateIntroducerOptions(NoDefaultBasedirOptions): validate_i2p_options(self) -@defer.inlineCallbacks -def write_node_config(c, config): +def merge_config( + left: Optional[ListenerConfig], + right: Optional[ListenerConfig], +) -> Optional[ListenerConfig]: + """ + Merge two listener configurations into one configuration representing + both of them. + + If either is ``None`` then the result is ``None``. This supports the + "disable listeners" functionality. + + :raise ValueError: If the keys in the node configs overlap. + """ + if left is None or right is None: + return None + + overlap = set(left.node_config) & set(right.node_config) + if overlap: + raise ValueError(f"Node configs overlap: {overlap}") + + return ListenerConfig( + list(left.tub_ports) + list(right.tub_ports), + list(left.tub_locations) + list(right.tub_locations), + dict(list(left.node_config.items()) + list(right.node_config.items())), + ) + + +async def write_node_config(c, config): # this is shared between clients and introducers c.write("# -*- mode: conf; coding: {c.encoding} -*-\n".format(c=c)) c.write("\n") @@ -249,9 +303,10 @@ def write_node_config(c, config): if config["hide-ip"]: c.write("[connections]\n") - if tor_provider._import_txtorcon(): + if _LISTENERS["tor"].is_available(): c.write("tcp = tor\n") else: + # XXX What about i2p? c.write("tcp = disabled\n") c.write("\n") @@ -270,38 +325,23 @@ def write_node_config(c, config): c.write("web.port = %s\n" % (webport,)) c.write("web.static = public_html\n") - listeners = config['listen'].split(",") + listener_config = ListenerConfig([], [], {}) + for listener_name in config['listen'].split(","): + listener = _LISTENERS[listener_name] + listener_config = merge_config( + (await listener.create_config(reactor, config)), + listener_config, + ) - tor_config = {} - i2p_config = {} - tub_ports = [] - tub_locations = [] - if listeners == ["none"]: - c.write("tub.port = disabled\n") - c.write("tub.location = disabled\n") + if listener_config is None: + tub_ports = ["disabled"] + tub_locations = ["disabled"] else: - if "tor" in listeners: - (tor_config, tor_port, tor_location) = \ - yield tor_provider.create_config(reactor, config) - tub_ports.append(tor_port) - tub_locations.append(tor_location) - if "i2p" in listeners: - (i2p_config, i2p_port, i2p_location) = \ - yield i2p_provider.create_config(reactor, config) - tub_ports.append(i2p_port) - tub_locations.append(i2p_location) - if "tcp" in listeners: - if config["port"]: # --port/--location are a pair - tub_ports.append(config["port"]) - tub_locations.append(config["location"]) - else: - assert "hostname" in config - hostname = config["hostname"] - new_port = iputil.allocate_tcp_port() - tub_ports.append("tcp:%s" % new_port) - tub_locations.append("tcp:%s:%s" % (hostname, new_port)) - c.write("tub.port = %s\n" % ",".join(tub_ports)) - c.write("tub.location = %s\n" % ",".join(tub_locations)) + tub_ports = listener_config.tub_ports + tub_locations = listener_config.tub_locations + + c.write("tub.port = %s\n" % ",".join(tub_ports)) + c.write("tub.location = %s\n" % ",".join(tub_locations)) c.write("\n") c.write("#log_gatherer.furl =\n") @@ -311,17 +351,12 @@ def write_node_config(c, config): c.write("#ssh.authorized_keys_file = ~/.ssh/authorized_keys\n") c.write("\n") - if tor_config: - c.write("[tor]\n") - for key, value in list(tor_config.items()): - c.write("%s = %s\n" % (key, value)) - c.write("\n") - - if i2p_config: - c.write("[i2p]\n") - for key, value in list(i2p_config.items()): - c.write("%s = %s\n" % (key, value)) - c.write("\n") + if listener_config is not None: + for section, items in listener_config.node_config.items(): + c.write(f"[{section}]\n") + for k, v in items: + c.write(f"{k} = {v}\n") + c.write("\n") def write_client_config(c, config): @@ -377,7 +412,7 @@ def _get_config_via_wormhole(config): relay_url = config.parent['wormhole-server'] print("Connecting to '{}'".format(relay_url), file=out) - wh = wormhole.create( + wh = config.parent.wormhole.create( appid=config.parent['wormhole-invite-appid'], relay_url=relay_url, reactor=reactor, @@ -462,7 +497,7 @@ def create_node(config): fileutil.make_dirs(os.path.join(basedir, "private"), 0o700) cfg_name = os.path.join(basedir, "tahoe.cfg") with io.open(cfg_name, "w", encoding='utf-8') as c: - yield write_node_config(c, config) + yield defer.Deferred.fromCoroutine(write_node_config(c, config)) write_client_config(c, config) print("Node created in %s" % quote_local_unicode_path(basedir), file=out) @@ -505,17 +540,17 @@ def create_introducer(config): fileutil.make_dirs(os.path.join(basedir, "private"), 0o700) cfg_name = os.path.join(basedir, "tahoe.cfg") with io.open(cfg_name, "w", encoding='utf-8') as c: - yield write_node_config(c, config) + yield defer.Deferred.fromCoroutine(write_node_config(c, config)) print("Introducer created in %s" % quote_local_unicode_path(basedir), file=out) defer.returnValue(0) -subCommands = [ +subCommands : SubCommands = [ ("create-node", None, CreateNodeOptions, "Create a node that acts as a client, server or both."), ("create-client", None, CreateClientOptions, "Create a client node (with storage initially disabled)."), ("create-introducer", None, CreateIntroducerOptions, "Create an introducer node."), -] # type: SubCommands +] dispatch = { "create-node": create_node, diff --git a/src/allmydata/scripts/debug.py b/src/allmydata/scripts/debug.py index 2d6ba4602..b6eba842a 100644 --- a/src/allmydata/scripts/debug.py +++ b/src/allmydata/scripts/debug.py @@ -1,29 +1,26 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from future.utils import PY2, bchr -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from future.utils import bchr -try: - from allmydata.scripts.types_ import SubCommands -except ImportError: - pass - - -# do not import any allmydata modules at this level. Do that from inside -# individual functions instead. import struct, time, os, sys + from twisted.python import usage, failure from twisted.internet import defer from foolscap.logging import cli as foolscap_cli -from allmydata.scripts.common import BaseOptions +from allmydata.scripts.common import BaseOptions +from allmydata import uri +from allmydata.storage.mutable import MutableShareFile +from allmydata.storage.immutable import ShareFile +from allmydata.mutable.layout import unpack_share +from allmydata.mutable.layout import MDMFSlotReadProxy +from allmydata.mutable.common import NeedMoreDataError +from allmydata.immutable.layout import ReadBucketProxy +from allmydata.util import base32 +from allmydata.util.encodingutil import quote_output +from allmydata.scripts.types_ import SubCommands class DumpOptions(BaseOptions): def getSynopsis(self): @@ -56,13 +53,11 @@ def dump_share(options): # check the version, to see if we have a mutable or immutable share print("share filename: %s" % quote_output(options['filename']), file=out) - f = open(options['filename'], "rb") - prefix = f.read(32) - f.close() - if prefix == MutableShareFile.MAGIC: - return dump_mutable_share(options) - # otherwise assume it's immutable - return dump_immutable_share(options) + with open(options['filename'], "rb") as f: + if MutableShareFile.is_valid_header(f.read(32)): + return dump_mutable_share(options) + # otherwise assume it's immutable + return dump_immutable_share(options) def dump_immutable_share(options): from allmydata.storage.immutable import ShareFile @@ -170,7 +165,7 @@ def dump_immutable_lease_info(f, out): leases = list(f.get_leases()) if leases: for i,lease in enumerate(leases): - when = format_expiration_time(lease.expiration_time) + when = format_expiration_time(lease.get_expiration_time()) print(" Lease #%d: owner=%d, expire in %s" \ % (i, lease.owner_num, when), file=out) else: @@ -223,10 +218,10 @@ def dump_mutable_share(options): print(file=out) print(" Lease #%d:" % leasenum, file=out) print(" ownerid: %d" % lease.owner_num, file=out) - when = format_expiration_time(lease.expiration_time) + when = format_expiration_time(lease.get_expiration_time()) print(" expires in %s" % when, file=out) - print(" renew_secret: %s" % str(base32.b2a(lease.renew_secret), "utf-8"), file=out) - print(" cancel_secret: %s" % str(base32.b2a(lease.cancel_secret), "utf-8"), file=out) + print(" renew_secret: %s" % lease.present_renew_secret(), file=out) + print(" cancel_secret: %s" % lease.present_cancel_secret(), file=out) print(" secrets are for nodeid: %s" % idlib.nodeid_b2a(lease.nodeid), file=out) else: print("No leases.", file=out) @@ -712,125 +707,122 @@ def call(c, *args, **kwargs): return results[0] def describe_share(abs_sharefile, si_s, shnum_s, now, out): - from allmydata import uri - from allmydata.storage.mutable import MutableShareFile - from allmydata.storage.immutable import ShareFile - from allmydata.mutable.layout import unpack_share - from allmydata.mutable.common import NeedMoreDataError - from allmydata.immutable.layout import ReadBucketProxy - from allmydata.util import base32 - from allmydata.util.encodingutil import quote_output - import struct - - f = open(abs_sharefile, "rb") - prefix = f.read(32) - - if prefix == MutableShareFile.MAGIC: - # mutable share - m = MutableShareFile(abs_sharefile) - WE, nodeid = m._read_write_enabler_and_nodeid(f) - data_length = m._read_data_length(f) - expiration_time = min( [lease.expiration_time - for (i,lease) in m._enumerate_leases(f)] ) - expiration = max(0, expiration_time - now) - - share_type = "unknown" - f.seek(m.DATA_OFFSET) - version = f.read(1) - if version == b"\x00": - # this slot contains an SMDF share - share_type = "SDMF" - elif version == b"\x01": - share_type = "MDMF" - - if share_type == "SDMF": - f.seek(m.DATA_OFFSET) - data = f.read(min(data_length, 2000)) - - try: - pieces = unpack_share(data) - except NeedMoreDataError as e: - # retry once with the larger size - size = e.needed_bytes - f.seek(m.DATA_OFFSET) - data = f.read(min(data_length, size)) - pieces = unpack_share(data) - (seqnum, root_hash, IV, k, N, segsize, datalen, - pubkey, signature, share_hash_chain, block_hash_tree, - share_data, enc_privkey) = pieces - - print("SDMF %s %d/%d %d #%d:%s %d %s" % \ - (si_s, k, N, datalen, - seqnum, str(base32.b2a(root_hash), "utf-8"), - expiration, quote_output(abs_sharefile)), file=out) - elif share_type == "MDMF": - from allmydata.mutable.layout import MDMFSlotReadProxy - fake_shnum = 0 - # TODO: factor this out with dump_MDMF_share() - class ShareDumper(MDMFSlotReadProxy): - def _read(self, readvs, force_remote=False, queue=False): - data = [] - for (where,length) in readvs: - f.seek(m.DATA_OFFSET+where) - data.append(f.read(length)) - return defer.succeed({fake_shnum: data}) - - p = ShareDumper(None, "fake-si", fake_shnum) - def extract(func): - stash = [] - # these methods return Deferreds, but we happen to know that - # they run synchronously when not actually talking to a - # remote server - d = func() - d.addCallback(stash.append) - return stash[0] - - verinfo = extract(p.get_verinfo) - (seqnum, root_hash, salt_to_use, segsize, datalen, k, N, prefix, - offsets) = verinfo - print("MDMF %s %d/%d %d #%d:%s %d %s" % \ - (si_s, k, N, datalen, - seqnum, str(base32.b2a(root_hash), "utf-8"), - expiration, quote_output(abs_sharefile)), file=out) + with open(abs_sharefile, "rb") as f: + prefix = f.read(32) + if MutableShareFile.is_valid_header(prefix): + _describe_mutable_share(abs_sharefile, f, now, si_s, out) + elif ShareFile.is_valid_header(prefix): + _describe_immutable_share(abs_sharefile, now, si_s, out) else: - print("UNKNOWN mutable %s" % quote_output(abs_sharefile), file=out) + print("UNKNOWN really-unknown %s" % quote_output(abs_sharefile), file=out) - elif struct.unpack(">L", prefix[:4]) == (1,): - # immutable +def _describe_mutable_share(abs_sharefile, f, now, si_s, out): + # mutable share + m = MutableShareFile(abs_sharefile) + WE, nodeid = m._read_write_enabler_and_nodeid(f) + data_length = m._read_data_length(f) + expiration_time = min( [lease.get_expiration_time() + for (i,lease) in m._enumerate_leases(f)] ) + expiration = max(0, expiration_time - now) - class ImmediateReadBucketProxy(ReadBucketProxy): - def __init__(self, sf): - self.sf = sf - ReadBucketProxy.__init__(self, None, None, "") - def __repr__(self): - return "" - def _read(self, offset, size): - return defer.succeed(sf.read_share_data(offset, size)) + share_type = "unknown" + f.seek(m.DATA_OFFSET) + version = f.read(1) + if version == b"\x00": + # this slot contains an SMDF share + share_type = "SDMF" + elif version == b"\x01": + share_type = "MDMF" - # use a ReadBucketProxy to parse the bucket and find the uri extension - sf = ShareFile(abs_sharefile) - bp = ImmediateReadBucketProxy(sf) + if share_type == "SDMF": + f.seek(m.DATA_OFFSET) - expiration_time = min( [lease.expiration_time - for lease in sf.get_leases()] ) - expiration = max(0, expiration_time - now) + # Read at least the mutable header length, if possible. If there's + # less data than that in the share, don't try to read more (we won't + # be able to unpack the header in this case but we surely don't want + # to try to unpack bytes *following* the data section as if they were + # header data). Rather than 2000 we could use HEADER_LENGTH from + # allmydata/mutable/layout.py, probably. + data = f.read(min(data_length, 2000)) - UEB_data = call(bp.get_uri_extension) - unpacked = uri.unpack_extension_readable(UEB_data) + try: + pieces = unpack_share(data) + except NeedMoreDataError as e: + # retry once with the larger size + size = e.needed_bytes + f.seek(m.DATA_OFFSET) + data = f.read(min(data_length, size)) + pieces = unpack_share(data) + (seqnum, root_hash, IV, k, N, segsize, datalen, + pubkey, signature, share_hash_chain, block_hash_tree, + share_data, enc_privkey) = pieces - k = unpacked["needed_shares"] - N = unpacked["total_shares"] - filesize = unpacked["size"] - ueb_hash = unpacked["UEB_hash"] + print("SDMF %s %d/%d %d #%d:%s %d %s" % \ + (si_s, k, N, datalen, + seqnum, str(base32.b2a(root_hash), "utf-8"), + expiration, quote_output(abs_sharefile)), file=out) + elif share_type == "MDMF": + fake_shnum = 0 + # TODO: factor this out with dump_MDMF_share() + class ShareDumper(MDMFSlotReadProxy): + def _read(self, readvs, force_remote=False, queue=False): + data = [] + for (where,length) in readvs: + f.seek(m.DATA_OFFSET+where) + data.append(f.read(length)) + return defer.succeed({fake_shnum: data}) - print("CHK %s %d/%d %d %s %d %s" % (si_s, k, N, filesize, - str(ueb_hash, "utf-8"), expiration, - quote_output(abs_sharefile)), file=out) + p = ShareDumper(None, "fake-si", fake_shnum) + def extract(func): + stash = [] + # these methods return Deferreds, but we happen to know that + # they run synchronously when not actually talking to a + # remote server + d = func() + d.addCallback(stash.append) + return stash[0] + verinfo = extract(p.get_verinfo) + (seqnum, root_hash, salt_to_use, segsize, datalen, k, N, prefix, + offsets) = verinfo + print("MDMF %s %d/%d %d #%d:%s %d %s" % \ + (si_s, k, N, datalen, + seqnum, str(base32.b2a(root_hash), "utf-8"), + expiration, quote_output(abs_sharefile)), file=out) else: - print("UNKNOWN really-unknown %s" % quote_output(abs_sharefile), file=out) + print("UNKNOWN mutable %s" % quote_output(abs_sharefile), file=out) + + +def _describe_immutable_share(abs_sharefile, now, si_s, out): + class ImmediateReadBucketProxy(ReadBucketProxy): + def __init__(self, sf): + self.sf = sf + ReadBucketProxy.__init__(self, None, None, "") + def __repr__(self): + return "" + def _read(self, offset, size): + return defer.succeed(sf.read_share_data(offset, size)) + + # use a ReadBucketProxy to parse the bucket and find the uri extension + sf = ShareFile(abs_sharefile) + bp = ImmediateReadBucketProxy(sf) + + expiration_time = min(lease.get_expiration_time() + for lease in sf.get_leases()) + expiration = max(0, expiration_time - now) + + UEB_data = call(bp.get_uri_extension) + unpacked = uri.unpack_extension_readable(UEB_data) + + k = unpacked["needed_shares"] + N = unpacked["total_shares"] + filesize = unpacked["size"] + ueb_hash = unpacked["UEB_hash"] + + print("CHK %s %d/%d %d %s %d %s" % (si_s, k, N, filesize, + str(ueb_hash, "utf-8"), expiration, + quote_output(abs_sharefile)), file=out) - f.close() def catalog_shares(options): from allmydata.util.encodingutil import listdir_unicode, quote_output @@ -933,34 +925,35 @@ def corrupt_share(options): f.write(d) f.close() - f = open(fn, "rb") - prefix = f.read(32) - f.close() - if prefix == MutableShareFile.MAGIC: - # mutable - m = MutableShareFile(fn) - f = open(fn, "rb") - f.seek(m.DATA_OFFSET) - data = f.read(2000) - # make sure this slot contains an SMDF share - assert data[0:1] == b"\x00", "non-SDMF mutable shares not supported" - f.close() + with open(fn, "rb") as f: + prefix = f.read(32) - (version, ig_seqnum, ig_roothash, ig_IV, ig_k, ig_N, ig_segsize, - ig_datalen, offsets) = unpack_header(data) + if MutableShareFile.is_valid_header(prefix): + # mutable + m = MutableShareFile(fn) + with open(fn, "rb") as f: + f.seek(m.DATA_OFFSET) + # Read enough data to get a mutable header to unpack. + data = f.read(2000) + # make sure this slot contains an SMDF share + assert data[0:1] == b"\x00", "non-SDMF mutable shares not supported" + f.close() - assert version == 0, "we only handle v0 SDMF files" - start = m.DATA_OFFSET + offsets["share_data"] - end = m.DATA_OFFSET + offsets["enc_privkey"] - flip_bit(start, end) - else: - # otherwise assume it's immutable - f = ShareFile(fn) - bp = ReadBucketProxy(None, None, '') - offsets = bp._parse_offsets(f.read_share_data(0, 0x24)) - start = f._data_offset + offsets["data"] - end = f._data_offset + offsets["plaintext_hash_tree"] - flip_bit(start, end) + (version, ig_seqnum, ig_roothash, ig_IV, ig_k, ig_N, ig_segsize, + ig_datalen, offsets) = unpack_header(data) + + assert version == 0, "we only handle v0 SDMF files" + start = m.DATA_OFFSET + offsets["share_data"] + end = m.DATA_OFFSET + offsets["enc_privkey"] + flip_bit(start, end) + else: + # otherwise assume it's immutable + f = ShareFile(fn) + bp = ReadBucketProxy(None, None, '') + offsets = bp._parse_offsets(f.read_share_data(0, 0x24)) + start = f._data_offset + offsets["data"] + end = f._data_offset + offsets["plaintext_hash_tree"] + flip_bit(start, end) @@ -1073,9 +1066,9 @@ def do_debug(options): return f(so) -subCommands = [ +subCommands : SubCommands = [ ("debug", None, DebugCommand, "debug subcommands: use 'tahoe debug' for a list."), - ] # type: SubCommands + ] dispatch = { "debug": do_debug, diff --git a/src/allmydata/scripts/default_nodedir.py b/src/allmydata/scripts/default_nodedir.py index 00924b8f9..fff120140 100644 --- a/src/allmydata/scripts/default_nodedir.py +++ b/src/allmydata/scripts/default_nodedir.py @@ -2,17 +2,7 @@ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import sys -import six from allmydata.util.assertutil import precondition from allmydata.util.fileutil import abspath_expanduser_unicode @@ -22,10 +12,10 @@ if sys.platform == 'win32': from allmydata.windows import registry path = registry.get_base_dir_path() if path: - precondition(isinstance(path, six.text_type), path) + precondition(isinstance(path, str), path) _default_nodedir = abspath_expanduser_unicode(path) if _default_nodedir is None: - path = abspath_expanduser_unicode(u"~/.tahoe") - precondition(isinstance(path, six.text_type), path) + path = abspath_expanduser_unicode("~/.tahoe") + precondition(isinstance(path, str), path) _default_nodedir = path diff --git a/src/allmydata/scripts/runner.py b/src/allmydata/scripts/runner.py index 145ee6464..16f43e9d8 100644 --- a/src/allmydata/scripts/runner.py +++ b/src/allmydata/scripts/runner.py @@ -1,28 +1,14 @@ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import os, sys -from six.moves import StringIO -from past.builtins import unicode +from io import StringIO import six -try: - from allmydata.scripts.types_ import SubCommands -except ImportError: - pass - from twisted.python import usage from twisted.internet import defer, task, threads from allmydata.scripts.common import get_default_nodedir from allmydata.scripts import debug, create_node, cli, \ admin, tahoe_run, tahoe_invite +from allmydata.scripts.types_ import SubCommands from allmydata.util.encodingutil import quote_local_unicode_path, argv_to_unicode from allmydata.util.eliotutil import ( opt_eliot_destination, @@ -47,22 +33,23 @@ if _default_nodedir: NODEDIR_HELP += " [default for most commands: " + quote_local_unicode_path(_default_nodedir) + "]" -# XXX all this 'dispatch' stuff needs to be unified + fixed up -_control_node_dispatch = { - "run": tahoe_run.run, -} - -process_control_commands = [ +process_control_commands : SubCommands = [ ("run", None, tahoe_run.RunOptions, "run a node without daemonizing"), -] # type: SubCommands +] class Options(usage.Options): + """ + :ivar wormhole: An object exposing the magic-wormhole API (mainly a test + hook). + """ # unit tests can override these to point at StringIO instances stdin = sys.stdin stdout = sys.stdout stderr = sys.stderr + from wormhole import wormhole + subCommands = ( create_node.subCommands + admin.subCommands + process_control_commands @@ -78,8 +65,8 @@ class Options(usage.Options): ] optParameters = [ ["node-directory", "d", None, NODEDIR_HELP], - ["wormhole-server", None, u"ws://wormhole.tahoe-lafs.org:4000/v1", "The magic wormhole server to use.", six.text_type], - ["wormhole-invite-appid", None, u"tahoe-lafs.org/invite", "The appid to use on the wormhole server.", six.text_type], + ["wormhole-server", None, u"ws://wormhole.tahoe-lafs.org:4000/v1", "The magic wormhole server to use.", str], + ["wormhole-invite-appid", None, u"tahoe-lafs.org/invite", "The appid to use on the wormhole server.", str], ] def opt_version(self): @@ -118,28 +105,7 @@ def parse_options(argv, config=None): config = Options() try: config.parseOptions(argv) - except usage.error as e: - if six.PY2: - # On Python 2 the exception may hold non-ascii in a byte string. - # This makes it impossible to convert the exception to any kind of - # string using str() or unicode(). It could also hold non-ascii - # in a unicode string which still makes it difficult to convert it - # to a byte string later. - # - # So, reach inside and turn it into some entirely safe ascii byte - # strings that will survive being written to stdout without - # causing too much damage in the process. - # - # As a result, non-ascii will not be rendered correctly but - # instead as escape sequences. At least this can go away when - # we're done with Python 2 support. - raise usage.error(*( - arg.encode("ascii", errors="backslashreplace") - if isinstance(arg, unicode) - else arg.decode("utf-8").encode("ascii", errors="backslashreplace") - for arg - in e.args - )) + except usage.error: raise return config @@ -164,6 +130,8 @@ def parse_or_exit(config, argv, stdout, stderr): :return: ``config``, after using it to parse the argument list. """ try: + config.stdout = stdout + config.stderr = stderr parse_options(argv[1:], config=config) except usage.error as e: # `parse_options` may have the side-effect of initializing a @@ -189,6 +157,7 @@ def parse_or_exit(config, argv, stdout, stderr): return config def dispatch(config, + reactor, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr): command = config.subCommand so = config.subOptions @@ -197,11 +166,12 @@ def dispatch(config, so.stdout = stdout so.stderr = stderr so.stdin = stdin + config.stdin = stdin if command in create_dispatch: f = create_dispatch[command] - elif command in _control_node_dispatch: - f = _control_node_dispatch[command] + elif command == "run": + f = lambda config: tahoe_run.run(reactor, config) elif command in debug.dispatch: f = debug.dispatch[command] elif command in admin.dispatch: @@ -292,7 +262,7 @@ def _setup_coverage(reactor, argv): # can we put this _setup_coverage call after we hit # argument-parsing? # ensure_str() only necessary on Python 2. - if six.ensure_str('--coverage') not in sys.argv: + if '--coverage' not in sys.argv: return argv.remove('--coverage') @@ -355,7 +325,7 @@ def _run_with_reactor(reactor, config, argv, stdout, stderr): stderr, ) d.addCallback(_maybe_enable_eliot_logging, reactor) - d.addCallback(dispatch, stdout=stdout, stderr=stderr) + d.addCallback(dispatch, reactor, stdout=stdout, stderr=stderr) def _show_exception(f): # when task.react() notices a non-SystemExit exception, it does # log.err() with the failure and then exits with rc=1. We want this diff --git a/src/allmydata/scripts/slow_operation.py b/src/allmydata/scripts/slow_operation.py index 3c23fb533..9596fd805 100644 --- a/src/allmydata/scripts/slow_operation.py +++ b/src/allmydata/scripts/slow_operation.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2, PY3 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six import ensure_str @@ -87,9 +79,7 @@ class SlowOperationRunner(object): if not data["finished"]: return False if self.options.get("raw"): - if PY3: - # need to write bytes! - stdout = stdout.buffer + stdout = stdout.buffer if is_printable_ascii(jdata): stdout.write(jdata) stdout.write(b"\n") diff --git a/src/allmydata/scripts/tahoe_add_alias.py b/src/allmydata/scripts/tahoe_add_alias.py index 8476aeb28..ac57879b0 100644 --- a/src/allmydata/scripts/tahoe_add_alias.py +++ b/src/allmydata/scripts/tahoe_add_alias.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os.path import codecs diff --git a/src/allmydata/scripts/tahoe_backup.py b/src/allmydata/scripts/tahoe_backup.py index b574f16e8..7ca79d393 100644 --- a/src/allmydata/scripts/tahoe_backup.py +++ b/src/allmydata/scripts/tahoe_backup.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os.path import time diff --git a/src/allmydata/scripts/tahoe_check.py b/src/allmydata/scripts/tahoe_check.py index 6bafe3d1a..c5ba07db9 100644 --- a/src/allmydata/scripts/tahoe_check.py +++ b/src/allmydata/scripts/tahoe_check.py @@ -1,16 +1,8 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from six import ensure_str, ensure_text +from six import ensure_text from urllib.parse import quote as url_quote import json @@ -176,7 +168,7 @@ class DeepCheckOutput(LineOnlyReceiver, object): # LIT files and directories do not have a "summary" field. summary = cr.get("summary", "Healthy (LIT)") # When Python 2 is dropped the ensure_text()/ensure_str() will be unnecessary. - print(ensure_text(ensure_str("%s: %s") % (quote_path(path), quote_output(summary, quotemarks=False)), + print(ensure_text("%s: %s" % (quote_path(path), quote_output(summary, quotemarks=False)), encoding=get_io_encoding()), file=stdout) # always print out corrupt shares @@ -254,13 +246,11 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver, object): if not path: path = [""] # we don't seem to have a summary available, so build one - # When Python 2 is dropped the ensure_text/ensure_str crap can be - # dropped. if was_healthy: - summary = ensure_str("healthy") + summary = "healthy" else: - summary = ensure_str("not healthy") - print(ensure_text(ensure_str("%s: %s") % (quote_path(path), summary), + summary = "not healthy" + print(ensure_text("%s: %s" % (quote_path(path), summary), encoding=get_io_encoding()), file=stdout) # always print out corrupt shares diff --git a/src/allmydata/scripts/tahoe_cp.py b/src/allmydata/scripts/tahoe_cp.py index aae03291f..1e9726605 100644 --- a/src/allmydata/scripts/tahoe_cp.py +++ b/src/allmydata/scripts/tahoe_cp.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os.path from urllib.parse import quote as url_quote diff --git a/src/allmydata/scripts/tahoe_get.py b/src/allmydata/scripts/tahoe_get.py index 39f1686ce..8e688e432 100644 --- a/src/allmydata/scripts/tahoe_get.py +++ b/src/allmydata/scripts/tahoe_get.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2, PY3 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from urllib.parse import quote as url_quote from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \ @@ -42,7 +34,7 @@ def get(options): outf = stdout # Make sure we can write bytes; on Python 3 stdout is Unicode by # default. - if PY3 and getattr(outf, "encoding", None) is not None: + if getattr(outf, "encoding", None) is not None: outf = outf.buffer while True: data = resp.read(4096) diff --git a/src/allmydata/scripts/tahoe_invite.py b/src/allmydata/scripts/tahoe_invite.py index 09d4cbd59..b44efdeb9 100644 --- a/src/allmydata/scripts/tahoe_invite.py +++ b/src/allmydata/scripts/tahoe_invite.py @@ -1,28 +1,14 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -try: - from allmydata.scripts.types_ import SubCommands -except ImportError: - pass from twisted.python import usage from twisted.internet import defer, reactor -from wormhole import wormhole - from allmydata.util.encodingutil import argv_to_abspath from allmydata.util import jsonbytes as json from allmydata.scripts.common import get_default_nodedir, get_introducer_furl +from allmydata.scripts.types_ import SubCommands from allmydata.client import read_config @@ -50,7 +36,7 @@ def _send_config_via_wormhole(options, config): err = options.stderr relay_url = options.parent['wormhole-server'] print("Connecting to '{}'...".format(relay_url), file=out) - wh = wormhole.create( + wh = options.parent.wormhole.create( appid=options.parent['wormhole-invite-appid'], relay_url=relay_url, reactor=reactor, @@ -114,10 +100,10 @@ def invite(options): print("Completed successfully", file=out) -subCommands = [ +subCommands : SubCommands = [ ("invite", None, InviteOptions, "Invite a new node to this grid"), -] # type: SubCommands +] dispatch = { "invite": invite, diff --git a/src/allmydata/scripts/tahoe_ls.py b/src/allmydata/scripts/tahoe_ls.py index 5a7136d77..d38fe060c 100644 --- a/src/allmydata/scripts/tahoe_ls.py +++ b/src/allmydata/scripts/tahoe_ls.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six import ensure_text diff --git a/src/allmydata/scripts/tahoe_manifest.py b/src/allmydata/scripts/tahoe_manifest.py index b55075eef..ebff2e893 100644 --- a/src/allmydata/scripts/tahoe_manifest.py +++ b/src/allmydata/scripts/tahoe_manifest.py @@ -1,16 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2, PY3 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from six import ensure_str from urllib.parse import quote as url_quote import json @@ -62,8 +52,7 @@ class ManifestStreamer(LineOnlyReceiver, object): # use Twisted to split this into lines self.in_error = False # Writing bytes, so need binary stdout. - if PY3: - stdout = stdout.buffer + stdout = stdout.buffer while True: chunk = resp.read(100) if not chunk: @@ -105,8 +94,7 @@ class ManifestStreamer(LineOnlyReceiver, object): if vc: print(quote_output(vc, quotemarks=False), file=stdout) else: - # ensure_str() only necessary for Python 2. - print(ensure_str("%s %s") % ( + print("%s %s" % ( quote_output(d["cap"], quotemarks=False), quote_path(d["path"], quotemarks=False)), file=stdout) diff --git a/src/allmydata/scripts/tahoe_mkdir.py b/src/allmydata/scripts/tahoe_mkdir.py index 85fe12554..8a9dc6262 100644 --- a/src/allmydata/scripts/tahoe_mkdir.py +++ b/src/allmydata/scripts/tahoe_mkdir.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from urllib.parse import quote as url_quote from allmydata.scripts.common_http import do_http, check_http_error diff --git a/src/allmydata/scripts/tahoe_mv.py b/src/allmydata/scripts/tahoe_mv.py index d921047a8..016c0e725 100644 --- a/src/allmydata/scripts/tahoe_mv.py +++ b/src/allmydata/scripts/tahoe_mv.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import re from urllib.parse import quote as url_quote diff --git a/src/allmydata/scripts/tahoe_put.py b/src/allmydata/scripts/tahoe_put.py index 1ea45e8ea..c04b6b4bc 100644 --- a/src/allmydata/scripts/tahoe_put.py +++ b/src/allmydata/scripts/tahoe_put.py @@ -1,23 +1,32 @@ """ -Ported to Python 3. +Implement the ``tahoe put`` command. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations from io import BytesIO from urllib.parse import quote as url_quote +from base64 import urlsafe_b64encode +from cryptography.hazmat.primitives.serialization import load_pem_private_key + +from twisted.python.filepath import FilePath + +from allmydata.crypto.rsa import PrivateKey, der_string_from_signing_key from allmydata.scripts.common_http import do_http, format_http_success, format_http_error from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \ UnknownAliasError from allmydata.util.encodingutil import quote_output +def load_private_key(path: str) -> str: + """ + Load a private key from a file and return it in a format appropriate + to include in the HTTP request. + """ + privkey = load_pem_private_key(FilePath(path).getContent(), password=None) + assert isinstance(privkey, PrivateKey) + derbytes = der_string_from_signing_key(privkey) + return urlsafe_b64encode(derbytes).decode("ascii") + def put(options): """ @param verbosity: 0, 1, or 2, meaning quiet, verbose, or very verbose @@ -29,6 +38,10 @@ def put(options): from_file = options.from_file to_file = options.to_file mutable = options['mutable'] + if options["private-key-path"] is None: + private_key = None + else: + private_key = load_private_key(options["private-key-path"]) format = options['format'] if options['quiet']: verbosity = 0 @@ -79,6 +92,12 @@ def put(options): queryargs = [] if mutable: queryargs.append("mutable=true") + if private_key is not None: + queryargs.append(f"private-key={private_key}") + else: + if private_key is not None: + raise Exception("Can only supply a private key for mutables.") + if format: queryargs.append("format=%s" % format) if queryargs: @@ -92,10 +111,7 @@ def put(options): if verbosity > 0: print("waiting for file data on stdin..", file=stderr) # We're uploading arbitrary files, so this had better be bytes: - if PY2: - stdinb = stdin - else: - stdinb = stdin.buffer + stdinb = stdin.buffer data = stdinb.read() infileobj = BytesIO(data) diff --git a/src/allmydata/scripts/tahoe_run.py b/src/allmydata/scripts/tahoe_run.py index 01f1a354c..d7b570faa 100644 --- a/src/allmydata/scripts/tahoe_run.py +++ b/src/allmydata/scripts/tahoe_run.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 __all__ = [ "RunOptions", @@ -19,49 +11,65 @@ import os, sys from allmydata.scripts.common import BasedirOptions from twisted.scripts import twistd from twisted.python import usage +from twisted.python.filepath import FilePath from twisted.python.reflect import namedAny -from twisted.internet.defer import maybeDeferred +from twisted.python.failure import Failure +from twisted.internet.defer import maybeDeferred, Deferred +from twisted.internet.protocol import Protocol +from twisted.internet.stdio import StandardIO +from twisted.internet.error import ReactorNotRunning from twisted.application.service import Service from allmydata.scripts.default_nodedir import _default_nodedir from allmydata.util.encodingutil import listdir_unicode, quote_local_unicode_path from allmydata.util.configutil import UnknownConfigError from allmydata.util.deferredutil import HookMixin - +from allmydata.util.pid import ( + parse_pidfile, + check_pid_process, + cleanup_pidfile, + ProcessInTheWay, + InvalidPidFile, +) +from allmydata.storage.crawler import ( + MigratePickleFileError, +) +from allmydata.storage_client import ( + MissingPlugin, +) from allmydata.node import ( PortAssignmentRequired, PrivacyError, ) + def get_pidfile(basedir): """ Returns the path to the PID file. :param basedir: the node's base directory :returns: the path to the PID file """ - return os.path.join(basedir, u"twistd.pid") + return os.path.join(basedir, u"running.process") + def get_pid_from_pidfile(pidfile): """ Tries to read and return the PID stored in the node's PID file - (twistd.pid). + :param pidfile: try to read this PID file :returns: A numeric PID on success, ``None`` if PID file absent or inaccessible, ``-1`` if PID file invalid. """ try: - with open(pidfile, "r") as f: - pid = f.read() + pid, _ = parse_pidfile(pidfile) except EnvironmentError: return None - - try: - pid = int(pid) - except ValueError: + except InvalidPidFile: return -1 return pid + def identify_node_type(basedir): """ :return unicode: None or one of: 'client' or 'introducer'. @@ -91,6 +99,11 @@ class RunOptions(BasedirOptions): " [default: %s]" % quote_local_unicode_path(_default_nodedir)), ] + optFlags = [ + ("allow-stdin-close", None, + 'Do not exit when stdin closes ("tahoe run" otherwise will exit).'), + ] + def parseArgs(self, basedir=None, *twistd_args): # This can't handle e.g. 'tahoe run --reactor=foo', since # '--reactor=foo' looks like an option to the tahoe subcommand, not to @@ -143,9 +156,12 @@ class DaemonizeTheRealService(Service, HookMixin): "running": None, } self.stderr = options.parent.stderr + self._close_on_stdin_close = False if options["allow-stdin-close"] else True def startService(self): + from twisted.internet import reactor + def start(): node_to_instance = { u"client": lambda: maybeDeferred(namedAny("allmydata.client.create_client"), self.basedir), @@ -164,35 +180,104 @@ class DaemonizeTheRealService(Service, HookMixin): self.stderr.write("\ntub.port cannot be 0: you must choose.\n\n") elif reason.check(PrivacyError): self.stderr.write("\n{}\n\n".format(reason.value)) + elif reason.check(MigratePickleFileError): + self.stderr.write( + "Error\nAt least one 'pickle' format file exists.\n" + "The file is {}\n" + "You must either delete the pickle-format files" + " or migrate them using the command:\n" + " tahoe admin migrate-crawler --basedir {}\n\n" + .format( + reason.value.args[0].path, + self.basedir, + ) + ) + elif reason.check(MissingPlugin): + self.stderr.write( + "Missing Plugin\n" + "The configuration requests a plugin:\n" + "\n {}\n\n" + "...which cannot be found.\n" + "This typically means that some software hasn't been installed or the plugin couldn't be instantiated.\n\n" + .format( + reason.value.plugin_name, + ) + ) else: - self.stderr.write("\nUnknown error\n") + self.stderr.write("\nUnknown error, here's the traceback:\n") reason.printTraceback(self.stderr) reactor.stop() d = service_factory() def created(srv): - srv.setServiceParent(self.parent) + if self.parent is not None: + srv.setServiceParent(self.parent) + # exiting on stdin-closed facilitates cleanup when run + # as a subprocess + if self._close_on_stdin_close: + on_stdin_close(reactor, reactor.stop) d.addCallback(created) d.addErrback(handle_config_error) d.addBoth(self._call_hook, 'running') return d - from twisted.internet import reactor reactor.callWhenRunning(start) class DaemonizeTahoeNodePlugin(object): tapname = "tahoenode" - def __init__(self, nodetype, basedir): + def __init__(self, nodetype, basedir, allow_stdin_close): self.nodetype = nodetype self.basedir = basedir + self.allow_stdin_close = allow_stdin_close def makeService(self, so): + so["allow-stdin-close"] = self.allow_stdin_close return DaemonizeTheRealService(self.nodetype, self.basedir, so) -def run(config, runApp=twistd.runApp): +def on_stdin_close(reactor, fn): + """ + Arrange for the function `fn` to run when our stdin closes + """ + when_closed_d = Deferred() + + class WhenClosed(Protocol): + """ + Notify a Deferred when our connection is lost .. as this is passed + to twisted's StandardIO class, it is used to detect our parent + going away. + """ + + def connectionLost(self, reason): + when_closed_d.callback(None) + + def on_close(arg): + try: + fn() + except ReactorNotRunning: + pass + except Exception: + # for our "exit" use-case failures will _mostly_ just be + # ReactorNotRunning (because we're already shutting down + # when our stdin closes) but no matter what "bad thing" + # happens we just want to ignore it .. although other + # errors might be interesting so we'll log those + print(Failure()) + return arg + + when_closed_d.addBoth(on_close) + # we don't need to do anything with this instance because it gets + # hooked into the reactor and thus remembered .. but we return it + # for Windows testing purposes. + return StandardIO( + proto=WhenClosed(), + reactor=reactor, + ) + + +def run(reactor, config, runApp=twistd.runApp): """ Runs a Tahoe-LAFS node in the foreground. @@ -213,10 +298,15 @@ def run(config, runApp=twistd.runApp): print("%s is not a recognizable node directory" % quoted_basedir, file=err) return 1 - twistd_args = ["--nodaemon", "--rundir", basedir] + twistd_args = [ + # ensure twistd machinery does not daemonize. + "--nodaemon", + "--rundir", basedir, + ] if sys.platform != "win32": - pidfile = get_pidfile(basedir) - twistd_args.extend(["--pidfile", pidfile]) + # turn off Twisted's pid-file to use our own -- but not on + # windows, because twistd doesn't know about pidfiles there + twistd_args.extend(["--pidfile", None]) twistd_args.extend(config.twistd_args) twistd_args.append("DaemonizeTahoeNode") # point at our DaemonizeTahoeNodePlugin @@ -230,12 +320,22 @@ def run(config, runApp=twistd.runApp): print(config, file=err) print("tahoe %s: usage error from twistd: %s\n" % (config.subcommand_name, ue), file=err) return 1 - twistd_config.loadedPlugins = {"DaemonizeTahoeNode": DaemonizeTahoeNodePlugin(nodetype, basedir)} + twistd_config.loadedPlugins = { + "DaemonizeTahoeNode": DaemonizeTahoeNodePlugin(nodetype, basedir, config["allow-stdin-close"]) + } - # handle invalid PID file (twistd might not start otherwise) - if sys.platform != "win32" and get_pid_from_pidfile(pidfile) == -1: - print("found invalid PID file in %s - deleting it" % basedir, file=err) - os.remove(pidfile) + # our own pid-style file contains PID and process creation time + pidfile = FilePath(get_pidfile(config['basedir'])) + try: + check_pid_process(pidfile) + except (ProcessInTheWay, InvalidPidFile) as e: + print("ERROR: {}".format(e), file=err) + return 1 + else: + reactor.addSystemEventTrigger( + "after", "shutdown", + lambda: cleanup_pidfile(pidfile) + ) # We always pass --nodaemon so twistd.runApp does not daemonize. print("running node in %s" % (quoted_basedir,), file=out) diff --git a/src/allmydata/scripts/tahoe_status.py b/src/allmydata/scripts/tahoe_status.py index 250bfdea3..ef8da35c0 100644 --- a/src/allmydata/scripts/tahoe_status.py +++ b/src/allmydata/scripts/tahoe_status.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os from sys import stdout as _sys_stdout @@ -32,13 +24,12 @@ def print(*args, **kwargs): encoding error handler and then write the result whereas builtin print uses the "strict" encoding error handler. """ - from past.builtins import unicode out = kwargs.pop("file", None) if out is None: out = _sys_stdout encoding = out.encoding or "ascii" def ensafe(o): - if isinstance(o, unicode): + if isinstance(o, str): return o.encode(encoding, errors="replace").decode(encoding) return o return _print( diff --git a/src/allmydata/scripts/tahoe_unlink.py b/src/allmydata/scripts/tahoe_unlink.py index 5bdebb960..8531ce059 100644 --- a/src/allmydata/scripts/tahoe_unlink.py +++ b/src/allmydata/scripts/tahoe_unlink.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from urllib.parse import quote as url_quote from allmydata.scripts.common_http import do_http, format_http_success, format_http_error diff --git a/src/allmydata/scripts/tahoe_webopen.py b/src/allmydata/scripts/tahoe_webopen.py index dbec31e87..011677b4e 100644 --- a/src/allmydata/scripts/tahoe_webopen.py +++ b/src/allmydata/scripts/tahoe_webopen.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from urllib.parse import quote as url_quote diff --git a/src/allmydata/scripts/types_.py b/src/allmydata/scripts/types_.py index 1bed6e11e..e2a5c2f1e 100644 --- a/src/allmydata/scripts/types_.py +++ b/src/allmydata/scripts/types_.py @@ -2,8 +2,6 @@ Type definitions used by modules in this package. """ -# Python 3 only - from typing import List, Tuple, Type, Sequence, Any from twisted.python.usage import Options diff --git a/src/allmydata/stats.py b/src/allmydata/stats.py index 13ed8817c..f6361b074 100644 --- a/src/allmydata/stats.py +++ b/src/allmydata/stats.py @@ -1,54 +1,39 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - from time import clock as process_time -else: - from time import process_time +from collections import deque +from time import process_time import time +from typing import Deque, Tuple from twisted.application import service from twisted.application.internet import TimerService from zope.interface import implementer -from foolscap.api import eventually from allmydata.util import log, dictutil from allmydata.interfaces import IStatsProducer @implementer(IStatsProducer) class CPUUsageMonitor(service.MultiService): - HISTORY_LENGTH = 15 - POLL_INTERVAL = 60 # type: float + HISTORY_LENGTH: int = 15 + POLL_INTERVAL: float = 60 + initial_cpu: float = 0.0 def __init__(self): service.MultiService.__init__(self) - # we don't use process_time() here, because the constructor is run by - # the twistd parent process (as it loads the .tac file), whereas the - # rest of the program will be run by the child process, after twistd - # forks. Instead, set self.initial_cpu as soon as the reactor starts - # up. - self.initial_cpu = 0.0 # just in case - eventually(self._set_initial_cpu) - self.samples = [] + self.samples: Deque[Tuple[float, float]] = deque([], self.HISTORY_LENGTH + 1) # we provide 1min, 5min, and 15min moving averages TimerService(self.POLL_INTERVAL, self.check).setServiceParent(self) - def _set_initial_cpu(self): + def startService(self): self.initial_cpu = process_time() + return super().startService() def check(self): now_wall = time.time() now_cpu = process_time() self.samples.append( (now_wall, now_cpu) ) - while len(self.samples) > self.HISTORY_LENGTH+1: - self.samples.pop(0) def _average_N_minutes(self, size): if len(self.samples) < size+1: diff --git a/src/allmydata/storage/common.py b/src/allmydata/storage/common.py index e5563647f..c76e01052 100644 --- a/src/allmydata/storage/common.py +++ b/src/allmydata/storage/common.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2, PY3 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os.path from allmydata.util import base32 @@ -16,11 +8,22 @@ from allmydata.util import base32 # Backwards compatibility. from allmydata.interfaces import DataTooLargeError # noqa: F401 -class UnknownMutableContainerVersionError(Exception): - pass -class UnknownImmutableContainerVersionError(Exception): +class UnknownContainerVersionError(Exception): + def __init__(self, filename, version): + self.filename = filename + self.version = version + + def __str__(self): + return "sharefile {!r} had unexpected version {!r}".format( + self.filename, + self.version, + ) + +class UnknownMutableContainerVersionError(UnknownContainerVersionError): pass +class UnknownImmutableContainerVersionError(UnknownContainerVersionError): + pass def si_b2a(storageindex): return base32.b2a(storageindex) @@ -28,13 +31,15 @@ def si_b2a(storageindex): def si_a2b(ascii_storageindex): return base32.a2b(ascii_storageindex) +def si_to_human_readable(storageindex: bytes) -> str: + """Create human-readable string of storage index.""" + return str(base32.b2a(storageindex), "ascii") + def storage_index_to_dir(storageindex): """Convert storage index to directory path. Returns native string. """ sia = si_b2a(storageindex) - if PY3: - # On Python 3 we expect paths to be unicode. - sia = sia.decode("ascii") + sia = sia.decode("ascii") return os.path.join(sia[:2], sia) diff --git a/src/allmydata/storage/crawler.py b/src/allmydata/storage/crawler.py index bd4f4f432..613f04bfb 100644 --- a/src/allmydata/storage/crawler.py +++ b/src/allmydata/storage/crawler.py @@ -4,30 +4,180 @@ Crawl the storage server shares. Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2, PY3 -if PY2: - # We don't import bytes, object, dict, and list just in case they're used, - # so as not to create brittle pickles with random magic objects. - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, range, str, max, min # noqa: F401 - -import os, time, struct -try: - import cPickle as pickle -except ImportError: - import pickle # type: ignore +import os +import time +import json +import struct from twisted.internet import reactor from twisted.application import service +from twisted.python.filepath import FilePath from allmydata.storage.common import si_b2a from allmydata.util import fileutil class TimeSliceExceeded(Exception): pass + +class MigratePickleFileError(Exception): + """ + A pickle-format file exists (the FilePath to the file will be the + single arg). + """ + pass + + +def _convert_cycle_data(state): + """ + :param dict state: cycle-to-date or history-item state + + :return dict: the state in the JSON form + """ + + def _convert_expiration_mode(value): + # original is a 4-tuple, with the last element being a 2-tuple + # .. convert both to lists + return [ + value[0], + value[1], + value[2], + list(value[3]), + ] + + def _convert_lease_age(value): + # if we're in cycle-to-date, this is a dict + if isinstance(value, dict): + return { + "{},{}".format(k[0], k[1]): v + for k, v in value.items() + } + # otherwise, it's a history-item and they're 3-tuples + return [ + list(v) + for v in value + ] + + converters = { + "configured-expiration-mode": _convert_expiration_mode, + "cycle-start-finish-times": list, + "lease-age-histogram": _convert_lease_age, + "corrupt-shares": lambda value: [ + list(x) + for x in value + ], + "leases-per-share-histogram": lambda value: { + str(k): v + for k, v in value.items() + }, + } + return { + k: converters.get(k, lambda z: z)(v) + for k, v in state.items() + } + + +def _convert_pickle_state_to_json(state): + """ + :param dict state: the pickled state + + :return dict: the state in the JSON form + """ + assert state["version"] == 1, "Only known version is 1" + + converters = { + "cycle-to-date": _convert_cycle_data, + } + return { + k: converters.get(k, lambda x: x)(v) + for k, v in state.items() + } + + +def _upgrade_pickle_to_json(state_path, convert_pickle): + """ + :param FilePath state_path: the filepath to ensure is json + + :param Callable[dict] convert_pickle: function to change + pickle-style state into JSON-style state + + :returns FilePath: the local path where the state is stored + + If this state is pickle, convert to the JSON format and return the + JSON path. + """ + json_state_path = state_path.siblingExtension(".json") + + # if there's no file there at all, we're done because there's + # nothing to upgrade + if not state_path.exists(): + return json_state_path + + # upgrade the pickle data to JSON + import pickle + with state_path.open("rb") as f: + state = pickle.load(f) + new_state = convert_pickle(state) + _dump_json_to_file(new_state, json_state_path) + + # we've written the JSON, delete the pickle + state_path.remove() + return json_state_path + + +def _confirm_json_format(fp): + """ + :param FilePath fp: the original (pickle) name of a state file + + This confirms that we do _not_ have the pickle-version of a + state-file and _do_ either have nothing, or the JSON version. If + the pickle-version exists, an exception is raised. + + :returns FilePath: the JSON name of a state file + """ + if fp.path.endswith(".json"): + return fp + jsonfp = fp.siblingExtension(".json") + if fp.exists(): + raise MigratePickleFileError(fp) + return jsonfp + + +def _dump_json_to_file(js, afile): + """ + Dump the JSON object `js` to the FilePath `afile` + """ + with afile.open("wb") as f: + data = json.dumps(js) + f.write(data.encode("utf8")) + + +class _LeaseStateSerializer(object): + """ + Read and write state for LeaseCheckingCrawler. This understands + how to read the legacy pickle format files and upgrade them to the + new JSON format (which will occur automatically). + """ + + def __init__(self, state_path): + self._path = _confirm_json_format(FilePath(state_path)) + + def load(self): + """ + :returns: deserialized JSON state + """ + with self._path.open("rb") as f: + return json.load(f) + + def save(self, data): + """ + Serialize the given data as JSON into the state-path + :returns: None + """ + tmpfile = self._path.siblingExtension(".tmp") + _dump_json_to_file(data, tmpfile) + fileutil.move_into_place(tmpfile.path, self._path.path) + return None + + class ShareCrawler(service.MultiService): """A ShareCrawler subclass is attached to a StorageServer, and periodically walks all of its shares, processing each one in some @@ -90,12 +240,10 @@ class ShareCrawler(service.MultiService): self.allowed_cpu_percentage = allowed_cpu_percentage self.server = server self.sharedir = server.sharedir - self.statefile = statefile + self._state_serializer = _LeaseStateSerializer(statefile) self.prefixes = [si_b2a(struct.pack(">H", i << (16-10)))[:2] for i in range(2**10)] - if PY3: - # On Python 3 we expect the paths to be unicode, not bytes. - self.prefixes = [p.decode("ascii") for p in self.prefixes] + self.prefixes = [p.decode("ascii") for p in self.prefixes] self.prefixes.sort() self.timer = None self.bucket_cache = (None, []) @@ -213,8 +361,7 @@ class ShareCrawler(service.MultiService): # of the last bucket to be processed, or # None if we are sleeping between cycles try: - with open(self.statefile, "rb") as f: - state = pickle.load(f) + state = self._state_serializer.load() except Exception: state = {"version": 1, "last-cycle-finished": None, @@ -250,12 +397,7 @@ class ShareCrawler(service.MultiService): else: last_complete_prefix = self.prefixes[lcpi] self.state["last-complete-prefix"] = last_complete_prefix - tmpfile = self.statefile + ".tmp" - with open(tmpfile, "wb") as f: - # Newer protocols won't work in Python 2; when it is dropped, - # protocol v4 can be used (added in Python 3.4). - pickle.dump(self.state, f, protocol=2) - fileutil.move_into_place(tmpfile, self.statefile) + self._state_serializer.save(self.get_state()) def startService(self): # arrange things to look like we were just sleeping, so diff --git a/src/allmydata/storage/expirer.py b/src/allmydata/storage/expirer.py index 7c6cd8218..c0968fd39 100644 --- a/src/allmydata/storage/expirer.py +++ b/src/allmydata/storage/expirer.py @@ -1,19 +1,65 @@ -from __future__ import division -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - # We omit anything that might end up in pickle, just in case. - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, range, str, max, min # noqa: F401 - -import time, os, pickle, struct -from allmydata.storage.crawler import ShareCrawler +import json +import time +import os +import struct +from allmydata.storage.crawler import ( + ShareCrawler, + _confirm_json_format, + _convert_cycle_data, + _dump_json_to_file, +) from allmydata.storage.shares import get_share_file from allmydata.storage.common import UnknownMutableContainerVersionError, \ UnknownImmutableContainerVersionError from twisted.python import log as twlog +from twisted.python.filepath import FilePath + + +def _convert_pickle_state_to_json(state): + """ + Convert a pickle-serialized crawler-history state to the new JSON + format. + + :param dict state: the pickled state + + :return dict: the state in the JSON form + """ + return { + str(k): _convert_cycle_data(v) + for k, v in state.items() + } + + +class _HistorySerializer(object): + """ + Serialize the 'history' file of the lease-crawler state. This is + "storage/lease_checker.history" for the pickle or + "storage/lease_checker.history.json" for the new JSON format. + """ + + def __init__(self, history_path): + self._path = _confirm_json_format(FilePath(history_path)) + + if not self._path.exists(): + _dump_json_to_file({}, self._path) + + def load(self): + """ + Deserialize the existing data. + + :return dict: the existing history state + """ + with self._path.open("rb") as f: + history = json.load(f) + return history + + def save(self, new_history): + """ + Serialize the existing data as JSON. + """ + _dump_json_to_file(new_history, self._path) + return None + class LeaseCheckingCrawler(ShareCrawler): """I examine the leases on all shares, determining which are still valid @@ -63,7 +109,7 @@ class LeaseCheckingCrawler(ShareCrawler): override_lease_duration, # used if expiration_mode=="age" cutoff_date, # used if expiration_mode=="cutoff-date" sharetypes): - self.historyfile = historyfile + self._history_serializer = _HistorySerializer(historyfile) self.expiration_enabled = expiration_enabled self.mode = mode self.override_lease_duration = None @@ -91,14 +137,6 @@ class LeaseCheckingCrawler(ShareCrawler): for k in so_far: self.state["cycle-to-date"].setdefault(k, so_far[k]) - # initialize history - if not os.path.exists(self.historyfile): - history = {} # cyclenum -> dict - with open(self.historyfile, "wb") as f: - # Newer protocols won't work in Python 2; when it is dropped, - # protocol v4 can be used (added in Python 3.4). - pickle.dump(history, f, protocol=2) - def create_empty_cycle_dict(self): recovered = self.create_empty_recovered_dict() so_far = {"corrupt-shares": [], @@ -142,7 +180,7 @@ class LeaseCheckingCrawler(ShareCrawler): struct.error): twlog.msg("lease-checker error processing %s" % sharefile) twlog.err() - which = (storage_index_b32, shnum) + which = [storage_index_b32, shnum] self.state["cycle-to-date"]["corrupt-shares"].append(which) wks = (1, 1, 1, "unknown") would_keep_shares.append(wks) @@ -212,7 +250,7 @@ class LeaseCheckingCrawler(ShareCrawler): num_valid_leases_configured += 1 so_far = self.state["cycle-to-date"] - self.increment(so_far["leases-per-share-histogram"], num_leases, 1) + self.increment(so_far["leases-per-share-histogram"], str(num_leases), 1) self.increment_space("examined", s, sharetype) would_keep_share = [1, 1, 1, sharetype] @@ -291,12 +329,14 @@ class LeaseCheckingCrawler(ShareCrawler): start = self.state["current-cycle-start-time"] now = time.time() - h["cycle-start-finish-times"] = (start, now) + h["cycle-start-finish-times"] = [start, now] h["expiration-enabled"] = self.expiration_enabled - h["configured-expiration-mode"] = (self.mode, - self.override_lease_duration, - self.cutoff_date, - self.sharetypes_to_expire) + h["configured-expiration-mode"] = [ + self.mode, + self.override_lease_duration, + self.cutoff_date, + self.sharetypes_to_expire, + ] s = self.state["cycle-to-date"] @@ -314,16 +354,12 @@ class LeaseCheckingCrawler(ShareCrawler): # copy() needs to become a deepcopy h["space-recovered"] = s["space-recovered"].copy() - with open(self.historyfile, "rb") as f: - history = pickle.load(f) - history[cycle] = h + history = self._history_serializer.load() + history[str(cycle)] = h while len(history) > 10: - oldcycles = sorted(history.keys()) - del history[oldcycles[0]] - with open(self.historyfile, "wb") as f: - # Newer protocols won't work in Python 2; when it is dropped, - # protocol v4 can be used (added in Python 3.4). - pickle.dump(history, f, protocol=2) + oldcycles = sorted(int(k) for k in history.keys()) + del history[str(oldcycles[0])] + self._history_serializer.save(history) def get_state(self): """In addition to the crawler state described in @@ -392,9 +428,7 @@ class LeaseCheckingCrawler(ShareCrawler): progress = self.get_progress() state = ShareCrawler.get_state(self) # does a shallow copy - with open(self.historyfile, "rb") as f: - history = pickle.load(f) - state["history"] = history + state["history"] = self._history_serializer.load() if not progress["cycle-in-progress"]: del state["cycle-to-date"] @@ -406,10 +440,12 @@ class LeaseCheckingCrawler(ShareCrawler): lah = so_far["lease-age-histogram"] so_far["lease-age-histogram"] = self.convert_lease_age_histogram(lah) so_far["expiration-enabled"] = self.expiration_enabled - so_far["configured-expiration-mode"] = (self.mode, - self.override_lease_duration, - self.cutoff_date, - self.sharetypes_to_expire) + so_far["configured-expiration-mode"] = [ + self.mode, + self.override_lease_duration, + self.cutoff_date, + self.sharetypes_to_expire, + ] so_far_sr = so_far["space-recovered"] remaining_sr = {} diff --git a/src/allmydata/storage/http_client.py b/src/allmydata/storage/http_client.py new file mode 100644 index 000000000..f0570a4d6 --- /dev/null +++ b/src/allmydata/storage/http_client.py @@ -0,0 +1,1262 @@ +""" +HTTP client that talks to the HTTP storage server. +""" + +from __future__ import annotations + + +from typing import ( + Union, + Optional, + Sequence, + Mapping, + BinaryIO, + cast, + TypedDict, + Set, + Dict, + Callable, + ClassVar, +) +from base64 import b64encode +from io import BytesIO +from os import SEEK_END + +from attrs import define, asdict, frozen, field +from eliot import start_action, register_exception_extractor +from eliot.twisted import DeferredContext + +from pycddl import Schema +from collections_extended import RangeMap +from werkzeug.datastructures import Range, ContentRange +from twisted.web.http_headers import Headers +from twisted.web import http +from twisted.web.iweb import IPolicyForHTTPS, IResponse, IAgent +from twisted.internet.defer import Deferred, succeed +from twisted.internet.interfaces import ( + IOpenSSLClientConnectionCreator, + IReactorTime, + IDelayedCall, +) +from twisted.internet.ssl import CertificateOptions +from twisted.protocols.tls import TLSMemoryBIOProtocol +from twisted.web.client import Agent, HTTPConnectionPool +from zope.interface import implementer +from hyperlink import DecodedURL +import treq +from treq.client import HTTPClient +from treq.testing import StubTreq +from OpenSSL import SSL +from werkzeug.http import parse_content_range_header + +from .http_common import ( + swissnum_auth_header, + Secrets, + get_content_type, + CBOR_MIME_TYPE, + get_spki_hash, + response_is_not_html, +) +from ..interfaces import VersionMessage +from .common import si_b2a, si_to_human_readable +from ..util.hashutil import timing_safe_compare +from ..util.deferredutil import async_to_deferred +from ..util.tor_provider import _Provider as TorProvider +from ..util.cputhreadpool import defer_to_thread +from ..util.cbor import dumps + +try: + from txtorcon import Tor # type: ignore +except ImportError: + + class Tor: # type: ignore[no-redef] + pass + + +def _encode_si(si: bytes) -> str: + """Encode the storage index into Unicode string.""" + return str(si_b2a(si), "ascii") + + +class ClientException(Exception): + """An unexpected response code from the server.""" + + def __init__( + self, code: int, message: Optional[str] = None, body: Optional[bytes] = None + ): + Exception.__init__(self, code, message, body) + self.code = code + self.message = message + self.body = body + + +register_exception_extractor(ClientException, lambda e: {"response_code": e.code}) + + +# Schemas for server responses. +# +# Tags are of the form #6.nnn, where the number is documented at +# https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml. Notably, #6.258 +# indicates a set. +_SCHEMAS: Mapping[str, Schema] = { + "get_version": Schema( + # Note that the single-quoted (`'`) string keys in this schema + # represent *byte* strings - per the CDDL specification. Text strings + # are represented using strings with *double* quotes (`"`). + """ + response = {'http://allmydata.org/tahoe/protocols/storage/v1' => { + 'maximum-immutable-share-size' => uint + 'maximum-mutable-share-size' => uint + 'available-space' => uint + } + 'application-version' => bstr + } + """ + ), + "allocate_buckets": Schema( + """ + response = { + already-have: #6.258([0*256 uint]) + allocated: #6.258([0*256 uint]) + } + """ + ), + "immutable_write_share_chunk": Schema( + """ + response = { + required: [0* {begin: uint, end: uint}] + } + """ + ), + "list_shares": Schema( + """ + response = #6.258([0*256 uint]) + """ + ), + "mutable_read_test_write": Schema( + """ + response = { + "success": bool, + "data": {0*256 share_number: [0* bstr]} + } + share_number = uint + """ + ), + "mutable_list_shares": Schema( + """ + response = #6.258([0*256 uint]) + """ + ), +} + + +@define +class _LengthLimitedCollector: + """ + Collect data using ``treq.collect()``, with limited length. + """ + + remaining_length: int + timeout_on_silence: IDelayedCall + f: BytesIO = field(factory=BytesIO) + + def __call__(self, data: bytes) -> None: + self.timeout_on_silence.reset(60) + self.remaining_length -= len(data) + if self.remaining_length < 0: + raise ValueError("Response length was too long") + self.f.write(data) + + +def limited_content( + response: IResponse, + clock: IReactorTime, + max_length: int = 30 * 1024 * 1024, +) -> Deferred[BinaryIO]: + """ + Like ``treq.content()``, but limit data read from the response to a set + length. If the response is longer than the max allowed length, the result + fails with a ``ValueError``. + + A potentially useful future improvement would be using a temporary file to + store the content; since filesystem buffering means that would use memory + for small responses and disk for large responses. + + This will time out if no data is received for 60 seconds; so long as a + trickle of data continues to arrive, it will continue to run. + """ + result_deferred = succeed(None) + + # Sadly, addTimeout() won't work because we need access to the IDelayedCall + # in order to reset it on each data chunk received. + timeout = clock.callLater(60, result_deferred.cancel) + collector = _LengthLimitedCollector(max_length, timeout) + + with start_action( + action_type="allmydata:storage:http-client:limited-content", + max_length=max_length, + ).context(): + d = DeferredContext(result_deferred) + + # Make really sure everything gets called in Deferred context, treq might + # call collector directly... + d.addCallback(lambda _: treq.collect(response, collector)) + + def done(_: object) -> BytesIO: + timeout.cancel() + collector.f.seek(0) + return collector.f + + def failed(f): + if timeout.active(): + timeout.cancel() + return f + + result = d.addCallbacks(done, failed) + return result.addActionFinish() + + +@define +class ImmutableCreateResult(object): + """Result of creating a storage index for an immutable.""" + + already_have: set[int] + allocated: set[int] + + +class _TLSContextFactory(CertificateOptions): + """ + Create a context that validates the way Tahoe-LAFS wants to: based on a + pinned certificate hash, rather than a certificate authority. + + Originally implemented as part of Foolscap. To comply with the license, + here's the original licensing terms: + + Copyright (c) 2006-2008 Brian Warner + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + """ + + def __init__(self, expected_spki_hash: bytes): + self.expected_spki_hash = expected_spki_hash + CertificateOptions.__init__(self) + + def getContext(self) -> SSL.Context: + def always_validate(conn, cert, errno, depth, preverify_ok): + # This function is called to validate the certificate received by + # the other end. OpenSSL calls it multiple times, for each errno + # for each certificate. + + # We do not care about certificate authorities or revocation + # lists, we just want to know that the certificate has a valid + # signature and follow the chain back to one which is + # self-signed. We need to protect against forged signatures, but + # not the usual TLS concerns about invalid CAs or revoked + # certificates. + things_are_ok = ( + SSL.X509VerificationCodes.OK, + SSL.X509VerificationCodes.ERR_CERT_NOT_YET_VALID, + SSL.X509VerificationCodes.ERR_CERT_HAS_EXPIRED, + SSL.X509VerificationCodes.ERR_DEPTH_ZERO_SELF_SIGNED_CERT, + SSL.X509VerificationCodes.ERR_SELF_SIGNED_CERT_IN_CHAIN, + ) + # TODO can we do this once instead of multiple times? + if errno in things_are_ok and timing_safe_compare( + get_spki_hash(cert.to_cryptography()), self.expected_spki_hash + ): + return 1 + # TODO: log the details of the error, because otherwise they get + # lost in the PyOpenSSL exception that will eventually be raised + # (possibly OpenSSL.SSL.Error: certificate verify failed) + return 0 + + ctx = CertificateOptions.getContext(self) + + # VERIFY_PEER means we ask the the other end for their certificate. + ctx.set_verify(SSL.VERIFY_PEER, always_validate) + return ctx + + +@implementer(IPolicyForHTTPS) +@implementer(IOpenSSLClientConnectionCreator) +@define +class _StorageClientHTTPSPolicy: + """ + A HTTPS policy that ensures the SPKI hash of the public key matches a known + hash, i.e. pinning-based validation. + """ + + expected_spki_hash: bytes + + # IPolicyForHTTPS + def creatorForNetloc(self, hostname: str, port: int) -> _StorageClientHTTPSPolicy: + return self + + # IOpenSSLClientConnectionCreator + def clientConnectionForTLS( + self, tlsProtocol: TLSMemoryBIOProtocol + ) -> SSL.Connection: + return SSL.Connection( + _TLSContextFactory(self.expected_spki_hash).getContext(), None + ) + + +@define +class StorageClientFactory: + """ + Create ``StorageClient`` instances, using appropriate + ``twisted.web.iweb.IAgent`` for different connection methods: normal TCP, + Tor, and eventually I2P. + + There is some caching involved since there might be shared setup work, e.g. + connecting to the local Tor service only needs to happen once. + """ + + _default_connection_handlers: dict[str, str] + _tor_provider: Optional[TorProvider] + # Cache the Tor instance created by the provider, if relevant. + _tor_instance: Optional[Tor] = None + + # If set, we're doing unit testing and we should call this with any + # HTTPConnectionPool that gets passed/created to ``create_agent()``. + TEST_MODE_REGISTER_HTTP_POOL: ClassVar[ + Optional[Callable[[HTTPConnectionPool], None]] + ] = None + + @classmethod + def start_test_mode(cls, callback: Callable[[HTTPConnectionPool], None]) -> None: + """Switch to testing mode. + + In testing mode we register the pool with test system using the given + callback so it can Do Things, most notably killing off idle HTTP + connections at test shutdown and, in some tests, in the midddle of the + test. + """ + cls.TEST_MODE_REGISTER_HTTP_POOL = callback + + @classmethod + def stop_test_mode(cls) -> None: + """Stop testing mode.""" + cls.TEST_MODE_REGISTER_HTTP_POOL = None + + async def _create_agent( + self, + nurl: DecodedURL, + reactor: object, + tls_context_factory: IPolicyForHTTPS, + pool: HTTPConnectionPool, + ) -> IAgent: + """Create a new ``IAgent``, possibly using Tor.""" + if self.TEST_MODE_REGISTER_HTTP_POOL is not None: + self.TEST_MODE_REGISTER_HTTP_POOL(pool) + + # TODO default_connection_handlers should really be an object, not a + # dict, so we can ask "is this using Tor" without poking at a + # dictionary with arbitrary strings... See + # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4032 + handler = self._default_connection_handlers["tcp"] + + if handler == "tcp": + return Agent(reactor, tls_context_factory, pool=pool) + if handler == "tor" or nurl.scheme == "pb+tor": + assert self._tor_provider is not None + if self._tor_instance is None: + self._tor_instance = await self._tor_provider.get_tor_instance(reactor) + return self._tor_instance.web_agent( + pool=pool, tls_context_factory=tls_context_factory + ) + else: + # I2P support will be added here. See + # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4037 + raise RuntimeError(f"Unsupported tcp connection handler: {handler}") + + async def create_storage_client( + self, + nurl: DecodedURL, + reactor: IReactorTime, + pool: Optional[HTTPConnectionPool] = None, + ) -> StorageClient: + """Create a new ``StorageClient`` for the given NURL.""" + assert nurl.fragment == "v=1" + assert nurl.scheme in ("pb", "pb+tor") + if pool is None: + pool = HTTPConnectionPool(reactor) + pool.maxPersistentPerHost = 10 + + certificate_hash = nurl.user.encode("ascii") + agent = await self._create_agent( + nurl, + reactor, + _StorageClientHTTPSPolicy(expected_spki_hash=certificate_hash), + pool, + ) + treq_client = HTTPClient(agent) + https_url = DecodedURL().replace(scheme="https", host=nurl.host, port=nurl.port) + swissnum = nurl.path[0].encode("ascii") + response_check = lambda _: None + if self.TEST_MODE_REGISTER_HTTP_POOL is not None: + response_check = response_is_not_html + + return StorageClient( + https_url, + swissnum, + treq_client, + pool, + reactor, + response_check, + ) + + +@define(hash=True) +class StorageClient(object): + """ + Low-level HTTP client that talks to the HTTP storage server. + + Create using a ``StorageClientFactory`` instance. + """ + + # The URL should be a HTTPS URL ("https://...") + _base_url: DecodedURL + _swissnum: bytes + _treq: Union[treq, StubTreq, HTTPClient] + _pool: HTTPConnectionPool + _clock: IReactorTime + # Are we running unit tests? + _analyze_response: Callable[[IResponse], None] = lambda _: None + + def relative_url(self, path: str) -> DecodedURL: + """Get a URL relative to the base URL.""" + return self._base_url.click(path) + + def _get_headers(self, headers: Optional[Headers]) -> Headers: + """Return the basic headers to be used by default.""" + if headers is None: + headers = Headers() + headers.addRawHeader( + "Authorization", + swissnum_auth_header(self._swissnum), + ) + return headers + + @async_to_deferred + async def request( + self, + method: str, + url: DecodedURL, + lease_renew_secret: Optional[bytes] = None, + lease_cancel_secret: Optional[bytes] = None, + upload_secret: Optional[bytes] = None, + write_enabler_secret: Optional[bytes] = None, + headers: Optional[Headers] = None, + message_to_serialize: object = None, + timeout: float = 60, + **kwargs, + ) -> IResponse: + """ + Like ``treq.request()``, but with optional secrets that get translated + into corresponding HTTP headers. + + If ``message_to_serialize`` is set, it will be serialized (by default + with CBOR) and set as the request body. It should not be mutated + during execution of this function! + + Default timeout is 60 seconds. + """ + with start_action( + action_type="allmydata:storage:http-client:request", + method=method, + url=url.to_text(), + timeout=timeout, + ) as ctx: + response = await self._request( + method, + url, + lease_renew_secret, + lease_cancel_secret, + upload_secret, + write_enabler_secret, + headers, + message_to_serialize, + timeout, + **kwargs, + ) + ctx.add_success_fields(response_code=response.code) + return response + + async def _request( + self, + method: str, + url: DecodedURL, + lease_renew_secret: Optional[bytes] = None, + lease_cancel_secret: Optional[bytes] = None, + upload_secret: Optional[bytes] = None, + write_enabler_secret: Optional[bytes] = None, + headers: Optional[Headers] = None, + message_to_serialize: object = None, + timeout: float = 60, + **kwargs, + ) -> IResponse: + """The implementation of request().""" + headers = self._get_headers(headers) + + # Add secrets: + for secret, value in [ + (Secrets.LEASE_RENEW, lease_renew_secret), + (Secrets.LEASE_CANCEL, lease_cancel_secret), + (Secrets.UPLOAD, upload_secret), + (Secrets.WRITE_ENABLER, write_enabler_secret), + ]: + if value is None: + continue + headers.addRawHeader( + "X-Tahoe-Authorization", + b"%s %s" % (secret.value.encode("ascii"), b64encode(value).strip()), + ) + + # Note we can accept CBOR: + headers.addRawHeader("Accept", CBOR_MIME_TYPE) + + # If there's a request message, serialize it and set the Content-Type + # header: + if message_to_serialize is not None: + if "data" in kwargs: + raise TypeError( + "Can't use both `message_to_serialize` and `data` " + "as keyword arguments at the same time" + ) + kwargs["data"] = await defer_to_thread(dumps, message_to_serialize) + headers.addRawHeader("Content-Type", CBOR_MIME_TYPE) + + response = await self._treq.request( + method, url, headers=headers, timeout=timeout, **kwargs + ) + self._analyze_response(response) + + return response + + async def decode_cbor(self, response: IResponse, schema: Schema) -> object: + """Given HTTP response, return decoded CBOR body.""" + with start_action(action_type="allmydata:storage:http-client:decode-cbor"): + if response.code > 199 and response.code < 300: + content_type = get_content_type(response.headers) + if content_type == CBOR_MIME_TYPE: + f = await limited_content(response, self._clock) + data = f.read() + + def validate_and_decode(): + return schema.validate_cbor(data, True) + + return await defer_to_thread(validate_and_decode) + else: + raise ClientException( + -1, + "Server didn't send CBOR, content type is {}".format( + content_type + ), + ) + else: + data = ( + await limited_content(response, self._clock, max_length=10_000) + ).read() + raise ClientException(response.code, response.phrase, data) + + def shutdown(self) -> Deferred[object]: + """Shutdown any connections.""" + return self._pool.closeCachedConnections() + + +@define(hash=True) +class StorageClientGeneral(object): + """ + High-level HTTP APIs that aren't immutable- or mutable-specific. + """ + + _client: StorageClient + + @async_to_deferred + async def get_version(self) -> VersionMessage: + """ + Return the version metadata for the server. + """ + with start_action( + action_type="allmydata:storage:http-client:get-version", + ): + return await self._get_version() + + async def _get_version(self) -> VersionMessage: + """Implementation of get_version().""" + url = self._client.relative_url("/storage/v1/version") + response = await self._client.request("GET", url) + decoded_response = cast( + Dict[bytes, object], + await self._client.decode_cbor(response, _SCHEMAS["get_version"]), + ) + # Add some features we know are true because the HTTP API + # specification requires them and because other parts of the storage + # client implementation assumes they will be present. + cast( + Dict[bytes, object], + decoded_response[b"http://allmydata.org/tahoe/protocols/storage/v1"], + ).update( + { + b"tolerates-immutable-read-overrun": True, + b"delete-mutable-shares-with-zero-length-writev": True, + b"fills-holes-with-zero-bytes": True, + b"prevents-read-past-end-of-share-data": True, + } + ) + return decoded_response + + @async_to_deferred + async def add_or_renew_lease( + self, storage_index: bytes, renew_secret: bytes, cancel_secret: bytes + ) -> None: + """ + Add or renew a lease. + + If the renewal secret matches an existing lease, it is renewed. + Otherwise a new lease is added. + """ + with start_action( + action_type="allmydata:storage:http-client:add-or-renew-lease", + storage_index=si_to_human_readable(storage_index), + ): + return await self._add_or_renew_lease( + storage_index, renew_secret, cancel_secret + ) + + async def _add_or_renew_lease( + self, storage_index: bytes, renew_secret: bytes, cancel_secret: bytes + ) -> None: + url = self._client.relative_url( + "/storage/v1/lease/{}".format(_encode_si(storage_index)) + ) + response = await self._client.request( + "PUT", + url, + lease_renew_secret=renew_secret, + lease_cancel_secret=cancel_secret, + ) + + if response.code == http.NO_CONTENT: + return + else: + raise ClientException(response.code) + + +@define +class UploadProgress(object): + """ + Progress of immutable upload, per the server. + """ + + # True when upload has finished. + finished: bool + # Remaining ranges to upload. + required: RangeMap + + +@async_to_deferred +async def read_share_chunk( + client: StorageClient, + share_type: str, + storage_index: bytes, + share_number: int, + offset: int, + length: int, +) -> bytes: + """ + Download a chunk of data from a share. + + TODO https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3857 Failed downloads + should be transparently retried and redownloaded by the implementation a + few times so that if a failure percolates up, the caller can assume the + failure isn't a short-term blip. + + NOTE: the underlying HTTP protocol is somewhat more flexible than this API, + insofar as it doesn't always require a range. In practice a range is + always provided by the current callers. + """ + url = client.relative_url( + "/storage/v1/{}/{}/{}".format( + share_type, _encode_si(storage_index), share_number + ) + ) + # The default 60 second timeout is for getting the response, so it doesn't + # include the time it takes to download the body... so we will will deal + # with that later, via limited_content(). + response = await client.request( + "GET", + url, + headers=Headers( + # Ranges in HTTP are _inclusive_, Python's convention is exclusive, + # but Range constructor does that the conversion for us. + {"range": [Range("bytes", [(offset, offset + length)]).to_header()]} + ), + unbuffered=True, # Don't buffer the response in memory. + ) + + if response.code == http.NO_CONTENT: + return b"" + + content_type = get_content_type(response.headers) + if content_type != "application/octet-stream": + raise ValueError( + f"Content-type was wrong: {content_type}, should be application/octet-stream" + ) + + if response.code == http.PARTIAL_CONTENT: + content_range = parse_content_range_header( + response.headers.getRawHeaders("content-range")[0] or "" + ) + if ( + content_range is None + or content_range.stop is None + or content_range.start is None + ): + raise ValueError( + "Content-Range was missing, invalid, or in format we don't support" + ) + supposed_length = content_range.stop - content_range.start + if supposed_length > length: + raise ValueError("Server sent more than we asked for?!") + # It might also send less than we asked for. That's (probably) OK, e.g. + # if we went past the end of the file. + body = await limited_content(response, client._clock, supposed_length) + body.seek(0, SEEK_END) + actual_length = body.tell() + if actual_length != supposed_length: + # Most likely a mutable that got changed out from under us, but + # conceivably could be a bug... + raise ValueError( + f"Length of response sent from server ({actual_length}) " + + f"didn't match Content-Range header ({supposed_length})" + ) + body.seek(0) + return body.read() + else: + # Technically HTTP allows sending an OK with full body under these + # circumstances, but the server is not designed to do that so we ignore + # that possibility for now... + raise ClientException(response.code) + + +@async_to_deferred +async def advise_corrupt_share( + client: StorageClient, + share_type: str, + storage_index: bytes, + share_number: int, + reason: str, +) -> None: + assert isinstance(reason, str) + url = client.relative_url( + "/storage/v1/{}/{}/{}/corrupt".format( + share_type, _encode_si(storage_index), share_number + ) + ) + message = {"reason": reason} + response = await client.request("POST", url, message_to_serialize=message) + if response.code == http.OK: + return + else: + raise ClientException( + response.code, + ) + + +@define(hash=True) +class StorageClientImmutables(object): + """ + APIs for interacting with immutables. + """ + + _client: StorageClient + + @async_to_deferred + async def create( + self, + storage_index: bytes, + share_numbers: set[int], + allocated_size: int, + upload_secret: bytes, + lease_renew_secret: bytes, + lease_cancel_secret: bytes, + ) -> ImmutableCreateResult: + """ + Create a new storage index for an immutable. + + TODO https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3857 retry + internally on failure, to ensure the operation fully succeeded. If + sufficient number of failures occurred, the result may fire with an + error, but there's no expectation that user code needs to have a + recovery codepath; it will most likely just report an error to the + user. + + Result fires when creating the storage index succeeded, if creating the + storage index failed the result will fire with an exception. + """ + with start_action( + action_type="allmydata:storage:http-client:immutable:create", + storage_index=si_to_human_readable(storage_index), + share_numbers=share_numbers, + allocated_size=allocated_size, + ) as ctx: + result = await self._create( + storage_index, + share_numbers, + allocated_size, + upload_secret, + lease_renew_secret, + lease_cancel_secret, + ) + ctx.add_success_fields( + already_have=result.already_have, allocated=result.allocated + ) + return result + + async def _create( + self, + storage_index: bytes, + share_numbers: set[int], + allocated_size: int, + upload_secret: bytes, + lease_renew_secret: bytes, + lease_cancel_secret: bytes, + ) -> ImmutableCreateResult: + """Implementation of create().""" + url = self._client.relative_url( + "/storage/v1/immutable/" + _encode_si(storage_index) + ) + message = {"share-numbers": share_numbers, "allocated-size": allocated_size} + + response = await self._client.request( + "POST", + url, + lease_renew_secret=lease_renew_secret, + lease_cancel_secret=lease_cancel_secret, + upload_secret=upload_secret, + message_to_serialize=message, + ) + decoded_response = cast( + Mapping[str, Set[int]], + await self._client.decode_cbor(response, _SCHEMAS["allocate_buckets"]), + ) + return ImmutableCreateResult( + already_have=decoded_response["already-have"], + allocated=decoded_response["allocated"], + ) + + @async_to_deferred + async def abort_upload( + self, storage_index: bytes, share_number: int, upload_secret: bytes + ) -> None: + """Abort the upload.""" + with start_action( + action_type="allmydata:storage:http-client:immutable:abort-upload", + storage_index=si_to_human_readable(storage_index), + share_number=share_number, + ): + return await self._abort_upload(storage_index, share_number, upload_secret) + + async def _abort_upload( + self, storage_index: bytes, share_number: int, upload_secret: bytes + ) -> None: + """Implementation of ``abort_upload()``.""" + url = self._client.relative_url( + "/storage/v1/immutable/{}/{}/abort".format( + _encode_si(storage_index), share_number + ) + ) + response = await self._client.request( + "PUT", + url, + upload_secret=upload_secret, + ) + + if response.code == http.OK: + return + else: + raise ClientException( + response.code, + ) + + @async_to_deferred + async def write_share_chunk( + self, + storage_index: bytes, + share_number: int, + upload_secret: bytes, + offset: int, + data: bytes, + ) -> UploadProgress: + """ + Upload a chunk of data for a specific share. + + TODO https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3857 The + implementation should retry failed uploads transparently a number of + times, so that if a failure percolates up, the caller can assume the + failure isn't a short-term blip. + + Result fires when the upload succeeded, with a boolean indicating + whether the _complete_ share (i.e. all chunks, not just this one) has + been uploaded. + """ + with start_action( + action_type="allmydata:storage:http-client:immutable:write-share-chunk", + storage_index=si_to_human_readable(storage_index), + share_number=share_number, + offset=offset, + data_len=len(data), + ) as ctx: + result = await self._write_share_chunk( + storage_index, share_number, upload_secret, offset, data + ) + ctx.add_success_fields(finished=result.finished) + return result + + async def _write_share_chunk( + self, + storage_index: bytes, + share_number: int, + upload_secret: bytes, + offset: int, + data: bytes, + ) -> UploadProgress: + """Implementation of ``write_share_chunk()``.""" + url = self._client.relative_url( + "/storage/v1/immutable/{}/{}".format( + _encode_si(storage_index), share_number + ) + ) + response = await self._client.request( + "PATCH", + url, + upload_secret=upload_secret, + data=data, + headers=Headers( + { + "content-range": [ + ContentRange("bytes", offset, offset + len(data)).to_header() + ] + } + ), + ) + + if response.code == http.OK: + # Upload is still unfinished. + finished = False + elif response.code == http.CREATED: + # Upload is done! + finished = True + else: + raise ClientException( + response.code, + ) + body = cast( + Mapping[str, Sequence[Mapping[str, int]]], + await self._client.decode_cbor( + response, _SCHEMAS["immutable_write_share_chunk"] + ), + ) + remaining = RangeMap() + for chunk in body["required"]: + remaining.set(True, chunk["begin"], chunk["end"]) + return UploadProgress(finished=finished, required=remaining) + + @async_to_deferred + async def read_share_chunk( + self, storage_index: bytes, share_number: int, offset: int, length: int + ) -> bytes: + """ + Download a chunk of data from a share. + """ + with start_action( + action_type="allmydata:storage:http-client:immutable:read-share-chunk", + storage_index=si_to_human_readable(storage_index), + share_number=share_number, + offset=offset, + length=length, + ) as ctx: + result = await read_share_chunk( + self._client, "immutable", storage_index, share_number, offset, length + ) + ctx.add_success_fields(data_len=len(result)) + return result + + @async_to_deferred + async def list_shares(self, storage_index: bytes) -> Set[int]: + """ + Return the set of shares for a given storage index. + """ + with start_action( + action_type="allmydata:storage:http-client:immutable:list-shares", + storage_index=si_to_human_readable(storage_index), + ) as ctx: + result = await self._list_shares(storage_index) + ctx.add_success_fields(shares=result) + return result + + async def _list_shares(self, storage_index: bytes) -> Set[int]: + """Implementation of ``list_shares()``.""" + url = self._client.relative_url( + "/storage/v1/immutable/{}/shares".format(_encode_si(storage_index)) + ) + response = await self._client.request( + "GET", + url, + ) + if response.code == http.OK: + return cast( + Set[int], + await self._client.decode_cbor(response, _SCHEMAS["list_shares"]), + ) + else: + raise ClientException(response.code) + + @async_to_deferred + async def advise_corrupt_share( + self, + storage_index: bytes, + share_number: int, + reason: str, + ) -> None: + """Indicate a share has been corrupted, with a human-readable message.""" + with start_action( + action_type="allmydata:storage:http-client:immutable:advise-corrupt-share", + storage_index=si_to_human_readable(storage_index), + share_number=share_number, + reason=reason, + ): + await advise_corrupt_share( + self._client, "immutable", storage_index, share_number, reason + ) + + +@frozen +class WriteVector: + """Data to write to a chunk.""" + + offset: int + data: bytes + + +@frozen +class TestVector: + """Checks to make on a chunk before writing to it.""" + + offset: int + size: int + specimen: bytes + + +@frozen +class ReadVector: + """ + Reads to do on chunks, as part of a read/test/write operation. + """ + + offset: int + size: int + + +@frozen +class TestWriteVectors: + """Test and write vectors for a specific share.""" + + test_vectors: Sequence[TestVector] = field(factory=list) + write_vectors: Sequence[WriteVector] = field(factory=list) + new_length: Optional[int] = None + + def asdict(self) -> dict: + """Return dictionary suitable for sending over CBOR.""" + d = asdict(self) + d["test"] = d.pop("test_vectors") + d["write"] = d.pop("write_vectors") + d["new-length"] = d.pop("new_length") + return d + + +@frozen +class ReadTestWriteResult: + """Result of sending read-test-write vectors.""" + + success: bool + # Map share numbers to reads corresponding to the request's list of + # ReadVectors: + reads: Mapping[int, Sequence[bytes]] + + +# Result type for mutable read/test/write HTTP response. Can't just use +# dict[int,list[bytes]] because on Python 3.8 that will error out. +MUTABLE_RTW = TypedDict( + "MUTABLE_RTW", {"success": bool, "data": Mapping[int, Sequence[bytes]]} +) + + +@frozen +class StorageClientMutables: + """ + APIs for interacting with mutables. + """ + + _client: StorageClient + + @async_to_deferred + async def read_test_write_chunks( + self, + storage_index: bytes, + write_enabler_secret: bytes, + lease_renew_secret: bytes, + lease_cancel_secret: bytes, + testwrite_vectors: dict[int, TestWriteVectors], + read_vector: list[ReadVector], + ) -> ReadTestWriteResult: + """ + Read, test, and possibly write chunks to a particular mutable storage + index. + + Reads are done before writes. + + Given a mapping between share numbers and test/write vectors, the tests + are done and if they are valid the writes are done. + """ + with start_action( + action_type="allmydata:storage:http-client:mutable:read-test-write", + storage_index=si_to_human_readable(storage_index), + ): + return await self._read_test_write_chunks( + storage_index, + write_enabler_secret, + lease_renew_secret, + lease_cancel_secret, + testwrite_vectors, + read_vector, + ) + + async def _read_test_write_chunks( + self, + storage_index: bytes, + write_enabler_secret: bytes, + lease_renew_secret: bytes, + lease_cancel_secret: bytes, + testwrite_vectors: dict[int, TestWriteVectors], + read_vector: list[ReadVector], + ) -> ReadTestWriteResult: + """Implementation of ``read_test_write_chunks()``.""" + url = self._client.relative_url( + "/storage/v1/mutable/{}/read-test-write".format(_encode_si(storage_index)) + ) + message = { + "test-write-vectors": { + share_number: twv.asdict() + for (share_number, twv) in testwrite_vectors.items() + }, + "read-vector": [asdict(r) for r in read_vector], + } + response = await self._client.request( + "POST", + url, + write_enabler_secret=write_enabler_secret, + lease_renew_secret=lease_renew_secret, + lease_cancel_secret=lease_cancel_secret, + message_to_serialize=message, + ) + if response.code == http.OK: + result = cast( + MUTABLE_RTW, + await self._client.decode_cbor( + response, _SCHEMAS["mutable_read_test_write"] + ), + ) + return ReadTestWriteResult(success=result["success"], reads=result["data"]) + else: + raise ClientException(response.code, (await response.content())) + + @async_to_deferred + async def read_share_chunk( + self, + storage_index: bytes, + share_number: int, + offset: int, + length: int, + ) -> bytes: + """ + Download a chunk of data from a share. + """ + with start_action( + action_type="allmydata:storage:http-client:mutable:read-share-chunk", + storage_index=si_to_human_readable(storage_index), + share_number=share_number, + offset=offset, + length=length, + ) as ctx: + result = await read_share_chunk( + self._client, "mutable", storage_index, share_number, offset, length + ) + ctx.add_success_fields(data_len=len(result)) + return result + + @async_to_deferred + async def list_shares(self, storage_index: bytes) -> Set[int]: + """ + List the share numbers for a given storage index. + """ + with start_action( + action_type="allmydata:storage:http-client:mutable:list-shares", + storage_index=si_to_human_readable(storage_index), + ) as ctx: + result = await self._list_shares(storage_index) + ctx.add_success_fields(shares=result) + return result + + async def _list_shares(self, storage_index: bytes) -> Set[int]: + """Implementation of ``list_shares()``.""" + url = self._client.relative_url( + "/storage/v1/mutable/{}/shares".format(_encode_si(storage_index)) + ) + response = await self._client.request("GET", url) + if response.code == http.OK: + return cast( + Set[int], + await self._client.decode_cbor( + response, + _SCHEMAS["mutable_list_shares"], + ), + ) + else: + raise ClientException(response.code) + + @async_to_deferred + async def advise_corrupt_share( + self, + storage_index: bytes, + share_number: int, + reason: str, + ) -> None: + """Indicate a share has been corrupted, with a human-readable message.""" + with start_action( + action_type="allmydata:storage:http-client:mutable:advise-corrupt-share", + storage_index=si_to_human_readable(storage_index), + share_number=share_number, + reason=reason, + ): + await advise_corrupt_share( + self._client, "mutable", storage_index, share_number, reason + ) diff --git a/src/allmydata/storage/http_common.py b/src/allmydata/storage/http_common.py new file mode 100644 index 000000000..d59cab541 --- /dev/null +++ b/src/allmydata/storage/http_common.py @@ -0,0 +1,74 @@ +""" +Common HTTP infrastructure for the storge server. +""" + +from enum import Enum +from base64 import urlsafe_b64encode, b64encode +from hashlib import sha256 +from typing import Optional + +from cryptography.x509 import Certificate +from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat + +from werkzeug.http import parse_options_header +from twisted.web.http_headers import Headers +from twisted.web.iweb import IResponse + +CBOR_MIME_TYPE = "application/cbor" + + +def get_content_type(headers: Headers) -> Optional[str]: + """ + Get the content type from the HTTP ``Content-Type`` header. + + Returns ``None`` if no content-type was set. + """ + values = headers.getRawHeaders("content-type", [None]) or [None] + content_type = parse_options_header(values[0])[0] or None + return content_type + + +def response_is_not_html(response: IResponse) -> None: + """ + During tests, this is registered so we can ensure the web server + doesn't give us text/html. + + HTML is never correct except in 404, but it's the default for + Twisted's web server so we assert nothing unexpected happened. + """ + if response.code != 404: + assert get_content_type(response.headers) != "text/html" + + +def swissnum_auth_header(swissnum: bytes) -> bytes: + """Return value for ``Authorization`` header.""" + return b"Tahoe-LAFS " + b64encode(swissnum).strip() + + +class Secrets(Enum): + """Different kinds of secrets the client may send.""" + + LEASE_RENEW = "lease-renew-secret" + LEASE_CANCEL = "lease-cancel-secret" + UPLOAD = "upload-secret" + WRITE_ENABLER = "write-enabler" + + +def get_spki(certificate: Certificate) -> bytes: + """ + Get the bytes making up the DER encoded representation of the + `SubjectPublicKeyInfo` (RFC 7469) for the given certificate. + """ + return certificate.public_key().public_bytes( + Encoding.DER, PublicFormat.SubjectPublicKeyInfo + ) + +def get_spki_hash(certificate: Certificate) -> bytes: + """ + Get the public key hash, as per RFC 7469: base64 of sha256 of the public + key encoded in DER + Subject Public Key Info format. + + We use the URL-safe base64 variant, since this is typically found in NURLs. + """ + spki_bytes = get_spki(certificate) + return urlsafe_b64encode(sha256(spki_bytes).digest()).strip().rstrip(b"=") diff --git a/src/allmydata/storage/http_server.py b/src/allmydata/storage/http_server.py new file mode 100644 index 000000000..2e1a6a413 --- /dev/null +++ b/src/allmydata/storage/http_server.py @@ -0,0 +1,1171 @@ +""" +HTTP server for storage. +""" + +from __future__ import annotations + +from typing import ( + Any, + Callable, + Union, + cast, + Optional, + TypeVar, + Sequence, + Protocol, + Dict, +) +from typing_extensions import ParamSpec, Concatenate +from functools import wraps +from base64 import b64decode +import binascii +from tempfile import TemporaryFile +from os import SEEK_END, SEEK_SET +import mmap + +from eliot import start_action +from cryptography.x509 import Certificate as CryptoCertificate +from zope.interface import implementer +from klein import Klein, KleinRenderable +from klein.resource import KleinResource +from twisted.web import http +from twisted.internet.interfaces import ( + IListeningPort, + IStreamServerEndpoint, + IPullProducer, + IProtocolFactory, +) +from twisted.internet.address import IPv4Address, IPv6Address +from twisted.internet.defer import Deferred +from twisted.internet.ssl import CertificateOptions, Certificate, PrivateCertificate +from twisted.internet.interfaces import IReactorFromThreads +from twisted.web.server import Site, Request +from twisted.web.iweb import IRequest +from twisted.protocols.tls import TLSMemoryBIOFactory +from twisted.python.filepath import FilePath +from twisted.python.failure import Failure + +from attrs import define, field, Factory +from werkzeug.http import ( + parse_range_header, + parse_content_range_header, + parse_accept_header, +) +from werkzeug.routing import BaseConverter, ValidationError +from werkzeug.datastructures import ContentRange +from hyperlink import DecodedURL +from cryptography.x509 import load_pem_x509_certificate + + +from pycddl import Schema, ValidationError as CDDLValidationError +from .server import StorageServer +from .http_common import ( + swissnum_auth_header, + Secrets, + get_content_type, + CBOR_MIME_TYPE, + get_spki_hash, +) + +from .common import si_a2b +from .immutable import BucketWriter, ConflictingWriteError +from ..util.hashutil import timing_safe_compare +from ..util.base32 import rfc3548_alphabet +from ..util.deferredutil import async_to_deferred +from ..util.cputhreadpool import defer_to_thread +from ..util import cbor +from ..interfaces import BadWriteEnablerError + + +class ClientSecretsException(Exception): + """The client did not send the appropriate secrets.""" + + +def _extract_secrets( + header_values: Sequence[str], required_secrets: set[Secrets] +) -> dict[Secrets, bytes]: + """ + Given list of values of ``X-Tahoe-Authorization`` headers, and required + secrets, return dictionary mapping secrets to decoded values. + + If too few secrets were given, or too many, a ``ClientSecretsException`` is + raised; its text is sent in the HTTP response. + """ + string_key_to_enum = {e.value: e for e in Secrets} + result = {} + try: + for header_value in header_values: + string_key, string_value = header_value.strip().split(" ", 1) + key = string_key_to_enum[string_key] + value = b64decode(string_value) + if value == b"": + raise ClientSecretsException( + "Failed to decode secret {}".format(string_key) + ) + if key in (Secrets.LEASE_CANCEL, Secrets.LEASE_RENEW) and len(value) != 32: + raise ClientSecretsException("Lease secrets must be 32 bytes long") + result[key] = value + except (ValueError, KeyError): + raise ClientSecretsException("Bad header value(s): {}".format(header_values)) + if result.keys() != required_secrets: + raise ClientSecretsException( + "Expected {} in X-Tahoe-Authorization headers, got {}".format( + [r.value for r in required_secrets], list(result.keys()) + ) + ) + return result + + +class BaseApp(Protocol): + """Protocol for ``HTTPServer`` and testing equivalent.""" + + _swissnum: bytes + + +P = ParamSpec("P") +T = TypeVar("T") +SecretsDict = Dict[Secrets, bytes] +App = TypeVar("App", bound=BaseApp) + + +def _authorization_decorator( + required_secrets: set[Secrets], +) -> Callable[ + [Callable[Concatenate[App, Request, SecretsDict, P], T]], + Callable[Concatenate[App, Request, P], T], +]: + """ + 1. Check the ``Authorization`` header matches server swissnum. + 2. Extract ``X-Tahoe-Authorization`` headers and pass them in. + 3. Log the request and response. + """ + + def decorator( + f: Callable[Concatenate[App, Request, SecretsDict, P], T] + ) -> Callable[Concatenate[App, Request, P], T]: + @wraps(f) + def route( + self: App, + request: Request, + *args: P.args, + **kwargs: P.kwargs, + ) -> T: + # Don't set text/html content type by default. + # None is actually supported, see https://github.com/twisted/twisted/issues/11902 + request.defaultContentType = None # type: ignore[assignment] + + with start_action( + action_type="allmydata:storage:http-server:handle-request", + method=request.method, + path=request.path, + ) as ctx: + try: + # Check Authorization header: + try: + auth_header = request.requestHeaders.getRawHeaders( + "Authorization", [""] + )[0].encode("utf-8") + except UnicodeError: + raise _HTTPError(http.BAD_REQUEST, "Bad Authorization header") + if not timing_safe_compare( + auth_header, + swissnum_auth_header(self._swissnum), + ): + raise _HTTPError( + http.UNAUTHORIZED, "Wrong Authorization header" + ) + + # Check secrets: + authorization = request.requestHeaders.getRawHeaders( + "X-Tahoe-Authorization", [] + ) + try: + secrets = _extract_secrets(authorization, required_secrets) + except ClientSecretsException as e: + raise _HTTPError(http.BAD_REQUEST, str(e)) + + # Run the business logic: + result = f(self, request, secrets, *args, **kwargs) + except _HTTPError as e: + # This isn't an error necessarily for logging purposes, + # it's an implementation detail, an easier way to set + # response codes. + ctx.add_success_fields(response_code=e.code) + ctx.finish() + raise + else: + ctx.add_success_fields(response_code=request.code) + return result + + return route + + return decorator + + +def _authorized_route( + klein_app: Klein, + required_secrets: set[Secrets], + url: str, + *route_args: Any, + branch: bool = False, + **route_kwargs: Any, +) -> Callable[ + [ + Callable[ + Concatenate[App, Request, SecretsDict, P], + KleinRenderable, + ] + ], + Callable[..., KleinRenderable], +]: + """ + Like Klein's @route, but with additional support for checking the + ``Authorization`` header as well as ``X-Tahoe-Authorization`` headers. The + latter will get passed in as second argument to wrapped functions, a + dictionary mapping a ``Secret`` value to the uploaded secret. + + :param required_secrets: Set of required ``Secret`` types. + """ + + def decorator( + f: Callable[ + Concatenate[App, Request, SecretsDict, P], + KleinRenderable, + ] + ) -> Callable[..., KleinRenderable]: + @klein_app.route(url, *route_args, branch=branch, **route_kwargs) # type: ignore[arg-type] + @_authorization_decorator(required_secrets) + @wraps(f) + def handle_route( + app: App, + request: Request, + secrets: SecretsDict, + *args: P.args, + **kwargs: P.kwargs, + ) -> KleinRenderable: + return f(app, request, secrets, *args, **kwargs) + + return handle_route + + return decorator + + +@define +class StorageIndexUploads(object): + """ + In-progress upload to storage index. + """ + + # Map share number to BucketWriter + shares: dict[int, BucketWriter] = Factory(dict) + + # Map share number to the upload secret (different shares might have + # different upload secrets). + upload_secrets: dict[int, bytes] = Factory(dict) + + +@define +class UploadsInProgress(object): + """ + Keep track of uploads for storage indexes. + """ + + # Map storage index to corresponding uploads-in-progress + _uploads: dict[bytes, StorageIndexUploads] = Factory(dict) + + # Map BucketWriter to (storage index, share number) + _bucketwriters: dict[BucketWriter, tuple[bytes, int]] = Factory(dict) + + def add_write_bucket( + self, + storage_index: bytes, + share_number: int, + upload_secret: bytes, + bucket: BucketWriter, + ): + """Add a new ``BucketWriter`` to be tracked.""" + si_uploads = self._uploads.setdefault(storage_index, StorageIndexUploads()) + si_uploads.shares[share_number] = bucket + si_uploads.upload_secrets[share_number] = upload_secret + self._bucketwriters[bucket] = (storage_index, share_number) + + def get_write_bucket( + self, storage_index: bytes, share_number: int, upload_secret: bytes + ) -> BucketWriter: + """Get the given in-progress immutable share upload.""" + self.validate_upload_secret(storage_index, share_number, upload_secret) + try: + return self._uploads[storage_index].shares[share_number] + except (KeyError, IndexError): + raise _HTTPError(http.NOT_FOUND) + + def remove_write_bucket(self, bucket: BucketWriter) -> None: + """Stop tracking the given ``BucketWriter``.""" + try: + storage_index, share_number = self._bucketwriters.pop(bucket) + except KeyError: + # This is probably a BucketWriter created by Foolscap, so just + # ignore it. + return + uploads_index = self._uploads[storage_index] + uploads_index.shares.pop(share_number) + uploads_index.upload_secrets.pop(share_number) + if not uploads_index.shares: + self._uploads.pop(storage_index) + + def validate_upload_secret( + self, storage_index: bytes, share_number: int, upload_secret: bytes + ) -> None: + """ + Raise an unauthorized-HTTP-response exception if the given + storage_index+share_number have a different upload secret than the + given one. + + If the given upload doesn't exist at all, nothing happens. + """ + if storage_index in self._uploads: + in_progress = self._uploads[storage_index] + # For pre-existing upload, make sure password matches. + if share_number in in_progress.upload_secrets and not timing_safe_compare( + in_progress.upload_secrets[share_number], upload_secret + ): + raise _HTTPError(http.UNAUTHORIZED) + + +class StorageIndexConverter(BaseConverter): + """Parser/validator for storage index URL path segments.""" + + regex = "[" + str(rfc3548_alphabet, "ascii") + "]{26}" + + def to_python(self, value: str) -> bytes: + try: + return si_a2b(value.encode("ascii")) + except (AssertionError, binascii.Error, ValueError): + raise ValidationError("Invalid storage index") + + +class _HTTPError(Exception): + """ + Raise from ``HTTPServer`` endpoint to return the given HTTP response code. + """ + + def __init__(self, code: int, body: Optional[str] = None): + Exception.__init__(self, (code, body)) + self.code = code + self.body = body + + +# CDDL schemas. +# +# Tags are of the form #6.nnn, where the number is documented at +# https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml. Notably, #6.258 +# indicates a set. +# +# Somewhat arbitrary limits are set to reduce e.g. number of shares, number of +# vectors, etc.. These may need to be iterated on in future revisions of the +# code. +_SCHEMAS = { + "allocate_buckets": Schema( + """ + request = { + share-numbers: #6.258([0*256 uint]) + allocated-size: uint + } + """ + ), + "advise_corrupt_share": Schema( + """ + request = { + reason: tstr .size (1..32765) + } + """ + ), + "mutable_read_test_write": Schema( + """ + request = { + "test-write-vectors": { + 0*256 share_number : { + "test": [0*30 {"offset": uint, "size": uint, "specimen": bstr}] + "write": [* {"offset": uint, "data": bstr}] + "new-length": uint / null + } + } + "read-vector": [0*30 {"offset": uint, "size": uint}] + } + share_number = uint + """ + ), +} + + +# Callable that takes offset and length, returns the data at that range. +ReadData = Callable[[int, int], bytes] + + +@implementer(IPullProducer) +@define +class _ReadAllProducer: + """ + Producer that calls a read function repeatedly to read all the data, and + writes to a request. + """ + + request: Request + read_data: ReadData + result: Deferred = Factory(Deferred) + start: int = field(default=0) + + @classmethod + def produce_to(cls, request: Request, read_data: ReadData) -> Deferred[bytes]: + """ + Create and register the producer, returning ``Deferred`` that should be + returned from a HTTP server endpoint. + """ + producer = cls(request, read_data) + request.registerProducer(producer, False) + return producer.result + + def resumeProducing(self) -> None: + data = self.read_data(self.start, 65536) + if not data: + self.request.unregisterProducer() + d = self.result + del self.result + d.callback(b"") + return + self.request.write(data) + self.start += len(data) + + def pauseProducing(self) -> None: + pass + + def stopProducing(self) -> None: + pass + + +@implementer(IPullProducer) +@define +class _ReadRangeProducer: + """ + Producer that calls a read function to read a range of data, and writes to + a request. + """ + + request: Optional[Request] + read_data: ReadData + result: Optional[Deferred[bytes]] + start: int + remaining: int + + def resumeProducing(self) -> None: + if self.result is None or self.request is None: + return + + to_read = min(self.remaining, 65536) + data = self.read_data(self.start, to_read) + assert len(data) <= to_read + + if not data and self.remaining > 0: + d, self.result = self.result, None + d.errback( + ValueError( + f"Should be {self.remaining} bytes left, but we got an empty read" + ) + ) + self.stopProducing() + return + + if len(data) > self.remaining: + d, self.result = self.result, None + d.errback( + ValueError( + f"Should be {self.remaining} bytes left, but we got more than that ({len(data)})!" + ) + ) + self.stopProducing() + return + + self.start += len(data) + self.remaining -= len(data) + assert self.remaining >= 0 + + self.request.write(data) + + if self.remaining == 0: + self.stopProducing() + + def pauseProducing(self) -> None: + pass + + def stopProducing(self) -> None: + if self.request is not None: + self.request.unregisterProducer() + self.request = None + if self.result is not None: + d = self.result + self.result = None + d.callback(b"") + + +def read_range( + request: Request, read_data: ReadData, share_length: int +) -> Union[Deferred[bytes], bytes]: + """ + Read an optional ``Range`` header, reads data appropriately via the given + callable, writes the data to the request. + + Only parses a subset of ``Range`` headers that we support: must be set, + bytes only, only a single range, the end must be explicitly specified. + Raises a ``_HTTPError(http.REQUESTED_RANGE_NOT_SATISFIABLE)`` if parsing is + not possible or the header isn't set. + + Takes a function that will do the actual reading given the start offset and + a length to read. + + The resulting data is written to the request. + """ + + def read_data_with_error_handling(offset: int, length: int) -> bytes: + try: + return read_data(offset, length) + except _HTTPError as e: + request.setResponseCode(e.code) + # Empty read means we're done. + return b"" + + if request.getHeader("range") is None: + return _ReadAllProducer.produce_to(request, read_data_with_error_handling) + + range_header = parse_range_header(request.getHeader("range")) + if ( + range_header is None # failed to parse + or range_header.units != "bytes" + or len(range_header.ranges) > 1 # more than one range + or range_header.ranges[0][1] is None # range without end + ): + raise _HTTPError(http.REQUESTED_RANGE_NOT_SATISFIABLE) + + offset, end = range_header.ranges[0] + assert end is not None # should've exited in block above this if so + + # If we're being ask to read beyond the length of the share, just read + # less: + end = min(end, share_length) + if offset >= end: + # Basically we'd need to return an empty body. However, the + # Content-Range header can't actually represent empty lengths... so + # (mis)use 204 response code to indicate that. + raise _HTTPError(http.NO_CONTENT) + + request.setResponseCode(http.PARTIAL_CONTENT) + + # Actual conversion from Python's exclusive ranges to inclusive ranges is + # handled by werkzeug. + request.setHeader( + "content-range", + ContentRange("bytes", offset, end).to_header(), + ) + + d: Deferred[bytes] = Deferred() + request.registerProducer( + _ReadRangeProducer( + request, read_data_with_error_handling, d, offset, end - offset + ), + False, + ) + return d + + +def _add_error_handling(app: Klein) -> None: + """Add exception handlers to a Klein app.""" + + @app.handle_errors(_HTTPError) + def _http_error(self: Any, request: IRequest, failure: Failure) -> KleinRenderable: + """Handle ``_HTTPError`` exceptions.""" + assert isinstance(failure.value, _HTTPError) + request.setResponseCode(failure.value.code) + if failure.value.body is not None: + return failure.value.body + else: + return b"" + + @app.handle_errors(CDDLValidationError) + def _cddl_validation_error( + self: Any, request: IRequest, failure: Failure + ) -> KleinRenderable: + """Handle CDDL validation errors.""" + request.setResponseCode(http.BAD_REQUEST) + return str(failure.value).encode("utf-8") + + +async def read_encoded( + reactor, request, schema: Schema, max_size: int = 1024 * 1024 +) -> Any: + """ + Read encoded request body data, decoding it with CBOR by default. + + Somewhat arbitrarily, limit body size to 1MiB by default. + """ + content_type = get_content_type(request.requestHeaders) + if content_type is None: + content_type = CBOR_MIME_TYPE + if content_type != CBOR_MIME_TYPE: + raise _HTTPError(http.UNSUPPORTED_MEDIA_TYPE) + + # Make sure it's not too large: + request.content.seek(0, SEEK_END) + size = request.content.tell() + if size > max_size: + raise _HTTPError(http.REQUEST_ENTITY_TOO_LARGE) + request.content.seek(0, SEEK_SET) + + # We don't want to load the whole message into memory, cause it might + # be quite large. The CDDL validator takes a read-only bytes-like + # thing. Luckily, for large request bodies twisted.web will buffer the + # data in a file, so we can use mmap() to get a memory view. The CDDL + # validator will not make a copy, so it won't increase memory usage + # beyond that. + try: + fd = request.content.fileno() + except (ValueError, OSError): + fd = -1 + if fd >= 0: + # It's a file, so we can use mmap() to save memory. + message = mmap.mmap(fd, 0, access=mmap.ACCESS_READ) + else: + message = request.content.read() + + # Pycddl will release the GIL when validating larger documents, so + # let's take advantage of multiple CPUs: + decoded = await defer_to_thread(schema.validate_cbor, message, True) + return decoded + +class HTTPServer(BaseApp): + """ + A HTTP interface to the storage server. + """ + + _app = Klein() + _app.url_map.converters["storage_index"] = StorageIndexConverter + _add_error_handling(_app) + + def __init__( + self, + reactor: IReactorFromThreads, + storage_server: StorageServer, + swissnum: bytes, + ): + self._reactor = reactor + self._storage_server = storage_server + self._swissnum = swissnum + # Maps storage index to StorageIndexUploads: + self._uploads = UploadsInProgress() + + # When an upload finishes successfully, gets aborted, or times out, + # make sure it gets removed from our tracking datastructure: + self._storage_server.register_bucket_writer_close_handler( + self._uploads.remove_write_bucket + ) + + def get_resource(self) -> KleinResource: + """Return twisted.web ``Resource`` for this object.""" + return self._app.resource() + + def _send_encoded(self, request: Request, data: object) -> Deferred[bytes]: + """ + Return encoded data suitable for writing as the HTTP body response, by + default using CBOR. + + Also sets the appropriate ``Content-Type`` header on the response. + """ + accept_headers = request.requestHeaders.getRawHeaders("accept") or [ + CBOR_MIME_TYPE + ] + accept = parse_accept_header(accept_headers[0]) + if accept.best == CBOR_MIME_TYPE: + request.setHeader("Content-Type", CBOR_MIME_TYPE) + f = TemporaryFile() + cbor.dump(data, f) # type: ignore + + def read_data(offset: int, length: int) -> bytes: + f.seek(offset) + return f.read(length) + + return _ReadAllProducer.produce_to(request, read_data) + else: + # TODO Might want to optionally send JSON someday: + # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3861 + raise _HTTPError(http.NOT_ACCEPTABLE) + + ##### Generic APIs ##### + + @_authorized_route(_app, set(), "/storage/v1/version", methods=["GET"]) + def version(self, request: Request, authorization: SecretsDict) -> KleinRenderable: + """Return version information.""" + return self._send_encoded(request, self._get_version()) + + def _get_version(self) -> dict[bytes, Any]: + """ + Get the HTTP version of the storage server's version response. + + This differs from the Foolscap version by omitting certain obsolete + fields. + """ + v = self._storage_server.get_version() + v1_identifier = b"http://allmydata.org/tahoe/protocols/storage/v1" + v1 = v[v1_identifier] + return { + v1_identifier: { + b"maximum-immutable-share-size": v1[b"maximum-immutable-share-size"], + b"maximum-mutable-share-size": v1[b"maximum-mutable-share-size"], + b"available-space": v1[b"available-space"], + }, + b"application-version": v[b"application-version"], + } + + ##### Immutable APIs ##### + + @_authorized_route( + _app, + {Secrets.LEASE_RENEW, Secrets.LEASE_CANCEL, Secrets.UPLOAD}, + "/storage/v1/immutable/", + methods=["POST"], + ) + @async_to_deferred + async def allocate_buckets( + self, request: Request, authorization: SecretsDict, storage_index: bytes + ) -> KleinRenderable: + """Allocate buckets.""" + upload_secret = authorization[Secrets.UPLOAD] + # It's just a list of up to ~256 shares, shouldn't use many bytes. + info = await read_encoded( + self._reactor, request, _SCHEMAS["allocate_buckets"], max_size=8192 + ) + + # We do NOT validate the upload secret for existing bucket uploads. + # Another upload may be happening in parallel, with a different upload + # key. That's fine! If a client tries to _write_ to that upload, they + # need to have an upload key. That does mean we leak the existence of + # these parallel uploads, but if you know storage index you can + # download them once upload finishes, so it's not a big deal to leak + # that information. + + already_got, sharenum_to_bucket = self._storage_server.allocate_buckets( + storage_index, + renew_secret=authorization[Secrets.LEASE_RENEW], + cancel_secret=authorization[Secrets.LEASE_CANCEL], + sharenums=info["share-numbers"], + allocated_size=info["allocated-size"], + ) + for share_number, bucket in sharenum_to_bucket.items(): + self._uploads.add_write_bucket( + storage_index, share_number, upload_secret, bucket + ) + + return await self._send_encoded( + request, + {"already-have": set(already_got), "allocated": set(sharenum_to_bucket)}, + ) + + @_authorized_route( + _app, + {Secrets.UPLOAD}, + "/storage/v1/immutable///abort", + methods=["PUT"], + ) + def abort_share_upload( + self, + request: Request, + authorization: SecretsDict, + storage_index: bytes, + share_number: int, + ) -> KleinRenderable: + """Abort an in-progress immutable share upload.""" + try: + bucket = self._uploads.get_write_bucket( + storage_index, share_number, authorization[Secrets.UPLOAD] + ) + except _HTTPError as e: + if e.code == http.NOT_FOUND: + # It may be we've already uploaded this, in which case error + # should be method not allowed (405). + try: + self._storage_server.get_buckets(storage_index)[share_number] + except KeyError: + pass + else: + # Already uploaded, so we can't abort. + raise _HTTPError(http.NOT_ALLOWED) + raise + + # Abort the upload; this should close it which will eventually result + # in self._uploads.remove_write_bucket() being called. + bucket.abort() + + return b"" + + @_authorized_route( + _app, + {Secrets.UPLOAD}, + "/storage/v1/immutable//", + methods=["PATCH"], + ) + def write_share_data( + self, + request: Request, + authorization: SecretsDict, + storage_index: bytes, + share_number: int, + ) -> KleinRenderable: + """Write data to an in-progress immutable upload.""" + content_range = parse_content_range_header(request.getHeader("content-range")) + if content_range is None or content_range.units != "bytes": + request.setResponseCode(http.REQUESTED_RANGE_NOT_SATISFIABLE) + return b"" + + bucket = self._uploads.get_write_bucket( + storage_index, share_number, authorization[Secrets.UPLOAD] + ) + offset = content_range.start or 0 + # We don't support an unspecified stop for the range: + assert content_range.stop is not None + # Missing body makes no sense: + assert request.content is not None + remaining = content_range.stop - offset + finished = False + + while remaining > 0: + data = request.content.read(min(remaining, 65536)) + assert data, "uploaded data length doesn't match range" + try: + finished = bucket.write(offset, data) + except ConflictingWriteError: + request.setResponseCode(http.CONFLICT) + return b"" + remaining -= len(data) + offset += len(data) + + if finished: + bucket.close() + request.setResponseCode(http.CREATED) + else: + request.setResponseCode(http.OK) + + required = [] + for start, end, _ in bucket.required_ranges().ranges(): + required.append({"begin": start, "end": end}) + return self._send_encoded(request, {"required": required}) + + @_authorized_route( + _app, + set(), + "/storage/v1/immutable//shares", + methods=["GET"], + ) + def list_shares( + self, request: Request, authorization: SecretsDict, storage_index: bytes + ) -> KleinRenderable: + """ + List shares for the given storage index. + """ + share_numbers = set(self._storage_server.get_buckets(storage_index).keys()) + return self._send_encoded(request, share_numbers) + + @_authorized_route( + _app, + set(), + "/storage/v1/immutable//", + methods=["GET"], + ) + def read_share_chunk( + self, + request: Request, + authorization: SecretsDict, + storage_index: bytes, + share_number: int, + ) -> KleinRenderable: + """Read a chunk for an already uploaded immutable.""" + request.setHeader("content-type", "application/octet-stream") + try: + bucket = self._storage_server.get_buckets(storage_index)[share_number] + except KeyError: + request.setResponseCode(http.NOT_FOUND) + return b"" + + return read_range(request, bucket.read, bucket.get_length()) + + @_authorized_route( + _app, + {Secrets.LEASE_RENEW, Secrets.LEASE_CANCEL}, + "/storage/v1/lease/", + methods=["PUT"], + ) + def add_or_renew_lease( + self, request: Request, authorization: SecretsDict, storage_index: bytes + ) -> KleinRenderable: + """Update the lease for an immutable or mutable share.""" + if not list(self._storage_server.get_shares(storage_index)): + raise _HTTPError(http.NOT_FOUND) + + # Checking of the renewal secret is done by the backend. + self._storage_server.add_lease( + storage_index, + authorization[Secrets.LEASE_RENEW], + authorization[Secrets.LEASE_CANCEL], + ) + + request.setResponseCode(http.NO_CONTENT) + return b"" + + @_authorized_route( + _app, + set(), + "/storage/v1/immutable///corrupt", + methods=["POST"], + ) + @async_to_deferred + async def advise_corrupt_share_immutable( + self, + request: Request, + authorization: SecretsDict, + storage_index: bytes, + share_number: int, + ) -> KleinRenderable: + """Indicate that given share is corrupt, with a text reason.""" + try: + bucket = self._storage_server.get_buckets(storage_index)[share_number] + except KeyError: + raise _HTTPError(http.NOT_FOUND) + + # The reason can be a string with explanation, so in theory it could be + # longish? + info = await read_encoded( + self._reactor, + request, + _SCHEMAS["advise_corrupt_share"], + max_size=32768, + ) + bucket.advise_corrupt_share(info["reason"].encode("utf-8")) + return b"" + + ##### Mutable APIs ##### + + @_authorized_route( + _app, + {Secrets.LEASE_RENEW, Secrets.LEASE_CANCEL, Secrets.WRITE_ENABLER}, + "/storage/v1/mutable//read-test-write", + methods=["POST"], + ) + @async_to_deferred + async def mutable_read_test_write( + self, request: Request, authorization: SecretsDict, storage_index: bytes + ) -> KleinRenderable: + """Read/test/write combined operation for mutables.""" + rtw_request = await read_encoded( + self._reactor, + request, + _SCHEMAS["mutable_read_test_write"], + max_size=2**48, + ) + secrets = ( + authorization[Secrets.WRITE_ENABLER], + authorization[Secrets.LEASE_RENEW], + authorization[Secrets.LEASE_CANCEL], + ) + try: + success, read_data = self._storage_server.slot_testv_and_readv_and_writev( + storage_index, + secrets, + { + k: ( + [ + (d["offset"], d["size"], b"eq", d["specimen"]) + for d in v["test"] + ], + [(d["offset"], d["data"]) for d in v["write"]], + v["new-length"], + ) + for (k, v) in rtw_request["test-write-vectors"].items() + }, + [(d["offset"], d["size"]) for d in rtw_request["read-vector"]], + ) + except BadWriteEnablerError: + raise _HTTPError(http.UNAUTHORIZED) + return await self._send_encoded( + request, {"success": success, "data": read_data} + ) + + @_authorized_route( + _app, + set(), + "/storage/v1/mutable//", + methods=["GET"], + ) + def read_mutable_chunk( + self, + request: Request, + authorization: SecretsDict, + storage_index: bytes, + share_number: int, + ) -> KleinRenderable: + """Read a chunk from a mutable.""" + request.setHeader("content-type", "application/octet-stream") + + try: + share_length = self._storage_server.get_mutable_share_length( + storage_index, share_number + ) + except KeyError: + raise _HTTPError(http.NOT_FOUND) + + def read_data(offset, length): + try: + return self._storage_server.slot_readv( + storage_index, [share_number], [(offset, length)] + )[share_number][0] + except KeyError: + raise _HTTPError(http.NOT_FOUND) + + return read_range(request, read_data, share_length) + + @_authorized_route( + _app, + set(), + "/storage/v1/mutable//shares", + methods=["GET"], + ) + def enumerate_mutable_shares(self, request, authorization, storage_index): + """List mutable shares for a storage index.""" + shares = self._storage_server.enumerate_mutable_shares(storage_index) + return self._send_encoded(request, shares) + + @_authorized_route( + _app, + set(), + "/storage/v1/mutable///corrupt", + methods=["POST"], + ) + @async_to_deferred + async def advise_corrupt_share_mutable( + self, + request: Request, + authorization: SecretsDict, + storage_index: bytes, + share_number: int, + ) -> KleinRenderable: + """Indicate that given share is corrupt, with a text reason.""" + if share_number not in { + shnum for (shnum, _) in self._storage_server.get_shares(storage_index) + }: + raise _HTTPError(http.NOT_FOUND) + + # The reason can be a string with explanation, so in theory it could be + # longish? + info = await read_encoded( + self._reactor, request, _SCHEMAS["advise_corrupt_share"], max_size=32768 + ) + self._storage_server.advise_corrupt_share( + b"mutable", storage_index, share_number, info["reason"].encode("utf-8") + ) + return b"" + + +@implementer(IStreamServerEndpoint) +@define +class _TLSEndpointWrapper(object): + """ + Wrap an existing endpoint with the server-side storage TLS policy. This is + useful because not all Tahoe-LAFS endpoints might be plain TCP+TLS, for + example there's Tor and i2p. + """ + + endpoint: IStreamServerEndpoint + context_factory: CertificateOptions + + @classmethod + def from_paths( + cls: type[_TLSEndpointWrapper], + endpoint: IStreamServerEndpoint, + private_key_path: FilePath, + cert_path: FilePath, + ) -> "_TLSEndpointWrapper": + """ + Create an endpoint with the given private key and certificate paths on + the filesystem. + """ + certificate = Certificate.loadPEM(cert_path.getContent()).original + private_key = PrivateCertificate.loadPEM( + cert_path.getContent() + b"\n" + private_key_path.getContent() + ).privateKey.original + certificate_options = CertificateOptions( + privateKey=private_key, certificate=certificate + ) + return cls(endpoint=endpoint, context_factory=certificate_options) + + def listen(self, factory: IProtocolFactory) -> Deferred[IListeningPort]: + return self.endpoint.listen( + TLSMemoryBIOFactory(self.context_factory, False, factory) + ) + + +def build_nurl( + hostname: str, + port: int, + swissnum: str, + certificate: CryptoCertificate, + subscheme: Optional[str] = None, +) -> DecodedURL: + """ + Construct a HTTPS NURL, given the hostname, port, server swissnum, and x509 + certificate for the server. Clients can then connect to the server using + this NURL. + """ + scheme = "pb" + if subscheme is not None: + scheme = f"{scheme}+{subscheme}" + return DecodedURL().replace( + fragment="v=1", # how we know this NURL is HTTP-based (i.e. not Foolscap) + host=hostname, + port=port, + path=(swissnum,), + userinfo=( + str( + get_spki_hash(certificate), + "ascii", + ), + ), + scheme=scheme, + ) + + +def listen_tls( + server: HTTPServer, + hostname: str, + endpoint: IStreamServerEndpoint, + private_key_path: FilePath, + cert_path: FilePath, +) -> Deferred[tuple[DecodedURL, IListeningPort]]: + """ + Start a HTTPS storage server on the given port, return the NURL and the + listening port. + + The hostname is the external IP or hostname clients will connect to, used + to constrtuct the NURL; it does not modify what interfaces the server + listens on. + + This will likely need to be updated eventually to handle Tor/i2p. + """ + endpoint = _TLSEndpointWrapper.from_paths(endpoint, private_key_path, cert_path) + + def get_nurl(listening_port: IListeningPort) -> DecodedURL: + address = cast(Union[IPv4Address, IPv6Address], listening_port.getHost()) + return build_nurl( + hostname, + address.port, + str(server._swissnum, "ascii"), + load_pem_x509_certificate(cert_path.getContent()), + ) + + return endpoint.listen(Site(server.get_resource())).addCallback( + lambda listening_port: (get_nurl(listening_port), listening_port) + ) diff --git a/src/allmydata/storage/immutable.py b/src/allmydata/storage/immutable.py index b8b18f140..9cb6cc6ee 100644 --- a/src/allmydata/storage/immutable.py +++ b/src/allmydata/storage/immutable.py @@ -2,15 +2,6 @@ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2, bytes_to_native_str -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import os, stat, struct, time from collections_extended import RangeMap @@ -21,27 +12,32 @@ from zope.interface import implementer from allmydata.interfaces import ( RIBucketWriter, RIBucketReader, ConflictingWriteError, DataTooLargeError, + NoSpace, ) from allmydata.util import base32, fileutil, log from allmydata.util.assertutil import precondition -from allmydata.util.hashutil import timing_safe_compare -from allmydata.storage.lease import LeaseInfo from allmydata.storage.common import UnknownImmutableContainerVersionError +from .immutable_schema import ( + NEWEST_SCHEMA_VERSION, + schema_from_version, +) + + # each share file (in storage/shares/$SI/$SHNUM) contains lease information # and share data. The share data is accessed by RIBucketWriter.write and # RIBucketReader.read . The lease information is not accessible through these # interfaces. # The share file has the following layout: -# 0x00: share file version number, four bytes, current version is 1 +# 0x00: share file version number, four bytes, current version is 2 # 0x04: share data length, four bytes big-endian = A # See Footnote 1 below. # 0x08: number of leases, four bytes big-endian # 0x0c: beginning of share data (see immutable.layout.WriteBucketProxy) # A+0x0c = B: first lease. Lease format is: # B+0x00: owner number, 4 bytes big-endian, 0 is reserved for no-owner -# B+0x04: renew secret, 32 bytes (SHA256) -# B+0x24: cancel secret, 32 bytes (SHA256) +# B+0x04: renew secret, 32 bytes (SHA256 + blake2b) # See Footnote 2 below. +# B+0x24: cancel secret, 32 bytes (SHA256 + blake2b) # B+0x44: expiration time, 4 bytes big-endian seconds-since-epoch # B+0x48: next lease, or end of record @@ -53,13 +49,126 @@ from allmydata.storage.common import UnknownImmutableContainerVersionError # then the value stored in this field will be the actual share data length # modulo 2**32. +# Footnote 2: The change between share file version number 1 and 2 is that +# storage of lease secrets is changed from plaintext to hashed. This change +# protects the secrets from compromises of local storage on the server: if a +# plaintext cancel secret is somehow exfiltrated from the storage server, an +# attacker could use it to cancel that lease and potentially cause user data +# to be discarded before intended by the real owner. As of this comment, +# lease cancellation is disabled because there have been at least two bugs +# which leak the persisted value of the cancellation secret. If lease secrets +# were stored hashed instead of plaintext then neither of these bugs would +# have allowed an attacker to learn a usable cancel secret. +# +# Clients are free to construct these secrets however they like. The +# Tahoe-LAFS client uses a SHA256-based construction. The server then uses +# blake2b to hash these values for storage so that it retains no persistent +# copy of the original secret. +# + +def _fix_lease_count_format(lease_count_format): + """ + Turn a single character struct format string into a format string suitable + for use in encoding and decoding the lease count value inside a share + file, if possible. + + :param str lease_count_format: A single character format string like + ``"B"`` or ``"L"``. + + :raise ValueError: If the given format string is not suitable for use + encoding and decoding a lease count. + + :return str: A complete format string which can safely be used to encode + and decode lease counts in a share file. + """ + if len(lease_count_format) != 1: + raise ValueError( + "Cannot construct ShareFile with lease_count_format={!r}; " + "format must accept a single value".format( + lease_count_format, + ), + ) + # Make it big-endian with standard size so all platforms agree on the + # result. + fixed = ">" + lease_count_format + if struct.calcsize(fixed) > 4: + # There is only room for at most 4 bytes in the share file format so + # we can't allow any larger formats. + raise ValueError( + "Cannot construct ShareFile with lease_count_format={!r}; " + "size must be smaller than size of '>L'".format( + lease_count_format, + ), + ) + return fixed + + class ShareFile(object): + """ + Support interaction with persistent storage of a share. + + :ivar str _lease_count_format: The format string which is used to encode + and decode the lease count inside the share file. As stated in the + comment in this module there is room for at most 4 bytes in this part + of the file. A format string that works on fewer bytes is allowed to + restrict the number of leases allowed in the share file to a smaller + number than could be supported by using the full 4 bytes. This is + mostly of interest for testing. + """ LEASE_SIZE = struct.calcsize(">L32s32sL") sharetype = "immutable" - def __init__(self, filename, max_size=None, create=False): - """ If max_size is not None then I won't allow more than max_size to be written to me. If create=True and max_size must not be None. """ + @classmethod + def is_valid_header(cls, header): + # type: (bytes) -> bool + """ + Determine if the given bytes constitute a valid header for this type of + container. + + :param header: Some bytes from the beginning of a container. + + :return: ``True`` if the bytes could belong to this container, + ``False`` otherwise. + """ + (version,) = struct.unpack(">L", header[:4]) + return schema_from_version(version) is not None + + def __init__( + self, + filename, + max_size=None, + create=False, + lease_count_format="L", + schema=NEWEST_SCHEMA_VERSION, + ): + """ + Initialize a ``ShareFile``. + + :param Optional[int] max_size: If given, the maximum number of bytes + that this ``ShareFile`` will accept to be stored. + + :param bool create: If ``True``, create the file (and fail if it + exists already). ``max_size`` must not be ``None`` in this case. + If ``False``, open an existing file for reading. + + :param str lease_count_format: A format character to use to encode and + decode the number of leases in the share file. There are only 4 + bytes available in the file so the format must be 4 bytes or + smaller. If different formats are used at different times with + the same share file, the result will likely be nonsense. + + This parameter is intended for the test suite to use to be able to + exercise values near the maximum encodeable value without having + to create billions of leases. + + :raise ValueError: If the encoding of ``lease_count_format`` is too + large or if it is not a single format character. + """ + precondition((max_size is not None) or (not create), max_size, create) + + self._lease_count_format = _fix_lease_count_format(lease_count_format) + self._lease_count_size = struct.calcsize(self._lease_count_format) self.home = filename self._max_size = max_size if create: @@ -67,31 +176,30 @@ class ShareFile(object): # it. Also construct the metadata. assert not os.path.exists(self.home) fileutil.make_dirs(os.path.dirname(self.home)) - # The second field -- the four-byte share data length -- is no - # longer used as of Tahoe v1.3.0, but we continue to write it in - # there in case someone downgrades a storage server from >= - # Tahoe-1.3.0 to < Tahoe-1.3.0, or moves a share file from one - # server to another, etc. We do saturation -- a share data length - # larger than 2**32-1 (what can fit into the field) is marked as - # the largest length that can fit into the field. That way, even - # if this does happen, the old < v1.3.0 server will still allow - # clients to read the first part of the share. + self._schema = schema with open(self.home, 'wb') as f: - f.write(struct.pack(">LLL", 1, min(2**32-1, max_size), 0)) + f.write(self._schema.header(max_size)) self._lease_offset = max_size + 0x0c self._num_leases = 0 else: with open(self.home, 'rb') as f: filesize = os.path.getsize(self.home) (version, unused, num_leases) = struct.unpack(">LLL", f.read(0xc)) - if version != 1: - msg = "sharefile %s had version %d but we wanted 1" % \ - (filename, version) - raise UnknownImmutableContainerVersionError(msg) + self._schema = schema_from_version(version) + if self._schema is None: + raise UnknownImmutableContainerVersionError(filename, version) self._num_leases = num_leases self._lease_offset = filesize - (num_leases * self.LEASE_SIZE) + self._length = filesize - 0xc - (num_leases * self.LEASE_SIZE) + self._data_offset = 0xc + def get_length(self): + """ + Return the length of the data in the share, if we're reading. + """ + return self._length + def unlink(self): os.unlink(self.home) @@ -122,16 +230,25 @@ class ShareFile(object): offset = self._lease_offset + lease_number * self.LEASE_SIZE f.seek(offset) assert f.tell() == offset - f.write(lease_info.to_immutable_data()) + f.write(self._schema.lease_serializer.serialize(lease_info)) def _read_num_leases(self, f): f.seek(0x08) - (num_leases,) = struct.unpack(">L", f.read(4)) + (num_leases,) = struct.unpack( + self._lease_count_format, + f.read(self._lease_count_size), + ) return num_leases def _write_num_leases(self, f, num_leases): + self._write_encoded_num_leases( + f, + struct.pack(self._lease_count_format, num_leases), + ) + + def _write_encoded_num_leases(self, f, encoded_num_leases): f.seek(0x08) - f.write(struct.pack(">L", num_leases)) + f.write(encoded_num_leases) def _truncate_leases(self, f, num_leases): f.truncate(self._lease_offset + num_leases * self.LEASE_SIZE) @@ -144,34 +261,63 @@ class ShareFile(object): for i in range(num_leases): data = f.read(self.LEASE_SIZE) if data: - yield LeaseInfo().from_immutable_data(data) + yield self._schema.lease_serializer.unserialize(data) def add_lease(self, lease_info): with open(self.home, 'rb+') as f: num_leases = self._read_num_leases(f) + # Before we write the new lease record, make sure we can encode + # the new lease count. + new_lease_count = struct.pack(self._lease_count_format, num_leases + 1) self._write_lease_record(f, num_leases, lease_info) - self._write_num_leases(f, num_leases+1) + self._write_encoded_num_leases(f, new_lease_count) - def renew_lease(self, renew_secret, new_expire_time): + def renew_lease(self, renew_secret, new_expire_time, allow_backdate=False): + # type: (bytes, int, bool) -> None + """ + Update the expiration time on an existing lease. + + :param allow_backdate: If ``True`` then allow the new expiration time + to be before the current expiration time. Otherwise, make no + change when this is the case. + + :raise IndexError: If there is no lease matching the given renew + secret. + """ for i,lease in enumerate(self.get_leases()): - if timing_safe_compare(lease.renew_secret, renew_secret): + if lease.is_renew_secret(renew_secret): # yup. See if we need to update the owner time. - if new_expire_time > lease.expiration_time: + if allow_backdate or new_expire_time > lease.get_expiration_time(): # yes - lease.expiration_time = new_expire_time + lease = lease.renew(new_expire_time) with open(self.home, 'rb+') as f: self._write_lease_record(f, i, lease) return raise IndexError("unable to renew non-existent lease") - def add_or_renew_lease(self, lease_info): + def add_or_renew_lease(self, available_space, lease_info): + """ + Renew an existing lease if possible, otherwise allocate a new one. + + :param int available_space: The maximum number of bytes of storage to + commit in this operation. If more than this number of bytes is + required, raise ``NoSpace`` instead. + + :param LeaseInfo lease_info: The details of the lease to renew or add. + + :raise NoSpace: If more than ``available_space`` bytes is required to + complete the operation. In this case, no lease is added. + + :return: ``None`` + """ try: self.renew_lease(lease_info.renew_secret, - lease_info.expiration_time) + lease_info.get_expiration_time()) except IndexError: + if lease_info.immutable_size() > available_space: + raise NoSpace() self.add_lease(lease_info) - def cancel_lease(self, cancel_secret): """Remove a lease with the given cancel_secret. If the last lease is cancelled, the file will be removed. Return the number of bytes that @@ -183,7 +329,7 @@ class ShareFile(object): leases = list(self.get_leases()) num_leases_removed = 0 for i,lease in enumerate(leases): - if timing_safe_compare(lease.cancel_secret, cancel_secret): + if lease.is_cancel_secret(cancel_secret): leases[i] = None num_leases_removed += 1 if not num_leases_removed: @@ -205,10 +351,12 @@ class ShareFile(object): return space_freed -@implementer(RIBucketWriter) -class BucketWriter(Referenceable): # type: ignore # warner/foolscap#78 +class BucketWriter(object): + """ + Keep track of the process of writing to a ShareFile. + """ - def __init__(self, ss, incominghome, finalhome, max_size, lease_info): + def __init__(self, ss, incominghome, finalhome, max_size, lease_info, clock): self.ss = ss self.incominghome = incominghome self.finalhome = finalhome @@ -220,15 +368,34 @@ class BucketWriter(Referenceable): # type: ignore # warner/foolscap#78 # added by simultaneous uploaders self._sharefile.add_lease(lease_info) self._already_written = RangeMap() + self._clock = clock + self._timeout = clock.callLater(30 * 60, self._abort_due_to_timeout) + + def required_ranges(self): # type: () -> RangeMap + """ + Return which ranges still need to be written. + """ + result = RangeMap() + result.set(True, 0, self._max_size) + for start, end, _ in self._already_written.ranges(): + result.delete(start, end) + return result def allocated_size(self): return self._max_size - def remote_write(self, offset, data): - start = time.time() + def write(self, offset, data): # type: (int, bytes) -> bool + """ + Write data at given offset, return whether the upload is complete. + """ + # Delay the timeout, since we received data; if we get an + # AlreadyCancelled error, that means there's a bug in the client and + # write() was called after close(). + self._timeout.reset(30 * 60) + start = self._clock.seconds() precondition(not self.closed) if self.throw_out_all_data: - return + return False # Make sure we're not conflicting with existing data: end = offset + len(data) @@ -243,12 +410,23 @@ class BucketWriter(Referenceable): # type: ignore # warner/foolscap#78 self._sharefile.write_share_data(offset, data) self._already_written.set(True, offset, end) - self.ss.add_latency("write", time.time() - start) + self.ss.add_latency("write", self._clock.seconds() - start) self.ss.count("write") + return self._is_finished() - def remote_close(self): + def _is_finished(self): + """ + Return whether the whole thing has been written. + """ + return sum([mr.stop - mr.start for mr in self._already_written.ranges()]) == self._max_size + + def close(self): + # This can't actually be enabled, because it's not backwards compatible + # with old Foolscap clients. + # assert self._is_finished() precondition(not self.closed) - start = time.time() + self._timeout.cancel() + start = self._clock.seconds() fileutil.make_dirs(os.path.dirname(self.finalhome)) fileutil.rename(self.incominghome, self.finalhome) @@ -281,20 +459,25 @@ class BucketWriter(Referenceable): # type: ignore # warner/foolscap#78 filelen = os.stat(self.finalhome)[stat.ST_SIZE] self.ss.bucket_writer_closed(self, filelen) - self.ss.add_latency("close", time.time() - start) + self.ss.add_latency("close", self._clock.seconds() - start) self.ss.count("close") def disconnected(self): if not self.closed: - self._abort() + self.abort() - def remote_abort(self): + def _abort_due_to_timeout(self): + """ + Called if we run out of time. + """ + log.msg("storage: aborting sharefile %s due to timeout" % self.incominghome, + facility="tahoe.storage", level=log.UNUSUAL) + self.abort() + + def abort(self): log.msg("storage: aborting sharefile %s" % self.incominghome, facility="tahoe.storage", level=log.UNUSUAL) - self._abort() self.ss.count("abort") - - def _abort(self): if self.closed: return @@ -312,9 +495,33 @@ class BucketWriter(Referenceable): # type: ignore # warner/foolscap#78 self.closed = True self.ss.bucket_writer_closed(self, 0) + # Cancel timeout if it wasn't already cancelled. + if self._timeout.active(): + self._timeout.cancel() -@implementer(RIBucketReader) -class BucketReader(Referenceable): # type: ignore # warner/foolscap#78 + +@implementer(RIBucketWriter) +class FoolscapBucketWriter(Referenceable): # type: ignore # warner/foolscap#78 + """ + Foolscap-specific BucketWriter. + """ + def __init__(self, bucket_writer): + self._bucket_writer = bucket_writer + + def remote_write(self, offset, data): + self._bucket_writer.write(offset, data) + + def remote_close(self): + return self._bucket_writer.close() + + def remote_abort(self): + return self._bucket_writer.abort() + + +class BucketReader(object): + """ + Manage the process for reading from a ``ShareFile``. + """ def __init__(self, ss, sharefname, storage_index=None, shnum=None): self.ss = ss @@ -324,20 +531,40 @@ class BucketReader(Referenceable): # type: ignore # warner/foolscap#78 def __repr__(self): return "<%s %s %s>" % (self.__class__.__name__, - bytes_to_native_str( - base32.b2a(self.storage_index[:8])[:12] - ), + base32.b2a(self.storage_index[:8])[:12].decode(), self.shnum) - def remote_read(self, offset, length): + def read(self, offset, length): start = time.time() data = self._share_file.read_share_data(offset, length) self.ss.add_latency("read", time.time() - start) self.ss.count("read") return data + def advise_corrupt_share(self, reason): + return self.ss.advise_corrupt_share(b"immutable", + self.storage_index, + self.shnum, + reason) + + def get_length(self): + """ + Return the length of the data in the share. + """ + return self._share_file.get_length() + + +@implementer(RIBucketReader) +class FoolscapBucketReader(Referenceable): # type: ignore # warner/foolscap#78 + """ + Foolscap wrapper for ``BucketReader`` + """ + + def __init__(self, bucket_reader): + self._bucket_reader = bucket_reader + + def remote_read(self, offset, length): + return self._bucket_reader.read(offset, length) + def remote_advise_corrupt_share(self, reason): - return self.ss.remote_advise_corrupt_share(b"immutable", - self.storage_index, - self.shnum, - reason) + return self._bucket_reader.advise_corrupt_share(reason) diff --git a/src/allmydata/storage/immutable_schema.py b/src/allmydata/storage/immutable_schema.py new file mode 100644 index 000000000..2798ea0cb --- /dev/null +++ b/src/allmydata/storage/immutable_schema.py @@ -0,0 +1,63 @@ +""" +Ported to Python 3. +""" + +import struct + +import attr + +from .lease_schema import ( + v1_immutable, + v2_immutable, +) + +@attr.s(frozen=True) +class _Schema(object): + """ + Implement encoding and decoding for multiple versions of the immutable + container schema. + + :ivar int version: the version number of the schema this object supports + + :ivar lease_serializer: an object that is responsible for lease + serialization and unserialization + """ + version = attr.ib() + lease_serializer = attr.ib() + + def header(self, max_size): + # type: (int) -> bytes + """ + Construct a container header. + + :param max_size: the maximum size the container can hold + + :return: the header bytes + """ + # The second field -- the four-byte share data length -- is no longer + # used as of Tahoe v1.3.0, but we continue to write it in there in + # case someone downgrades a storage server from >= Tahoe-1.3.0 to < + # Tahoe-1.3.0, or moves a share file from one server to another, + # etc. We do saturation -- a share data length larger than 2**32-1 + # (what can fit into the field) is marked as the largest length that + # can fit into the field. That way, even if this does happen, the old + # < v1.3.0 server will still allow clients to read the first part of + # the share. + return struct.pack(">LLL", self.version, min(2**32 - 1, max_size), 0) + +ALL_SCHEMAS = { + _Schema(version=2, lease_serializer=v2_immutable), + _Schema(version=1, lease_serializer=v1_immutable), +} +ALL_SCHEMA_VERSIONS = {schema.version for schema in ALL_SCHEMAS} +NEWEST_SCHEMA_VERSION = max(ALL_SCHEMAS, key=lambda schema: schema.version) + +def schema_from_version(version): + # (int) -> Optional[type] + """ + Find the schema object that corresponds to a certain version number. + """ + for schema in ALL_SCHEMAS: + if schema.version == version: + return schema + return None diff --git a/src/allmydata/storage/lease.py b/src/allmydata/storage/lease.py index 187f32406..4a8b10d01 100644 --- a/src/allmydata/storage/lease.py +++ b/src/allmydata/storage/lease.py @@ -2,63 +2,379 @@ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import struct, time +import attr + +from zope.interface import ( + Interface, + implementer, +) + +from twisted.python.components import ( + proxyForInterface, +) + +from allmydata.util.hashutil import timing_safe_compare +from allmydata.util import base32 + +# struct format for representation of a lease in an immutable share +IMMUTABLE_FORMAT = ">L32s32sL" + +# struct format for representation of a lease in a mutable share +MUTABLE_FORMAT = ">LL32s32s20s" + + +class ILeaseInfo(Interface): + """ + Represent a marker attached to a share that indicates that share should be + retained for some amount of time. + + Typically clients will create and renew leases on their shares as a way to + inform storage servers that there is still interest in those shares. A + share may have more than one lease. If all leases on a share have + expiration times in the past then the storage server may take this as a + strong hint that no one is interested in the share anymore and therefore + the share may be deleted to reclaim the space. + """ + def renew(new_expire_time): + """ + Create a new ``ILeaseInfo`` with the given expiration time. + + :param Union[int, float] new_expire_time: The expiration time the new + ``ILeaseInfo`` will have. + + :return: The new ``ILeaseInfo`` provider with the new expiration time. + """ + + def get_expiration_time(): + """ + :return Union[int, float]: this lease's expiration time + """ + + def get_grant_renew_time_time(): + """ + :return Union[int, float]: a guess about the last time this lease was + renewed + """ + + def get_age(): + """ + :return Union[int, float]: a guess about how long it has been since this + lease was renewed + """ + + def to_immutable_data(): + """ + :return bytes: a serialized representation of this lease suitable for + inclusion in an immutable container + """ + + def to_mutable_data(): + """ + :return bytes: a serialized representation of this lease suitable for + inclusion in a mutable container + """ + + def immutable_size(): + """ + :return int: the size of the serialized representation of this lease in an + immutable container + """ + + def mutable_size(): + """ + :return int: the size of the serialized representation of this lease in a + mutable container + """ + + def is_renew_secret(candidate_secret): + """ + :return bool: ``True`` if the given byte string is this lease's renew + secret, ``False`` otherwise + """ + + def present_renew_secret(): + """ + :return str: Text which could reasonably be shown to a person representing + this lease's renew secret. + """ + + def is_cancel_secret(candidate_secret): + """ + :return bool: ``True`` if the given byte string is this lease's cancel + secret, ``False`` otherwise + """ + + def present_cancel_secret(): + """ + :return str: Text which could reasonably be shown to a person representing + this lease's cancel secret. + """ + + +@implementer(ILeaseInfo) +@attr.s(frozen=True) class LeaseInfo(object): - def __init__(self, owner_num=None, renew_secret=None, cancel_secret=None, - expiration_time=None, nodeid=None): - self.owner_num = owner_num - self.renew_secret = renew_secret - self.cancel_secret = cancel_secret - self.expiration_time = expiration_time - if nodeid is not None: - assert isinstance(nodeid, bytes) - assert len(nodeid) == 20 - self.nodeid = nodeid + """ + Represent the details of one lease, a marker which is intended to inform + the storage server how long to store a particular share. + """ + owner_num = attr.ib(default=None) + + # Don't put secrets into the default string representation. This makes it + # slightly less likely the secrets will accidentally be leaked to + # someplace they're not meant to be. + renew_secret = attr.ib(default=None, repr=False) + cancel_secret = attr.ib(default=None, repr=False) + + _expiration_time = attr.ib(default=None) + + nodeid = attr.ib(default=None) + + @nodeid.validator + def _validate_nodeid(self, attribute, value): + if value is not None: + if not isinstance(value, bytes): + raise ValueError( + "nodeid value must be bytes, not {!r}".format(value), + ) + if len(value) != 20: + raise ValueError( + "nodeid value must be 20 bytes long, not {!r}".format(value), + ) + return None def get_expiration_time(self): - return self.expiration_time + # type: () -> float + """ + Retrieve a POSIX timestamp representing the time at which this lease is + set to expire. + """ + return self._expiration_time + + def renew(self, new_expire_time): + # type: (float) -> LeaseInfo + """ + Create a new lease the same as this one but with a new expiration time. + + :param new_expire_time: The new expiration time. + + :return: The new lease info. + """ + return attr.assoc( + self, + # MyPy is unhappy with this; long-term solution is likely switch to + # new @frozen attrs API, with type annotations. + _expiration_time=new_expire_time, # type: ignore[call-arg] + ) + + def is_renew_secret(self, candidate_secret): + # type: (bytes) -> bool + """ + Check a string to see if it is the correct renew secret. + + :return: ``True`` if it is the correct renew secret, ``False`` + otherwise. + """ + return timing_safe_compare(self.renew_secret, candidate_secret) + + def present_renew_secret(self): + # type: () -> str + """ + Return the renew secret, base32-encoded. + """ + return str(base32.b2a(self.renew_secret), "utf-8") + + def is_cancel_secret(self, candidate_secret): + # type: (bytes) -> bool + """ + Check a string to see if it is the correct cancel secret. + + :return: ``True`` if it is the correct cancel secret, ``False`` + otherwise. + """ + return timing_safe_compare(self.cancel_secret, candidate_secret) + + def present_cancel_secret(self): + # type: () -> str + """ + Return the cancel secret, base32-encoded. + """ + return str(base32.b2a(self.cancel_secret), "utf-8") def get_grant_renew_time_time(self): # hack, based upon fixed 31day expiration period - return self.expiration_time - 31*24*60*60 + return self._expiration_time - 31*24*60*60 def get_age(self): return time.time() - self.get_grant_renew_time_time() - def from_immutable_data(self, data): - (self.owner_num, - self.renew_secret, - self.cancel_secret, - self.expiration_time) = struct.unpack(">L32s32sL", data) - self.nodeid = None - return self + @classmethod + def from_immutable_data(cls, data): + """ + Create a new instance from the encoded data given. + + :param data: A lease serialized using the immutable-share-file format. + """ + names = [ + "owner_num", + "renew_secret", + "cancel_secret", + "expiration_time", + ] + values = struct.unpack(IMMUTABLE_FORMAT, data) + return cls(nodeid=None, **dict(zip(names, values))) + + def immutable_size(self): + """ + :return int: The size, in bytes, of the representation of this lease in an + immutable share file. + """ + return struct.calcsize(IMMUTABLE_FORMAT) + + def mutable_size(self): + """ + :return int: The size, in bytes, of the representation of this lease in a + mutable share file. + """ + return struct.calcsize(MUTABLE_FORMAT) def to_immutable_data(self): - return struct.pack(">L32s32sL", + return struct.pack(IMMUTABLE_FORMAT, self.owner_num, self.renew_secret, self.cancel_secret, - int(self.expiration_time)) + int(self._expiration_time)) def to_mutable_data(self): - return struct.pack(">LL32s32s20s", + return struct.pack(MUTABLE_FORMAT, self.owner_num, - int(self.expiration_time), + int(self._expiration_time), self.renew_secret, self.cancel_secret, self.nodeid) - def from_mutable_data(self, data): - (self.owner_num, - self.expiration_time, - self.renew_secret, self.cancel_secret, - self.nodeid) = struct.unpack(">LL32s32s20s", data) - return self + @classmethod + def from_mutable_data(cls, data): + """ + Create a new instance from the encoded data given. + + :param data: A lease serialized using the mutable-share-file format. + """ + names = [ + "owner_num", + "expiration_time", + "renew_secret", + "cancel_secret", + "nodeid", + ] + values = struct.unpack(MUTABLE_FORMAT, data) + return cls(**dict(zip(names, values))) + + +@attr.s(frozen=True) +class HashedLeaseInfo(proxyForInterface(ILeaseInfo, "_lease_info")): # type: ignore # unsupported dynamic base class + """ + A ``HashedLeaseInfo`` wraps lease information in which the secrets have + been hashed. + """ + _lease_info = attr.ib() + _hash = attr.ib() + + # proxyForInterface will take care of forwarding all methods on ILeaseInfo + # to `_lease_info`. Here we override a few of those methods to adjust + # their behavior to make them suitable for use with hashed secrets. + + def renew(self, new_expire_time): + # Preserve the HashedLeaseInfo wrapper around the renewed LeaseInfo. + return attr.assoc( + self, + _lease_info=super(HashedLeaseInfo, self).renew(new_expire_time), + ) + + def is_renew_secret(self, candidate_secret): + # type: (bytes) -> bool + """ + Hash the candidate secret and compare the result to the stored hashed + secret. + """ + return super(HashedLeaseInfo, self).is_renew_secret(self._hash(candidate_secret)) + + def present_renew_secret(self): + # type: () -> str + """ + Present the hash of the secret with a marker indicating it is a hash. + """ + return u"hash:" + super(HashedLeaseInfo, self).present_renew_secret() + + def is_cancel_secret(self, candidate_secret): + # type: (bytes) -> bool + """ + Hash the candidate secret and compare the result to the stored hashed + secret. + """ + if isinstance(candidate_secret, _HashedCancelSecret): + # Someone read it off of this object in this project - probably + # the lease crawler - and is just trying to use it to identify + # which lease it wants to operate on. Avoid re-hashing the value. + # + # It is important that this codepath is only availably internally + # for this process to talk to itself. If it were to be exposed to + # clients over the network, they could just provide the hashed + # value to avoid having to ever learn the original value. + hashed_candidate = candidate_secret.hashed_value + else: + # It is not yet hashed so hash it. + hashed_candidate = self._hash(candidate_secret) + + return super(HashedLeaseInfo, self).is_cancel_secret(hashed_candidate) + + def present_cancel_secret(self): + # type: () -> str + """ + Present the hash of the secret with a marker indicating it is a hash. + """ + return u"hash:" + super(HashedLeaseInfo, self).present_cancel_secret() + + @property + def owner_num(self): + return self._lease_info.owner_num + + @property + def nodeid(self): + return self._lease_info.nodeid + + @property + def cancel_secret(self): + """ + Give back an opaque wrapper around the hashed cancel secret which can + later be presented for a succesful equality comparison. + """ + # We don't *have* the cancel secret. We hashed it and threw away the + # original. That's good. It does mean that some code that runs + # in-process with the storage service (LeaseCheckingCrawler) runs into + # some difficulty. That code wants to cancel leases and does so using + # the same interface that faces storage clients (or would face them, + # if lease cancellation were exposed). + # + # Since it can't use the hashed secret to cancel a lease (that's the + # point of the hashing) and we don't have the unhashed secret to give + # it, instead we give it a marker that `cancel_lease` will recognize. + # On recognizing it, if the hashed value given matches the hashed + # value stored it is considered a match and the lease can be + # cancelled. + # + # This isn't great. Maybe the internal and external consumers of + # cancellation should use different interfaces. + return _HashedCancelSecret(self._lease_info.cancel_secret) + + +@attr.s(frozen=True) +class _HashedCancelSecret(object): + """ + ``_HashedCancelSecret`` is a marker type for an already-hashed lease + cancel secret that lets internal lease cancellers bypass the hash-based + protection that's imposed on external lease cancellers. + + :ivar bytes hashed_value: The already-hashed secret. + """ + hashed_value = attr.ib() diff --git a/src/allmydata/storage/lease_schema.py b/src/allmydata/storage/lease_schema.py new file mode 100644 index 000000000..ba7dc991a --- /dev/null +++ b/src/allmydata/storage/lease_schema.py @@ -0,0 +1,125 @@ +""" +Ported to Python 3. +""" + +from typing import Union + +import attr + +from nacl.hash import blake2b +from nacl.encoding import RawEncoder + +from .lease import ( + LeaseInfo, + HashedLeaseInfo, +) + +@attr.s(frozen=True) +class CleartextLeaseSerializer(object): + """ + Serialize and unserialize leases with cleartext secrets. + """ + _to_data = attr.ib() + _from_data = attr.ib() + + def serialize(self, lease): + # type: (LeaseInfo) -> bytes + """ + Represent the given lease as bytes with cleartext secrets. + """ + if isinstance(lease, LeaseInfo): + return self._to_data(lease) + raise ValueError( + "ShareFile v1 schema only supports LeaseInfo, not {!r}".format( + lease, + ), + ) + + def unserialize(self, data): + # type: (bytes) -> LeaseInfo + """ + Load a lease with cleartext secrets from the given bytes representation. + """ + # In v1 of the immutable schema lease secrets are stored plaintext. + # So load the data into a plain LeaseInfo which works on plaintext + # secrets. + return self._from_data(data) + +@attr.s(frozen=True) +class HashedLeaseSerializer(object): + _to_data = attr.ib() + _from_data = attr.ib() + + @classmethod + def _hash_secret(cls, secret): + # type: (bytes) -> bytes + """ + Hash a lease secret for storage. + """ + return blake2b(secret, digest_size=32, encoder=RawEncoder) + + @classmethod + def _hash_lease_info(cls, lease_info): + # type: (LeaseInfo) -> HashedLeaseInfo + """ + Hash the cleartext lease info secrets into a ``HashedLeaseInfo``. + """ + if not isinstance(lease_info, LeaseInfo): + # Provide a little safety against misuse, especially an attempt to + # re-hash an already-hashed lease info which is represented as a + # different type. + raise TypeError( + "Can only hash LeaseInfo, not {!r}".format(lease_info), + ) + + # Hash the cleartext secrets in the lease info and wrap the result in + # a new type. + return HashedLeaseInfo( + attr.assoc( + lease_info, + renew_secret=cls._hash_secret(lease_info.renew_secret), + cancel_secret=cls._hash_secret(lease_info.cancel_secret), + ), + cls._hash_secret, + ) + + def serialize(self, lease: Union[LeaseInfo, HashedLeaseInfo]) -> bytes: + if isinstance(lease, LeaseInfo): + # v2 of the immutable schema stores lease secrets hashed. If + # we're given a LeaseInfo then it holds plaintext secrets. Hash + # them before trying to serialize. + lease = self._hash_lease_info(lease) + if isinstance(lease, HashedLeaseInfo): + return self._to_data(lease) + raise ValueError( + "ShareFile v2 schema cannot represent lease {!r}".format( + lease, + ), + ) + + def unserialize(self, data): + # type: (bytes) -> HashedLeaseInfo + # In v2 of the immutable schema lease secrets are stored hashed. Wrap + # a LeaseInfo in a HashedLeaseInfo so it can supply the correct + # interpretation for those values. + return HashedLeaseInfo(self._from_data(data), self._hash_secret) + +v1_immutable = CleartextLeaseSerializer( + LeaseInfo.to_immutable_data, + LeaseInfo.from_immutable_data, +) + +v2_immutable = HashedLeaseSerializer( + HashedLeaseInfo.to_immutable_data, + LeaseInfo.from_immutable_data, +) + +v1_mutable = CleartextLeaseSerializer( + LeaseInfo.to_mutable_data, + LeaseInfo.from_mutable_data, +) + +v2_mutable = HashedLeaseSerializer( + HashedLeaseInfo.to_mutable_data, + LeaseInfo.from_mutable_data, +) diff --git a/src/allmydata/storage/mutable.py b/src/allmydata/storage/mutable.py index 2ef0c3215..d13a68020 100644 --- a/src/allmydata/storage/mutable.py +++ b/src/allmydata/storage/mutable.py @@ -2,18 +2,12 @@ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import os, stat, struct -from allmydata.interfaces import BadWriteEnablerError +from allmydata.interfaces import ( + BadWriteEnablerError, + NoSpace, +) from allmydata.util import idlib, log from allmydata.util.assertutil import precondition from allmydata.util.hashutil import timing_safe_compare @@ -21,7 +15,10 @@ from allmydata.storage.lease import LeaseInfo from allmydata.storage.common import UnknownMutableContainerVersionError, \ DataTooLargeError from allmydata.mutable.layout import MAX_MUTABLE_SHARE_SIZE - +from .mutable_schema import ( + NEWEST_SCHEMA_VERSION, + schema_from_header, +) # the MutableShareFile is like the ShareFile, but used for mutable data. It # has a different layout. See docs/mutable.txt for more details. @@ -61,26 +58,34 @@ class MutableShareFile(object): # our sharefiles share with a recognizable string, plus some random # binary data to reduce the chance that a regular text file will look # like a sharefile. - MAGIC = b"Tahoe mutable container v1\n" + b"\x75\x09\x44\x03\x8e" - assert len(MAGIC) == 32 - assert isinstance(MAGIC, bytes) MAX_SIZE = MAX_MUTABLE_SHARE_SIZE # TODO: decide upon a policy for max share size - def __init__(self, filename, parent=None): + @classmethod + def is_valid_header(cls, header): + # type: (bytes) -> bool + """ + Determine if the given bytes constitute a valid header for this type of + container. + + :param header: Some bytes from the beginning of a container. + + :return: ``True`` if the bytes could belong to this container, + ``False`` otherwise. + """ + return schema_from_header(header) is not None + + def __init__(self, filename, parent=None, schema=NEWEST_SCHEMA_VERSION): self.home = filename if os.path.exists(self.home): # we don't cache anything, just check the magic with open(self.home, 'rb') as f: - data = f.read(self.HEADER_SIZE) - (magic, - write_enabler_nodeid, write_enabler, - data_length, extra_least_offset) = \ - struct.unpack(">32s20s32sQQ", data) - if magic != self.MAGIC: - msg = "sharefile %s had magic '%r' but we wanted '%r'" % \ - (filename, magic, self.MAGIC) - raise UnknownMutableContainerVersionError(msg) + header = f.read(self.HEADER_SIZE) + self._schema = schema_from_header(header) + if self._schema is None: + raise UnknownMutableContainerVersionError(filename, header) + else: + self._schema = schema self.parent = parent # for logging def log(self, *args, **kwargs): @@ -88,23 +93,8 @@ class MutableShareFile(object): def create(self, my_nodeid, write_enabler): assert not os.path.exists(self.home) - data_length = 0 - extra_lease_offset = (self.HEADER_SIZE - + 4 * self.LEASE_SIZE - + data_length) - assert extra_lease_offset == self.DATA_OFFSET # true at creation - num_extra_leases = 0 with open(self.home, 'wb') as f: - header = struct.pack( - ">32s20s32sQQ", - self.MAGIC, my_nodeid, write_enabler, - data_length, extra_lease_offset, - ) - leases = (b"\x00" * self.LEASE_SIZE) * 4 - f.write(header + leases) - # data goes here, empty after creation - f.write(struct.pack(">L", num_extra_leases)) - # extra leases go here, none at creation + f.write(self._schema.header(my_nodeid, write_enabler)) def unlink(self): os.unlink(self.home) @@ -120,6 +110,7 @@ class MutableShareFile(object): def _read_share_data(self, f, offset, length): precondition(offset >= 0) + precondition(length >= 0) data_length = self._read_data_length(f) if offset+length > data_length: # reads beyond the end of the data are truncated. Reads that @@ -236,7 +227,7 @@ class MutableShareFile(object): + (lease_number-4)*self.LEASE_SIZE) f.seek(offset) assert f.tell() == offset - f.write(lease_info.to_mutable_data()) + f.write(self._schema.lease_serializer.serialize(lease_info)) def _read_lease_record(self, f, lease_number): # returns a LeaseInfo instance, or None @@ -253,7 +244,7 @@ class MutableShareFile(object): f.seek(offset) assert f.tell() == offset data = f.read(self.LEASE_SIZE) - lease_info = LeaseInfo().from_mutable_data(data) + lease_info = self._schema.lease_serializer.unserialize(data) if lease_info.owner_num == 0: return None return lease_info @@ -288,7 +279,19 @@ class MutableShareFile(object): except IndexError: return - def add_lease(self, lease_info): + def add_lease(self, available_space, lease_info): + """ + Add a new lease to this share. + + :param int available_space: The maximum number of bytes of storage to + commit in this operation. If more than this number of bytes is + required, raise ``NoSpace`` instead. + + :raise NoSpace: If more than ``available_space`` bytes is required to + complete the operation. In this case, no lease is added. + + :return: ``None`` + """ precondition(lease_info.owner_num != 0) # 0 means "no lease here" with open(self.home, 'rb+') as f: num_lease_slots = self._get_num_lease_slots(f) @@ -296,17 +299,30 @@ class MutableShareFile(object): if empty_slot is not None: self._write_lease_record(f, empty_slot, lease_info) else: + if lease_info.mutable_size() > available_space: + raise NoSpace() self._write_lease_record(f, num_lease_slots, lease_info) - def renew_lease(self, renew_secret, new_expire_time): + def renew_lease(self, renew_secret, new_expire_time, allow_backdate=False): + # type: (bytes, int, bool) -> None + """ + Update the expiration time on an existing lease. + + :param allow_backdate: If ``True`` then allow the new expiration time + to be before the current expiration time. Otherwise, make no + change when this is the case. + + :raise IndexError: If there is no lease matching the given renew + secret. + """ accepting_nodeids = set() with open(self.home, 'rb+') as f: for (leasenum,lease) in self._enumerate_leases(f): - if timing_safe_compare(lease.renew_secret, renew_secret): + if lease.is_renew_secret(renew_secret): # yup. See if we need to update the owner time. - if new_expire_time > lease.expiration_time: + if allow_backdate or new_expire_time > lease.get_expiration_time(): # yes - lease.expiration_time = new_expire_time + lease = lease.renew(new_expire_time) self._write_lease_record(f, leasenum, lease) return accepting_nodeids.add(lease.nodeid) @@ -320,13 +336,13 @@ class MutableShareFile(object): msg += " ." raise IndexError(msg) - def add_or_renew_lease(self, lease_info): + def add_or_renew_lease(self, available_space, lease_info): precondition(lease_info.owner_num != 0) # 0 means "no lease here" try: self.renew_lease(lease_info.renew_secret, - lease_info.expiration_time) + lease_info.get_expiration_time()) except IndexError: - self.add_lease(lease_info) + self.add_lease(available_space, lease_info) def cancel_lease(self, cancel_secret): """Remove any leases with the given cancel_secret. If the last lease @@ -346,7 +362,7 @@ class MutableShareFile(object): with open(self.home, 'rb+') as f: for (leasenum,lease) in self._enumerate_leases(f): accepting_nodeids.add(lease.nodeid) - if timing_safe_compare(lease.cancel_secret, cancel_secret): + if lease.is_cancel_secret(cancel_secret): self._write_lease_record(f, leasenum, blank_lease) modified += 1 else: @@ -377,7 +393,7 @@ class MutableShareFile(object): write_enabler_nodeid, write_enabler, data_length, extra_least_offset) = \ struct.unpack(">32s20s32sQQ", data) - assert magic == self.MAGIC + assert self.is_valid_header(data) return (write_enabler, write_enabler_nodeid) def readv(self, readv): @@ -387,11 +403,14 @@ class MutableShareFile(object): datav.append(self._read_share_data(f, offset, length)) return datav -# def remote_get_length(self): -# f = open(self.home, 'rb') -# data_length = self._read_data_length(f) -# f.close() -# return data_length + def get_length(self): + """ + Return the length of the data in the share. + """ + f = open(self.home, 'rb') + data_length = self._read_data_length(f) + f.close() + return data_length def check_write_enabler(self, write_enabler, si_s): with open(self.home, 'rb+') as f: @@ -454,4 +473,3 @@ def create_mutable_sharefile(filename, my_nodeid, write_enabler, parent): ms.create(my_nodeid, write_enabler) del ms return MutableShareFile(filename, parent) - diff --git a/src/allmydata/storage/mutable_schema.py b/src/allmydata/storage/mutable_schema.py new file mode 100644 index 000000000..389d743f4 --- /dev/null +++ b/src/allmydata/storage/mutable_schema.py @@ -0,0 +1,135 @@ +""" +Ported to Python 3. +""" + +import struct + +import attr + +from ..util.hashutil import ( + tagged_hash, +) +from .lease import ( + LeaseInfo, +) +from .lease_schema import ( + v1_mutable, + v2_mutable, +) + +def _magic(version): + # type: (int) -> bytes + """ + Compute a "magic" header string for a container of the given version. + + :param version: The version number of the container. + """ + # Make it easy for people to recognize + human_readable = u"Tahoe mutable container v{:d}\n".format(version).encode("ascii") + # But also keep the chance of accidental collision low + if version == 1: + # It's unclear where this byte sequence came from. It may have just + # been random. In any case, preserve it since it is the magic marker + # in all v1 share files. + random_bytes = b"\x75\x09\x44\x03\x8e" + else: + # For future versions, use a reproducable scheme. + random_bytes = tagged_hash( + b"allmydata_mutable_container_header", + human_readable, + truncate_to=5, + ) + magic = human_readable + random_bytes + assert len(magic) == 32 + if version > 1: + # The chance of collision is pretty low but let's just be sure about + # it. + assert magic != _magic(version - 1) + + return magic + +def _header(magic, extra_lease_offset, nodeid, write_enabler): + # type: (bytes, int, bytes, bytes) -> bytes + """ + Construct a container header. + + :param nodeid: A unique identifier for the node holding this + container. + + :param write_enabler: A secret shared with the client used to + authorize changes to the contents of this container. + """ + fixed_header = struct.pack( + ">32s20s32sQQ", + magic, + nodeid, + write_enabler, + # data length, initially the container is empty + 0, + extra_lease_offset, + ) + blank_leases = b"\x00" * LeaseInfo().mutable_size() * 4 + extra_lease_count = struct.pack(">L", 0) + + return b"".join([ + fixed_header, + # share data will go in between the next two items eventually but + # for now there is none. + blank_leases, + extra_lease_count, + ]) + + +_HEADER_FORMAT = ">32s20s32sQQ" + +# This size excludes leases +_HEADER_SIZE = struct.calcsize(_HEADER_FORMAT) + +_EXTRA_LEASE_OFFSET = _HEADER_SIZE + 4 * LeaseInfo().mutable_size() + + +@attr.s(frozen=True) +class _Schema(object): + """ + Implement encoding and decoding for the mutable container. + + :ivar int version: the version number of the schema this object supports + + :ivar lease_serializer: an object that is responsible for lease + serialization and unserialization + """ + version = attr.ib() + lease_serializer = attr.ib() + _magic = attr.ib() + + @classmethod + def for_version(cls, version, lease_serializer): + return cls(version, lease_serializer, magic=_magic(version)) + + def magic_matches(self, candidate_magic): + # type: (bytes) -> bool + """ + Return ``True`` if a candidate string matches the expected magic string + from a mutable container header, ``False`` otherwise. + """ + return candidate_magic[:len(self._magic)] == self._magic + + def header(self, nodeid, write_enabler): + return _header(self._magic, _EXTRA_LEASE_OFFSET, nodeid, write_enabler) + +ALL_SCHEMAS = { + _Schema.for_version(version=2, lease_serializer=v2_mutable), + _Schema.for_version(version=1, lease_serializer=v1_mutable), +} +ALL_SCHEMA_VERSIONS = {schema.version for schema in ALL_SCHEMAS} +NEWEST_SCHEMA_VERSION = max(ALL_SCHEMAS, key=lambda schema: schema.version) + +def schema_from_header(header): + # (int) -> Optional[type] + """ + Find the schema object that corresponds to a certain version number. + """ + for schema in ALL_SCHEMAS: + if schema.magic_matches(header): + return schema + return None diff --git a/src/allmydata/storage/server.py b/src/allmydata/storage/server.py index 041783a4e..9e9fb4b47 100644 --- a/src/allmydata/storage/server.py +++ b/src/allmydata/storage/server.py @@ -1,25 +1,16 @@ """ Ported to Python 3. """ -from __future__ import division -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import annotations -from future.utils import bytes_to_native_str, PY2 -if PY2: - # Omit open() to get native behavior where open("w") always accepts native - # strings. Omit bytes so we don't leak future's custom bytes. - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, dict, list, object, range, str, max, min # noqa: F401 -else: - from typing import Dict +from typing import Iterable, Any -import os, re, struct, time -import six +import os, re from foolscap.api import Referenceable from foolscap.ipb import IRemoteReference from twisted.application import service +from twisted.internet import reactor from zope.interface import implementer from allmydata.interfaces import RIStorageServer, IStatsProducer @@ -32,7 +23,10 @@ from allmydata.storage.lease import LeaseInfo from allmydata.storage.mutable import MutableShareFile, EmptyShare, \ create_mutable_sharefile from allmydata.mutable.layout import MAX_MUTABLE_SHARE_SIZE -from allmydata.storage.immutable import ShareFile, BucketWriter, BucketReader +from allmydata.storage.immutable import ( + ShareFile, BucketWriter, BucketReader, FoolscapBucketWriter, + FoolscapBucketReader, +) from allmydata.storage.crawler import BucketCountingCrawler from allmydata.storage.expirer import LeaseCheckingCrawler @@ -55,9 +49,15 @@ NUM_RE=re.compile("^[0-9]+$") DEFAULT_RENEWAL_TIME = 31 * 24 * 60 * 60 -@implementer(RIStorageServer, IStatsProducer) -class StorageServer(service.MultiService, Referenceable): - name = 'storage' +@implementer(IStatsProducer) +class StorageServer(service.MultiService): + """ + Implement the business logic for the storage server. + """ + # The type in Twisted for services is wrong in 22.10... + # https://github.com/twisted/twisted/issues/10135 + name = 'storage' # type: ignore[assignment] + # only the tests change this to anything else LeaseCheckerClass = LeaseCheckingCrawler def __init__(self, storedir, nodeid, reserved_space=0, @@ -68,7 +68,7 @@ class StorageServer(service.MultiService, Referenceable): expiration_override_lease_duration=None, expiration_cutoff_date=None, expiration_sharetypes=("mutable", "immutable"), - get_current_time=time.time): + clock=reactor): service.MultiService.__init__(self) assert isinstance(nodeid, bytes) assert len(nodeid) == 20 @@ -78,9 +78,9 @@ class StorageServer(service.MultiService, Referenceable): sharedir = os.path.join(storedir, "shares") fileutil.make_dirs(sharedir) self.sharedir = sharedir - # we don't actually create the corruption-advisory dir until necessary self.corruption_advisory_dir = os.path.join(storedir, "corruption-advisories") + fileutil.make_dirs(self.corruption_advisory_dir) self.reserved_space = int(reserved_space) self.no_storage = discard_storage self.readonly_storage = readonly_storage @@ -119,18 +119,19 @@ class StorageServer(service.MultiService, Referenceable): expiration_cutoff_date, expiration_sharetypes) self.lease_checker.setServiceParent(self) - self._get_current_time = get_current_time - - # Currently being-written Bucketwriters. For Foolscap, lifetime is tied - # to connection: when disconnection happens, the BucketWriters are - # removed. For HTTP, this makes no sense, so there will be - # timeout-based cleanup; see - # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3807. + self._clock = clock # Map in-progress filesystem path -> BucketWriter: self._bucket_writers = {} # type: Dict[str,BucketWriter] - # Canaries and disconnect markers for BucketWriters created via Foolscap: - self._bucket_writer_disconnect_markers = {} # type: Dict[BucketWriter,(IRemoteReference, object)] + + # These callables will be called with BucketWriters that closed: + self._call_on_bucket_writer_close = [] + + def stopService(self): + # Cancel any in-progress uploads: + for bw in list(self._bucket_writers.values()): + bw.disconnected() + return service.MultiService.stopService(self) def __repr__(self): return "" % (idlib.shortnodeid_b2a(self.my_nodeid),) @@ -253,7 +254,7 @@ class StorageServer(service.MultiService, Referenceable): space += bw.allocated_size() return space - def remote_get_version(self): + def get_version(self): remaining_space = self.get_available_space() if remaining_space is None: # We're on a platform that has no API to get disk stats. @@ -274,19 +275,24 @@ class StorageServer(service.MultiService, Referenceable): } return version - def _allocate_buckets(self, storage_index, + def allocate_buckets(self, storage_index, renew_secret, cancel_secret, sharenums, allocated_size, - owner_num=0): + owner_num=0, renew_leases=True): """ Generic bucket allocation API. + + :param bool renew_leases: If and only if this is ``True`` then renew a + secret-matching lease on (or, if none match, add a new lease to) + existing shares in this bucket. Any *new* shares are given a new + lease regardless. """ # owner_num is not for clients to set, but rather it should be # curried into the PersonalStorageServer instance that is dedicated # to a particular owner. - start = self._get_current_time() + start = self._clock.seconds() self.count("allocate") - alreadygot = set() + alreadygot = {} bucketwriters = {} # k: shnum, v: BucketWriter si_dir = storage_index_to_dir(storage_index) si_s = si_b2a(storage_index) @@ -297,7 +303,7 @@ class StorageServer(service.MultiService, Referenceable): # goes into the share files themselves. It could also be put into a # separate database. Note that the lease should not be added until # the BucketWriter has been closed. - expire_time = self._get_current_time() + DEFAULT_RENEWAL_TIME + expire_time = self._clock.seconds() + DEFAULT_RENEWAL_TIME lease_info = LeaseInfo(owner_num, renew_secret, cancel_secret, expire_time, self.my_nodeid) @@ -317,10 +323,10 @@ class StorageServer(service.MultiService, Referenceable): # they asked about: this will save them a lot of work. Add or update # leases for all of them: if they want us to hold shares for this # file, they'll want us to hold leases for this file. - for (shnum, fn) in self._get_bucket_shares(storage_index): - alreadygot.add(shnum) - sf = ShareFile(fn) - sf.add_or_renew_lease(lease_info) + for (shnum, fn) in self.get_shares(storage_index): + alreadygot[shnum] = ShareFile(fn) + if renew_leases: + self._add_or_renew_leases(alreadygot.values(), lease_info) for shnum in sharenums: incominghome = os.path.join(self.incomingdir, si_dir, "%d" % shnum) @@ -337,8 +343,12 @@ class StorageServer(service.MultiService, Referenceable): elif (not limited) or (remaining_space >= max_space_per_bucket): # ok! we need to create the new share file. bw = BucketWriter(self, incominghome, finalhome, - max_space_per_bucket, lease_info) + max_space_per_bucket, lease_info, + clock=self._clock) if self.no_storage: + # Really this should be done by having a separate class for + # this situation; see + # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3862 bw.throw_out_all_data = True bucketwriters[shnum] = bw self._bucket_writers[incominghome] = bw @@ -351,61 +361,47 @@ class StorageServer(service.MultiService, Referenceable): if bucketwriters: fileutil.make_dirs(os.path.join(self.sharedir, si_dir)) - self.add_latency("allocate", self._get_current_time() - start) - return alreadygot, bucketwriters - - def remote_allocate_buckets(self, storage_index, - renew_secret, cancel_secret, - sharenums, allocated_size, - canary, owner_num=0): - """Foolscap-specific ``allocate_buckets()`` API.""" - alreadygot, bucketwriters = self._allocate_buckets( - storage_index, renew_secret, cancel_secret, sharenums, allocated_size, - owner_num=owner_num, - ) - # Abort BucketWriters if disconnection happens. - for bw in bucketwriters.values(): - disconnect_marker = canary.notifyOnDisconnect(bw.disconnected) - self._bucket_writer_disconnect_markers[bw] = (canary, disconnect_marker) - return alreadygot, bucketwriters + self.add_latency("allocate", self._clock.seconds() - start) + return set(alreadygot), bucketwriters def _iter_share_files(self, storage_index): - for shnum, filename in self._get_bucket_shares(storage_index): + for shnum, filename in self.get_shares(storage_index): with open(filename, 'rb') as f: header = f.read(32) - if header[:32] == MutableShareFile.MAGIC: + if MutableShareFile.is_valid_header(header): sf = MutableShareFile(filename, self) # note: if the share has been migrated, the renew_lease() # call will throw an exception, with information to help the # client update the lease. - elif header[:4] == struct.pack(">L", 1): + elif ShareFile.is_valid_header(header): sf = ShareFile(filename) else: continue # non-sharefile yield sf - def remote_add_lease(self, storage_index, renew_secret, cancel_secret, - owner_num=1): - start = self._get_current_time() + def add_lease(self, storage_index, renew_secret, cancel_secret, owner_num=1): + start = self._clock.seconds() self.count("add-lease") - new_expire_time = self._get_current_time() + DEFAULT_RENEWAL_TIME + new_expire_time = self._clock.seconds() + DEFAULT_RENEWAL_TIME lease_info = LeaseInfo(owner_num, renew_secret, cancel_secret, new_expire_time, self.my_nodeid) - for sf in self._iter_share_files(storage_index): - sf.add_or_renew_lease(lease_info) - self.add_latency("add-lease", self._get_current_time() - start) + self._add_or_renew_leases( + self._iter_share_files(storage_index), + lease_info, + ) + self.add_latency("add-lease", self._clock.seconds() - start) return None - def remote_renew_lease(self, storage_index, renew_secret): - start = self._get_current_time() + def renew_lease(self, storage_index, renew_secret): + start = self._clock.seconds() self.count("renew") - new_expire_time = self._get_current_time() + DEFAULT_RENEWAL_TIME + new_expire_time = self._clock.seconds() + DEFAULT_RENEWAL_TIME found_buckets = False for sf in self._iter_share_files(storage_index): found_buckets = True sf.renew_lease(renew_secret, new_expire_time) - self.add_latency("renew", self._get_current_time() - start) + self.add_latency("renew", self._clock.seconds() - start) if not found_buckets: raise IndexError("no such lease to renew") @@ -413,14 +409,21 @@ class StorageServer(service.MultiService, Referenceable): if self.stats_provider: self.stats_provider.count('storage_server.bytes_added', consumed_size) del self._bucket_writers[bw.incominghome] - if bw in self._bucket_writer_disconnect_markers: - canary, disconnect_marker = self._bucket_writer_disconnect_markers.pop(bw) - canary.dontNotifyOnDisconnect(disconnect_marker) + for handler in self._call_on_bucket_writer_close: + handler(bw) - def _get_bucket_shares(self, storage_index): - """Return a list of (shnum, pathname) tuples for files that hold + def register_bucket_writer_close_handler(self, handler): + """ + The handler will be called with any ``BucketWriter`` that closes. + """ + self._call_on_bucket_writer_close.append(handler) + + def get_shares(self, storage_index) -> Iterable[tuple[int, str]]: + """ + Return an iterable of (shnum, pathname) tuples for files that hold shares for this storage_index. In each tuple, 'shnum' will always be - the integer form of the last component of 'pathname'.""" + the integer form of the last component of 'pathname'. + """ storagedir = os.path.join(self.sharedir, storage_index_to_dir(storage_index)) try: for f in os.listdir(storagedir): @@ -431,16 +434,19 @@ class StorageServer(service.MultiService, Referenceable): # Commonly caused by there being no buckets at all. pass - def remote_get_buckets(self, storage_index): - start = self._get_current_time() + def get_buckets(self, storage_index): + """ + Get ``BucketReaders`` for an immutable. + """ + start = self._clock.seconds() self.count("get") si_s = si_b2a(storage_index) log.msg("storage: get_buckets %r" % si_s) bucketreaders = {} # k: sharenum, v: BucketReader - for shnum, filename in self._get_bucket_shares(storage_index): + for shnum, filename in self.get_shares(storage_index): bucketreaders[shnum] = BucketReader(self, filename, storage_index, shnum) - self.add_latency("get", self._get_current_time() - start) + self.add_latency("get", self._clock.seconds() - start) return bucketreaders def get_leases(self, storage_index): @@ -454,7 +460,7 @@ class StorageServer(service.MultiService, Referenceable): # since all shares get the same lease data, we just grab the leases # from the first share try: - shnum, filename = next(self._get_bucket_shares(storage_index)) + shnum, filename = next(self.get_shares(storage_index)) sf = ShareFile(filename) return sf.get_leases() except StopIteration: @@ -468,7 +474,7 @@ class StorageServer(service.MultiService, Referenceable): :return: An iterable of the leases attached to this slot. """ - for _, share_filename in self._get_bucket_shares(storage_index): + for _, share_filename in self.get_shares(storage_index): share = MutableShareFile(share_filename) return share.get_leases() return [] @@ -579,10 +585,8 @@ class StorageServer(service.MultiService, Referenceable): else: if sharenum not in shares: # allocate a new share - allocated_size = 2000 # arbitrary, really share = self._allocate_slot_share(bucketdir, secrets, sharenum, - allocated_size, owner_num=0) shares[sharenum] = share shares[sharenum].writev(datav, new_length) @@ -601,7 +605,7 @@ class StorageServer(service.MultiService, Referenceable): :return LeaseInfo: Information for a new lease for a share. """ ownerid = 1 # TODO - expire_time = self._get_current_time() + DEFAULT_RENEWAL_TIME + expire_time = self._clock.seconds() + DEFAULT_RENEWAL_TIME lease_info = LeaseInfo(ownerid, renew_secret, cancel_secret, expire_time, self.my_nodeid) @@ -611,13 +615,13 @@ class StorageServer(service.MultiService, Referenceable): """ Put the given lease onto the given shares. - :param dict[int, MutableShareFile] shares: The shares to put the lease - onto. + :param Iterable[Union[MutableShareFile, ShareFile]] shares: The shares + to put the lease onto. :param LeaseInfo lease_info: The lease to put on the shares. """ - for share in six.viewvalues(shares): - share.add_or_renew_lease(lease_info) + for share in shares: + share.add_or_renew_lease(self.get_available_space(), lease_info) def slot_testv_and_readv_and_writev( # type: ignore # warner/foolscap#78 self, @@ -625,19 +629,21 @@ class StorageServer(service.MultiService, Referenceable): secrets, test_and_write_vectors, read_vector, - renew_leases, + renew_leases=True, ): """ Read data from shares and conditionally write some data to them. :param bool renew_leases: If and only if this is ``True`` and the test - vectors pass then shares in this slot will also have an updated - lease applied to them. + vectors pass then shares mentioned in ``test_and_write_vectors`` + that still exist after the changes are made will also have a + secret-matching lease renewed (or, if none match, a new lease + added). See ``allmydata.interfaces.RIStorageServer`` for details about other parameters and return value. """ - start = self._get_current_time() + start = self._clock.seconds() self.count("writev") si_s = si_b2a(storage_index) log.msg("storage: slot_writev %r" % si_s) @@ -675,26 +681,14 @@ class StorageServer(service.MultiService, Referenceable): ) if renew_leases: lease_info = self._make_lease_info(renew_secret, cancel_secret) - self._add_or_renew_leases(remaining_shares, lease_info) + self._add_or_renew_leases(remaining_shares.values(), lease_info) # all done - self.add_latency("writev", self._get_current_time() - start) + self.add_latency("writev", self._clock.seconds() - start) return (testv_is_good, read_data) - def remote_slot_testv_and_readv_and_writev(self, storage_index, - secrets, - test_and_write_vectors, - read_vector): - return self.slot_testv_and_readv_and_writev( - storage_index, - secrets, - test_and_write_vectors, - read_vector, - renew_leases=True, - ) - def _allocate_slot_share(self, bucketdir, secrets, sharenum, - allocated_size, owner_num=0): + owner_num=0): (write_enabler, renew_secret, cancel_secret) = secrets my_nodeid = self.my_nodeid fileutil.make_dirs(bucketdir) @@ -703,8 +697,23 @@ class StorageServer(service.MultiService, Referenceable): self) return share - def remote_slot_readv(self, storage_index, shares, readv): - start = self._get_current_time() + def enumerate_mutable_shares(self, storage_index: bytes) -> set[int]: + """Return all share numbers for the given mutable.""" + si_dir = storage_index_to_dir(storage_index) + # shares exist if there is a file for them + bucketdir = os.path.join(self.sharedir, si_dir) + if not os.path.isdir(bucketdir): + return set() + result = set() + for sharenum_s in os.listdir(bucketdir): + try: + result.add(int(sharenum_s)) + except ValueError: + continue + return result + + def slot_readv(self, storage_index, shares, readv): + start = self._clock.seconds() self.count("readv") si_s = si_b2a(storage_index) lp = log.msg("storage: slot_readv %r %r" % (si_s, shares), @@ -713,7 +722,7 @@ class StorageServer(service.MultiService, Referenceable): # shares exist if there is a file for them bucketdir = os.path.join(self.sharedir, si_dir) if not os.path.isdir(bucketdir): - self.add_latency("readv", self._get_current_time() - start) + self.add_latency("readv", self._clock.seconds() - start) return {} datavs = {} for sharenum_s in os.listdir(bucketdir): @@ -727,33 +736,222 @@ class StorageServer(service.MultiService, Referenceable): datavs[sharenum] = msf.readv(readv) log.msg("returning shares %s" % (list(datavs.keys()),), facility="tahoe.storage", level=log.NOISY, parent=lp) - self.add_latency("readv", self._get_current_time() - start) + self.add_latency("readv", self._clock.seconds() - start) return datavs - def remote_advise_corrupt_share(self, share_type, storage_index, shnum, - reason): - # This is a remote API, I believe, so this has to be bytes for legacy - # protocol backwards compatibility reasons. + def _share_exists(self, storage_index, shnum): + """ + Check local share storage to see if a matching share exists. + + :param bytes storage_index: The storage index to inspect. + :param int shnum: The share number to check for. + + :return bool: ``True`` if a share with the given number exists at the + given storage index, ``False`` otherwise. + """ + for existing_sharenum, ignored in self.get_shares(storage_index): + if existing_sharenum == shnum: + return True + return False + + def advise_corrupt_share(self, share_type, storage_index, shnum, + reason): + # Previously this had to be bytes for legacy protocol backwards + # compatibility reasons. Now that Foolscap layer has been abstracted + # out, we can probably refactor this to be unicode... assert isinstance(share_type, bytes) assert isinstance(reason, bytes), "%r is not bytes" % (reason,) - fileutil.make_dirs(self.corruption_advisory_dir) - now = time_format.iso_utc(sep="T") + si_s = si_b2a(storage_index) - # windows can't handle colons in the filename - fn = os.path.join( - self.corruption_advisory_dir, - ("%s--%s-%d" % (now, str(si_s, "utf-8"), shnum)).replace(":","") - ) - with open(fn, "w") as f: - f.write("report: Share Corruption\n") - f.write("type: %s\n" % bytes_to_native_str(share_type)) - f.write("storage_index: %s\n" % bytes_to_native_str(si_s)) - f.write("share_number: %d\n" % shnum) - f.write("\n") - f.write(bytes_to_native_str(reason)) - f.write("\n") + + if not self._share_exists(storage_index, shnum): + log.msg( + format=( + "discarding client corruption claim for %(si)s/%(shnum)d " + "which I do not have" + ), + si=si_s, + shnum=shnum, + ) + return + log.msg(format=("client claims corruption in (%(share_type)s) " + "%(si)s-%(shnum)d: %(reason)s"), share_type=share_type, si=si_s, shnum=shnum, reason=reason, level=log.SCARY, umid="SGx2fA") + + report = render_corruption_report(share_type, si_s, shnum, reason) + if len(report) > self.get_available_space(): + return None + + now = time_format.iso_utc(sep="T") + report_path = get_corruption_report_path( + self.corruption_advisory_dir, + now, + si_s.decode("utf8"), + shnum, + ) + with open(report_path, "w", encoding="utf-8") as f: + f.write(report) + return None + + def get_immutable_share_length(self, storage_index: bytes, share_number: int) -> int: + """Returns the length (in bytes) of an immutable.""" + si_dir = storage_index_to_dir(storage_index) + path = os.path.join(self.sharedir, si_dir, str(share_number)) + return ShareFile(path).get_length() + + def get_mutable_share_length(self, storage_index: bytes, share_number: int) -> int: + """Returns the length (in bytes) of a mutable.""" + si_dir = storage_index_to_dir(storage_index) + path = os.path.join(self.sharedir, si_dir, str(share_number)) + if not os.path.exists(path): + raise KeyError("No such storage index or share number") + return MutableShareFile(path).get_length() + + +@implementer(RIStorageServer) +class FoolscapStorageServer(Referenceable): # type: ignore # warner/foolscap#78 + """ + A filesystem-based implementation of ``RIStorageServer``. + + For Foolscap, BucketWriter lifetime is tied to connection: when + disconnection happens, the BucketWriters are removed. + """ + name = 'storage' + + def __init__(self, storage_server): # type: (StorageServer) -> None + self._server = storage_server + + # Canaries and disconnect markers for BucketWriters created via Foolscap: + self._bucket_writer_disconnect_markers : dict[BucketWriter, tuple[IRemoteReference, Any]] = {} + + self._server.register_bucket_writer_close_handler(self._bucket_writer_closed) + + def _bucket_writer_closed(self, bw): + if bw in self._bucket_writer_disconnect_markers: + canary, disconnect_marker = self._bucket_writer_disconnect_markers.pop(bw) + canary.dontNotifyOnDisconnect(disconnect_marker) + + def remote_get_version(self): + return self._server.get_version() + + def remote_allocate_buckets(self, storage_index, + renew_secret, cancel_secret, + sharenums, allocated_size, + canary, owner_num=0): + """Foolscap-specific ``allocate_buckets()`` API.""" + alreadygot, bucketwriters = self._server.allocate_buckets( + storage_index, renew_secret, cancel_secret, sharenums, allocated_size, + owner_num=owner_num, renew_leases=True, + ) + + # Abort BucketWriters if disconnection happens. + for bw in bucketwriters.values(): + disconnect_marker = canary.notifyOnDisconnect(bw.disconnected) + self._bucket_writer_disconnect_markers[bw] = (canary, disconnect_marker) + + # Wrap BucketWriters with Foolscap adapter: + bucketwriters = { + k: FoolscapBucketWriter(bw) + for (k, bw) in bucketwriters.items() + } + + return alreadygot, bucketwriters + + def remote_add_lease(self, storage_index, renew_secret, cancel_secret, + owner_num=1): + return self._server.add_lease(storage_index, renew_secret, cancel_secret) + + def remote_renew_lease(self, storage_index, renew_secret): + return self._server.renew_lease(storage_index, renew_secret) + + def remote_get_buckets(self, storage_index): + return { + k: FoolscapBucketReader(bucket) + for (k, bucket) in self._server.get_buckets(storage_index).items() + } + + def remote_slot_testv_and_readv_and_writev(self, storage_index, + secrets, + test_and_write_vectors, + read_vector): + return self._server.slot_testv_and_readv_and_writev( + storage_index, + secrets, + test_and_write_vectors, + read_vector, + renew_leases=True, + ) + + def remote_slot_readv(self, storage_index, shares, readv): + return self._server.slot_readv(storage_index, shares, readv) + + def remote_advise_corrupt_share(self, share_type, storage_index, shnum, + reason): + return self._server.advise_corrupt_share(share_type, storage_index, shnum, + reason) + + +CORRUPTION_REPORT_FORMAT = """\ +report: Share Corruption +type: {type} +storage_index: {storage_index} +share_number: {share_number} + +{reason} + +""" + +def render_corruption_report( + share_type: bytes, + si_s: bytes, + shnum: int, + reason: bytes +) -> str: + """ + Create a string that explains a corruption report using freeform text. + + :param bytes share_type: The type of the share which the report is about. + + :param bytes si_s: The encoded representation of the storage index which + the report is about. + + :param int shnum: The share number which the report is about. + + :param bytes reason: The reason given by the client for the corruption + report. + """ + return CORRUPTION_REPORT_FORMAT.format( + type=share_type.decode(), + storage_index=si_s.decode(), + share_number=shnum, + reason=reason.decode(), + ) + +def get_corruption_report_path( + base_dir: str, + now: str, + si_s: str, + shnum: int +) -> str: + """ + Determine the path to which a certain corruption report should be written. + + :param str base_dir: The directory beneath which to construct the path. + + :param str now: The time of the report. + + :param str si_s: The encoded representation of the storage index which the + report is about. + + :param int shnum: The share number which the report is about. + + :return str: A path to which the report can be written. + """ + # windows can't handle colons in the filename + return os.path.join( + base_dir, + ("%s--%s-%d" % (now, si_s, shnum)).replace(":","") + ) diff --git a/src/allmydata/storage/shares.py b/src/allmydata/storage/shares.py index ec6c0a501..6c9526b47 100644 --- a/src/allmydata/storage/shares.py +++ b/src/allmydata/storage/shares.py @@ -2,23 +2,13 @@ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - from allmydata.storage.mutable import MutableShareFile from allmydata.storage.immutable import ShareFile def get_share_file(filename): with open(filename, "rb") as f: prefix = f.read(32) - if prefix == MutableShareFile.MAGIC: + if MutableShareFile.is_valid_header(prefix): return MutableShareFile(filename) # otherwise assume it's immutable return ShareFile(filename) - diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 526e4e70d..9e6f94f47 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -5,10 +5,6 @@ the foolscap-based server implemented in src/allmydata/storage/*.py . Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals # roadmap: # @@ -34,31 +30,41 @@ from __future__ import unicode_literals # # 6: implement other sorts of IStorageClient classes: S3, etc -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -from six import ensure_text +from __future__ import annotations -import re, time, hashlib - -# On Python 2 this will be the backport. +from typing import Union, Callable, Any, Optional, cast, Dict, Iterable +from os import urandom +import re +import time +import hashlib +from io import StringIO from configparser import NoSectionError +import json import attr +from attr import define +from hyperlink import DecodedURL +from twisted.web.client import HTTPConnectionPool from zope.interface import ( Attribute, Interface, implementer, ) -from twisted.internet import defer +from twisted.python.failure import Failure +from twisted.web import http +from twisted.internet.task import LoopingCall +from twisted.internet import defer, reactor +from twisted.internet.interfaces import IReactorTime from twisted.application import service +from twisted.logger import Logger from twisted.plugin import ( getPlugins, ) from eliot import ( log_call, ) -from foolscap.api import eventually +from foolscap.ipb import IRemoteReference +from foolscap.api import eventually, RemoteException from foolscap.reconnector import ( ReconnectionInfo, ) @@ -68,13 +74,34 @@ from allmydata.interfaces import ( IServer, IStorageServer, IFoolscapStoragePlugin, + VersionMessage ) +from allmydata.grid_manager import ( + create_grid_manager_verifier, SignedCertificate +) +from allmydata.crypto import ( + ed25519, +) +from allmydata.util.tor_provider import _Provider as TorProvider from allmydata.util import log, base32, connection_status from allmydata.util.assertutil import precondition from allmydata.util.observer import ObserverList from allmydata.util.rrefutil import add_version_to_remote_reference from allmydata.util.hashutil import permute_server_hash from allmydata.util.dictutil import BytesKeyDict, UnicodeKeyDict +from allmydata.util.deferredutil import async_to_deferred, race +from allmydata.util.attrs_provides import provides +from allmydata.storage.http_client import ( + StorageClient, StorageClientImmutables, StorageClientGeneral, + ClientException as HTTPClientException, StorageClientMutables, + ReadVector, TestWriteVectors, WriteVector, TestVector, ClientException, + StorageClientFactory +) +from .node import _Config + +_log = Logger() + +ANONYMOUS_STORAGE_NURLS = "anonymous-storage-NURLs" # who is responsible for de-duplication? @@ -100,15 +127,21 @@ class StorageClientConfig(object): :ivar preferred_peers: An iterable of the server-ids (``bytes``) of the storage servers where share placement is preferred, in order of - decreasing preference. See the *[client]peers.preferred* - documentation for details. + decreasing preference. See the *[client]peers.preferred* documentation + for details. :ivar dict[unicode, dict[unicode, unicode]] storage_plugins: A mapping from names of ``IFoolscapStoragePlugin`` configured in *tahoe.cfg* to the respective configuration. + + :ivar list[ed25519.VerifyKey] grid_manager_keys: with no keys in + this list, we'll upload to any storage server. Otherwise, we will + only upload to a storage-server that has a valid certificate + signed by at least one of these keys. """ - preferred_peers = attr.ib(default=()) - storage_plugins = attr.ib(default=attr.Factory(dict)) + preferred_peers : Iterable[bytes] = attr.ib(default=()) + storage_plugins : dict[str, dict[str, str]] = attr.ib(default=attr.Factory(dict)) + grid_manager_keys : list[ed25519.Ed25519PublicKey] = attr.ib(default=attr.Factory(list)) @classmethod def from_node_config(cls, config): @@ -140,11 +173,44 @@ class StorageClientConfig(object): plugin_config = [] storage_plugins[plugin_name] = dict(plugin_config) + grid_manager_keys = [] + for name, gm_key in config.enumerate_section('grid_managers').items(): + grid_manager_keys.append( + ed25519.verifying_key_from_string(gm_key.encode("ascii")) + ) + + return cls( preferred_peers, storage_plugins, + grid_manager_keys, ) + def get_configured_storage_plugins(self) -> dict[str, IFoolscapStoragePlugin]: + """ + :returns: a mapping from names to instances for all available + plugins + + :raises MissingPlugin: if the configuration asks for a plugin + for which there is no corresponding instance (e.g. it is + not installed). + """ + plugins = { + plugin.name: plugin + for plugin + in getPlugins(IFoolscapStoragePlugin) + } + + # mypy doesn't like "str" in place of Any ... + configured: Dict[Any, IFoolscapStoragePlugin] = dict() + for plugin_name in self.storage_plugins: + try: + plugin = plugins[plugin_name] + except KeyError: + raise MissingPlugin(plugin_name) + configured[plugin_name] = plugin + return configured + @implementer(IStorageBroker) class StorageFarmBroker(service.MultiService): @@ -169,10 +235,15 @@ class StorageFarmBroker(service.MultiService): self, permute_peers, tub_maker, - node_config, + node_config: _Config, storage_client_config=None, + default_connection_handlers=None, + tor_provider: Optional[TorProvider]=None, ): service.MultiService.__init__(self) + if default_connection_handlers is None: + default_connection_handlers = {"tcp": "tcp"} + assert permute_peers # False not implemented yet self.permute_peers = permute_peers self._tub_maker = tub_maker @@ -188,10 +259,12 @@ class StorageFarmBroker(service.MultiService): # own Reconnector, and will give us a RemoteReference when we ask # them for it. self.servers = BytesKeyDict() - self._static_server_ids = set() # ignore announcements for these + self._static_server_ids : set[bytes] = set() # ignore announcements for these self.introducer_client = None - self._threshold_listeners = [] # tuples of (threshold, Deferred) + self._threshold_listeners : list[tuple[float,defer.Deferred[Any]]]= [] # tuples of (threshold, Deferred) self._connected_high_water_mark = 0 + self._tor_provider = tor_provider + self._default_connection_handlers = default_connection_handlers @log_call(action_type=u"storage-client:broker:set-static-servers") def set_static_servers(self, servers): @@ -199,7 +272,6 @@ class StorageFarmBroker(service.MultiService): # doesn't really matter but it makes the logging behavior more # predictable and easier to test (and at least one test does depend on # this sorted order). - servers = {ensure_text(key): value for (key, value) in servers.items()} for (server_id, server) in sorted(servers.items()): try: storage_server = self._make_storage_server( @@ -244,6 +316,16 @@ class StorageFarmBroker(service.MultiService): in self.storage_client_config.storage_plugins.items() }) + @staticmethod + def _should_we_use_http(node_config: _Config, announcement: dict) -> bool: + """ + Given an announcement dictionary and config, return whether we should + connect to storage server over HTTP. + """ + return not node_config.get_config( + "client", "force_foolscap", default=False, boolean=True, + ) and len(announcement.get(ANONYMOUS_STORAGE_NURLS, [])) > 0 + @log_call( action_type=u"storage-client:broker:make-storage-server", include_args=["server_id"], @@ -263,6 +345,23 @@ class StorageFarmBroker(service.MultiService): by the given announcement. """ assert isinstance(server_id, bytes) + gm_verifier = create_grid_manager_verifier( + self.storage_client_config.grid_manager_keys, + [SignedCertificate.load(StringIO(json.dumps(data))) for data in server["ann"].get("grid-manager-certificates", [])], + "pub-{}".format(str(server_id, "ascii")).encode("ascii"), # server_id is v0- not pub-v0-key .. for reasons? + ) + + if self._should_we_use_http(self.node_config, server["ann"]): + s = HTTPNativeStorageServer( + server_id, + server["ann"], + grid_manager_verifier=gm_verifier, + default_connection_handlers=self._default_connection_handlers, + tor_provider=self._tor_provider + ) + s.on_status_changed(lambda _: self._got_connection()) + return s + handler_overrides = server.get("connections", {}) s = NativeStorageServer( server_id, @@ -271,6 +370,7 @@ class StorageFarmBroker(service.MultiService): handler_overrides, self.node_config, self.storage_client_config, + gm_verifier, ) s.on_status_changed(lambda _: self._got_connection()) return s @@ -419,11 +519,26 @@ class StorageFarmBroker(service.MultiService): for dsc in list(self.servers.values()): dsc.try_to_connect() - def get_servers_for_psi(self, peer_selection_index): + def get_servers_for_psi(self, peer_selection_index, for_upload=False): + """ + :param for_upload: used to determine if we should include any + servers that are invalid according to Grid Manager + processing. When for_upload is True and we have any Grid + Manager keys configured, any storage servers with invalid or + missing certificates will be excluded. + """ # return a list of server objects (IServers) assert self.permute_peers == True connected_servers = self.get_connected_servers() preferred_servers = frozenset(s for s in connected_servers if s.get_longname() in self.preferred_peers) + if for_upload: + # print("upload processing: {}".format([srv.upload_permitted() for srv in connected_servers])) + connected_servers = [ + srv + for srv in connected_servers + if srv.upload_permitted() + ] + def _permuted(server): seed = server.get_permutation_seed() is_unpreferred = server not in preferred_servers @@ -524,6 +639,45 @@ class IFoolscapStorageServer(Interface): """ +def _parse_announcement(server_id: bytes, furl: bytes, ann: dict) -> tuple[str, bytes, bytes, bytes, bytes]: + """ + Parse the furl and announcement, return: + + (nickname, permutation_seed, tubid, short_description, long_description) + """ + m = re.match(br'pb://(\w+)@', furl) + assert m, furl + tubid_s = m.group(1).lower() + tubid = base32.a2b(tubid_s) + if "permutation-seed-base32" in ann: + seed = ann["permutation-seed-base32"] + if isinstance(seed, str): + seed = seed.encode("utf-8") + ps = base32.a2b(seed) + elif re.search(br'^v0-[0-9a-zA-Z]{52}$', server_id): + ps = base32.a2b(server_id[3:]) + else: + log.msg("unable to parse serverid '%(server_id)s as pubkey, " + "hashing it to get permutation-seed, " + "may not converge with other clients", + server_id=server_id, + facility="tahoe.storage_broker", + level=log.UNUSUAL, umid="qu86tw") + ps = hashlib.sha256(server_id).digest() + permutation_seed = ps + + assert server_id + long_description = server_id + if server_id.startswith(b"v0-"): + # remove v0- prefix from abbreviated name + short_description = server_id[3:3+8] + else: + short_description = server_id[:8] + nickname = ann.get("nickname", "") + + return (nickname, permutation_seed, tubid, short_description, long_description) + + @implementer(IFoolscapStorageServer) @attr.s(frozen=True) class _FoolscapStorage(object): @@ -534,7 +688,7 @@ class _FoolscapStorage(object): permutation_seed = attr.ib() tubid = attr.ib() - storage_server = attr.ib(validator=attr.validators.provides(IStorageServer)) + storage_server = attr.ib(validator=provides(IStorageServer)) _furl = attr.ib() _short_description = attr.ib() @@ -560,50 +714,21 @@ class _FoolscapStorage(object): {"permutation-seed-base32": "...", "nickname": "...", + "grid-manager-certificates": [..], } - *nickname* is optional. + *nickname* and *grid-manager-certificates* are optional. The furl will be a Unicode string on Python 3; on Python 2 it will be either a native (bytes) string or a Unicode string. """ - furl = furl.encode("utf-8") - m = re.match(br'pb://(\w+)@', furl) - assert m, furl - tubid_s = m.group(1).lower() - tubid = base32.a2b(tubid_s) - if "permutation-seed-base32" in ann: - seed = ann["permutation-seed-base32"] - if isinstance(seed, str): - seed = seed.encode("utf-8") - ps = base32.a2b(seed) - elif re.search(br'^v0-[0-9a-zA-Z]{52}$', server_id): - ps = base32.a2b(server_id[3:]) - else: - log.msg("unable to parse serverid '%(server_id)s as pubkey, " - "hashing it to get permutation-seed, " - "may not converge with other clients", - server_id=server_id, - facility="tahoe.storage_broker", - level=log.UNUSUAL, umid="qu86tw") - ps = hashlib.sha256(server_id).digest() - permutation_seed = ps - - assert server_id - long_description = server_id - if server_id.startswith(b"v0-"): - # remove v0- prefix from abbreviated name - short_description = server_id[3:3+8] - else: - short_description = server_id[:8] - nickname = ann.get("nickname", "") - + (nickname, permutation_seed, tubid, short_description, long_description) = _parse_announcement(server_id, furl.encode("utf-8"), ann) return cls( nickname=nickname, permutation_seed=permutation_seed, tubid=tubid, storage_server=storage_server, - furl=furl, + furl=furl.encode("utf-8"), short_description=short_description, long_description=long_description, ) @@ -613,6 +738,7 @@ class _FoolscapStorage(object): @implementer(IFoolscapStorageServer) +@define class _NullStorage(object): """ Abstraction for *not* communicating with a storage server of a type with @@ -626,7 +752,7 @@ class _NullStorage(object): lease_seed = hashlib.sha256(b"").digest() name = "" - longname = "" + longname: str = "" def connect_to(self, tub, got_connection): return NonReconnector() @@ -645,8 +771,6 @@ class NonReconnector(object): def getReconnectionInfo(self): return ReconnectionInfo() -_null_storage = _NullStorage() - class AnnouncementNotMatched(Exception): """ @@ -655,6 +779,18 @@ class AnnouncementNotMatched(Exception): """ +@attr.s(auto_exc=True) +class MissingPlugin(Exception): + """ + A particular plugin was requested but is missing + """ + + plugin_name = attr.ib() + + def __str__(self): + return "Missing plugin '{}'".format(self.plugin_name) + + def _storage_from_foolscap_plugin(node_config, config, announcement, get_rref): """ Construct an ``IStorageServer`` from the most locally-preferred plugin @@ -662,27 +798,124 @@ def _storage_from_foolscap_plugin(node_config, config, announcement, get_rref): :param allmydata.node._Config node_config: The node configuration to pass to the plugin. + + :param dict announcement: The storage announcement for the storage + server we should build """ - plugins = { - plugin.name: plugin - for plugin - in getPlugins(IFoolscapStoragePlugin) - } storage_options = announcement.get(u"storage-options", []) - for plugin_name, plugin_config in list(config.storage_plugins.items()): + plugins = config.get_configured_storage_plugins() + + # for every storage-option that we have enabled locally (in order + # of preference), see if the announcement asks for such a thing. + # if it does, great: we return that storage-client + # otherwise we've run out of options... + + for options in storage_options: try: - plugin = plugins[plugin_name] + plugin = plugins[options[u"name"]] except KeyError: - raise ValueError("{} not installed".format(plugin_name)) - for option in storage_options: - if plugin_name == option[u"name"]: - furl = option[u"storage-server-FURL"] - return furl, plugin.get_storage_client( - node_config, - option, - get_rref, - ) - raise AnnouncementNotMatched() + # we didn't configure this kind of plugin locally, so + # consider the next announced option + continue + + furl = options[u"storage-server-FURL"] + return furl, plugin.get_storage_client( + node_config, + options, + get_rref, + ) + + # none of the storage options in the announcement are configured + # locally; we can't make a storage-client. + plugin_names = ", ".join(sorted(option["name"] for option in storage_options)) + raise AnnouncementNotMatched(plugin_names) + + +def _available_space_from_version(version): + if version is None: + return None + protocol_v1_version = version.get(b'http://allmydata.org/tahoe/protocols/storage/v1', BytesKeyDict()) + available_space = protocol_v1_version.get(b'available-space') + if available_space is None: + available_space = protocol_v1_version.get(b'maximum-immutable-share-size', None) + return available_space + + +def _make_storage_system( + node_config: _Config, + config: StorageClientConfig, + ann: dict, + server_id: bytes, + get_rref: Callable[[], Optional[IRemoteReference]], +) -> IFoolscapStorageServer: + """ + Create an object for interacting with the storage server described by + the given announcement. + + :param node_config: The node configuration to pass to any configured + storage plugins. + + :param config: Configuration specifying desired storage client behavior. + + :param ann: The storage announcement from the storage server we are meant + to communicate with. + + :param server_id: The unique identifier for the server. + + :param get_rref: A function which returns a remote reference to the + server-side object which implements this storage system, if one is + available (otherwise None). + + :return: An object enabling communication via Foolscap with the server + which generated the announcement. + """ + unmatched = None + # Try to match the announcement against a plugin. + try: + furl, storage_server = _storage_from_foolscap_plugin( + node_config, + config, + ann, + # Pass in an accessor for our _rref attribute. The value of + # the attribute may change over time as connections are lost + # and re-established. The _StorageServer should always be + # able to get the most up-to-date value. + get_rref, + ) + except AnnouncementNotMatched as e: + # show a more-specific error to the user for this server + # (Note this will only be shown if the server _doesn't_ offer + # anonymous service, which will match below) + unmatched = _NullStorage('{}: missing plugin "{}"'.format(server_id.decode("utf8"), str(e))) + else: + return _FoolscapStorage.from_announcement( + server_id, + furl, + ann, + storage_server, + ) + + # Try to match the announcement against the anonymous access scheme. + try: + furl = ann[u"anonymous-storage-FURL"] + except KeyError: + # Nope + pass + else: + # See comment above for the _storage_from_foolscap_plugin case + # about passing in get_rref. + storage_server = _StorageServer(get_rref=get_rref) + return _FoolscapStorage.from_announcement( + server_id, + furl, + ann, + storage_server, + ) + + # Nothing matched so we can't talk to this server. (There should + # not be a way to get here without this local being valid) + assert unmatched is not None, "Expected unmatched plugin error" + return unmatched @implementer(IServer) @@ -712,7 +945,8 @@ class NativeStorageServer(service.MultiService): "application-version": "unknown: no get_version()", }) - def __init__(self, server_id, ann, tub_maker, handler_overrides, node_config, config=StorageClientConfig()): + def __init__(self, server_id, ann, tub_maker, handler_overrides, node_config, config=None, + grid_manager_verifier=None): service.MultiService.__init__(self) assert isinstance(server_id, bytes) self._server_id = server_id @@ -720,7 +954,12 @@ class NativeStorageServer(service.MultiService): self._tub_maker = tub_maker self._handler_overrides = handler_overrides - self._storage = self._make_storage_system(node_config, config, ann) + if config is None: + config = StorageClientConfig() + + self._grid_manager_verifier = grid_manager_verifier + + self._storage = _make_storage_system(node_config, config, ann, self._server_id, self.get_rref) self.last_connect_time = None self.last_loss_time = None @@ -730,62 +969,20 @@ class NativeStorageServer(service.MultiService): self._trigger_cb = None self._on_status_changed = ObserverList() - def _make_storage_system(self, node_config, config, ann): + def upload_permitted(self): """ - :param allmydata.node._Config node_config: The node configuration to pass - to any configured storage plugins. + If our client is configured with Grid Manager public-keys, we will + only upload to storage servers that have a currently-valid + certificate signed by at least one of the Grid Managers we + accept. - :param StorageClientConfig config: Configuration specifying desired - storage client behavior. - - :param dict ann: The storage announcement from the storage server we - are meant to communicate with. - - :return IFoolscapStorageServer: An object enabling communication via - Foolscap with the server which generated the announcement. + :return: True if we should use this server for uploads, False + otherwise. """ - # Try to match the announcement against a plugin. - try: - furl, storage_server = _storage_from_foolscap_plugin( - node_config, - config, - ann, - # Pass in an accessor for our _rref attribute. The value of - # the attribute may change over time as connections are lost - # and re-established. The _StorageServer should always be - # able to get the most up-to-date value. - self.get_rref, - ) - except AnnouncementNotMatched: - # Nope. - pass - else: - return _FoolscapStorage.from_announcement( - self._server_id, - furl, - ann, - storage_server, - ) - - # Try to match the announcement against the anonymous access scheme. - try: - furl = ann[u"anonymous-storage-FURL"] - except KeyError: - # Nope - pass - else: - # See comment above for the _storage_from_foolscap_plugin case - # about passing in get_rref. - storage_server = _StorageServer(get_rref=self.get_rref) - return _FoolscapStorage.from_announcement( - self._server_id, - furl, - ann, - storage_server, - ) - - # Nothing matched so we can't talk to this server. - return _null_storage + # if we have no Grid Manager keys configured, choice is easy + if self._grid_manager_verifier is None: + return True + return self._grid_manager_verifier() def get_permutation_seed(self): return self._storage.permutation_seed @@ -843,13 +1040,7 @@ class NativeStorageServer(service.MultiService): def get_available_space(self): version = self.get_version() - if version is None: - return None - protocol_v1_version = version.get(b'http://allmydata.org/tahoe/protocols/storage/v1', BytesKeyDict()) - available_space = protocol_v1_version.get(b'available-space') - if available_space is None: - available_space = protocol_v1_version.get(b'maximum-immutable-share-size', None) - return available_space + return _available_space_from_version(version) def start_connecting(self, trigger_cb): self._tub = self._tub_maker(self._handler_overrides) @@ -911,6 +1102,277 @@ class NativeStorageServer(service.MultiService): # used when the broker wants us to hurry up self._reconnector.reset() + +@async_to_deferred +async def _pick_a_http_server( + reactor, + nurls: list[DecodedURL], + request: Callable[[object, DecodedURL], defer.Deferred[object]] +) -> DecodedURL: + """Pick the first server we successfully send a request to. + + Fires with ``None`` if no server was found, or with the ``DecodedURL`` of + the first successfully-connected server. + """ + requests = [] + for nurl in nurls: + def to_nurl(_: object, nurl: DecodedURL=nurl) -> DecodedURL: + return nurl + + requests.append(request(reactor, nurl).addCallback(to_nurl)) + + queries: defer.Deferred[tuple[int, DecodedURL]] = race(requests) + _, nurl = await queries + return nurl + + +@implementer(IServer) +class HTTPNativeStorageServer(service.MultiService): + """ + Like ``NativeStorageServer``, but for HTTP clients. + + The notion of being "connected" is less meaningful for HTTP; we just poll + occasionally, and if we've succeeded at last poll, we assume we're + "connected". + """ + + def __init__(self, server_id: bytes, announcement, default_connection_handlers: dict[str,str], reactor=reactor, grid_manager_verifier=None, tor_provider: Optional[TorProvider]=None): + service.MultiService.__init__(self) + assert isinstance(server_id, bytes) + self._server_id = server_id + self.announcement = announcement + self._on_status_changed = ObserverList() + self._reactor = reactor + self._grid_manager_verifier = grid_manager_verifier + self._storage_client_factory = StorageClientFactory( + default_connection_handlers, tor_provider + ) + + furl = announcement["anonymous-storage-FURL"].encode("utf-8") + ( + self._nickname, + self._permutation_seed, + self._tubid, + self._short_description, + self._long_description + ) = _parse_announcement(server_id, furl, announcement) + self._nurls = [ + DecodedURL.from_text(u) + for u in announcement[ANONYMOUS_STORAGE_NURLS] + ] + self._istorage_server : Optional[_HTTPStorageServer] = None + + self._connection_status = connection_status.ConnectionStatus.unstarted() + self._version = None + self._last_connect_time = None + self._connecting_deferred : Optional[defer.Deferred[object]]= None + + def get_permutation_seed(self): + return self._permutation_seed + + def get_name(self): + return self._short_description + + def get_longname(self): + return self._long_description + + def get_tubid(self): + return self._tubid + + def get_lease_seed(self): + # Apparently this is what Foolscap version above does?! + return self._tubid + + def get_foolscap_write_enabler_seed(self): + return self._tubid + + def get_nickname(self): + return self._nickname + + def on_status_changed(self, status_changed): + """ + :param status_changed: a callable taking a single arg (the + NativeStorageServer) that is notified when we become connected + """ + return self._on_status_changed.subscribe(status_changed) + + def upload_permitted(self): + """ + If our client is configured with Grid Manager public-keys, we will + only upload to storage servers that have a currently-valid + certificate signed by at least one of the Grid Managers we + accept. + + :return: True if we should use this server for uploads, False + otherwise. + """ + # if we have no Grid Manager keys configured, choice is easy + if self._grid_manager_verifier is None: + return True + return self._grid_manager_verifier() + + # Special methods used by copy.copy() and copy.deepcopy(). When those are + # used in allmydata.immutable.filenode to copy CheckResults during + # repair, we want it to treat the IServer instances as singletons, and + # not attempt to duplicate them.. + def __copy__(self): + return self + + def __deepcopy__(self, memodict): + return self + + def __repr__(self): + return "" % self.get_name() + + def get_serverid(self): + return self._server_id + + def get_version(self): + return self._version + + def get_announcement(self): + return self.announcement + + def get_connection_status(self): + return self._connection_status + + def is_connected(self): + return self._connection_status.connected + + def get_available_space(self): + version = self.get_version() + return _available_space_from_version(version) + + def start_connecting(self, trigger_cb): + self._lc = LoopingCall(self._connect) + self._lc.start(1, True) + + def _got_version(self, version): + self._last_connect_time = time.time() + self._version = version + self._connection_status = connection_status.ConnectionStatus( + True, "connected", [], self._last_connect_time, self._last_connect_time + ) + self._on_status_changed.notify(self) + + def _failed_to_connect(self, reason): + self._connection_status = connection_status.ConnectionStatus( + False, f"failure: {reason}", [], self._last_connect_time, self._last_connect_time + ) + self._on_status_changed.notify(self) + + def get_storage_server(self): + """ + See ``IServer.get_storage_server``. + """ + if self._connection_status.summary == "unstarted": + return None + return self._istorage_server + + def stop_connecting(self): + self._lc.stop() + if self._connecting_deferred is not None: + self._connecting_deferred.cancel() + + def try_to_connect(self): + self._connect() + + def _connect(self) -> defer.Deferred[object]: + """ + Try to connect to a working storage server. + + If called while a previous ``_connect()`` is already running, it will + just return the same ``Deferred``. + + ``LoopingCall.stop()`` doesn't cancel ``Deferred``s, unfortunately: + https://github.com/twisted/twisted/issues/11814. Thus we want to store + the ``Deferred`` so we can cancel it when necessary. + + We also want to return it so that loop iterations take it into account, + and a new iteration doesn't start while we're in the middle of the + previous one. + """ + # Conceivably try_to_connect() was called on this before, in which case + # we already are in the middle of connecting. So in that case just + # return whatever is in progress: + if self._connecting_deferred is not None: + return self._connecting_deferred + + def done(_): + self._connecting_deferred = None + + connecting = self._pick_server_and_get_version() + # Set a short timeout since we're relying on this for server liveness. + connecting = connecting.addTimeout(5, self._reactor).addCallbacks( + self._got_version, self._failed_to_connect + ).addBoth(done) + self._connecting_deferred = connecting + return connecting + + @async_to_deferred + async def _pick_server_and_get_version(self): + """ + Minimal implementation of connection logic: pick a server, get its + version. This doesn't deal with errors much, so as to minimize + statefulness. It does change ``self._istorage_server``, so possibly + more refactoring would be useful to remove even that much statefulness. + """ + async def get_istorage_server() -> _HTTPStorageServer: + if self._istorage_server is not None: + return self._istorage_server + + # We haven't selected a server yet, so let's do so. + + # TODO This is somewhat inefficient on startup: it takes two successful + # version() calls before we are live talking to a server, it could only + # be one. See https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3992 + + @async_to_deferred + async def request(reactor, nurl: DecodedURL): + # Since we're just using this one off to check if the NURL + # works, no need for persistent pool or other fanciness. + pool = HTTPConnectionPool(reactor, persistent=False) + pool.retryAutomatically = False + storage_client = await self._storage_client_factory.create_storage_client( + nurl, reactor, pool + ) + return await StorageClientGeneral(storage_client).get_version() + + nurl = await _pick_a_http_server(reactor, self._nurls, request) + + # If we've gotten this far, we've found a working NURL. + storage_client = await self._storage_client_factory.create_storage_client( + nurl, cast(IReactorTime, reactor), None + ) + self._istorage_server = _HTTPStorageServer.from_http_client(storage_client) + return self._istorage_server + + try: + storage_server = await get_istorage_server() + + # Get the version from the remote server. + version = await storage_server.get_version() + return version + except Exception as e: + log.msg(f"Failed to connect to a HTTP storage server: {e}", level=log.CURIOUS) + raise + + def stopService(self): + if self._connecting_deferred is not None: + self._connecting_deferred.cancel() + + result = service.MultiService.stopService(self) + if self._lc.running: + self._lc.stop() + self._failed_to_connect("shut down") + + if self._istorage_server is not None: + client_shutting_down = self._istorage_server._http_client.shutdown() + result.addCallback(lambda _: client_shutting_down) + + return result + + class UnknownServerTypeError(Exception): pass @@ -1024,3 +1486,249 @@ class _StorageServer(object): shnum, reason, ).addErrback(log.err, "Error from remote call to advise_corrupt_share") + + + +@attr.s(hash=True) +class _FakeRemoteReference(object): + """ + Emulate a Foolscap RemoteReference, calling a local object instead. + """ + local_object = attr.ib(type=object) + + @defer.inlineCallbacks + def callRemote(self, action, *args, **kwargs): + try: + result = yield getattr(self.local_object, action)(*args, **kwargs) + defer.returnValue(result) + except HTTPClientException as e: + raise RemoteException((e.code, e.message, e.body)) + + +@attr.s +class _HTTPBucketWriter(object): + """ + Emulate a ``RIBucketWriter``, but use HTTP protocol underneath. + """ + client = attr.ib(type=StorageClientImmutables) + storage_index = attr.ib(type=bytes) + share_number = attr.ib(type=int) + upload_secret = attr.ib(type=bytes) + finished = attr.ib(type=defer.Deferred[bool], factory=defer.Deferred) + + def abort(self): + return self.client.abort_upload(self.storage_index, self.share_number, + self.upload_secret) + + @defer.inlineCallbacks + def write(self, offset, data): + result = yield self.client.write_share_chunk( + self.storage_index, self.share_number, self.upload_secret, offset, data + ) + if result.finished: + self.finished.callback(True) + defer.returnValue(None) + + def close(self): + # We're not _really_ closed until all writes have succeeded and we + # finished writing all the data. + return self.finished + + +def _ignore_404(failure: Failure) -> Optional[Failure]: + """ + Useful for advise_corrupt_share(), since it swallows unknown share numbers + in Foolscap. + """ + if failure.check(HTTPClientException) and failure.value.code == http.NOT_FOUND: + return None + else: + return failure + + +@attr.s(hash=True) +class _HTTPBucketReader(object): + """ + Emulate a ``RIBucketReader``, but use HTTP protocol underneath. + """ + client = attr.ib(type=StorageClientImmutables) + storage_index = attr.ib(type=bytes) + share_number = attr.ib(type=int) + + def read(self, offset, length): + return self.client.read_share_chunk( + self.storage_index, self.share_number, offset, length + ) + + def advise_corrupt_share(self, reason): + return self.client.advise_corrupt_share( + self.storage_index, self.share_number, + str(reason, "utf-8", errors="backslashreplace") + ).addErrback(_ignore_404) + + +# WORK IN PROGRESS, for now it doesn't actually implement whole thing. +@implementer(IStorageServer) # type: ignore +@attr.s +class _HTTPStorageServer(object): + """ + Talk to remote storage server over HTTP. + """ + _http_client = attr.ib(type=StorageClient) + + @staticmethod + def from_http_client(http_client: StorageClient) -> _HTTPStorageServer: + """ + Create an ``IStorageServer`` from a HTTP ``StorageClient``. + """ + return _HTTPStorageServer(http_client=http_client) + + def get_version(self) -> defer.Deferred[VersionMessage]: + return StorageClientGeneral(self._http_client).get_version() + + @defer.inlineCallbacks + def allocate_buckets( + self, + storage_index, + renew_secret, + cancel_secret, + sharenums, + allocated_size, + canary + ): + upload_secret = urandom(20) + immutable_client = StorageClientImmutables(self._http_client) + result = immutable_client.create( + storage_index, sharenums, allocated_size, upload_secret, renew_secret, + cancel_secret + ) + result = yield result + defer.returnValue( + (result.already_have, { + share_num: _FakeRemoteReference(_HTTPBucketWriter( + client=immutable_client, + storage_index=storage_index, + share_number=share_num, + upload_secret=upload_secret + )) + for share_num in result.allocated + }) + ) + + @defer.inlineCallbacks + def get_buckets( + self, + storage_index + ): + immutable_client = StorageClientImmutables(self._http_client) + share_numbers = yield immutable_client.list_shares( + storage_index + ) + defer.returnValue({ + share_num: _FakeRemoteReference(_HTTPBucketReader( + immutable_client, storage_index, share_num + )) + for share_num in share_numbers + }) + + @async_to_deferred + async def add_lease( + self, + storage_index, + renew_secret, + cancel_secret + ): + client = StorageClientGeneral(self._http_client) + try: + await client.add_or_renew_lease( + storage_index, renew_secret, cancel_secret + ) + except ClientException as e: + if e.code == http.NOT_FOUND: + # Silently do nothing, as is the case for the Foolscap client + return + raise + + def advise_corrupt_share( + self, + share_type, + storage_index, + shnum, + reason: bytes + ): + if share_type == b"immutable": + client : Union[StorageClientImmutables, StorageClientMutables] = StorageClientImmutables(self._http_client) + elif share_type == b"mutable": + client = StorageClientMutables(self._http_client) + else: + raise ValueError("Unknown share type") + return client.advise_corrupt_share( + storage_index, shnum, str(reason, "utf-8", errors="backslashreplace") + ).addErrback(_ignore_404) + + @defer.inlineCallbacks + def slot_readv(self, storage_index, shares, readv): + mutable_client = StorageClientMutables(self._http_client) + pending_reads = {} + reads = {} + # If shares list is empty, that means list all shares, so we need + # to do a query to get that. + if not shares: + shares = yield mutable_client.list_shares(storage_index) + + # Start all the queries in parallel: + for share_number in shares: + share_reads = defer.gatherResults( + [ + mutable_client.read_share_chunk( + storage_index, share_number, offset, length + ) + for (offset, length) in readv + ] + ) + pending_reads[share_number] = share_reads + + # Wait for all the queries to finish: + for share_number, pending_result in pending_reads.items(): + reads[share_number] = yield pending_result + + return reads + + @defer.inlineCallbacks + def slot_testv_and_readv_and_writev( + self, + storage_index, + secrets, + tw_vectors, + r_vector, + ): + mutable_client = StorageClientMutables(self._http_client) + we_secret, lr_secret, lc_secret = secrets + client_tw_vectors = {} + for share_num, (test_vector, data_vector, new_length) in tw_vectors.items(): + client_test_vectors = [ + TestVector(offset=offset, size=size, specimen=specimen) + for (offset, size, specimen) in test_vector + ] + client_write_vectors = [ + WriteVector(offset=offset, data=data) for (offset, data) in data_vector + ] + client_tw_vectors[share_num] = TestWriteVectors( + test_vectors=client_test_vectors, + write_vectors=client_write_vectors, + new_length=new_length + ) + client_read_vectors = [ + ReadVector(offset=offset, size=size) + for (offset, size) in r_vector + ] + try: + client_result = yield mutable_client.read_test_write_chunks( + storage_index, we_secret, lr_secret, lc_secret, client_tw_vectors, + client_read_vectors, + ) + except ClientException as e: + if e.code == http.UNAUTHORIZED: + raise RemoteException("Unauthorized write, possibly you passed the wrong write enabler?") + raise + return (client_result.success, client_result.reads) diff --git a/src/allmydata/test/__init__.py b/src/allmydata/test/__init__.py index 893aa15ce..6779aa527 100644 --- a/src/allmydata/test/__init__.py +++ b/src/allmydata/test/__init__.py @@ -15,14 +15,6 @@ some side-effects which make things better when the test suite runs. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from traceback import extract_stack, format_list diff --git a/src/allmydata/test/_win_subprocess.py b/src/allmydata/test/_win_subprocess.py deleted file mode 100644 index bf9767e73..000000000 --- a/src/allmydata/test/_win_subprocess.py +++ /dev/null @@ -1,210 +0,0 @@ -""" -This module is only necessary on Python 2. Once Python 2 code is dropped, it -can be deleted. -""" - -from future.utils import PY3 -if PY3: - raise RuntimeError("Just use subprocess.Popen") - -# This is necessary to pacify flake8 on Python 3, while we're still supporting -# Python 2. -from past.builtins import unicode - -# -*- coding: utf-8 -*- - -## Copyright (C) 2021 Valentin Lab -## -## Redistribution and use in source and binary forms, with or without -## modification, are permitted provided that the following conditions -## are met: -## -## 1. Redistributions of source code must retain the above copyright -## notice, this list of conditions and the following disclaimer. -## -## 2. Redistributions in binary form must reproduce the above -## copyright notice, this list of conditions and the following -## disclaimer in the documentation and/or other materials provided -## with the distribution. -## -## 3. Neither the name of the copyright holder nor the names of its -## contributors may be used to endorse or promote products derived -## from this software without specific prior written permission. -## -## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -## FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -## COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -## INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -## (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -## SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -## HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, -## STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -## ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED -## OF THE POSSIBILITY OF SUCH DAMAGE. -## - -## issue: https://bugs.python.org/issue19264 - -# See allmydata/windows/fixups.py -import sys -assert sys.platform == "win32" - -import os -import ctypes -import subprocess -import _subprocess -from ctypes import byref, windll, c_char_p, c_wchar_p, c_void_p, \ - Structure, sizeof, c_wchar, WinError -from ctypes.wintypes import BYTE, WORD, LPWSTR, BOOL, DWORD, LPVOID, \ - HANDLE - - -## -## Types -## - -CREATE_UNICODE_ENVIRONMENT = 0x00000400 -LPCTSTR = c_char_p -LPTSTR = c_wchar_p -LPSECURITY_ATTRIBUTES = c_void_p -LPBYTE = ctypes.POINTER(BYTE) - -class STARTUPINFOW(Structure): - _fields_ = [ - ("cb", DWORD), ("lpReserved", LPWSTR), - ("lpDesktop", LPWSTR), ("lpTitle", LPWSTR), - ("dwX", DWORD), ("dwY", DWORD), - ("dwXSize", DWORD), ("dwYSize", DWORD), - ("dwXCountChars", DWORD), ("dwYCountChars", DWORD), - ("dwFillAtrribute", DWORD), ("dwFlags", DWORD), - ("wShowWindow", WORD), ("cbReserved2", WORD), - ("lpReserved2", LPBYTE), ("hStdInput", HANDLE), - ("hStdOutput", HANDLE), ("hStdError", HANDLE), - ] - -LPSTARTUPINFOW = ctypes.POINTER(STARTUPINFOW) - - -class PROCESS_INFORMATION(Structure): - _fields_ = [ - ("hProcess", HANDLE), ("hThread", HANDLE), - ("dwProcessId", DWORD), ("dwThreadId", DWORD), - ] - -LPPROCESS_INFORMATION = ctypes.POINTER(PROCESS_INFORMATION) - - -class DUMMY_HANDLE(ctypes.c_void_p): - - def __init__(self, *a, **kw): - super(DUMMY_HANDLE, self).__init__(*a, **kw) - self.closed = False - - def Close(self): - if not self.closed: - windll.kernel32.CloseHandle(self) - self.closed = True - - def __int__(self): - return self.value - - -CreateProcessW = windll.kernel32.CreateProcessW -CreateProcessW.argtypes = [ - LPCTSTR, LPTSTR, LPSECURITY_ATTRIBUTES, - LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPCTSTR, - LPSTARTUPINFOW, LPPROCESS_INFORMATION, -] -CreateProcessW.restype = BOOL - - -## -## Patched functions/classes -## - -def CreateProcess(executable, args, _p_attr, _t_attr, - inherit_handles, creation_flags, env, cwd, - startup_info): - """Create a process supporting unicode executable and args for win32 - - Python implementation of CreateProcess using CreateProcessW for Win32 - - """ - - si = STARTUPINFOW( - dwFlags=startup_info.dwFlags, - wShowWindow=startup_info.wShowWindow, - cb=sizeof(STARTUPINFOW), - ## XXXvlab: not sure of the casting here to ints. - hStdInput=int(startup_info.hStdInput), - hStdOutput=int(startup_info.hStdOutput), - hStdError=int(startup_info.hStdError), - ) - - wenv = None - if env is not None: - ## LPCWSTR seems to be c_wchar_p, so let's say CWSTR is c_wchar - env = (unicode("").join([ - unicode("%s=%s\0") % (k, v) - for k, v in env.items()])) + unicode("\0") - wenv = (c_wchar * len(env))() - wenv.value = env - - pi = PROCESS_INFORMATION() - creation_flags |= CREATE_UNICODE_ENVIRONMENT - - if CreateProcessW(executable, args, None, None, - inherit_handles, creation_flags, - wenv, cwd, byref(si), byref(pi)): - return (DUMMY_HANDLE(pi.hProcess), DUMMY_HANDLE(pi.hThread), - pi.dwProcessId, pi.dwThreadId) - raise WinError() - - -class Popen(subprocess.Popen): - """This superseeds Popen and corrects a bug in cPython 2.7 implem""" - - def _execute_child(self, args, executable, preexec_fn, close_fds, - cwd, env, universal_newlines, - startupinfo, creationflags, shell, to_close, - p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite): - """Code from part of _execute_child from Python 2.7 (9fbb65e) - - There are only 2 little changes concerning the construction of - the the final string in shell mode: we preempt the creation of - the command string when shell is True, because original function - will try to encode unicode args which we want to avoid to be able to - sending it as-is to ``CreateProcess``. - - """ - if not isinstance(args, subprocess.types.StringTypes): - args = subprocess.list2cmdline(args) - - if startupinfo is None: - startupinfo = subprocess.STARTUPINFO() - if shell: - startupinfo.dwFlags |= _subprocess.STARTF_USESHOWWINDOW - startupinfo.wShowWindow = _subprocess.SW_HIDE - comspec = os.environ.get("COMSPEC", unicode("cmd.exe")) - args = unicode('{} /c "{}"').format(comspec, args) - if (_subprocess.GetVersion() >= 0x80000000 or - os.path.basename(comspec).lower() == "command.com"): - w9xpopen = self._find_w9xpopen() - args = unicode('"%s" %s') % (w9xpopen, args) - creationflags |= _subprocess.CREATE_NEW_CONSOLE - - cp = _subprocess.CreateProcess - _subprocess.CreateProcess = CreateProcess - try: - super(Popen, self)._execute_child( - args, executable, - preexec_fn, close_fds, cwd, env, universal_newlines, - startupinfo, creationflags, False, to_close, p2cread, - p2cwrite, c2pread, c2pwrite, errread, errwrite, - ) - finally: - _subprocess.CreateProcess = cp diff --git a/src/allmydata/test/blocking.py b/src/allmydata/test/blocking.py new file mode 100644 index 000000000..6b6c05e5a --- /dev/null +++ b/src/allmydata/test/blocking.py @@ -0,0 +1,40 @@ +import sys +import traceback +import signal +import threading + +from twisted.internet import reactor + + +def print_stacks(): + print("Uh oh, something is blocking the event loop!") + current_thread = threading.get_ident() + for thread_id, frame in sys._current_frames().items(): + if thread_id == current_thread: + traceback.print_stack(frame, limit=10) + break + + +def catch_blocking_in_event_loop(test=None): + """ + Print tracebacks if the event loop is blocked for more than a short amount + of time. + """ + signal.signal(signal.SIGALRM, lambda *args: print_stacks()) + + current_scheduled = [None] + + def cancel_and_rerun(): + signal.setitimer(signal.ITIMER_REAL, 0) + signal.setitimer(signal.ITIMER_REAL, 0.015) + current_scheduled[0] = reactor.callLater(0.01, cancel_and_rerun) + + cancel_and_rerun() + + def cleanup(): + signal.signal(signal.SIGALRM, signal.SIG_DFL) + signal.setitimer(signal.ITIMER_REAL, 0) + current_scheduled[0].cancel() + + if test is not None: + test.addCleanup(cleanup) diff --git a/src/allmydata/test/certs.py b/src/allmydata/test/certs.py new file mode 100644 index 000000000..9e6640386 --- /dev/null +++ b/src/allmydata/test/certs.py @@ -0,0 +1,66 @@ +"""Utilities for generating TLS certificates.""" + +import datetime + +from cryptography import x509 +from cryptography.x509.oid import NameOID +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.hazmat.primitives import serialization, hashes + +from twisted.python.filepath import FilePath + + +def cert_to_file(path: FilePath, cert) -> FilePath: + """ + Write the given certificate to a file on disk. Returns the path. + """ + path.setContent(cert.public_bytes(serialization.Encoding.PEM)) + return path + + +def private_key_to_file(path: FilePath, private_key) -> FilePath: + """ + Write the given key to a file on disk. Returns the path. + """ + path.setContent( + private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ) + ) + return path + + +def generate_private_key(): + """Create a RSA private key.""" + return rsa.generate_private_key(public_exponent=65537, key_size=2048) + + +def generate_certificate( + private_key, + expires_days: int = 10, + valid_in_days: int = 0, + org_name: str = "Yoyodyne", +): + """Generate a certificate from a RSA private key.""" + subject = issuer = x509.Name( + [x509.NameAttribute(NameOID.ORGANIZATION_NAME, org_name)] + ) + starts = datetime.datetime.utcnow() + datetime.timedelta(days=valid_in_days) + expires = datetime.datetime.utcnow() + datetime.timedelta(days=expires_days) + return ( + x509.CertificateBuilder() + .subject_name(subject) + .issuer_name(issuer) + .public_key(private_key.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(min(starts, expires)) + .not_valid_after(expires) + .add_extension( + x509.SubjectAlternativeName([x509.DNSName("localhost")]), + critical=False, + # Sign our certificate with our private key + ) + .sign(private_key, hashes.SHA256()) + ) diff --git a/src/allmydata/test/cli/common.py b/src/allmydata/test/cli/common.py index ed066c6b6..351b48baa 100644 --- a/src/allmydata/test/cli/common.py +++ b/src/allmydata/test/cli/common.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six import ensure_str, ensure_text diff --git a/src/allmydata/test/cli/test_admin.py b/src/allmydata/test/cli/test_admin.py new file mode 100644 index 000000000..a6370d6e4 --- /dev/null +++ b/src/allmydata/test/cli/test_admin.py @@ -0,0 +1,245 @@ +""" +Ported to Python 3. +""" + +# We're going to override stdin/stderr, so want to match their behavior on respective Python versions. +from io import StringIO + +from twisted.python.usage import ( + UsageError, +) +from twisted.python.filepath import ( + FilePath, +) + +from testtools.matchers import ( + Contains, +) + +from allmydata.scripts.admin import ( + migrate_crawler, + add_grid_manager_cert, +) +from allmydata.scripts.runner import ( + Options, +) +from allmydata.util import jsonbytes as json +from ..common import ( + SyncTestCase, +) + + +class AdminMigrateCrawler(SyncTestCase): + """ + Tests related to 'tahoe admin migrate-crawler' + """ + + def test_already(self): + """ + We've already migrated; don't do it again. + """ + + root = FilePath(self.mktemp()) + storage = root.child("storage") + storage.makedirs() + with storage.child("lease_checker.state.json").open("w") as f: + f.write(b"{}\n") + + top = Options() + top.parseOptions([ + "admin", "migrate-crawler", + "--basedir", storage.parent().path, + ]) + options = top.subOptions + while hasattr(options, "subOptions"): + options = options.subOptions + options.stdout = StringIO() + migrate_crawler(options) + + self.assertThat( + options.stdout.getvalue(), + Contains("Already converted:"), + ) + + def test_usage(self): + """ + We've already migrated; don't do it again. + """ + + root = FilePath(self.mktemp()) + storage = root.child("storage") + storage.makedirs() + with storage.child("lease_checker.state.json").open("w") as f: + f.write(b"{}\n") + + top = Options() + top.parseOptions([ + "admin", "migrate-crawler", + "--basedir", storage.parent().path, + ]) + options = top.subOptions + while hasattr(options, "subOptions"): + options = options.subOptions + self.assertThat( + str(options), + Contains("security issues with pickle") + ) + + +fake_cert = { + "certificate": "{\"expires\":1601687822,\"public_key\":\"pub-v0-cbq6hcf3pxcz6ouoafrbktmkixkeuywpcpbcomzd3lqbkq4nmfga\",\"version\":1}", + "signature": "fvjd3uvvupf2v6tnvkwjd473u3m3inyqkwiclhp7balmchkmn3px5pei3qyfjnhymq4cjcwvbpqmcwwnwswdtrfkpnlaxuih2zbdmda" +} + + +class AddCertificateOptions(SyncTestCase): + """ + Tests for 'tahoe admin add-grid-manager-cert' option validation + """ + def setUp(self): + self.tahoe = Options() + return super(AddCertificateOptions, self).setUp() + + def test_parse_no_data(self): + """ + When no data is passed to stdin an error is produced + """ + self.tahoe.stdin = StringIO("") + self.tahoe.stderr = StringIO() # suppress message + + with self.assertRaises(UsageError) as ctx: + self.tahoe.parseOptions( + [ + "admin", "add-grid-manager-cert", + "--name", "random-name", + "--filename", "-", + ] + ) + + self.assertIn( + "Reading certificate from stdin failed", + str(ctx.exception) + ) + + def test_read_cert_file(self): + """ + A certificate can be read from a file + """ + tmp = self.mktemp() + with open(tmp, "wb") as f: + f.write(json.dumps_bytes(fake_cert)) + + # certificate should be loaded + self.tahoe.parseOptions( + [ + "admin", "add-grid-manager-cert", + "--name", "random-name", + "--filename", tmp, + ] + ) + opts = self.tahoe.subOptions.subOptions + self.assertEqual( + fake_cert, + opts.certificate_data + ) + + def test_bad_certificate(self): + """ + Unparseable data produces an error + """ + self.tahoe.stdin = StringIO("{}") + self.tahoe.stderr = StringIO() # suppress message + + with self.assertRaises(UsageError) as ctx: + self.tahoe.parseOptions( + [ + "admin", "add-grid-manager-cert", + "--name", "random-name", + "--filename", "-", + ] + ) + + self.assertIn( + "Grid Manager certificate must contain", + str(ctx.exception) + ) + + +class AddCertificateCommand(SyncTestCase): + """ + Tests for 'tahoe admin add-grid-manager-cert' operation + """ + + def setUp(self): + self.tahoe = Options() + self.node_path = FilePath(self.mktemp()) + self.node_path.makedirs() + with self.node_path.child("tahoe.cfg").open("w") as f: + f.write(b"# minimal test config\n") + return super(AddCertificateCommand, self).setUp() + + def test_add_one(self): + """ + Adding a certificate succeeds + """ + self.tahoe.stdin = StringIO(json.dumps(fake_cert)) + self.tahoe.stderr = StringIO() + self.tahoe.parseOptions( + [ + "--node-directory", self.node_path.path, + "admin", "add-grid-manager-cert", + "--name", "zero", + "--filename", "-", + ] + ) + self.tahoe.subOptions.subOptions.stdin = self.tahoe.stdin + self.tahoe.subOptions.subOptions.stderr = self.tahoe.stderr + rc = add_grid_manager_cert(self.tahoe.subOptions.subOptions) + + self.assertEqual(rc, 0) + self.assertEqual( + {"zero.cert", "tahoe.cfg"}, + set(self.node_path.listdir()) + ) + self.assertIn( + "There are now 1 certificates", + self.tahoe.stderr.getvalue() + ) + + def test_add_two(self): + """ + An error message is produced when adding a certificate with a + duplicate name. + """ + self.tahoe.stdin = StringIO(json.dumps(fake_cert)) + self.tahoe.stderr = StringIO() + self.tahoe.parseOptions( + [ + "--node-directory", self.node_path.path, + "admin", "add-grid-manager-cert", + "--name", "zero", + "--filename", "-", + ] + ) + self.tahoe.subOptions.subOptions.stdin = self.tahoe.stdin + self.tahoe.subOptions.subOptions.stderr = self.tahoe.stderr + rc = add_grid_manager_cert(self.tahoe.subOptions.subOptions) + self.assertEqual(rc, 0) + + self.tahoe.stdin = StringIO(json.dumps(fake_cert)) + self.tahoe.parseOptions( + [ + "--node-directory", self.node_path.path, + "admin", "add-grid-manager-cert", + "--name", "zero", + "--filename", "-", + ] + ) + self.tahoe.subOptions.subOptions.stdin = self.tahoe.stdin + self.tahoe.subOptions.subOptions.stderr = self.tahoe.stderr + rc = add_grid_manager_cert(self.tahoe.subOptions.subOptions) + self.assertEqual(rc, 1) + self.assertIn( + "Already have certificate for 'zero'", + self.tahoe.stderr.getvalue() + ) diff --git a/src/allmydata/test/cli/test_alias.py b/src/allmydata/test/cli/test_alias.py index a3ee595b8..bbbafcabc 100644 --- a/src/allmydata/test/cli/test_alias.py +++ b/src/allmydata/test/cli/test_alias.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import json diff --git a/src/allmydata/test/cli/test_backup.py b/src/allmydata/test/cli/test_backup.py index df598b811..7ff1a14d0 100644 --- a/src/allmydata/test/cli/test_backup.py +++ b/src/allmydata/test/cli/test_backup.py @@ -1,17 +1,9 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os.path -from six.moves import cStringIO as StringIO +from io import StringIO from datetime import timedelta import re @@ -20,7 +12,7 @@ from twisted.python.monkey import MonkeyPatcher from allmydata.util import fileutil from allmydata.util.fileutil import abspath_expanduser_unicode -from allmydata.util.encodingutil import get_io_encoding, unicode_to_argv +from allmydata.util.encodingutil import unicode_to_argv from allmydata.util.namespace import Namespace from allmydata.scripts import cli, backupdb from ..common_util import StallMixin @@ -372,8 +364,6 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase): nice_doc = u"nice_d\u00F8c.lyx" try: doc_pattern_arg_unicode = doc_pattern_arg = u"*d\u00F8c*" - if PY2: - doc_pattern_arg = doc_pattern_arg.encode(get_io_encoding()) except UnicodeEncodeError: raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.") @@ -429,10 +419,7 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase): else: return original_open(name, *args, **kwargs) - if PY2: - from allmydata.scripts import cli as module_to_patch - else: - import builtins as module_to_patch + import builtins as module_to_patch patcher = MonkeyPatcher((module_to_patch, 'open', call_file)) patcher.runWithPatches(parse_options, basedir, "backup", ['--exclude-from-utf-8', unicode_to_argv(exclude_file), 'from', 'to']) self.failUnless(ns.called) diff --git a/src/allmydata/test/cli/test_backupdb.py b/src/allmydata/test/cli/test_backupdb.py index 665382dc8..53cc3225a 100644 --- a/src/allmydata/test/cli/test_backupdb.py +++ b/src/allmydata/test/cli/test_backupdb.py @@ -1,19 +1,10 @@ """ Ported to Python 3. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - # Don't import future bytes so we don't break a couple of tests - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min # noqa: F401 import sys import os.path, time -from six.moves import cStringIO as StringIO +from io import StringIO from twisted.trial import unittest from allmydata.util import fileutil diff --git a/src/allmydata/test/cli/test_check.py b/src/allmydata/test/cli/test_check.py index 472105ca1..c895451ea 100644 --- a/src/allmydata/test/cli/test_check.py +++ b/src/allmydata/test/cli/test_check.py @@ -1,17 +1,9 @@ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six import ensure_text import os.path import json from twisted.trial import unittest -from six.moves import cStringIO as StringIO +from io import StringIO from allmydata import uri from allmydata.util import base32 @@ -424,7 +416,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase): def _stash_uri(n): self.uriList.append(n.get_uri()) d.addCallback(_stash_uri) - d = c0.create_dirnode() + d.addCallback(lambda _: c0.create_dirnode()) d.addCallback(_stash_uri) d.addCallback(lambda ign: self.do_cli("check", self.uriList[0], self.uriList[1])) diff --git a/src/allmydata/test/cli/test_cli.py b/src/allmydata/test/cli/test_cli.py index 72eb011d0..432437b61 100644 --- a/src/allmydata/test/cli/test_cli.py +++ b/src/allmydata/test/cli/test_cli.py @@ -1,16 +1,8 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from six.moves import cStringIO as StringIO +from io import StringIO import re from six import ensure_text diff --git a/src/allmydata/test/cli/test_cp.py b/src/allmydata/test/cli/test_cp.py index fff50f331..2751dc055 100644 --- a/src/allmydata/test/cli/test_cp.py +++ b/src/allmydata/test/cli/test_cp.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os.path, json from twisted.trial import unittest @@ -17,8 +9,7 @@ from twisted.internet import defer from allmydata.scripts import cli from allmydata.util import fileutil -from allmydata.util.encodingutil import (quote_output, get_io_encoding, - unicode_to_output, to_bytes) +from allmydata.util.encodingutil import (quote_output, unicode_to_output, to_bytes) from allmydata.util.assertutil import _assert from ..no_network import GridTestMixin from .common import CLITestMixin @@ -77,8 +68,6 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase): self.failUnlessIn("files whose names could not be converted", err) else: self.failUnlessReallyEqual(rc, 0) - if PY2: - out = out.decode(get_io_encoding()) self.failUnlessReallyEqual(out, u"Metallica\n\u00C4rtonwall\n\u00C4rtonwall-2\n") self.assertEqual(len(err), 0, err) d.addCallback(_check) @@ -235,8 +224,6 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase): self.failUnlessIn("files whose names could not be converted", err) else: self.failUnlessReallyEqual(rc, 0) - if PY2: - out = out.decode(get_io_encoding()) self.failUnlessReallyEqual(out, u"\u00C4rtonwall\n") self.assertEqual(len(err), 0, err) d.addCallback(_check) diff --git a/src/allmydata/test/cli/test_create.py b/src/allmydata/test/cli/test_create.py index 282f26163..406aebd48 100644 --- a/src/allmydata/test/cli/test_create.py +++ b/src/allmydata/test/cli/test_create.py @@ -1,31 +1,92 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations import os -import mock + +from typing import Any + from twisted.trial import unittest from twisted.internet import defer, reactor from twisted.python import usage from allmydata.util import configutil +from allmydata.util import tor_provider, i2p_provider from ..common_util import run_cli, parse_cli +from ..common import ( + disable_modules, +) from ...scripts import create_node +from ...listeners import ListenerConfig, StaticProvider from ... import client - def read_config(basedir): tahoe_cfg = os.path.join(basedir, "tahoe.cfg") config = configutil.get_config(tahoe_cfg) return config +class MergeConfigTests(unittest.TestCase): + """ + Tests for ``create_node.merge_config``. + """ + def test_disable_left(self) -> None: + """ + If the left argument to ``create_node.merge_config`` is ``None`` + then the return value is ``None``. + """ + conf = ListenerConfig([], [], {}) + self.assertEqual(None, create_node.merge_config(None, conf)) + + def test_disable_right(self) -> None: + """ + If the right argument to ``create_node.merge_config`` is ``None`` + then the return value is ``None``. + """ + conf = ListenerConfig([], [], {}) + self.assertEqual(None, create_node.merge_config(conf, None)) + + def test_disable_both(self) -> None: + """ + If both arguments to ``create_node.merge_config`` are ``None`` + then the return value is ``None``. + """ + self.assertEqual(None, create_node.merge_config(None, None)) + + def test_overlapping_keys(self) -> None: + """ + If there are any keys in the ``node_config`` of the left and right + parameters that are shared then ``ValueError`` is raised. + """ + left = ListenerConfig([], [], {"foo": [("b", "ar")]}) + right = ListenerConfig([], [], {"foo": [("ba", "z")]}) + self.assertRaises(ValueError, lambda: create_node.merge_config(left, right)) + + def test_merge(self) -> None: + """ + ``create_node.merge_config`` returns a ``ListenerConfig`` that has + all of the ports, locations, and node config from each of the two + ``ListenerConfig`` values given. + """ + left = ListenerConfig( + ["left-port"], + ["left-location"], + {"left": [("f", "oo")]}, + ) + right = ListenerConfig( + ["right-port"], + ["right-location"], + {"right": [("ba", "r")]}, + ) + result = create_node.merge_config(left, right) + self.assertEqual( + ListenerConfig( + ["left-port", "right-port"], + ["left-location", "right-location"], + {"left": [("f", "oo")], "right": [("ba", "r")]}, + ), + result, + ) + class Config(unittest.TestCase): def test_client_unrecognized_options(self): tests = [ @@ -47,7 +108,14 @@ class Config(unittest.TestCase): e = self.assertRaises(usage.UsageError, parse_cli, verb, *args) self.assertIn("option %s not recognized" % (option,), str(e)) - def test_create_client_config(self): + async def test_create_client_config(self): + """ + ``create_node.write_client_config`` writes a configuration file + that can be parsed. + + TODO Maybe we should test that we can recover the given configuration + from the parse, too. + """ d = self.mktemp() os.mkdir(d) fname = os.path.join(d, 'tahoe.cfg') @@ -61,7 +129,7 @@ class Config(unittest.TestCase): "shares-happy": "1", "shares-total": "1", } - create_node.write_node_config(f, opts) + await create_node.write_node_config(f, opts) create_node.write_client_config(f, opts) # should succeed, no exceptions @@ -105,11 +173,12 @@ class Config(unittest.TestCase): @defer.inlineCallbacks def test_client_hide_ip_no_i2p_txtorcon(self): - # hmm, I must be doing something weird, these don't work as - # @mock.patch decorators for some reason - txi2p = mock.patch('allmydata.util.i2p_provider._import_txi2p', return_value=None) - txtorcon = mock.patch('allmydata.util.tor_provider._import_txtorcon', return_value=None) - with txi2p, txtorcon: + """ + The ``create-client`` sub-command tells the user to install the necessary + dependencies if they have neither tor nor i2p support installed and + they request network location privacy with the ``--hide-ip`` flag. + """ + with disable_modules("txi2p", "txtorcon"): basedir = self.mktemp() rc, out, err = yield run_cli("create-client", "--hide-ip", basedir) self.assertTrue(rc != 0, out) @@ -118,8 +187,7 @@ class Config(unittest.TestCase): @defer.inlineCallbacks def test_client_i2p_option_no_txi2p(self): - txi2p = mock.patch('allmydata.util.i2p_provider._import_txi2p', return_value=None) - with txi2p: + with disable_modules("txi2p"): basedir = self.mktemp() rc, out, err = yield run_cli("create-node", "--listen=i2p", "--i2p-launch", basedir) self.assertTrue(rc != 0) @@ -127,8 +195,7 @@ class Config(unittest.TestCase): @defer.inlineCallbacks def test_client_tor_option_no_txtorcon(self): - txtorcon = mock.patch('allmydata.util.tor_provider._import_txtorcon', return_value=None) - with txtorcon: + with disable_modules("txtorcon"): basedir = self.mktemp() rc, out, err = yield run_cli("create-node", "--listen=tor", "--tor-launch", basedir) self.assertTrue(rc != 0) @@ -145,9 +212,7 @@ class Config(unittest.TestCase): @defer.inlineCallbacks def test_client_hide_ip_no_txtorcon(self): - txtorcon = mock.patch('allmydata.util.tor_provider._import_txtorcon', - return_value=None) - with txtorcon: + with disable_modules("txtorcon"): basedir = self.mktemp() rc, out, err = yield run_cli("create-client", "--hide-ip", basedir) self.assertEqual(0, rc) @@ -250,7 +315,7 @@ class Config(unittest.TestCase): parse_cli, "create-node", "--listen=tcp,none", basedir) - self.assertEqual(str(e), "--listen= must be none, or one/some of: tcp, tor, i2p") + self.assertEqual(str(e), "--listen=tcp requires --hostname=") def test_node_listen_bad(self): basedir = self.mktemp() @@ -258,7 +323,7 @@ class Config(unittest.TestCase): parse_cli, "create-node", "--listen=XYZZY,tcp", basedir) - self.assertEqual(str(e), "--listen= must be none, or one/some of: tcp, tor, i2p") + self.assertEqual(str(e), "--listen= must be one/some of: i2p, none, tcp, tor") def test_node_listen_tor_hostname(self): e = self.assertRaises(usage.UsageError, @@ -292,27 +357,20 @@ class Config(unittest.TestCase): self.assertIn("To avoid clobbering anything, I am going to quit now", err) @defer.inlineCallbacks - def test_node_slow_tor(self): - basedir = self.mktemp() + def test_node_slow(self): + """ + A node can be created using a listener type that returns an + unfired Deferred from its ``create_config`` method. + """ d = defer.Deferred() - with mock.patch("allmydata.util.tor_provider.create_config", - return_value=d): - d2 = run_cli("create-node", "--listen=tor", basedir) - d.callback(({}, "port", "location")) - rc, out, err = yield d2 - self.assertEqual(rc, 0) - self.assertIn("Node created", out) - self.assertEqual(err, "") + slow = StaticProvider(True, False, d, None) + create_node._LISTENERS["xxyzy"] = slow + self.addCleanup(lambda: create_node._LISTENERS.pop("xxyzy")) - @defer.inlineCallbacks - def test_node_slow_i2p(self): basedir = self.mktemp() - d = defer.Deferred() - with mock.patch("allmydata.util.i2p_provider.create_config", - return_value=d): - d2 = run_cli("create-node", "--listen=i2p", basedir) - d.callback(({}, "port", "location")) - rc, out, err = yield d2 + d2 = run_cli("create-node", "--listen=xxyzy", basedir) + d.callback(None) + rc, out, err = yield d2 self.assertEqual(rc, 0) self.assertIn("Node created", out) self.assertEqual(err, "") @@ -353,19 +411,43 @@ class Config(unittest.TestCase): self.assertIn("is not empty", err) self.assertIn("To avoid clobbering anything, I am going to quit now", err) +def fake_config(testcase: unittest.TestCase, module: Any, result: Any) -> list[tuple]: + """ + Monkey-patch a fake configuration function into the given module. + + :param testcase: The test case to use to do the monkey-patching. + + :param module: The module into which to patch the fake function. + + :param result: The return value for the fake function. + + :return: A list of tuples of the arguments the fake function was called + with. + """ + calls = [] + def fake_config(reactor, cli_config): + calls.append((reactor, cli_config)) + return result + testcase.patch(module, "create_config", fake_config) + return calls + class Tor(unittest.TestCase): def test_default(self): basedir = self.mktemp() - tor_config = {"abc": "def"} + tor_config = {"tor": [("abc", "def")]} tor_port = "ghi" tor_location = "jkl" - config_d = defer.succeed( (tor_config, tor_port, tor_location) ) - with mock.patch("allmydata.util.tor_provider.create_config", - return_value=config_d) as co: - rc, out, err = self.successResultOf( - run_cli("create-node", "--listen=tor", basedir)) - self.assertEqual(len(co.mock_calls), 1) - args = co.mock_calls[0][1] + config_d = defer.succeed( + ListenerConfig([tor_port], [tor_location], tor_config) + ) + + calls = fake_config(self, tor_provider, config_d) + rc, out, err = self.successResultOf( + run_cli("create-node", "--listen=tor", basedir), + ) + + self.assertEqual(len(calls), 1) + args = calls[0] self.assertIdentical(args[0], reactor) self.assertIsInstance(args[1], create_node.CreateNodeOptions) self.assertEqual(args[1]["listen"], "tor") @@ -375,33 +457,41 @@ class Tor(unittest.TestCase): self.assertEqual(cfg.get("node", "tub.location"), "jkl") def test_launch(self): + """ + The ``--tor-launch`` command line option sets ``tor-launch`` to + ``True``. + """ basedir = self.mktemp() - tor_config = {"abc": "def"} - tor_port = "ghi" - tor_location = "jkl" - config_d = defer.succeed( (tor_config, tor_port, tor_location) ) - with mock.patch("allmydata.util.tor_provider.create_config", - return_value=config_d) as co: - rc, out, err = self.successResultOf( - run_cli("create-node", "--listen=tor", "--tor-launch", - basedir)) - args = co.mock_calls[0][1] + config_d = defer.succeed(None) + + calls = fake_config(self, tor_provider, config_d) + rc, out, err = self.successResultOf( + run_cli( + "create-node", "--listen=tor", "--tor-launch", + basedir, + ), + ) + args = calls[0] self.assertEqual(args[1]["listen"], "tor") self.assertEqual(args[1]["tor-launch"], True) self.assertEqual(args[1]["tor-control-port"], None) def test_control_port(self): + """ + The ``--tor-control-port`` command line parameter's value is + passed along as the ``tor-control-port`` value. + """ basedir = self.mktemp() - tor_config = {"abc": "def"} - tor_port = "ghi" - tor_location = "jkl" - config_d = defer.succeed( (tor_config, tor_port, tor_location) ) - with mock.patch("allmydata.util.tor_provider.create_config", - return_value=config_d) as co: - rc, out, err = self.successResultOf( - run_cli("create-node", "--listen=tor", "--tor-control-port=mno", - basedir)) - args = co.mock_calls[0][1] + config_d = defer.succeed(None) + + calls = fake_config(self, tor_provider, config_d) + rc, out, err = self.successResultOf( + run_cli( + "create-node", "--listen=tor", "--tor-control-port=mno", + basedir, + ), + ) + args = calls[0] self.assertEqual(args[1]["listen"], "tor") self.assertEqual(args[1]["tor-launch"], False) self.assertEqual(args[1]["tor-control-port"], "mno") @@ -430,16 +520,17 @@ class Tor(unittest.TestCase): class I2P(unittest.TestCase): def test_default(self): basedir = self.mktemp() - i2p_config = {"abc": "def"} + i2p_config = {"i2p": [("abc", "def")]} i2p_port = "ghi" i2p_location = "jkl" - dest_d = defer.succeed( (i2p_config, i2p_port, i2p_location) ) - with mock.patch("allmydata.util.i2p_provider.create_config", - return_value=dest_d) as co: - rc, out, err = self.successResultOf( - run_cli("create-node", "--listen=i2p", basedir)) - self.assertEqual(len(co.mock_calls), 1) - args = co.mock_calls[0][1] + dest_d = defer.succeed(ListenerConfig([i2p_port], [i2p_location], i2p_config)) + + calls = fake_config(self, i2p_provider, dest_d) + rc, out, err = self.successResultOf( + run_cli("create-node", "--listen=i2p", basedir), + ) + self.assertEqual(len(calls), 1) + args = calls[0] self.assertIdentical(args[0], reactor) self.assertIsInstance(args[1], create_node.CreateNodeOptions) self.assertEqual(args[1]["listen"], "i2p") @@ -457,16 +548,16 @@ class I2P(unittest.TestCase): def test_sam_port(self): basedir = self.mktemp() - i2p_config = {"abc": "def"} - i2p_port = "ghi" - i2p_location = "jkl" - dest_d = defer.succeed( (i2p_config, i2p_port, i2p_location) ) - with mock.patch("allmydata.util.i2p_provider.create_config", - return_value=dest_d) as co: - rc, out, err = self.successResultOf( - run_cli("create-node", "--listen=i2p", "--i2p-sam-port=mno", - basedir)) - args = co.mock_calls[0][1] + dest_d = defer.succeed(None) + + calls = fake_config(self, i2p_provider, dest_d) + rc, out, err = self.successResultOf( + run_cli( + "create-node", "--listen=i2p", "--i2p-sam-port=mno", + basedir, + ), + ) + args = calls[0] self.assertEqual(args[1]["listen"], "i2p") self.assertEqual(args[1]["i2p-launch"], False) self.assertEqual(args[1]["i2p-sam-port"], "mno") diff --git a/src/allmydata/test/cli/test_create_alias.py b/src/allmydata/test/cli/test_create_alias.py index 176bf7576..02978deca 100644 --- a/src/allmydata/test/cli/test_create_alias.py +++ b/src/allmydata/test/cli/test_create_alias.py @@ -1,16 +1,8 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from six.moves import StringIO +from io import StringIO import os.path from twisted.trial import unittest from urllib.parse import quote as url_quote diff --git a/src/allmydata/test/cli/test_grid_manager.py b/src/allmydata/test/cli/test_grid_manager.py new file mode 100644 index 000000000..b44b322d2 --- /dev/null +++ b/src/allmydata/test/cli/test_grid_manager.py @@ -0,0 +1,317 @@ +""" +Tests for the grid manager CLI. +""" + +import os +from io import ( + BytesIO, +) +from unittest import ( + skipIf, +) + +from twisted.trial.unittest import ( + TestCase, +) +from allmydata.cli.grid_manager import ( + grid_manager, +) + +import click.testing + +# these imports support the tests for `tahoe *` subcommands +from ..common_util import ( + run_cli, +) +from ..common import ( + superuser, +) +from twisted.internet.defer import ( + inlineCallbacks, +) +from twisted.python.filepath import ( + FilePath, +) +from twisted.python.runtime import ( + platform, +) +from allmydata.util import jsonbytes as json + +class GridManagerCommandLine(TestCase): + """ + Test the mechanics of the `grid-manager` command + """ + + def setUp(self): + self.runner = click.testing.CliRunner() + super(GridManagerCommandLine, self).setUp() + + def invoke_and_check(self, *args, **kwargs): + """Invoke a command with the runner and ensure it succeeded.""" + result = self.runner.invoke(*args, **kwargs) + if result.exception is not None: + raise result.exc_info[1].with_traceback(result.exc_info[2]) + self.assertEqual(result.exit_code, 0, result) + return result + + def test_create(self): + """ + Create a new grid-manager + """ + with self.runner.isolated_filesystem(): + result = self.invoke_and_check(grid_manager, ["--config", "foo", "create"]) + self.assertEqual(["foo"], os.listdir(".")) + self.assertEqual(["config.json"], os.listdir("./foo")) + result = self.invoke_and_check(grid_manager, ["--config", "foo", "public-identity"]) + self.assertTrue(result.output.startswith("pub-v0-")) + + def test_load_invalid(self): + """ + An invalid config is reported to the user + """ + with self.runner.isolated_filesystem(): + with open("config.json", "wb") as f: + f.write(json.dumps_bytes({"not": "valid"})) + result = self.runner.invoke(grid_manager, ["--config", ".", "public-identity"]) + self.assertNotEqual(result.exit_code, 0) + self.assertIn( + "Error loading Grid Manager", + result.output, + ) + + def test_create_already(self): + """ + It's an error to create a new grid-manager in an existing + directory. + """ + with self.runner.isolated_filesystem(): + result = self.invoke_and_check(grid_manager, ["--config", "foo", "create"]) + result = self.runner.invoke(grid_manager, ["--config", "foo", "create"]) + self.assertEqual(1, result.exit_code) + self.assertIn( + "Can't create", + result.stdout, + ) + + def test_create_stdout(self): + """ + Create a new grid-manager with no files + """ + with self.runner.isolated_filesystem(): + result = self.invoke_and_check(grid_manager, ["--config", "-", "create"]) + self.assertEqual([], os.listdir(".")) + config = json.loads(result.output) + self.assertEqual( + {"private_key", "grid_manager_config_version"}, + set(config.keys()), + ) + + def test_list_stdout(self): + """ + Load Grid Manager without files (using 'list' subcommand, but any will do) + """ + config = { + "storage_servers": { + "storage0": { + "public_key": "pub-v0-cbq6hcf3pxcz6ouoafrbktmkixkeuywpcpbcomzd3lqbkq4nmfga" + } + }, + "private_key": "priv-v0-6uinzyaxy3zvscwgsps5pxcfezhrkfb43kvnrbrhhfzyduyqnniq", + "grid_manager_config_version": 0 + } + result = self.invoke_and_check( + grid_manager, ["--config", "-", "list"], + input=BytesIO(json.dumps_bytes(config)), + ) + self.assertEqual(result.exit_code, 0) + self.assertEqual( + "storage0: pub-v0-cbq6hcf3pxcz6ouoafrbktmkixkeuywpcpbcomzd3lqbkq4nmfga\n", + result.output, + ) + + def test_add_and_sign(self): + """ + Add a new storage-server and sign a certificate for it + """ + pubkey = "pub-v0-cbq6hcf3pxcz6ouoafrbktmkixkeuywpcpbcomzd3lqbkq4nmfga" + with self.runner.isolated_filesystem(): + self.invoke_and_check(grid_manager, ["--config", "foo", "create"]) + self.invoke_and_check(grid_manager, ["--config", "foo", "add", "storage0", pubkey]) + result = self.invoke_and_check(grid_manager, ["--config", "foo", "sign", "storage0", "10"]) + sigcert = json.loads(result.output) + self.assertEqual({"certificate", "signature"}, set(sigcert.keys())) + cert = json.loads(sigcert['certificate']) + self.assertEqual(cert["public_key"], pubkey) + + def test_add_and_sign_second_cert(self): + """ + Add a new storage-server and sign two certificates. + """ + pubkey = "pub-v0-cbq6hcf3pxcz6ouoafrbktmkixkeuywpcpbcomzd3lqbkq4nmfga" + with self.runner.isolated_filesystem(): + self.invoke_and_check(grid_manager, ["--config", "foo", "create"]) + self.invoke_and_check(grid_manager, ["--config", "foo", "add", "storage0", pubkey]) + self.invoke_and_check(grid_manager, ["--config", "foo", "sign", "storage0", "10"]) + self.invoke_and_check(grid_manager, ["--config", "foo", "sign", "storage0", "10"]) + # we should now have two certificates stored + self.assertEqual( + set(FilePath("foo").listdir()), + {'storage0.cert.1', 'storage0.cert.0', 'config.json'}, + ) + + def test_add_twice(self): + """ + An error is reported trying to add an existing server + """ + pubkey0 = "pub-v0-cbq6hcf3pxcz6ouoafrbktmkixkeuywpcpbcomzd3lqbkq4nmfga" + pubkey1 = "pub-v0-5ysc55trfvfvg466v46j4zmfyltgus3y2gdejifctv7h4zkuyveq" + with self.runner.isolated_filesystem(): + self.invoke_and_check(grid_manager, ["--config", "foo", "create"]) + self.invoke_and_check(grid_manager, ["--config", "foo", "add", "storage0", pubkey0]) + result = self.runner.invoke(grid_manager, ["--config", "foo", "add", "storage0", pubkey1]) + self.assertNotEquals(result.exit_code, 0) + self.assertIn( + "A storage-server called 'storage0' already exists", + result.output, + ) + + def test_add_list_remove(self): + """ + Add a storage server, list it, remove it. + """ + pubkey = "pub-v0-cbq6hcf3pxcz6ouoafrbktmkixkeuywpcpbcomzd3lqbkq4nmfga" + with self.runner.isolated_filesystem(): + self.invoke_and_check(grid_manager, ["--config", "foo", "create"]) + self.invoke_and_check(grid_manager, ["--config", "foo", "add", "storage0", pubkey]) + self.invoke_and_check(grid_manager, ["--config", "foo", "sign", "storage0", "1"]) + + result = self.invoke_and_check(grid_manager, ["--config", "foo", "list"]) + names = [ + line.split(':')[0] + for line in result.output.strip().split('\n') + if not line.startswith(" ") # "cert" lines start with whitespace + ] + self.assertEqual(names, ["storage0"]) + + self.invoke_and_check(grid_manager, ["--config", "foo", "remove", "storage0"]) + + result = self.invoke_and_check(grid_manager, ["--config", "foo", "list"]) + self.assertEqual(result.output.strip(), "") + + def test_remove_missing(self): + """ + Error reported when removing non-existant server + """ + with self.runner.isolated_filesystem(): + self.invoke_and_check(grid_manager, ["--config", "foo", "create"]) + result = self.runner.invoke(grid_manager, ["--config", "foo", "remove", "storage0"]) + self.assertNotEquals(result.exit_code, 0) + self.assertIn( + "No storage-server called 'storage0' exists", + result.output, + ) + + def test_sign_missing(self): + """ + Error reported when signing non-existant server + """ + with self.runner.isolated_filesystem(): + self.invoke_and_check(grid_manager, ["--config", "foo", "create"]) + result = self.runner.invoke(grid_manager, ["--config", "foo", "sign", "storage0", "42"]) + self.assertNotEquals(result.exit_code, 0) + self.assertIn( + "No storage-server called 'storage0' exists", + result.output, + ) + + @skipIf(platform.isWindows(), "We don't know how to set permissions on Windows.") + @skipIf(superuser, "cannot test as superuser with all permissions") + def test_sign_bad_perms(self): + """ + Error reported if we can't create certificate file + """ + pubkey = "pub-v0-cbq6hcf3pxcz6ouoafrbktmkixkeuywpcpbcomzd3lqbkq4nmfga" + with self.runner.isolated_filesystem(): + self.invoke_and_check(grid_manager, ["--config", "foo", "create"]) + self.invoke_and_check(grid_manager, ["--config", "foo", "add", "storage0", pubkey]) + # make the directory un-writable (so we can't create a new cert) + os.chmod("foo", 0o550) + result = self.runner.invoke(grid_manager, ["--config", "foo", "sign", "storage0", "42"]) + self.assertEquals(result.exit_code, 1) + self.assertIn( + "Permission denied", + result.output, + ) + + +class TahoeAddGridManagerCert(TestCase): + """ + Test `tahoe admin add-grid-manager-cert` subcommand + """ + + @inlineCallbacks + def test_help(self): + """ + some kind of help is printed + """ + code, out, err = yield run_cli("admin", "add-grid-manager-cert") + self.assertEqual(err, "") + self.assertNotEqual(0, code) + + @inlineCallbacks + def test_no_name(self): + """ + error to miss --name option + """ + code, out, err = yield run_cli( + "admin", "add-grid-manager-cert", "--filename", "-", + stdin=b"the cert", + ) + self.assertIn( + "Must provide --name", + out + ) + + @inlineCallbacks + def test_no_filename(self): + """ + error to miss --name option + """ + code, out, err = yield run_cli( + "admin", "add-grid-manager-cert", "--name", "foo", + stdin=b"the cert", + ) + self.assertIn( + "Must provide --filename", + out + ) + + @inlineCallbacks + def test_add_one(self): + """ + we can add a certificate + """ + nodedir = self.mktemp() + fake_cert = b"""{"certificate": "", "signature": ""}""" + + code, out, err = yield run_cli( + "--node-directory", nodedir, + "admin", "add-grid-manager-cert", "-f", "-", "--name", "foo", + stdin=fake_cert, + ignore_stderr=True, + ) + nodepath = FilePath(nodedir) + with nodepath.child("tahoe.cfg").open("r") as f: + config_data = f.read() + + self.assertIn("tahoe.cfg", nodepath.listdir()) + self.assertIn( + b"foo = foo.cert", + config_data, + ) + self.assertIn("foo.cert", nodepath.listdir()) + with nodepath.child("foo.cert").open("r") as f: + self.assertEqual( + json.load(f), + json.loads(fake_cert) + ) diff --git a/src/allmydata/test/cli/test_invite.py b/src/allmydata/test/cli/test_invite.py index 20d012995..1302e5970 100644 --- a/src/allmydata/test/cli/test_invite.py +++ b/src/allmydata/test/cli/test_invite.py @@ -1,77 +1,126 @@ """ -Ported to Pythn 3. +Tests for ``tahoe invite``. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations -import os -import mock import json +import os +from functools import partial from os.path import join +from typing import Callable, Optional, Sequence, TypeVar, Union, Coroutine, Any, Tuple, cast, Generator -try: - from typing import Optional, Sequence -except ImportError: - pass - -from twisted.trial import unittest from twisted.internet import defer +from twisted.trial import unittest + +from ...client import read_config +from ...scripts import runner +from ...util.jsonbytes import dumps_bytes from ..common_util import run_cli from ..no_network import GridTestMixin from .common import CLITestMixin -from ...client import ( - read_config, -) +from .wormholetesting import MemoryWormholeServer, TestingHelper, memory_server, IWormhole -class _FakeWormhole(object): - def __init__(self, outgoing_messages): - self.messages = [] - for o in outgoing_messages: - assert isinstance(o, bytes) - self._outgoing = outgoing_messages +# Logically: +# JSONable = dict[str, Union[JSONable, None, int, float, str, list[JSONable]]] +# +# But practically: +JSONable = Union[dict, None, int, float, str, list] - def get_code(self): - return defer.succeed(u"6-alarmist-tuba") - def set_code(self, code): - self._code = code +async def open_wormhole() -> tuple[Callable, IWormhole, str]: + """ + Create a new in-memory wormhole server, open one end of a wormhole, and + return it and related info. - def get_welcome(self): - return defer.succeed( - { - u"welcome": {}, - } + :return: A three-tuple allowing use of the wormhole. The first element is + a callable like ``run_cli`` but which will run commands so that they + use the in-memory wormhole server instead of a real one. The second + element is the open wormhole. The third element is the wormhole's + code. + """ + server = MemoryWormholeServer() + options = runner.Options() + options.wormhole = server + reactor = object() + + wormhole = server.create( + "tahoe-lafs.org/invite", + "ws://wormhole.tahoe-lafs.org:4000/v1", + reactor, + ) + code = await wormhole.get_code() + + return (partial(run_cli, options=options), wormhole, code) + + +def make_simple_peer( + reactor, + server: MemoryWormholeServer, + helper: TestingHelper, + messages: Sequence[JSONable], +) -> Callable[[], Coroutine[defer.Deferred[IWormhole], Any, IWormhole]]: + """ + Make a wormhole peer that just sends the given messages. + + The returned function returns an awaitable that fires with the peer's end + of the wormhole. + """ + async def peer() -> IWormhole: + # Run the client side of the invitation by manually pumping a + # message through the wormhole. + + # First, wait for the server to create the wormhole at all. + wormhole = await helper.wait_for_wormhole( + "tahoe-lafs.org/invite", + "ws://wormhole.tahoe-lafs.org:4000/v1", ) + # Then read out its code and open the other side of the wormhole. + code = await wormhole.when_code() + other_end = server.create( + "tahoe-lafs.org/invite", + "ws://wormhole.tahoe-lafs.org:4000/v1", + reactor, + ) + other_end.set_code(code) + send_messages(other_end, messages) + return other_end - def allocate_code(self): - return None - - def send_message(self, msg): - assert isinstance(msg, bytes) - self.messages.append(msg) - - def get_message(self): - return defer.succeed(self._outgoing.pop(0)) - - def close(self): - return defer.succeed(None) + return peer -def _create_fake_wormhole(outgoing_messages): - outgoing_messages = [ - m.encode("utf-8") if isinstance(m, str) else m - for m in outgoing_messages - ] - return _FakeWormhole(outgoing_messages) +def send_messages(wormhole: IWormhole, messages: Sequence[JSONable]) -> None: + """ + Send a list of message through a wormhole. + """ + for msg in messages: + wormhole.send_message(dumps_bytes(msg)) +A = TypeVar("A") +B = TypeVar("B") + +def concurrently( + client: Callable[[], Union[ + Coroutine[defer.Deferred[A], Any, A], + Generator[defer.Deferred[A], Any, A], + ]], + server: Callable[[], Union[ + Coroutine[defer.Deferred[B], Any, B], + Generator[defer.Deferred[B], Any, B], + ]], +) -> defer.Deferred[Tuple[A, B]]: + """ + Run two asynchronous functions concurrently and asynchronously return a + tuple of both their results. + """ + result = defer.gatherResults([ + defer.Deferred.fromCoroutine(client()), + defer.Deferred.fromCoroutine(server()), + ]).addCallback(tuple) # type: ignore + return cast(defer.Deferred[Tuple[A, B]], result) + class Join(GridTestMixin, CLITestMixin, unittest.TestCase): @defer.inlineCallbacks @@ -86,41 +135,39 @@ class Join(GridTestMixin, CLITestMixin, unittest.TestCase): successfully join after an invite """ node_dir = self.mktemp() + run_cli, wormhole, code = yield defer.Deferred.fromCoroutine(open_wormhole()) + send_messages(wormhole, [ + {u"abilities": {u"server-v1": {}}}, + { + u"shares-needed": 1, + u"shares-happy": 1, + u"shares-total": 1, + u"nickname": u"somethinghopefullyunique", + u"introducer": u"pb://foo", + }, + ]) - with mock.patch('allmydata.scripts.create_node.wormhole') as w: - fake_wh = _create_fake_wormhole([ - json.dumps({u"abilities": {u"server-v1": {}}}), - json.dumps({ - u"shares-needed": 1, - u"shares-happy": 1, - u"shares-total": 1, - u"nickname": u"somethinghopefullyunique", - u"introducer": u"pb://foo", - }), - ]) - w.create = mock.Mock(return_value=fake_wh) + rc, out, err = yield run_cli( + "create-client", + "--join", code, + node_dir, + ) - rc, out, err = yield run_cli( - "create-client", - "--join", "1-abysmal-ant", - node_dir, - ) + self.assertEqual(0, rc) - self.assertEqual(0, rc) + config = read_config(node_dir, u"") + self.assertIn( + "pb://foo", + set( + furl + for (furl, cache) + in config.get_introducer_configuration().values() + ), + ) - config = read_config(node_dir, u"") - self.assertIn( - "pb://foo", - set( - furl - for (furl, cache) - in config.get_introducer_configuration().values() - ), - ) - - with open(join(node_dir, 'tahoe.cfg'), 'r') as f: - config = f.read() - self.assertIn(u"somethinghopefullyunique", config) + with open(join(node_dir, 'tahoe.cfg'), 'r') as f: + config = f.read() + self.assertIn(u"somethinghopefullyunique", config) @defer.inlineCallbacks def test_create_node_illegal_option(self): @@ -128,30 +175,28 @@ class Join(GridTestMixin, CLITestMixin, unittest.TestCase): Server sends JSON with unknown/illegal key """ node_dir = self.mktemp() + run_cli, wormhole, code = yield defer.Deferred.fromCoroutine(open_wormhole()) + send_messages(wormhole, [ + {u"abilities": {u"server-v1": {}}}, + { + u"shares-needed": 1, + u"shares-happy": 1, + u"shares-total": 1, + u"nickname": u"somethinghopefullyunique", + u"introducer": u"pb://foo", + u"something-else": u"not allowed", + }, + ]) - with mock.patch('allmydata.scripts.create_node.wormhole') as w: - fake_wh = _create_fake_wormhole([ - json.dumps({u"abilities": {u"server-v1": {}}}), - json.dumps({ - u"shares-needed": 1, - u"shares-happy": 1, - u"shares-total": 1, - u"nickname": u"somethinghopefullyunique", - u"introducer": u"pb://foo", - u"something-else": u"not allowed", - }), - ]) - w.create = mock.Mock(return_value=fake_wh) + rc, out, err = yield run_cli( + "create-client", + "--join", code, + node_dir, + ) - rc, out, err = yield run_cli( - "create-client", - "--join", "1-abysmal-ant", - node_dir, - ) - - # should still succeed -- just ignores the not-whitelisted - # "something-else" option - self.assertEqual(0, rc) + # should still succeed -- just ignores the not-whitelisted + # "something-else" option + self.assertEqual(0, rc) class Invite(GridTestMixin, CLITestMixin, unittest.TestCase): @@ -168,8 +213,7 @@ class Invite(GridTestMixin, CLITestMixin, unittest.TestCase): intro_dir, ) - def _invite_success(self, extra_args=(), tahoe_config=None): - # type: (Sequence[bytes], Optional[bytes]) -> defer.Deferred + async def _invite_success(self, extra_args: Sequence[bytes] = (), tahoe_config: Optional[bytes] = None) -> str: """ Exercise an expected-success case of ``tahoe invite``. @@ -190,53 +234,58 @@ class Invite(GridTestMixin, CLITestMixin, unittest.TestCase): with open(join(intro_dir, "tahoe.cfg"), "wb") as fobj_cfg: fobj_cfg.write(tahoe_config) - with mock.patch('allmydata.scripts.tahoe_invite.wormhole') as w: - fake_wh = _create_fake_wormhole([ - json.dumps({u"abilities": {u"client-v1": {}}}), - ]) - w.create = mock.Mock(return_value=fake_wh) + wormhole_server, helper = memory_server() + options = runner.Options() + options.wormhole = wormhole_server + reactor = object() - extra_args = tuple(extra_args) - - d = run_cli( + async def server(): + # Run the server side of the invitation process using the CLI. + rc, out, err = await run_cli( "-d", intro_dir, "invite", - *(extra_args + ("foo",)) + *tuple(extra_args) + ("foo",), + options=options, ) - def done(result): - rc, out, err = result - self.assertEqual(2, len(fake_wh.messages)) - self.assertEqual( - json.loads(fake_wh.messages[0]), - { - "abilities": - { - "server-v1": {} - }, - }, - ) - invite = json.loads(fake_wh.messages[1]) - self.assertEqual( - invite["nickname"], "foo", - ) - self.assertEqual( - invite["introducer"], "pb://fooblam", - ) - return invite - d.addCallback(done) - return d + # Send a proper client abilities message. + client = make_simple_peer(reactor, wormhole_server, helper, [{u"abilities": {u"client-v1": {}}}]) + other_end, _ = await concurrently(client, server) + + # Check the server's messages. First, it should announce its + # abilities correctly. + server_abilities = json.loads(await other_end.when_received()) + self.assertEqual( + server_abilities, + { + "abilities": + { + "server-v1": {} + }, + }, + ) + + # Second, it should have an invitation with a nickname and introducer + # furl. + invite = json.loads(await other_end.when_received()) + self.assertEqual( + invite["nickname"], "foo", + ) + self.assertEqual( + invite["introducer"], "pb://fooblam", + ) + return invite @defer.inlineCallbacks def test_invite_success(self): """ successfully send an invite """ - invite = yield self._invite_success(( + invite = yield defer.Deferred.fromCoroutine(self._invite_success(( "--shares-needed", "1", "--shares-happy", "2", "--shares-total", "3", - )) + ))) self.assertEqual( invite["shares-needed"], "1", ) @@ -253,12 +302,12 @@ class Invite(GridTestMixin, CLITestMixin, unittest.TestCase): If ``--shares-{needed,happy,total}`` are not given on the command line then the invitation is generated using the configured values. """ - invite = yield self._invite_success(tahoe_config=b""" + invite = yield defer.Deferred.fromCoroutine(self._invite_success(tahoe_config=b""" [client] shares.needed = 2 shares.happy = 4 shares.total = 6 -""") +""")) self.assertEqual( invite["shares-needed"], "2", ) @@ -277,22 +326,20 @@ shares.total = 6 """ intro_dir = os.path.join(self.basedir, "introducer") - with mock.patch('allmydata.scripts.tahoe_invite.wormhole') as w: - fake_wh = _create_fake_wormhole([ - json.dumps({u"abilities": {u"client-v1": {}}}), - ]) - w.create = mock.Mock(return_value=fake_wh) + options = runner.Options() + options.wormhole = None - rc, out, err = yield run_cli( - "-d", intro_dir, - "invite", - "--shares-needed", "1", - "--shares-happy", "1", - "--shares-total", "1", - "foo", - ) - self.assertNotEqual(rc, 0) - self.assertIn(u"Can't find introducer FURL", out + err) + rc, out, err = yield run_cli( + "-d", intro_dir, + "invite", + "--shares-needed", "1", + "--shares-happy", "1", + "--shares-total", "1", + "foo", + options=options, + ) + self.assertNotEqual(rc, 0) + self.assertIn(u"Can't find introducer FURL", out + err) @defer.inlineCallbacks def test_invite_wrong_client_abilities(self): @@ -306,23 +353,28 @@ shares.total = 6 with open(join(priv_dir, "introducer.furl"), "w") as f: f.write("pb://fooblam\n") - with mock.patch('allmydata.scripts.tahoe_invite.wormhole') as w: - fake_wh = _create_fake_wormhole([ - json.dumps({u"abilities": {u"client-v9000": {}}}), - ]) - w.create = mock.Mock(return_value=fake_wh) + wormhole_server, helper = memory_server() + options = runner.Options() + options.wormhole = wormhole_server + reactor = object() - rc, out, err = yield run_cli( + async def server(): + rc, out, err = await run_cli( "-d", intro_dir, "invite", "--shares-needed", "1", "--shares-happy", "1", "--shares-total", "1", "foo", + options=options, ) self.assertNotEqual(rc, 0) self.assertIn(u"No 'client-v1' in abilities", out + err) + # Send some surprising client abilities. + client = make_simple_peer(reactor, wormhole_server, helper, [{u"abilities": {u"client-v9000": {}}}]) + yield concurrently(client, server) + @defer.inlineCallbacks def test_invite_no_client_abilities(self): """ @@ -335,23 +387,30 @@ shares.total = 6 with open(join(priv_dir, "introducer.furl"), "w") as f: f.write("pb://fooblam\n") - with mock.patch('allmydata.scripts.tahoe_invite.wormhole') as w: - fake_wh = _create_fake_wormhole([ - json.dumps({}), - ]) - w.create = mock.Mock(return_value=fake_wh) + wormhole_server, helper = memory_server() + options = runner.Options() + options.wormhole = wormhole_server + reactor = object() - rc, out, err = yield run_cli( + async def server(): + # Run the server side of the invitation process using the CLI. + rc, out, err = await run_cli( "-d", intro_dir, "invite", "--shares-needed", "1", "--shares-happy", "1", "--shares-total", "1", "foo", + options=options, ) self.assertNotEqual(rc, 0) self.assertIn(u"No 'abilities' from client", out + err) + # Send a no-abilities message through to the server. + client = make_simple_peer(reactor, wormhole_server, helper, [{}]) + yield concurrently(client, server) + + @defer.inlineCallbacks def test_invite_wrong_server_abilities(self): """ @@ -364,26 +423,25 @@ shares.total = 6 with open(join(priv_dir, "introducer.furl"), "w") as f: f.write("pb://fooblam\n") - with mock.patch('allmydata.scripts.create_node.wormhole') as w: - fake_wh = _create_fake_wormhole([ - json.dumps({u"abilities": {u"server-v9000": {}}}), - json.dumps({ - "shares-needed": "1", - "shares-total": "1", - "shares-happy": "1", - "nickname": "foo", - "introducer": "pb://fooblam", - }), - ]) - w.create = mock.Mock(return_value=fake_wh) + run_cli, wormhole, code = yield defer.Deferred.fromCoroutine(open_wormhole()) + send_messages(wormhole, [ + {u"abilities": {u"server-v9000": {}}}, + { + "shares-needed": "1", + "shares-total": "1", + "shares-happy": "1", + "nickname": "foo", + "introducer": "pb://fooblam", + }, + ]) - rc, out, err = yield run_cli( - "create-client", - "--join", "1-alarmist-tuba", - "foo", - ) - self.assertNotEqual(rc, 0) - self.assertIn("Expected 'server-v1' in server abilities", out + err) + rc, out, err = yield run_cli( + "create-client", + "--join", code, + "foo", + ) + self.assertNotEqual(rc, 0) + self.assertIn("Expected 'server-v1' in server abilities", out + err) @defer.inlineCallbacks def test_invite_no_server_abilities(self): @@ -397,26 +455,25 @@ shares.total = 6 with open(join(priv_dir, "introducer.furl"), "w") as f: f.write("pb://fooblam\n") - with mock.patch('allmydata.scripts.create_node.wormhole') as w: - fake_wh = _create_fake_wormhole([ - json.dumps({}), - json.dumps({ - "shares-needed": "1", - "shares-total": "1", - "shares-happy": "1", - "nickname": "bar", - "introducer": "pb://fooblam", - }), - ]) - w.create = mock.Mock(return_value=fake_wh) + run_cli, wormhole, code = yield defer.Deferred.fromCoroutine(open_wormhole()) + send_messages(wormhole, [ + {}, + { + "shares-needed": "1", + "shares-total": "1", + "shares-happy": "1", + "nickname": "bar", + "introducer": "pb://fooblam", + }, + ]) - rc, out, err = yield run_cli( - "create-client", - "--join", "1-alarmist-tuba", - "bar", - ) - self.assertNotEqual(rc, 0) - self.assertIn("Expected 'abilities' in server introduction", out + err) + rc, out, err = yield run_cli( + "create-client", + "--join", code, + "bar", + ) + self.assertNotEqual(rc, 0) + self.assertIn("Expected 'abilities' in server introduction", out + err) @defer.inlineCallbacks def test_invite_no_nick(self): @@ -425,13 +482,16 @@ shares.total = 6 """ intro_dir = os.path.join(self.basedir, "introducer") - with mock.patch('allmydata.scripts.tahoe_invite.wormhole'): - rc, out, err = yield run_cli( - "-d", intro_dir, - "invite", - "--shares-needed", "1", - "--shares-happy", "1", - "--shares-total", "1", - ) - self.assertTrue(rc) - self.assertIn(u"Provide a single argument", out + err) + options = runner.Options() + options.wormhole = None + + rc, out, err = yield run_cli( + "-d", intro_dir, + "invite", + "--shares-needed", "1", + "--shares-happy", "1", + "--shares-total", "1", + options=options, + ) + self.assertTrue(rc) + self.assertIn(u"Provide a single argument", out + err) diff --git a/src/allmydata/test/cli/test_list.py b/src/allmydata/test/cli/test_list.py index 1206579f1..55f0952fe 100644 --- a/src/allmydata/test/cli/test_list.py +++ b/src/allmydata/test/cli/test_list.py @@ -1,15 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2, PY3 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -from six import ensure_str from twisted.trial import unittest from twisted.internet import defer @@ -18,7 +9,7 @@ from allmydata.immutable import upload from allmydata.interfaces import MDMF_VERSION, SDMF_VERSION from allmydata.mutable.publish import MutableData from ..no_network import GridTestMixin -from allmydata.util.encodingutil import quote_output, get_io_encoding +from allmydata.util.encodingutil import quote_output from .common import CLITestMixin @@ -32,10 +23,6 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase): good_arg = u"g\u00F6\u00F6d" good_out = u"g\u00F6\u00F6d" - # On Python 2 we get bytes, so we need encoded version. On Python 3 - # stdio is unicode so can leave unchanged. - good_out_encoded = good_out if PY3 else good_out.encode(get_io_encoding()) - d = c0.create_dirnode() def _stash_root_and_create_file(n): self.rootnode = n @@ -58,7 +45,7 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase): (rc, out, err) = args self.failUnlessReallyEqual(rc, 0) self.assertEqual(len(err), 0, err) - expected = sorted([ensure_str("0share"), ensure_str("1share"), good_out_encoded]) + expected = sorted(["0share", "1share", good_out]) self.assertEqual(sorted(out.splitlines()), expected) d.addCallback(_check1) d.addCallback(lambda ign: self.do_cli("ls", "missing")) @@ -91,8 +78,8 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase): # listing a file (as dir/filename) should have the edge metadata, # including the filename self.failUnlessReallyEqual(rc, 0) - self.failUnlessIn(good_out_encoded, out) - self.failIfIn(ensure_str("-r-- %d -" % len(small)), out, + self.failUnlessIn(good_out, out) + self.failIfIn("-r-- %d -" % len(small), out, "trailing hyphen means unknown date") if good_arg is not None: diff --git a/src/allmydata/test/cli/test_mv.py b/src/allmydata/test/cli/test_mv.py index 0bb9ba369..183e94725 100644 --- a/src/allmydata/test/cli/test_mv.py +++ b/src/allmydata/test/cli/test_mv.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os.path from twisted.trial import unittest diff --git a/src/allmydata/test/cli/test_put.py b/src/allmydata/test/cli/test_put.py index 03306ab71..c5f32a553 100644 --- a/src/allmydata/test/cli/test_put.py +++ b/src/allmydata/test/cli/test_put.py @@ -1,19 +1,18 @@ """ -Ported to Python 3. +Tests for the ``tahoe put`` CLI tool. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations +from typing import Callable, Awaitable, TypeVar, Any import os.path from twisted.trial import unittest from twisted.python import usage +from twisted.python.filepath import FilePath +from cryptography.hazmat.primitives.serialization import load_pem_private_key + +from allmydata.crypto.rsa import PrivateKey +from allmydata.uri import from_string from allmydata.util import fileutil from allmydata.scripts.common import get_aliases from allmydata.scripts import cli @@ -22,6 +21,9 @@ from ..common_util import skip_if_cannot_represent_filename from allmydata.util.encodingutil import get_io_encoding from allmydata.util.fileutil import abspath_expanduser_unicode from .common import CLITestMixin +from allmydata.mutable.common import derive_mutable_keys + +T = TypeVar("T") class Put(GridTestMixin, CLITestMixin, unittest.TestCase): @@ -215,6 +217,65 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase): return d + async def test_unlinked_mutable_specified_private_key(self) -> None: + """ + A new unlinked mutable can be created using a specified private + key. + """ + self.basedir = "cli/Put/unlinked-mutable-with-key" + await self._test_mutable_specified_key( + lambda do_cli, pempath, datapath: do_cli( + "put", "--mutable", "--private-key-path", pempath.path, + stdin=datapath.getContent(), + ), + ) + + async def test_linked_mutable_specified_private_key(self) -> None: + """ + A new linked mutable can be created using a specified private key. + """ + self.basedir = "cli/Put/linked-mutable-with-key" + await self._test_mutable_specified_key( + lambda do_cli, pempath, datapath: do_cli( + "put", "--mutable", "--private-key-path", pempath.path, datapath.path, + ), + ) + + async def _test_mutable_specified_key( + self, + run: Callable[[Any, FilePath, FilePath], Awaitable[tuple[int, bytes, bytes]]], + ) -> None: + """ + A helper for testing mutable creation. + + :param run: A function to do the creation. It is called with + ``self.do_cli`` and the path to a private key PEM file and a data + file. It returns whatever ``do_cli`` returns. + """ + self.set_up_grid(oneshare=True) + + pempath = FilePath(__file__).parent().sibling("data").child("openssl-rsa-2048.txt") + datapath = FilePath(self.basedir).child("data") + datapath.setContent(b"Hello world" * 1024) + + (rc, out, err) = await run(self.do_cli, pempath, datapath) + self.assertEqual(rc, 0, (out, err)) + cap = from_string(out.strip()) + # The capability is derived from the key we specified. + privkey = load_pem_private_key(pempath.getContent(), password=None) + assert isinstance(privkey, PrivateKey) + pubkey = privkey.public_key() + writekey, _, fingerprint = derive_mutable_keys((pubkey, privkey)) + self.assertEqual( + (writekey, fingerprint), + (cap.writekey, cap.fingerprint), + ) + # Also the capability we were given actually refers to the data we + # uploaded. + (rc, out, err) = await self.do_cli("get", out.strip()) + self.assertEqual(rc, 0, (out, err)) + self.assertEqual(out, datapath.getContent().decode("ascii")) + def test_mutable(self): # echo DATA1 | tahoe put --mutable - uploaded.txt # echo DATA2 | tahoe put - uploaded.txt # should modify-in-place diff --git a/src/allmydata/test/cli/test_run.py b/src/allmydata/test/cli/test_run.py index 28613e8c1..18ee8f67d 100644 --- a/src/allmydata/test/cli/test_run.py +++ b/src/allmydata/test/cli/test_run.py @@ -1,40 +1,34 @@ """ Tests for ``allmydata.scripts.tahoe_run``. - -Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations -from six.moves import ( +import re +from io import ( StringIO, ) -from testtools import ( - skipIf, -) +from hypothesis.strategies import text +from hypothesis import given, assume from testtools.matchers import ( Contains, Equals, - HasLength, ) -from twisted.python.runtime import ( - platform, -) from twisted.python.filepath import ( FilePath, ) from twisted.internet.testing import ( MemoryReactor, ) +from twisted.python.failure import ( + Failure, +) +from twisted.internet.error import ( + ConnectionDone, +) from twisted.internet.test.modulehelpers import ( AlternateReactor, ) @@ -44,6 +38,10 @@ from ...scripts.tahoe_run import ( RunOptions, run, ) +from ...util.pid import ( + check_pid_process, + InvalidPidFile, +) from ...scripts.runner import ( parse_options @@ -147,11 +145,96 @@ class DaemonizeTheRealServiceTests(SyncTestCase): ) +class DaemonizeStopTests(SyncTestCase): + """ + Tests relating to stopping the daemon + """ + def setUp(self): + self.nodedir = FilePath(self.mktemp()) + self.nodedir.makedirs() + config = "" + self.nodedir.child("tahoe.cfg").setContent(config.encode("ascii")) + self.nodedir.child("tahoe-client.tac").touch() + + # arrange to know when reactor.stop() is called + self.reactor = MemoryReactor() + self.stop_calls = [] + + def record_stop(): + self.stop_calls.append(object()) + self.reactor.stop = record_stop + + super().setUp() + + def _make_daemon(self, extra_argv: list[str]) -> DaemonizeTheRealService: + """ + Create the daemonization service. + + :param extra_argv: Extra arguments to pass between ``run`` and the + node path. + """ + options = parse_options(["run"] + extra_argv + [self.nodedir.path]) + options.stdout = StringIO() + options.stderr = StringIO() + options.stdin = StringIO() + run_options = options.subOptions + return DaemonizeTheRealService( + "client", + self.nodedir.path, + run_options, + ) + + def _run_daemon(self) -> None: + """ + Simulate starting up the reactor so the daemon plugin can do its + stuff. + """ + # We happen to know that the service uses reactor.callWhenRunning + # to schedule all its work (though I couldn't tell you *why*). + # Make sure those scheduled calls happen. + waiting = self.reactor.whenRunningHooks[:] + del self.reactor.whenRunningHooks[:] + for f, a, k in waiting: + f(*a, **k) + + def _close_stdin(self) -> None: + """ + Simulate closing the daemon plugin's stdin. + """ + # there should be a single reader: our StandardIO process + # reader for stdin. Simulate it closing. + for r in self.reactor.getReaders(): + r.connectionLost(Failure(ConnectionDone())) + + def test_stop_on_stdin_close(self): + """ + We stop when stdin is closed. + """ + with AlternateReactor(self.reactor): + service = self._make_daemon([]) + service.startService() + self._run_daemon() + self._close_stdin() + self.assertEqual(len(self.stop_calls), 1) + + def test_allow_stdin_close(self): + """ + If --allow-stdin-close is specified then closing stdin doesn't + stop the process + """ + with AlternateReactor(self.reactor): + service = self._make_daemon(["--allow-stdin-close"]) + service.startService() + self._run_daemon() + self._close_stdin() + self.assertEqual(self.stop_calls, []) + + class RunTests(SyncTestCase): """ Tests for ``run``. """ - @skipIf(platform.isWindows(), "There are no PID files on Windows.") + def test_non_numeric_pid(self): """ If the pidfile exists but does not contain a numeric value, a complaint to @@ -159,7 +242,7 @@ class RunTests(SyncTestCase): """ basedir = FilePath(self.mktemp()).asTextMode() basedir.makedirs() - basedir.child(u"twistd.pid").setContent(b"foo") + basedir.child(u"running.process").setContent(b"foo") basedir.child(u"tahoe-client.tac").setContent(b"") config = RunOptions() @@ -168,17 +251,30 @@ class RunTests(SyncTestCase): config['basedir'] = basedir.path config.twistd_args = [] + reactor = MemoryReactor() + runs = [] - result_code = run(config, runApp=runs.append) + result_code = run(reactor, config, runApp=runs.append) self.assertThat( config.stderr.getvalue(), Contains("found invalid PID file in"), ) - self.assertThat( - runs, - HasLength(1), - ) - self.assertThat( - result_code, - Equals(0), - ) + # because the pidfile is invalid we shouldn't get to the + # .run() call itself. + self.assertThat(runs, Equals([])) + self.assertThat(result_code, Equals(1)) + + good_file_content_re = re.compile(r"\s*[0-9]*\s[0-9]*\s*", re.M) + + @given(text()) + def test_pidfile_contents(self, content): + """ + invalid contents for a pidfile raise errors + """ + assume(not self.good_file_content_re.match(content)) + pidfile = FilePath("pidfile") + pidfile.setContent(content.encode("utf8")) + + with self.assertRaises(InvalidPidFile): + with check_pid_process(pidfile): + pass diff --git a/src/allmydata/test/cli/test_status.py b/src/allmydata/test/cli/test_status.py index a015391e2..a3921b442 100644 --- a/src/allmydata/test/cli/test_status.py +++ b/src/allmydata/test/cli/test_status.py @@ -1,14 +1,7 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six import ensure_text import os diff --git a/src/allmydata/test/cli/wormholetesting.py b/src/allmydata/test/cli/wormholetesting.py new file mode 100644 index 000000000..3bcad1ebf --- /dev/null +++ b/src/allmydata/test/cli/wormholetesting.py @@ -0,0 +1,313 @@ +""" +An in-memory implementation of some of the magic-wormhole interfaces for +use by automated tests. + +For example:: + + async def peerA(mw): + wormhole = mw.create("myapp", "wss://myserver", reactor) + code = await wormhole.get_code() + print(f"I have a code: {code}") + message = await wormhole.when_received() + print(f"I have a message: {message}") + + async def local_peerB(helper, mw): + peerA_wormhole = await helper.wait_for_wormhole("myapp", "wss://myserver") + code = await peerA_wormhole.when_code() + + peerB_wormhole = mw.create("myapp", "wss://myserver") + peerB_wormhole.set_code(code) + + peerB_wormhole.send_message("Hello, peer A") + + # Run peerA against local_peerB with pure in-memory message passing. + server, helper = memory_server() + run(gather(peerA(server), local_peerB(helper, server))) + + # Run peerA against a peerB somewhere out in the world, using a real + # wormhole relay server somewhere. + import wormhole + run(peerA(wormhole)) +""" + +from __future__ import annotations + +__all__ = ['MemoryWormholeServer', 'TestingHelper', 'memory_server', 'IWormhole'] + +from typing import Iterator, Optional, List, Tuple, Any, TextIO +from inspect import getfullargspec +from itertools import count +from sys import stderr + +from attrs import frozen, define, field, Factory +from twisted.internet.defer import Deferred, DeferredQueue, succeed +from wormhole._interfaces import IWormhole +from wormhole.wormhole import create +from zope.interface import implementer + +WormholeCode = str +WormholeMessage = bytes +AppId = str +RelayURL = str +ApplicationKey = Tuple[RelayURL, AppId] + +@define +class MemoryWormholeServer(object): + """ + A factory for in-memory wormholes. + + :ivar _apps: Wormhole state arranged by the application id and relay URL + it belongs to. + + :ivar _waiters: Observers waiting for a wormhole to be created for a + specific application id and relay URL combination. + """ + _apps: dict[ApplicationKey, _WormholeApp] = field(default=Factory(dict)) + _waiters: dict[ApplicationKey, Deferred[IWormhole]] = field(default=Factory(dict)) + + def create( + self, + appid: str, + relay_url: str, + reactor: Any, + # Unfortunately we need a mutable default to match the real API + versions: Any={}, # noqa: B006 + delegate: Optional[Any]=None, + journal: Optional[Any]=None, + tor: Optional[Any]=None, + timing: Optional[Any]=None, + stderr: TextIO=stderr, + _eventual_queue: Optional[Any]=None, + _enable_dilate: bool=False, + ) -> _MemoryWormhole: + """ + Create a wormhole. It will be able to connect to other wormholes created + by this instance (and constrained by the normal appid/relay_url + rules). + """ + if tor is not None: + raise ValueError("Cannot deal with Tor right now.") + if _enable_dilate: + raise ValueError("Cannot deal with dilation right now.") + + key = (relay_url, appid) + wormhole = _MemoryWormhole(self._view(key)) + if key in self._waiters: + self._waiters.pop(key).callback(wormhole) + return wormhole + + def _view(self, key: ApplicationKey) -> _WormholeServerView: + """ + Created a view onto this server's state that is limited by a certain + appid/relay_url pair. + """ + return _WormholeServerView(self, key) + + +@frozen +class TestingHelper(object): + """ + Provide extra functionality for interacting with an in-memory wormhole + implementation. + + This is intentionally a separate API so that it is not confused with + proper public interface of the real wormhole implementation. + """ + _server: MemoryWormholeServer + + async def wait_for_wormhole(self, appid: AppId, relay_url: RelayURL) -> IWormhole: + """ + Wait for a wormhole to appear at a specific location. + + :param appid: The appid that the resulting wormhole will have. + + :param relay_url: The URL of the relay at which the resulting wormhole + will presume to be created. + + :return: The first wormhole to be created which matches the given + parameters. + """ + key = (relay_url, appid) + if key in self._server._waiters: + raise ValueError(f"There is already a waiter for {key}") + d : Deferred[IWormhole] = Deferred() + self._server._waiters[key] = d + wormhole = await d + return wormhole + + +def _verify() -> None: + """ + Roughly confirm that the in-memory wormhole creation function matches the + interface of the real implementation. + """ + # Poor man's interface verification. + + a = getfullargspec(create) + b = getfullargspec(MemoryWormholeServer.create) + # I know it has a `self` argument at the beginning. That's okay. + b = b._replace(args=b.args[1:]) + + # Just compare the same information to check function signature + assert a.varkw == b.varkw + assert a.args == b.args + assert a.varargs == b.varargs + assert a.kwonlydefaults == b.kwonlydefaults + assert a.defaults == b.defaults + + +_verify() + + +@define +class _WormholeApp(object): + """ + Represent a collection of wormholes that belong to the same + appid/relay_url scope. + """ + wormholes: dict[WormholeCode, IWormhole] = field(default=Factory(dict)) + _waiting: dict[WormholeCode, List[Deferred[_MemoryWormhole]]] = field(default=Factory(dict)) + _counter: Iterator[int] = field(default=Factory(count)) + + def allocate_code(self, wormhole: IWormhole, code: Optional[WormholeCode]) -> WormholeCode: + """ + Allocate a new code for the given wormhole. + + This also associates the given wormhole with the code for future + lookup. + + Code generation logic is trivial and certainly not good enough for any + real use. It is sufficient for automated testing, though. + """ + if code is None: + code = "{}-persnickety-tardigrade".format(next(self._counter)) + self.wormholes.setdefault(code, []).append(wormhole) + try: + waiters = self._waiting.pop(code) + except KeyError: + pass + else: + for w in waiters: + w.callback(wormhole) + + return code + + def wait_for_wormhole(self, code: WormholeCode) -> Deferred[_MemoryWormhole]: + """ + Return a ``Deferred`` which fires with the next wormhole to be associated + with the given code. This is used to let the first end of a wormhole + rendezvous with the second end. + """ + d : Deferred[_MemoryWormhole] = Deferred() + self._waiting.setdefault(code, []).append(d) + return d + + +@frozen +class _WormholeServerView(object): + """ + Present an interface onto the server to be consumed by individual + wormholes. + """ + _server: MemoryWormholeServer + _key: ApplicationKey + + def allocate_code(self, wormhole: _MemoryWormhole, code: Optional[WormholeCode]) -> WormholeCode: + """ + Allocate a new code for the given wormhole in the scope associated with + this view. + """ + app = self._server._apps.setdefault(self._key, _WormholeApp()) + return app.allocate_code(wormhole, code) + + def wormhole_by_code(self, code: WormholeCode, exclude: object) -> Deferred[IWormhole]: + """ + Retrieve all wormholes previously associated with a code. + """ + app = self._server._apps[self._key] + wormholes = app.wormholes[code] + try: + [wormhole] = list(wormhole for wormhole in wormholes if wormhole != exclude) + except ValueError: + return app.wait_for_wormhole(code) + return succeed(wormhole) + + +@implementer(IWormhole) +@define +class _MemoryWormhole(object): + """ + Represent one side of a wormhole as conceived by ``MemoryWormholeServer``. + """ + + _view: _WormholeServerView + _code: Optional[WormholeCode] = None + _payload: DeferredQueue[WormholeMessage] = field(default=Factory(DeferredQueue)) + _waiting_for_code: list[Deferred[WormholeCode]] = field(default=Factory(list)) + + def allocate_code(self) -> None: + if self._code is not None: + raise ValueError( + "allocate_code used with a wormhole which already has a code" + ) + self._code = self._view.allocate_code(self, None) + waiters = self._waiting_for_code + self._waiting_for_code = [] + for d in waiters: + d.callback(self._code) + + def set_code(self, code: WormholeCode) -> None: + if self._code is None: + self._code = code + self._view.allocate_code(self, code) + else: + raise ValueError("set_code used with a wormhole which already has a code") + + def when_code(self) -> Deferred[WormholeCode]: + if self._code is None: + d : Deferred[WormholeCode] = Deferred() + self._waiting_for_code.append(d) + return d + return succeed(self._code) + + def get_welcome(self) -> Deferred[str]: + return succeed("welcome") + + def send_message(self, payload: WormholeMessage) -> None: + self._payload.put(payload) + + def when_received(self) -> Deferred[WormholeMessage]: + if self._code is None: + raise ValueError( + "This implementation requires set_code or allocate_code " + "before when_received." + ) + d = self._view.wormhole_by_code(self._code, exclude=self) + + def got_wormhole(wormhole: _MemoryWormhole) -> Deferred[WormholeMessage]: + msg: Deferred[WormholeMessage] = wormhole._payload.get() + return msg + + d.addCallback(got_wormhole) + return d + + get_message = when_received + + def close(self) -> None: + pass + + # 0.9.2 compatibility + def get_code(self) -> Deferred[WormholeCode]: + if self._code is None: + self.allocate_code() + return self.when_code() + + get = when_received + + +def memory_server() -> tuple[MemoryWormholeServer, TestingHelper]: + """ + Create a paired in-memory wormhole server and testing helper. + """ + server = MemoryWormholeServer() + return server, TestingHelper(server) diff --git a/src/allmydata/test/cli_node_api.py b/src/allmydata/test/cli_node_api.py index 410796be2..bed4cfd55 100644 --- a/src/allmydata/test/cli_node_api.py +++ b/src/allmydata/test/cli_node_api.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 __all__ = [ "CLINodeAPI", @@ -134,7 +126,7 @@ class CLINodeAPI(object): @property def twistd_pid_file(self): - return self.basedir.child(u"twistd.pid") + return self.basedir.child(u"running.process") @property def node_url_file(self): diff --git a/src/allmydata/test/common.py b/src/allmydata/test/common.py index 0f2dc7c62..4ec3c2300 100644 --- a/src/allmydata/test/common.py +++ b/src/allmydata/test/common.py @@ -1,14 +1,8 @@ """ -Ported to Python 3. +Functionality related to a lot of the test suite. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals +from __future__ import annotations -from future.utils import PY2, native_str -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from past.builtins import chr as byteschr __all__ = [ @@ -28,6 +22,7 @@ __all__ = [ import sys import os, random, struct +from contextlib import contextmanager import six import tempfile from tempfile import mktemp @@ -87,6 +82,7 @@ from allmydata.interfaces import ( SDMF_VERSION, MDMF_VERSION, IAddressFamily, + NoSpace, ) from allmydata.check_results import CheckResults, CheckAndRepairResults, \ DeepCheckResults, DeepCheckAndRepairResults @@ -109,29 +105,21 @@ from allmydata.scripts.common import ( from ..crypto import ( ed25519, + rsa, ) from .eliotutil import ( EliotLoggedRunTest, ) from .common_util import ShouldFailMixin # noqa: F401 -if sys.platform == "win32" and PY2: - # Python 2.7 doesn't have good options for launching a process with - # non-ASCII in its command line. So use this alternative that does a - # better job. However, only use it on Windows because it doesn't work - # anywhere else. - from ._win_subprocess import ( - Popen, - ) -else: - from subprocess import ( - Popen, - ) from subprocess import ( + Popen, PIPE, ) -TEST_RSA_KEY_SIZE = 522 +# Is the process running as an OS user with elevated privileges (ie, root)? +# We only know how to determine this for POSIX systems. +superuser = getattr(os, "getuid", lambda: -1)() == 0 EMPTY_CLIENT_CONFIG = config_from_string( "/dev/null", @@ -139,6 +127,42 @@ EMPTY_CLIENT_CONFIG = config_from_string( "" ) +@attr.s +class FakeDisk(object): + """ + Just enough of a disk to be able to report free / used information. + """ + total = attr.ib() + used = attr.ib() + + def use(self, num_bytes): + """ + Mark some amount of available bytes as used (and no longer available). + + :param int num_bytes: The number of bytes to use. + + :raise NoSpace: If there are fewer bytes available than ``num_bytes``. + + :return: ``None`` + """ + if num_bytes > self.total - self.used: + raise NoSpace() + self.used += num_bytes + + @property + def available(self): + return self.total - self.used + + def get_disk_stats(self, whichdir, reserved_space): + avail = self.available + return { + 'total': self.total, + 'free_for_root': avail, + 'free_for_nonroot': avail, + 'used': self.used, + 'avail': avail - reserved_space, + } + @attr.s class MemoryIntroducerClient(object): @@ -153,8 +177,8 @@ class MemoryIntroducerClient(object): sequencer = attr.ib() cache_filepath = attr.ib() - subscribed_to = attr.ib(default=attr.Factory(list)) - published_announcements = attr.ib(default=attr.Factory(list)) + subscribed_to : list[Subscription] = attr.ib(default=attr.Factory(list)) + published_announcements : list[Announcement] = attr.ib(default=attr.Factory(list)) def setServiceParent(self, parent): @@ -262,13 +286,17 @@ class UseNode(object): plugin_config = attr.ib() storage_plugin = attr.ib() basedir = attr.ib(validator=attr.validators.instance_of(FilePath)) - introducer_furl = attr.ib(validator=attr.validators.instance_of(native_str), + introducer_furl = attr.ib(validator=attr.validators.instance_of(str), converter=six.ensure_str) - node_config = attr.ib(default=attr.Factory(dict)) + node_config : dict[bytes,bytes] = attr.ib(default=attr.Factory(dict)) config = attr.ib(default=None) + reactor = attr.ib(default=None) def setUp(self): + self.assigner = SameProcessStreamEndpointAssigner() + self.assigner.setUp() + def format_config_items(config): return "\n".join( " = ".join((key, value)) @@ -279,34 +307,54 @@ class UseNode(object): if self.plugin_config is None: plugin_config_section = "" else: - plugin_config_section = """ -[storageclient.plugins.{storage_plugin}] -{config} -""".format( - storage_plugin=self.storage_plugin, - config=format_config_items(self.plugin_config), -) + plugin_config_section = ( + "[storageclient.plugins.{storage_plugin}]\n" + "{config}\n").format( + storage_plugin=self.storage_plugin, + config=format_config_items(self.plugin_config), + ) + + if self.storage_plugin is None: + plugins = "" + else: + plugins = "storage.plugins = {}".format(self.storage_plugin) write_introducer( self.basedir, "default", self.introducer_furl, ) + + node_config = self.node_config.copy() + if "tub.port" not in node_config: + if "tub.location" in node_config: + raise ValueError( + "UseNode fixture does not support specifying tub.location " + "without tub.port" + ) + + # Don't use the normal port auto-assignment logic. It produces + # collisions and makes tests fail spuriously. + tub_location, tub_endpoint = self.assigner.assign(self.reactor) + node_config.update({ + "tub.port": tub_endpoint, + "tub.location": tub_location, + }) + self.config = config_from_string( self.basedir.asTextMode().path, "tub.port", -""" -[node] -{node_config} - -[client] -storage.plugins = {storage_plugin} -{plugin_config_section} -""".format( - storage_plugin=self.storage_plugin, - node_config=format_config_items(self.node_config), - plugin_config_section=plugin_config_section, -) + "[node]\n" + "{node_config}\n" + "\n" + "[client]\n" + "{plugins}\n" + "{plugin_config_section}\n" + .format( + plugins=plugins, + node_config=format_config_items(node_config), + plugin_config_section=plugin_config_section, + ) ) def create_node(self): @@ -316,7 +364,7 @@ storage.plugins = {storage_plugin} ) def cleanUp(self): - pass + self.assigner.tearDown() def getDetails(self): @@ -582,15 +630,28 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation MUTABLE_SIZELIMIT = 10000 - def __init__(self, storage_broker, secret_holder, - default_encoding_parameters, history, all_contents): + _public_key: rsa.PublicKey | None + _private_key: rsa.PrivateKey | None + + def __init__(self, + storage_broker, + secret_holder, + default_encoding_parameters, + history, + all_contents, + keypair: tuple[rsa.PublicKey, rsa.PrivateKey] | None + ): self.all_contents = all_contents - self.file_types = {} # storage index => MDMF_VERSION or SDMF_VERSION - self.init_from_cap(make_mutable_file_cap()) + self.file_types: dict[bytes, int] = {} # storage index => MDMF_VERSION or SDMF_VERSION + self.init_from_cap(make_mutable_file_cap(keypair)) self._k = default_encoding_parameters['k'] self._segsize = default_encoding_parameters['max_segment_size'] - def create(self, contents, key_generator=None, keysize=None, - version=SDMF_VERSION): + if keypair is None: + self._public_key = self._private_key = None + else: + self._public_key, self._private_key = keypair + + def create(self, contents, version=SDMF_VERSION): if version == MDMF_VERSION and \ isinstance(self.my_uri, (uri.ReadonlySSKFileURI, uri.WriteableSSKFileURI)): @@ -786,9 +847,28 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation return defer.succeed(consumer) -def make_mutable_file_cap(): - return uri.WriteableSSKFileURI(writekey=os.urandom(16), - fingerprint=os.urandom(32)) +def make_mutable_file_cap( + keypair: tuple[rsa.PublicKey, rsa.PrivateKey] | None = None, +) -> uri.WriteableSSKFileURI: + """ + Create a local representation of a mutable object. + + :param keypair: If None, a random keypair will be generated for the new + object. Otherwise, this is the keypair for that object. + """ + if keypair is None: + writekey = os.urandom(16) + fingerprint = os.urandom(32) + else: + pubkey, privkey = keypair + pubkey_s = rsa.der_string_from_verifying_key(pubkey) + privkey_s = rsa.der_string_from_signing_key(privkey) + writekey = hashutil.ssk_writekey_hash(privkey_s) + fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey_s) + + return uri.WriteableSSKFileURI( + writekey=writekey, fingerprint=fingerprint, + ) def make_mdmf_mutable_file_cap(): return uri.WriteableMDMFFileURI(writekey=os.urandom(16), @@ -818,7 +898,7 @@ def create_mutable_filenode(contents, mdmf=False, all_contents=None): encoding_params['max_segment_size'] = 128*1024 filenode = FakeMutableFileNode(None, None, encoding_params, None, - all_contents) + all_contents, None) filenode.init_from_cap(cap) if mdmf: filenode.create(MutableData(contents), version=MDMF_VERSION) @@ -1068,7 +1148,7 @@ def _corrupt_offset_of_uri_extension_to_force_short_read(data, debug=False): def _corrupt_mutable_share_data(data, debug=False): prefix = data[:32] - assert prefix == MutableShareFile.MAGIC, "This function is designed to corrupt mutable shares of v1, and the magic number doesn't look right: %r vs %r" % (prefix, MutableShareFile.MAGIC) + assert MutableShareFile.is_valid_header(prefix), "This function is designed to corrupt mutable shares of v1, and the magic number doesn't look right: %r vs %r" % (prefix, MutableShareFile.MAGIC) data_offset = MutableShareFile.DATA_OFFSET sharetype = data[data_offset:data_offset+1] assert sharetype == b"\x00", "non-SDMF mutable shares not supported" @@ -1213,6 +1293,29 @@ class ConstantAddresses(object): raise Exception("{!r} has no client endpoint.") return self._handler +@contextmanager +def disable_modules(*names): + """ + A context manager which makes modules appear to be missing while it is + active. + + :param *names: The names of the modules to disappear. Only top-level + modules are supported (that is, "." is not allowed in any names). + This is an implementation shortcoming which could be lifted if + desired. + """ + if any("." in name for name in names): + raise ValueError("Names containing '.' are not supported.") + missing = object() + modules = list(sys.modules.get(n, missing) for n in names) + for n in names: + sys.modules[n] = None + yield + for n, original in zip(names, modules): + if original is missing: + del sys.modules[n] + else: + sys.modules[n] = original class _TestCaseMixin(object): """ @@ -1249,6 +1352,26 @@ class _TestCaseMixin(object): def assertRaises(self, *a, **kw): return self._dummyCase.assertRaises(*a, **kw) + def failUnless(self, *args, **kwargs): + """Backwards compatibility method.""" + self.assertTrue(*args, **kwargs) + + def failIf(self, *args, **kwargs): + """Backwards compatibility method.""" + self.assertFalse(*args, **kwargs) + + def failIfEqual(self, *args, **kwargs): + """Backwards compatibility method.""" + self.assertNotEqual(*args, **kwargs) + + def failUnlessEqual(self, *args, **kwargs): + """Backwards compatibility method.""" + self.assertEqual(*args, **kwargs) + + def failUnlessReallyEqual(self, *args, **kwargs): + """Backwards compatibility method.""" + self.assertReallyEqual(*args, **kwargs) + class SyncTestCase(_TestCaseMixin, TestCase): """ @@ -1304,7 +1427,4 @@ class TrialTestCase(_TrialTestCase): you try to turn that Exception instance into a string. """ - if six.PY2: - if isinstance(msg, six.text_type): - return super(TrialTestCase, self).fail(msg.encode("utf8")) return super(TrialTestCase, self).fail(msg) diff --git a/src/allmydata/test/common_storage.py b/src/allmydata/test/common_storage.py new file mode 100644 index 000000000..7adcafa43 --- /dev/null +++ b/src/allmydata/test/common_storage.py @@ -0,0 +1,60 @@ + +def upload_immutable(storage_server, storage_index, renew_secret, cancel_secret, shares): + """ + Synchronously upload some immutable shares to a ``StorageServer``. + + :param allmydata.storage.server.StorageServer storage_server: The storage + server object to use to perform the upload. + + :param bytes storage_index: The storage index for the immutable shares. + + :param bytes renew_secret: The renew secret for the implicitly created lease. + :param bytes cancel_secret: The cancel secret for the implicitly created lease. + + :param dict[int, bytes] shares: A mapping from share numbers to share data + to upload. The data for all shares must be of the same length. + + :return: ``None`` + """ + already, writers = storage_server.allocate_buckets( + storage_index, + renew_secret, + cancel_secret, + shares.keys(), + len(next(iter(shares.values()))), + ) + for shnum, writer in writers.items(): + writer.write(0, shares[shnum]) + writer.close() + + +def upload_mutable(storage_server, storage_index, secrets, shares): + """ + Synchronously upload some mutable shares to a ``StorageServer``. + + :param allmydata.storage.server.StorageServer storage_server: The storage + server object to use to perform the upload. + + :param bytes storage_index: The storage index for the immutable shares. + + :param secrets: A three-tuple of a write enabler, renew secret, and cancel + secret. + + :param dict[int, bytes] shares: A mapping from share numbers to share data + to upload. + + :return: ``None`` + """ + test_and_write_vectors = { + sharenum: ([], [(0, data)], None) + for sharenum, data + in shares.items() + } + read_vector = [] + + storage_server.slot_testv_and_readv_and_writev( + storage_index, + secrets, + test_and_write_vectors, + read_vector, + ) diff --git a/src/allmydata/test/common_system.py b/src/allmydata/test/common_system.py index 9d14c8642..27c11f660 100644 --- a/src/allmydata/test/common_system.py +++ b/src/allmydata/test/common_system.py @@ -5,22 +5,14 @@ in ``allmydata.test.test_system``. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - # Don't import bytes since it causes issues on (so far unported) modules on Python 2. - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, max, min, str # noqa: F401 - +from typing import Optional import os from functools import partial from twisted.internet import reactor from twisted.internet import defer from twisted.internet.defer import inlineCallbacks +from twisted.internet.task import deferLater from twisted.application import service from foolscap.api import flushEventualQueue @@ -28,13 +20,18 @@ from foolscap.api import flushEventualQueue from allmydata import client from allmydata.introducer.server import create_introducer from allmydata.util import fileutil, log, pollmixin +from allmydata.util.deferredutil import async_to_deferred +from allmydata.storage import http_client +from allmydata.storage_client import ( + NativeStorageServer, + HTTPNativeStorageServer, +) from twisted.python.filepath import ( FilePath, ) from .common import ( - TEST_RSA_KEY_SIZE, SameProcessStreamEndpointAssigner, ) @@ -643,9 +640,58 @@ def _render_section_values(values): )) +@async_to_deferred +async def spin_until_cleanup_done(value=None, timeout=10): + """ + At the end of the test, spin until the reactor has no more DelayedCalls + and file descriptors (or equivalents) registered. This prevents dirty + reactor errors, while also not hard-coding a fixed amount of time, so it + can finish faster on faster computers. + + There is also a timeout: if it takes more than 10 seconds (by default) for + the remaining reactor state to clean itself up, the presumption is that it + will never get cleaned up and the spinning stops. + + Make sure to run as last thing in tearDown. + """ + def num_fds(): + if hasattr(reactor, "handles"): + # IOCP! + return len(reactor.handles) + else: + # Normal reactor; having internal readers still registered is fine, + # that's not our code. + return len( + set(reactor.getReaders()) - set(reactor._internalReaders) + ) + len(reactor.getWriters()) + + for i in range(timeout * 1000): + # There's a single DelayedCall for AsynchronousDeferredRunTest's + # timeout... + if (len(reactor.getDelayedCalls()) < 2 and num_fds() == 0): + break + await deferLater(reactor, 0.001) + return value + + class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): + # If set to True, use Foolscap for storage protocol. If set to False, HTTP + # will be used when possible. If set to None, this suggests a bug in the + # test code. + FORCE_FOOLSCAP_FOR_STORAGE : Optional[bool] = None + + # If True, reduce the timeout on connections: + REDUCE_HTTP_CLIENT_TIMEOUT : bool = True + def setUp(self): + if os.getenv("TAHOE_DEBUG_BLOCKING") == "1": + from .blocking import catch_blocking_in_event_loop + catch_blocking_in_event_loop(self) + + self._http_client_pools = [] + http_client.StorageClientFactory.start_test_mode(self._got_new_http_connection_pool) + self.addCleanup(http_client.StorageClientFactory.stop_test_mode) self.port_assigner = SameProcessStreamEndpointAssigner() self.port_assigner.setUp() self.addCleanup(self.port_assigner.tearDown) @@ -653,10 +699,38 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): self.sparent = service.MultiService() self.sparent.startService() + def _got_new_http_connection_pool(self, pool): + # Make sure the pool closes cached connections quickly: + pool.cachedConnectionTimeout = 0.1 + # Register the pool for shutdown later: + self._http_client_pools.append(pool) + # Disable retries: + pool.retryAutomatically = False + # Make a much more aggressive timeout for connections, we're connecting + # locally after all... and also make sure it's lower than the delay we + # add in tearDown, to prevent dirty reactor issues. + getConnection = pool.getConnection + + def getConnectionWithTimeout(*args, **kwargs): + d = getConnection(*args, **kwargs) + d.addTimeout(1, reactor) + return d + + if self.REDUCE_HTTP_CLIENT_TIMEOUT: + pool.getConnection = getConnectionWithTimeout + + def close_idle_http_connections(self): + """Close all HTTP client connections that are just hanging around.""" + return defer.gatherResults( + [pool.closeCachedConnections() for pool in self._http_client_pools] + ) + def tearDown(self): log.msg("shutting down SystemTest services") d = self.sparent.stopService() d.addBoth(flush_but_dont_ignore) + d.addBoth(lambda x: self.close_idle_http_connections().addCallback(lambda _: x)) + d.addBoth(spin_until_cleanup_done) return d def getdir(self, subdir): @@ -672,11 +746,14 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): """ iv_dir = self.getdir("introducer") if not os.path.isdir(iv_dir): - _, port_endpoint = self.port_assigner.assign(reactor) + _, web_port_endpoint = self.port_assigner.assign(reactor) + main_location_hint, main_port_endpoint = self.port_assigner.assign(reactor) introducer_config = ( u"[node]\n" u"nickname = introducer \N{BLACK SMILING FACE}\n" + - u"web.port = {}\n".format(port_endpoint) + u"web.port = {}\n".format(web_port_endpoint) + + u"tub.port = {}\n".format(main_port_endpoint) + + u"tub.location = {}\n".format(main_location_hint) ).encode("utf-8") fileutil.make_dirs(iv_dir) @@ -712,35 +789,44 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): :return: A ``Deferred`` that fires when the nodes have connected to each other. """ + self.assertIn( + self.FORCE_FOOLSCAP_FOR_STORAGE, (True, False), + "You forgot to set FORCE_FOOLSCAP_FOR_STORAGE on {}".format(self.__class__) + ) self.numclients = NUMCLIENTS self.introducer = yield self._create_introducer() self.add_service(self.introducer) self.introweb_url = self._get_introducer_web() - yield self._set_up_client_nodes() + yield self._set_up_client_nodes(self.FORCE_FOOLSCAP_FOR_STORAGE) + native_server = next(iter(self.clients[0].storage_broker.get_known_servers())) + if self.FORCE_FOOLSCAP_FOR_STORAGE: + expected_storage_server_class = NativeStorageServer + else: + expected_storage_server_class = HTTPNativeStorageServer + self.assertIsInstance(native_server, expected_storage_server_class) @inlineCallbacks - def _set_up_client_nodes(self): + def _set_up_client_nodes(self, force_foolscap): q = self.introducer self.introducer_furl = q.introducer_url self.clients = [] basedirs = [] for i in range(self.numclients): - basedirs.append((yield self._set_up_client_node(i))) + basedirs.append((yield self._set_up_client_node(i, force_foolscap))) # start clients[0], wait for it's tub to be ready (at which point it # will have registered the helper furl). c = yield client.create_client(basedirs[0]) c.setServiceParent(self.sparent) self.clients.append(c) - c.set_default_mutable_keysize(TEST_RSA_KEY_SIZE) with open(os.path.join(basedirs[0],"private","helper.furl"), "r") as f: helper_furl = f.read() self.helper_furl = helper_furl - if self.numclients >= 4: - with open(os.path.join(basedirs[3], 'tahoe.cfg'), 'a+') as f: + if self.numclients >= 2: + with open(os.path.join(basedirs[1], 'tahoe.cfg'), 'a+') as f: f.write( "[client]\n" "helper.furl = {}\n".format(helper_furl) @@ -751,32 +837,33 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): c = yield client.create_client(basedirs[i]) c.setServiceParent(self.sparent) self.clients.append(c) - c.set_default_mutable_keysize(TEST_RSA_KEY_SIZE) log.msg("STARTING") yield self.wait_for_connections() log.msg("CONNECTED") # now find out where the web port was self.webish_url = self.clients[0].getServiceNamed("webish").getURL() - if self.numclients >=4: + if self.numclients >=2: # and the helper-using webport - self.helper_webish_url = self.clients[3].getServiceNamed("webish").getURL() + self.helper_webish_url = self.clients[1].getServiceNamed("webish").getURL() - def _generate_config(self, which, basedir): + def _generate_config(self, which, basedir, force_foolscap=False): config = {} - except1 = set(range(self.numclients)) - {1} + allclients = set(range(self.numclients)) + except1 = allclients - {1} feature_matrix = { ("client", "nickname"): except1, - # client 1 has to auto-assign an address. - ("node", "tub.port"): except1, - ("node", "tub.location"): except1, + # Auto-assigning addresses is extremely failure prone and not + # amenable to automated testing in _this_ manner. + ("node", "tub.port"): allclients, + ("node", "tub.location"): allclients, # client 0 runs a webserver and a helper - # client 3 runs a webserver but no helper - ("node", "web.port"): {0, 3}, + # client 1 runs a webserver but no helper + ("node", "web.port"): {0, 1}, ("node", "timeout.keepalive"): {0}, - ("node", "timeout.disconnect"): {3}, + ("node", "timeout.disconnect"): {1}, ("helper", "enabled"): {0}, } @@ -789,6 +876,8 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): sethelper = partial(setconf, config, which, "helper") setnode("nickname", u"client %d \N{BLACK SMILING FACE}" % (which,)) + setconf(config, which, "storage", "force_foolscap", str(force_foolscap)) + setconf(config, which, "client", "force_foolscap", str(force_foolscap)) tub_location_hint, tub_port_endpoint = self.port_assigner.assign(reactor) setnode("tub.port", tub_port_endpoint) @@ -806,17 +895,16 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): " furl: %s\n") % self.introducer_furl iyaml_fn = os.path.join(basedir, "private", "introducers.yaml") fileutil.write(iyaml_fn, iyaml) - return _render_config(config) - def _set_up_client_node(self, which): + def _set_up_client_node(self, which, force_foolscap): basedir = self.getdir("client%d" % (which,)) fileutil.make_dirs(os.path.join(basedir, "private")) if len(SYSTEM_TEST_CERTS) > (which + 1): f = open(os.path.join(basedir, "private", "node.pem"), "w") f.write(SYSTEM_TEST_CERTS[which + 1]) f.close() - config = self._generate_config(which, basedir) + config = self._generate_config(which, basedir, force_foolscap) fileutil.write(os.path.join(basedir, 'tahoe.cfg'), config) return basedir @@ -833,7 +921,6 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): def _stopped(res): new_c = yield client.create_client(self.getdir("client%d" % num)) self.clients[num] = new_c - new_c.set_default_mutable_keysize(TEST_RSA_KEY_SIZE) new_c.setServiceParent(self.sparent) d.addCallback(_stopped) d.addCallback(lambda res: self.wait_for_connections()) @@ -852,7 +939,13 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): # connection-lost code basedir = FilePath(self.getdir("client%d" % client_num)) basedir.makedirs() - config = "[client]\n" + config = ( + "[node]\n" + "tub.location = {}\n" + "tub.port = {}\n" + "[client]\n" + ).format(*self.port_assigner.assign(reactor)) + if helper_furl: config += "helper.furl = %s\n" % helper_furl basedir.child("tahoe.cfg").setContent(config.encode("utf-8")) @@ -866,7 +959,6 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin): c = yield client.create_client(basedir.path) self.clients.append(c) - c.set_default_mutable_keysize(TEST_RSA_KEY_SIZE) self.numclients += 1 if add_to_sparent: c.setServiceParent(self.sparent) diff --git a/src/allmydata/test/common_util.py b/src/allmydata/test/common_util.py index d2d20916d..d52cb8afa 100644 --- a/src/allmydata/test/common_util.py +++ b/src/allmydata/test/common_util.py @@ -1,15 +1,6 @@ """ Ported to Python 3. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2, PY3, bchr, binary_type -from future.builtins import str as future_str -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min # noqa: F401 import os import sys @@ -19,8 +10,6 @@ from functools import ( partial, ) from random import randrange -if PY2: - from StringIO import StringIO from io import ( TextIOWrapper, BytesIO, @@ -34,6 +23,9 @@ from ..util.assertutil import precondition from ..scripts import runner from allmydata.util.encodingutil import unicode_platform, get_filesystem_encoding, argv_type, unicode_to_argv +def bchr(s): + return bytes([s]) + def skip_if_cannot_represent_filename(u): precondition(isinstance(u, str)) @@ -69,13 +61,16 @@ def run_cli_native(verb, *args, **kwargs): Most code should prefer ``run_cli_unicode`` which deals with all the necessary encoding considerations. - :param native_str verb: The command to run. For example, + :param runner.Options options: The options instance to use to parse the + given arguments. + + :param str verb: The command to run. For example, ``"create-node"``. - :param [native_str] args: The arguments to pass to the command. For + :param [str] args: The arguments to pass to the command. For example, ``("--hostname=localhost",)``. - :param [native_str] nodeargs: Extra arguments to pass to the Tahoe + :param [str] nodeargs: Extra arguments to pass to the Tahoe executable before ``verb``. :param bytes|unicode stdin: Text or bytes to pass to the command via stdin. @@ -88,6 +83,7 @@ def run_cli_native(verb, *args, **kwargs): matching native behavior. If True, stdout/stderr are returned as bytes. """ + options = kwargs.pop("options", runner.Options()) nodeargs = kwargs.pop("nodeargs", []) encoding = kwargs.pop("encoding", None) or getattr(sys.stdout, "encoding") or "utf-8" return_bytes = kwargs.pop("return_bytes", False) @@ -103,22 +99,7 @@ def run_cli_native(verb, *args, **kwargs): ) argv = ["tahoe"] + nodeargs + [verb] + list(args) stdin = kwargs.get("stdin", "") - if PY2: - # The original behavior, the Python 2 behavior, is to accept either - # bytes or unicode and try to automatically encode or decode as - # necessary. This works okay for ASCII and if LANG is set - # appropriately. These aren't great constraints so we should move - # away from this behavior. - # - # The encoding attribute doesn't change StringIO behavior on Python 2, - # but it's there for realism of the emulation. - stdin = StringIO(stdin) - stdin.encoding = encoding - stdout = StringIO() - stdout.encoding = encoding - stderr = StringIO() - stderr.encoding = encoding - else: + if True: # The new behavior, the Python 3 behavior, is to accept unicode and # encode it using a specific encoding. For older versions of Python 3, # the encoding is determined from LANG (bad) but for newer Python 3, @@ -130,29 +111,31 @@ def run_cli_native(verb, *args, **kwargs): stdin = TextIOWrapper(BytesIO(stdin), encoding) stdout = TextIOWrapper(BytesIO(), encoding) stderr = TextIOWrapper(BytesIO(), encoding) + options.stdin = stdin d = defer.succeed(argv) d.addCallback( partial( runner.parse_or_exit, - runner.Options(), + options, ), stdout=stdout, stderr=stderr, ) d.addCallback( runner.dispatch, + reactor, stdin=stdin, stdout=stdout, stderr=stderr, ) def _done(rc, stdout=stdout, stderr=stderr): - if return_bytes and PY3: + if return_bytes: stdout = stdout.buffer stderr = stderr.buffer return 0, _getvalue(stdout), _getvalue(stderr) def _err(f, stdout=stdout, stderr=stderr): f.trap(SystemExit) - if return_bytes and PY3: + if return_bytes: stdout = stdout.buffer stderr = stderr.buffer return f.value.code, _getvalue(stdout), _getvalue(stderr) @@ -182,18 +165,14 @@ def run_cli_unicode(verb, argv, nodeargs=None, stdin=None, encoding=None): if nodeargs is None: nodeargs = [] precondition( - all(isinstance(arg, future_str) for arg in [verb] + nodeargs + argv), + all(isinstance(arg, str) for arg in [verb] + nodeargs + argv), "arguments to run_cli_unicode must be unicode", verb=verb, nodeargs=nodeargs, argv=argv, ) codec = encoding or "ascii" - if PY2: - encode = lambda t: None if t is None else t.encode(codec) - else: - # On Python 3 command-line parsing expects Unicode! - encode = lambda t: t + encode = lambda t: t d = run_cli_native( encode(verb), nodeargs=list(encode(arg) for arg in nodeargs), @@ -238,7 +217,7 @@ def flip_bit(good, which): def flip_one_bit(s, offset=0, size=None): """ flip one random bit of the string s, in a byte greater than or equal to offset and less than offset+size. """ - precondition(isinstance(s, binary_type)) + precondition(isinstance(s, bytes)) if size is None: size=len(s)-offset i = randrange(offset, offset+size) @@ -250,13 +229,9 @@ def flip_one_bit(s, offset=0, size=None): class ReallyEqualMixin(object): def failUnlessReallyEqual(self, a, b, msg=None): self.assertEqual(a, b, msg) - # Make sure unicode strings are a consistent type. Specifically there's - # Future newstr (backported Unicode type) vs. Python 2 native unicode - # type. They're equal, and _logically_ the same type, but have - # different types in practice. - if a.__class__ == future_str: + if a.__class__ == str: a = str(a) - if b.__class__ == future_str: + if b.__class__ == str: b = str(b) self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, type(a), b, type(b), msg)) diff --git a/src/allmydata/test/common_web.py b/src/allmydata/test/common_web.py index bd55a9fe9..1f8a58d96 100644 --- a/src/allmydata/test/common_web.py +++ b/src/allmydata/test/common_web.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six import ensure_str diff --git a/src/allmydata/test/data/lease_checker.history.txt b/src/allmydata/test/data/lease_checker.history.txt new file mode 100644 index 000000000..0c27a5ad0 --- /dev/null +++ b/src/allmydata/test/data/lease_checker.history.txt @@ -0,0 +1,501 @@ +(dp0 +I363 +(dp1 +Vconfigured-expiration-mode +p2 +(S'age' +p3 +NN(S'immutable' +p4 +S'mutable' +p5 +tp6 +tp7 +sVexpiration-enabled +p8 +I00 +sVleases-per-share-histogram +p9 +(dp10 +I1 +I39774 +ssVlease-age-histogram +p11 +(lp12 +(I0 +I86400 +I3125 +tp13 +a(I345600 +I432000 +I4175 +tp14 +a(I950400 +I1036800 +I141 +tp15 +a(I1036800 +I1123200 +I345 +tp16 +a(I1123200 +I1209600 +I81 +tp17 +a(I1296000 +I1382400 +I1832 +tp18 +a(I1555200 +I1641600 +I390 +tp19 +a(I1728000 +I1814400 +I12 +tp20 +a(I2073600 +I2160000 +I84 +tp21 +a(I2160000 +I2246400 +I228 +tp22 +a(I2246400 +I2332800 +I75 +tp23 +a(I2592000 +I2678400 +I644 +tp24 +a(I2678400 +I2764800 +I273 +tp25 +a(I2764800 +I2851200 +I94 +tp26 +a(I2851200 +I2937600 +I97 +tp27 +a(I3196800 +I3283200 +I143 +tp28 +a(I3283200 +I3369600 +I48 +tp29 +a(I4147200 +I4233600 +I374 +tp30 +a(I4320000 +I4406400 +I534 +tp31 +a(I5270400 +I5356800 +I1005 +tp32 +a(I6739200 +I6825600 +I8704 +tp33 +a(I6825600 +I6912000 +I3986 +tp34 +a(I6912000 +I6998400 +I7592 +tp35 +a(I6998400 +I7084800 +I2607 +tp36 +a(I7689600 +I7776000 +I35 +tp37 +a(I8035200 +I8121600 +I33 +tp38 +a(I8294400 +I8380800 +I54 +tp39 +a(I8640000 +I8726400 +I45 +tp40 +a(I8726400 +I8812800 +I27 +tp41 +a(I8812800 +I8899200 +I12 +tp42 +a(I9763200 +I9849600 +I77 +tp43 +a(I9849600 +I9936000 +I91 +tp44 +a(I9936000 +I10022400 +I1210 +tp45 +a(I10022400 +I10108800 +I45 +tp46 +a(I10108800 +I10195200 +I186 +tp47 +a(I10368000 +I10454400 +I113 +tp48 +a(I10972800 +I11059200 +I21 +tp49 +a(I11232000 +I11318400 +I5 +tp50 +a(I11318400 +I11404800 +I19 +tp51 +a(I11404800 +I11491200 +I238 +tp52 +a(I11491200 +I11577600 +I159 +tp53 +a(I11750400 +I11836800 +I1 +tp54 +a(I11836800 +I11923200 +I32 +tp55 +a(I11923200 +I12009600 +I192 +tp56 +a(I12009600 +I12096000 +I222 +tp57 +a(I12096000 +I12182400 +I18 +tp58 +a(I12182400 +I12268800 +I224 +tp59 +a(I12268800 +I12355200 +I9 +tp60 +a(I12355200 +I12441600 +I9 +tp61 +a(I12441600 +I12528000 +I10 +tp62 +a(I12528000 +I12614400 +I6 +tp63 +a(I12614400 +I12700800 +I6 +tp64 +a(I12700800 +I12787200 +I18 +tp65 +a(I12787200 +I12873600 +I6 +tp66 +a(I12873600 +I12960000 +I62 +tp67 +asVcycle-start-finish-times +p68 +(F1634446505.241972 +F1634446666.055401 +tp69 +sVspace-recovered +p70 +(dp71 +Vexamined-buckets-immutable +p72 +I17896 +sVconfigured-buckets-mutable +p73 +I0 +sVexamined-shares-mutable +p74 +I2473 +sVoriginal-shares-mutable +p75 +I1185 +sVconfigured-buckets-immutable +p76 +I0 +sVoriginal-shares-immutable +p77 +I27457 +sVoriginal-diskbytes-immutable +p78 +I2810982400 +sVexamined-shares-immutable +p79 +I37301 +sVoriginal-buckets +p80 +I14047 +sVactual-shares-immutable +p81 +I0 +sVconfigured-shares +p82 +I0 +sVoriginal-buckets-mutable +p83 +I691 +sVactual-diskbytes +p84 +I4096 +sVactual-shares-mutable +p85 +I0 +sVconfigured-buckets +p86 +I1 +sVexamined-buckets-unknown +p87 +I14 +sVactual-sharebytes +p88 +I0 +sVoriginal-shares +p89 +I28642 +sVactual-buckets-immutable +p90 +I0 +sVoriginal-sharebytes +p91 +I2695552941 +sVexamined-sharebytes-immutable +p92 +I2754798505 +sVactual-shares +p93 +I0 +sVactual-sharebytes-immutable +p94 +I0 +sVoriginal-diskbytes +p95 +I2818981888 +sVconfigured-diskbytes-mutable +p96 +I0 +sVconfigured-sharebytes-immutable +p97 +I0 +sVconfigured-shares-mutable +p98 +I0 +sVactual-diskbytes-immutable +p99 +I0 +sVconfigured-diskbytes-immutable +p100 +I0 +sVoriginal-diskbytes-mutable +p101 +I7995392 +sVactual-sharebytes-mutable +p102 +I0 +sVconfigured-sharebytes +p103 +I0 +sVexamined-shares +p104 +I39774 +sVactual-diskbytes-mutable +p105 +I0 +sVactual-buckets +p106 +I1 +sVoriginal-buckets-immutable +p107 +I13355 +sVconfigured-sharebytes-mutable +p108 +I0 +sVexamined-sharebytes +p109 +I2763646972 +sVoriginal-sharebytes-immutable +p110 +I2692076909 +sVoriginal-sharebytes-mutable +p111 +I3476032 +sVactual-buckets-mutable +p112 +I0 +sVexamined-buckets-mutable +p113 +I1286 +sVconfigured-shares-immutable +p114 +I0 +sVexamined-diskbytes +p115 +I2854801408 +sVexamined-diskbytes-mutable +p116 +I12161024 +sVexamined-sharebytes-mutable +p117 +I8848467 +sVexamined-buckets +p118 +I19197 +sVconfigured-diskbytes +p119 +I4096 +sVexamined-diskbytes-immutable +p120 +I2842640384 +ssVcorrupt-shares +p121 +(lp122 +(V2dn6xnlnsqwtnapwxfdivpm3s4 +p123 +I3 +tp124 +a(g123 +I0 +tp125 +a(V2rrzthwsrrxolevmwdvbdy3rqi +p126 +I3 +tp127 +a(g126 +I0 +tp128 +a(V2skfngcto6h7eqmn4uo7ntk3ne +p129 +I3 +tp130 +a(g129 +I0 +tp131 +a(V32d5swqpqx2mwix7xmqzvhdwje +p132 +I3 +tp133 +a(g132 +I0 +tp134 +a(V5mmayp66yflmpon3o6unsnbaca +p135 +I3 +tp136 +a(g135 +I0 +tp137 +a(V6ixhpvbtre7fnrl6pehlrlflc4 +p138 +I3 +tp139 +a(g138 +I0 +tp140 +a(Vewzhvswjsz4vp2bqkb6mi3bz2u +p141 +I3 +tp142 +a(g141 +I0 +tp143 +a(Vfu7pazf6ogavkqj6z4q5qqex3u +p144 +I3 +tp145 +a(g144 +I0 +tp146 +a(Vhbyjtqvpcimwxiyqbcbbdn2i4a +p147 +I3 +tp148 +a(g147 +I0 +tp149 +a(Vpmcjbdkbjdl26k3e6yja77femq +p150 +I3 +tp151 +a(g150 +I0 +tp152 +a(Vr6swof4v2uttbiiqwj5pi32cm4 +p153 +I3 +tp154 +a(g153 +I0 +tp155 +a(Vt45v5akoktf53evc2fi6gwnv6y +p156 +I3 +tp157 +a(g156 +I0 +tp158 +a(Vy6zb4faar3rdvn3e6pfg4wlotm +p159 +I3 +tp160 +a(g159 +I0 +tp161 +a(Vz3yghutvqoqbchjao4lndnrh3a +p162 +I3 +tp163 +a(g162 +I0 +tp164 +ass. \ No newline at end of file diff --git a/src/allmydata/test/data/lease_checker.state.txt b/src/allmydata/test/data/lease_checker.state.txt new file mode 100644 index 000000000..b32554434 --- /dev/null +++ b/src/allmydata/test/data/lease_checker.state.txt @@ -0,0 +1,545 @@ +(dp1 +S'last-complete-prefix' +p2 +NsS'version' +p3 +I1 +sS'current-cycle-start-time' +p4 +F1635003106.611748 +sS'last-cycle-finished' +p5 +I312 +sS'cycle-to-date' +p6 +(dp7 +Vleases-per-share-histogram +p8 +(dp9 +I1 +I36793 +sI2 +I1 +ssVspace-recovered +p10 +(dp11 +Vexamined-buckets-immutable +p12 +I17183 +sVconfigured-buckets-mutable +p13 +I0 +sVexamined-shares-mutable +p14 +I1796 +sVoriginal-shares-mutable +p15 +I1563 +sVconfigured-buckets-immutable +p16 +I0 +sVoriginal-shares-immutable +p17 +I27926 +sVoriginal-diskbytes-immutable +p18 +I431149056 +sVexamined-shares-immutable +p19 +I34998 +sVoriginal-buckets +p20 +I14661 +sVactual-shares-immutable +p21 +I0 +sVconfigured-shares +p22 +I0 +sVoriginal-buckets-immutable +p23 +I13761 +sVactual-diskbytes +p24 +I4096 +sVactual-shares-mutable +p25 +I0 +sVconfigured-buckets +p26 +I1 +sVexamined-buckets-unknown +p27 +I14 +sVactual-sharebytes +p28 +I0 +sVoriginal-shares +p29 +I29489 +sVoriginal-sharebytes +p30 +I312664812 +sVexamined-sharebytes-immutable +p31 +I383801602 +sVactual-shares +p32 +I0 +sVactual-sharebytes-immutable +p33 +I0 +sVoriginal-diskbytes +p34 +I441643008 +sVconfigured-diskbytes-mutable +p35 +I0 +sVconfigured-sharebytes-immutable +p36 +I0 +sVconfigured-shares-mutable +p37 +I0 +sVactual-diskbytes-immutable +p38 +I0 +sVconfigured-diskbytes-immutable +p39 +I0 +sVoriginal-diskbytes-mutable +p40 +I10489856 +sVactual-sharebytes-mutable +p41 +I0 +sVconfigured-sharebytes +p42 +I0 +sVexamined-shares +p43 +I36794 +sVactual-diskbytes-mutable +p44 +I0 +sVactual-buckets +p45 +I1 +sVoriginal-buckets-mutable +p46 +I899 +sVconfigured-sharebytes-mutable +p47 +I0 +sVexamined-sharebytes +p48 +I390369660 +sVoriginal-sharebytes-immutable +p49 +I308125753 +sVoriginal-sharebytes-mutable +p50 +I4539059 +sVactual-buckets-mutable +p51 +I0 +sVexamined-diskbytes-mutable +p52 +I9154560 +sVexamined-buckets-mutable +p53 +I1043 +sVconfigured-shares-immutable +p54 +I0 +sVexamined-diskbytes +p55 +I476598272 +sVactual-buckets-immutable +p56 +I0 +sVexamined-sharebytes-mutable +p57 +I6568058 +sVexamined-buckets +p58 +I18241 +sVconfigured-diskbytes +p59 +I4096 +sVexamined-diskbytes-immutable +p60 +I467443712 +ssVcorrupt-shares +p61 +(lp62 +(V2dn6xnlnsqwtnapwxfdivpm3s4 +p63 +I4 +tp64 +a(g63 +I1 +tp65 +a(V2rrzthwsrrxolevmwdvbdy3rqi +p66 +I4 +tp67 +a(g66 +I1 +tp68 +a(V2skfngcto6h7eqmn4uo7ntk3ne +p69 +I4 +tp70 +a(g69 +I1 +tp71 +a(V32d5swqpqx2mwix7xmqzvhdwje +p72 +I4 +tp73 +a(g72 +I1 +tp74 +a(V5mmayp66yflmpon3o6unsnbaca +p75 +I4 +tp76 +a(g75 +I1 +tp77 +a(V6ixhpvbtre7fnrl6pehlrlflc4 +p78 +I4 +tp79 +a(g78 +I1 +tp80 +a(Vewzhvswjsz4vp2bqkb6mi3bz2u +p81 +I4 +tp82 +a(g81 +I1 +tp83 +a(Vfu7pazf6ogavkqj6z4q5qqex3u +p84 +I4 +tp85 +a(g84 +I1 +tp86 +a(Vhbyjtqvpcimwxiyqbcbbdn2i4a +p87 +I4 +tp88 +a(g87 +I1 +tp89 +a(Vpmcjbdkbjdl26k3e6yja77femq +p90 +I4 +tp91 +a(g90 +I1 +tp92 +a(Vr6swof4v2uttbiiqwj5pi32cm4 +p93 +I4 +tp94 +a(g93 +I1 +tp95 +a(Vt45v5akoktf53evc2fi6gwnv6y +p96 +I4 +tp97 +a(g96 +I1 +tp98 +a(Vy6zb4faar3rdvn3e6pfg4wlotm +p99 +I4 +tp100 +a(g99 +I1 +tp101 +a(Vz3yghutvqoqbchjao4lndnrh3a +p102 +I4 +tp103 +a(g102 +I1 +tp104 +asVlease-age-histogram +p105 +(dp106 +(I45619200 +I45705600 +tp107 +I4 +s(I12441600 +I12528000 +tp108 +I78 +s(I11923200 +I12009600 +tp109 +I89 +s(I33436800 +I33523200 +tp110 +I7 +s(I37411200 +I37497600 +tp111 +I4 +s(I38361600 +I38448000 +tp112 +I5 +s(I4665600 +I4752000 +tp113 +I256 +s(I11491200 +I11577600 +tp114 +I20 +s(I10713600 +I10800000 +tp115 +I183 +s(I42076800 +I42163200 +tp116 +I4 +s(I47865600 +I47952000 +tp117 +I7 +s(I3110400 +I3196800 +tp118 +I328 +s(I5788800 +I5875200 +tp119 +I954 +s(I9331200 +I9417600 +tp120 +I12 +s(I7430400 +I7516800 +tp121 +I7228 +s(I1555200 +I1641600 +tp122 +I492 +s(I37929600 +I38016000 +tp123 +I3 +s(I38880000 +I38966400 +tp124 +I3 +s(I12528000 +I12614400 +tp125 +I193 +s(I10454400 +I10540800 +tp126 +I1239 +s(I11750400 +I11836800 +tp127 +I7 +s(I950400 +I1036800 +tp128 +I4435 +s(I44409600 +I44496000 +tp129 +I13 +s(I12787200 +I12873600 +tp130 +I218 +s(I10368000 +I10454400 +tp131 +I117 +s(I3283200 +I3369600 +tp132 +I86 +s(I7516800 +I7603200 +tp133 +I993 +s(I42336000 +I42422400 +tp134 +I33 +s(I46310400 +I46396800 +tp135 +I1 +s(I39052800 +I39139200 +tp136 +I51 +s(I7603200 +I7689600 +tp137 +I2004 +s(I10540800 +I10627200 +tp138 +I16 +s(I36374400 +I36460800 +tp139 +I3 +s(I3369600 +I3456000 +tp140 +I79 +s(I12700800 +I12787200 +tp141 +I25 +s(I4838400 +I4924800 +tp142 +I386 +s(I10972800 +I11059200 +tp143 +I122 +s(I8812800 +I8899200 +tp144 +I57 +s(I38966400 +I39052800 +tp145 +I61 +s(I3196800 +I3283200 +tp146 +I628 +s(I9244800 +I9331200 +tp147 +I73 +s(I30499200 +I30585600 +tp148 +I5 +s(I12009600 +I12096000 +tp149 +I329 +s(I12960000 +I13046400 +tp150 +I8 +s(I12614400 +I12700800 +tp151 +I210 +s(I3801600 +I3888000 +tp152 +I32 +s(I10627200 +I10713600 +tp153 +I43 +s(I44928000 +I45014400 +tp154 +I2 +s(I8208000 +I8294400 +tp155 +I38 +s(I8640000 +I8726400 +tp156 +I32 +s(I7344000 +I7430400 +tp157 +I12689 +s(I49075200 +I49161600 +tp158 +I19 +s(I2764800 +I2851200 +tp159 +I76 +s(I2592000 +I2678400 +tp160 +I40 +s(I2073600 +I2160000 +tp161 +I388 +s(I37497600 +I37584000 +tp162 +I11 +s(I1641600 +I1728000 +tp163 +I78 +s(I12873600 +I12960000 +tp164 +I5 +s(I1814400 +I1900800 +tp165 +I1860 +s(I40176000 +I40262400 +tp166 +I1 +s(I3715200 +I3801600 +tp167 +I104 +s(I2332800 +I2419200 +tp168 +I12 +s(I2678400 +I2764800 +tp169 +I278 +s(I12268800 +I12355200 +tp170 +I2 +s(I28771200 +I28857600 +tp171 +I6 +s(I41990400 +I42076800 +tp172 +I10 +sssS'last-complete-bucket' +p173 +NsS'current-cycle' +p174 +Ns. \ No newline at end of file diff --git a/src/allmydata/test/data/openssl-rsa-2048-2.txt b/src/allmydata/test/data/openssl-rsa-2048-2.txt new file mode 100644 index 000000000..dd3174209 --- /dev/null +++ b/src/allmydata/test/data/openssl-rsa-2048-2.txt @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAygMjLBKayDEioOZap2syJhUlqI7Dkk4zV5TfVxlQFO7bR410 +eJRJY1rHGIeZxQPjytsSJvqlYEJrvvVNdhi6XN/6NA3RFL6pDTHkYyM3qbrXqlYC +HUlkS2JAZzIFRizl6nG11yIbHjPsoG+vGSjGSzVIiOP4NeIssYLpoASTIppdZxy+ +syZ6zSmPhZu7W9X73aupLjFrIZpjeKfO2+GfUwEzAH0HckLIgJpQ+vK3sqbSik/2 +1oZK33M8uvtdmba7D3uJXmxWMTJ7oyFLDpDOMl7HSUv1lZY2O2qiDPYfGDUM1BRp +6blxE+BA2INr9NO4A4H8pzhikFnaFnkpH/AxowIDAQABAoIBABprXJ8386w42NmI +JtT8bPuUCm/H9AXfWlGa87aVZebG8kCiXFgktJBc3+ryWQbuIk12ZyJX52b2aNb5 +h97pDv50gGlsYSrAYKWMH91jTrVQ7UGmq/IelhJR0DBu10e9OXh21JxFJpzFl63H +zXOR5JUTa+ATSHPrl4LDp0A5OPDuWbBWa64yx7gUI9/tljbndplCrPjmIE6+h10M +sqxW5oJpLnZpWc73QQUTuPIr+A7fLgGJYHnyCFUu9OW4ZnxNEI3/wNHPvoxkYuHN +2qVonFESiAx9mBv7JzQ7X2KIB8doY3KL6S7sAKi/i/aP7EDJ9QEtl3BR3M8/XP8E +KJVORWECgYEA8Vbw75+aVMxHUl9BJc1zESxqVvr+R0NBqMO47CBj39sTJkXY37O3 +A7j4dzCorI0NaB7Jr+AI2ZZu9CaR31Y2mhAGbNLBPK8yn0Z7iWyDIqOW1OpMDs35 +h2CI1pFLjx1a3PzhsQdzZ68izWKYBdTs2scaFz/ntaPwwPEwORaMDZECgYEA1kie +YfMRJ2GwzvbR35WvEMhVxhnmA6yuRL15Pkb1WDR3iWGM0ld/u3N4sRVCx1nU4wk/ +MMqCRdm4JaxqzR/hl8+/sp3Aai15ecqR+F+ecwbbB2XKVHfi1nqClivYnB+GgCh1 +bQYUd9LT80sIQdBEW5MBdbMFnOkt+1sSpjf1wfMCgYBAavlyrIJQQhqDdSN5iKY/ +HkDgKKy4rs4W0u9IL7kY5mvtGlWyGFEwcC35+oX7UMcUVKt3A3C5S3sgNi9XkraO +VtqwL20e2pDDjNeqrcku9MVs3YEhrn79UJoV08B8WdSICgPf8eIu+cNrWPbFD7mN +B/oB3K/nfvPjPD2n70nA0QKBgGWJN3NWR9SPV8ZZ8gyt0qxzISGjd/hZxKHR3jeC +TBMlmVbBoIay61WZW6EdX+0yRcvmv8iQzLXoendvgZP8/VqAGGe8lEY7kgoB0LUO +Kfh7USHqO7tWq2fR2TrrP9KKpaLoiOvGK8CzZ7cq4Ji+5QU3XUO2NnypiR5Hg0i7 +z3m9AoGBAIEXtoSR9OTwdmrdIQn3vsaFOkN5pyYfvAvdeZ+7wwMg/ZOwhStwctbI +Um7XqocXU+8f/gjczgLgMJj+zqr+QDH5n4vSTUMPeN0gIugI9UwWnc2rhbRCgDdY +W6SwPQGDuGoUa5PxjggkyevUUmtXvGG9jnkt9kozQOA0lOF1vbw/ +-----END RSA PRIVATE KEY----- diff --git a/src/allmydata/test/data/openssl-rsa-2048-3.txt b/src/allmydata/test/data/openssl-rsa-2048-3.txt new file mode 100644 index 000000000..2c423dc1f --- /dev/null +++ b/src/allmydata/test/data/openssl-rsa-2048-3.txt @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAoa9i8v9YIzb+3yRHyXLm4j1eWK9lQc6lFwoQhik8y+joD+5A +v73OlDZAcn6vzlU72vwrJ1f4o54nEVm0rhNrhwCsiHCdxxEDEoqZ8w/19vc4hWj4 +SYwGirhcnyb2ysZSV8v9Lm5HiFe5zZM4jzCzf2rzt0YRlZZj9nhSglaiHZ9BE2e0 +vzOl6GePDz6yS4jbh2RsPsDQtqXNOqZwfGUd+iTsbSxXcm8+rNrT1VAbx6+1Sr0r +aDyc/jp8S1JwJ0ofJLsU3Pb6DYazFf12CNTsrKF1L0hAsbN8v2DSunZIQqQLQGfp +0hnNO9V8q9FjvVu8XY/HhgoTvtESU3vuq+BnIwIDAQABAoIBAGpWDP+/y9mtK8bZ +95SXyx10Ov6crD2xiIY0ilWR/XgmP6lqio8QaDK104D5rOpIyErnmgIQK2iAdTVG +CDyMbSWm3dIGLt5jY9/n5AQltSCtyzCCrvi/7PWC9vd9Csal1DYF5QeKY+VZvMtl +Tcduwj7EunEI1jvJYwkQbUNncsuDi+88/JNwa8DJp1IrR4goxNflGl7mNzfq49re +lhSyezfLSTZKDa3A6sYnNFAAOy82iXZuLXCqKuwRuaiFFilB0R0/egzBSUeBwMJk +sS+SvHHXwv9HsYt4pYiiZFm8HxB4NKYtdpHpvJVJcG9vOXjewnA5YHWVDJsrBfu6 +0kPgbcECgYEA0bqfX2Vc6DizwjWVn9yVlckjQNGTnwf/B9eGW2MgTn6YADe0yjFm +KCtr34hEZc/hv3kBnoLOqSvZJiser8ve3SmwxfmpjEfJdIgA5J5DbCEGBiDm9PMy +0lYsfjykzYykehdasb8f4xd+SPMuTC/CFb1MCTlohex7qn7Xt9IskBECgYEAxVtF +iXwFJPQUil2bSFGnxtaI/8ijypLOkP3CyuVnEcbMt74jDt1hdooRxjQ9VVlg7r7i +EvebPKMukWxdVcQ/38i97oB/oN7MIH0QBCDWTdTQokuNQSEknGLouj6YtLAWRcyJ +9DDENSaGtP42le5dD60hZc732jN09fGxNa6gN/MCgYB5ux98CGJ3q0mzBNUW17q/ +GOLsYXiUitidHZyveIas6M+i+LJn1WpdEG7pbLd+fL2kHEEzVutKx9efTtHd6bAu +oF8pWfLuKFCm4bXa/H1XyocrkXdcX7h0222xy9NAN0zUTK/okW2Zqu4yu2t47xNw ++NGkXPztFsjkugDNgiE5cQKBgQDDy/BqHPORnOIAACw9jF1SpKcYdPsiz5FGQawO +1ZbzCPMzW9y2M6YtD3/gzxUGZv0G/7OUs7h8aTybJBJZM7FXGHZud2ent0J2/Px1 +zAow/3DZgvEp63LCAFL5635ezM/cAbff3r3aKVW9nPOUvf3vvokC01oMTb68/kMc +ihoERwKBgFsoRUrgGPSfG1UZt8BpIXbG/8qfoy/Vy77BRqvJ6ZpdM9RPqdAl7Sih +cdqfxs8w0NVvj+gvM/1CGO0J9lZW2f1J81haIoyUpiITFdoyzLKXLhMSbaF4Y7Hn +yC/N5w3cCLa2LLKoLG8hagFDlXBGSmpT1zgKBk4YxNn6CLdMSzPR +-----END RSA PRIVATE KEY----- diff --git a/src/allmydata/test/data/openssl-rsa-2048-4.txt b/src/allmydata/test/data/openssl-rsa-2048-4.txt new file mode 100644 index 000000000..534ae30bc --- /dev/null +++ b/src/allmydata/test/data/openssl-rsa-2048-4.txt @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEA2PL5Ry2BGuuUtRJa20WS0fwBOqVIVSXDVuSvZFYTT1Xji19J +q+ohHcFnIIYHAq0zQG+NgNjK5rogY/5TfbwIhfwLufleeAdL9jXTfxan0o/wwFA1 +DAIHcYsTEYI2dfQe4acOLFY6/Hh6iXCbHvSzzUnEmYkgwCAZvc0v/lD8pMnz/6gQ +2nJnAASfFovcAvfr1T+MZzLJGQem3f2IFp1frurQyFmzFRtZMO5B9PDSsFG4yJVf +cz0iSP8wlc9QydImmJGRvu4xEOkx/55B/XaUdb6CIGpCTkLsDOlImvZt9UHDSgXq +qcE/T7SYMIXqbep64tJw9enjomH+n1KVh9UA2wIDAQABAoIBABCSTrQ/J5N010EV +i9cf810S0M03/tRyM/+ZLESPxp3Sw7TLrIbzNWBee5AibLqpnDaZzsc+yBDjusGo +lZwPFt+VJxgnki288PJ3nhYhFuSglhU6izLFnOfxZZ16wsozwYAfEJgWZh8O3N1O +uqqcqndN4TSRIu1KBm1XFQlqCkJT/stzYjO4k1vhgZT4pqhYRdx7q7FAap4v+sNs +Svhm1blvOXlyeumAbFBdGFttpTxIOGRzI1bp00jcLK4rgssTTxNyEiVu4oJhQY/k +0CptSUzpGio8DZ0/8bNnKCkw8YATUWJZQgSmKraRwAYMMR/SZa7WqjEc2KRTj6xQ +pHmYwZECgYEA700a/7ur8+EwTSulLgDveAOtTV0xEbhuq6cJQgNrEp2rbFqie6FX +g/YJKzEpEnUvj/yOzhEcw3CdQDUaxndlqY87QIhUWMcsnfMPsM1FjhmfksR8s3TF +WZNqa0RAKmcRoLohGclSvRV2OVU8+10mLUwJfR86Nl5+auR3LxWLyB8CgYEA6BaR +r+Z7oTlgkdEDVhnQ58Msktv58y28N+VIbYS79bV01jqUUlogm5uTvdvq5nyENXHx +gnK88mVzWYBMk83D01HlOC5DhpspTVEQQG2V/If6KZa56mxiHP3Mab9jLew9w/kA +g6l/04ATSA8g4i2H/Bz0eEyPEBt6o/+SO0Xv38UCgYEAyTTLvrrNmgF922UXPdcL +gp2U2bfBymSIqUuJPTgij0SDHlgWxlyieRImI2ryXdKqayav7BP3W10U2yfLm5RI +pokICPqX8Q2HNkdoqf/uu8xPn9gWAc3tIaQRlp+MVBrVd48IxeXA67tf7FT/MVrg +/rUwRUQ8bfqF0NrIW46COYECgYAYDJamGoT/DNoD4hutZVlvWpsY0LCS0U9qn1ik ++Jcde+MSe9l4uxwb48AocUxi+84bV6ZF9Su9FmQghxnoSu8ay6ar7qdSoGtkNp0v +f+uF0nVKr/Kt5vM3u9jdsFZPoOY5k2jJO9wiB2h4FBE9PqiTqFBw0sYUTjSkH8yA +VdvoXQKBgFqCC8Y82eVf0/ORGTgG/KhZ72WFQKHyAeryvoLuadZ6JAI6qW9U1l9P +18SMnCO+opGN5GH2Qx7gdg17KzWzTW1gnbv0QUPNnnYEJU8VYMelNuKa8tmNgFH7 +inAwsxbbWoR08ai4exzbJrNrLpDRg5ih2wMtknN6D8m+EAvBC/Gj +-----END RSA PRIVATE KEY----- diff --git a/src/allmydata/test/data/openssl-rsa-2048.txt b/src/allmydata/test/data/openssl-rsa-2048.txt new file mode 100644 index 000000000..8f989f42c --- /dev/null +++ b/src/allmydata/test/data/openssl-rsa-2048.txt @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDF1MeXulDWFO05 +YXCh8aqNc1dS1ddJRzsti4BOWuDOepUc0oCaSIcC5aR7XJ+vhX7a02mTIwvLcuEH +8sxx0BJU4jCDpRI6aAqaKJxwZx1e6AcVFJDl7vzymhvWhqHuKh0jTvwM2zONWTwV +V8m2PbDdxu0Prwdx+Mt2sDT6xHEhJj5fI/GUDUEdkhLJF6DQSulFRqqd0qP7qcI9 +fSHZbM7MywfzqFUe8J1+tk4fBh2v7gNzN1INpzh2mDtLPAtxr4ZPtEb/0D0U4PsP +CniOHP0U8sF3VY0+K5qoCQr92cLRJvT/vLpQGVNUTFdFrtbqDoFxUCyEH4FUqRDX +2mVrPo2xAgMBAAECggEAA0Ev1y5/1NTPbgytBeIIH3d+v9hwKDbHecVoMwnOVeFJ +BZpONrOToovhAc1NXH2wj4SvwYWfpJ1HR9piDAuLeKlnuUu4ffzfE0gQok4E+v4r +2yg9ZcYBs/NOetAYVwbq960tiv/adFRr71E0WqbfS3fBx8q2L3Ujkkhd98PudUhQ +izbrTvkT7q00OPCWGwgWepMlLEowUWwZehGI0MlbONg7SbRraZZmG586Iy0tpC3e +AM7wC1/ORzFqcRgTIxXizQ5RHL7S0OQPLhbEJbuwPonNjze3p0EP4wNBELZTaVOd +xeA22Py4Bh/d1q3aEgbwR7tLyA8YfEzshTaY6oV8AQKBgQD0uFo8pyWk0AWXfjzn +jV4yYyPWy8pJA6YfAJAST8m7B/JeYgGlfHxTlNZiB40DsJq08tOZv3HAubgMpFIa +reuDxPqo6/Quwdy4Syu+AFhY48KIuwuoegG/L+5qcQLE69r1w71ZV6wUvLmXYX2I +Y6nYz+OdpD1JrMIr6Js60XURsQKBgQDO8yWl7ufIDKMbQpbs0PgUQsH4FtzGcP4J +j/7/8GfhKYt6rPsrojPHUbAi1+25xBVOuhm0Zx2ku2t+xPIMJoS+15EcER1Z2iHZ +Zci9UGpJpUxGcUhG7ETF1HZv0xKHcEOl9eIIOcAP9Vd9DqnGk85gy6ti6MHe/5Tn +IMD36OQ8AQKBgQDwqE7NMM67KnslRNaeG47T3F0FQbm3XehCuqnz6BUJYcI+gQD/ +fdFB3K+LDcPmKgmqAtaGbxdtoPXXMM0xQXHHTrH15rxmMu1dK0dj/TDkkW7gSZko +YHtRSdCbSnGfuBXG9GxD7QzkA8g7j3sE4oXIGoDLqRVAW61DwubMy+jlsQKBgGNB ++Zepi1/Gt+BWQt8YpzPIhRIBnShMf3uEphCJdLlo3K4dE2btKBp8UpeTq0CDDJky +5ytAndYp0jf+K/2p59dEuyOUDdjPp5aGnA446JGkB35tzPW/Uoj0C049FVEChl+u +HBhH4peE285uXv2QXNbOOMh6zKmxOfDVI9iDyhwBAoGBAIXq2Ar0zDXXaL3ncEKo +pXt9BZ8OpJo2pvB1t2VPePOwEQ0wdT+H62fKNY47NiF9+LyS541/ps5Qhv6AmiKJ +Z7I0Vb6+sxQljYH/LNW+wc2T/pIAi/7sNcmnlBtZfoVwt99bk2CyoRALPLWHYCkh +c7Tty2bZzDZy6aCX+FGRt5N/ +-----END PRIVATE KEY----- diff --git a/src/allmydata/test/data/pycryptopp-rsa-1024-priv.txt b/src/allmydata/test/data/pycryptopp-rsa-1024-priv.txt new file mode 100644 index 000000000..6f5e67950 --- /dev/null +++ b/src/allmydata/test/data/pycryptopp-rsa-1024-priv.txt @@ -0,0 +1 @@ +MIICdQIBADANBgkqhkiG9w0BAQEFAASCAl8wggJbAgEAAoGBAJLEAfZueLuT4vUQ1+c8ZM9dJ/LA29CYgA5toaMklQjbVQ2Skywvw1wEkRjhMpjQAx5+lpLTE2xCtqtfkHooMRNnquOxoh0o1Xya60jUHze7VB5QMV7BMKeUTff1hQqpIgw/GLvJRtar53cVY+SYf4SXx2/slDbVr8BI3DPwdeNtAgERAoGABzHD3GTJrteQJRxu+cQ3I0NPwx2IQ/Nlplq1GZDaIQ/FbJY+bhZrdXOswnl4cOcPNjNhu+c1qHGznv0ntayjCGgJ9dDySGqknDau+ezZcBO1JrIpPOABS7MVMst79mn47vB2+t8w5krrBYahAVp/L5kY8k+Pr9AU+L9mbevFW9MCQQDA+bAeMRNBfGc4gvoVV8ecovE1KRksFDlkaDVEOc76zNW6JZazHhQF/zIoMkV81rrg5UBntw3WR3R8A3l9osgDAkEAwrLQICJ3zjsJBt0xEkCBv9tK6IvSIc7MUQIc4J2Y1hiSjqsnTRACRy3UMsODfx/Lg7ITlDbABCLfv3v4D39jzwJBAKpFuYQNLxuqALlkgk8RN6hTiYlCYYE/BXa2TR4U4848RBy3wTSiEarwO1Ck0+afWZlCwFuDZo/kshMSH+dTZS8CQQC3PuIAIHDCGXHoV7W200zwzmSeoba2aEfTxcDTZyZvJi+VVcqi4eQGwbioP4rR/86aEQNeUaWpijv/g7xK0j/RAkBbt2U9bFFcja10KIpgw2bBxDU/c67h4+38lkrBUnM9XVBZxjbtQbnkkeAfOgQDiq3oBDBrHF3/Q8XM0CzZJBWS \ No newline at end of file diff --git a/src/allmydata/test/data/pycryptopp-rsa-32768-priv.txt b/src/allmydata/test/data/pycryptopp-rsa-32768-priv.txt new file mode 100644 index 000000000..d949f3f60 --- /dev/null +++ b/src/allmydata/test/data/pycryptopp-rsa-32768-priv.txt @@ -0,0 +1 @@ +MIJIQQIBADANBgkqhkiG9w0BAQEFAASCSCswgkgnAgEAAoIQAQC3x9r2dfYoTp7oIMsPdOhyNK5CB3TOtiaxhf3EkGAIaLWTXUVbxvOkiSu3Tca9VqFVnN7EkbT790uDjh4rviGeZF8oplVN+FDxKfcg5tXWv4ec9LnOUUAVRUnrUQA2azkOT+ozXQwZnJwUYr210VoV8D0MkrvOzNgGpb8aErDhW8SwrJcoYkObIE7n3C3zEMaEIyA1OFWSJDiXNGnBDvO54t1/y+/o4IuzLWWG7TPx8hnV+jcHRoxJTX2MZusJ7kugvxhgB0+avwXFTQr6ogvPNcUXak0+aLInLRtkYJ+0DYqo1hLAh8EBY/cLrhZM5LGGC4BAwGgUwsx3KKeOeduNnob3s/1rZpvZGwbGtfiWYQwDB8q68j3Ypf2Qvn7hPwicdOr0Dwe4TXJQ4yRHPeQaToOBUjtTJnrHsKDZET6i+jQ9e07Ct+yYrUwZjiaSXJYU/gCyPCui7L37NasXBJ00f1Ogm3gt4uxl3abO8mO1nKSWM+HbFBEyyO0apT+sSwYj6IL7cyCSJtWYMD4APdW5rXSArhyiaHV+xNbVUXAdBrZSNuwet925hTOf4IQD9uqfzeV3HIoiUCxn5GKYPZy01Kft+DExuDbJjMmES2GhfPWRIFB5MN0UdjlagDHLzFraQUcLTDKlxL0iZ+uV4Itv5dQyaf93Szu2LD1jnkvZOV5GN1RxTmZCH1FIPYCNwS6mIRG/4aPWA0HCZX8HzSMOBshAS6wECaoLWxv8D3K4Tm1rp/EgP7NZRxTj2ToOostJtjzTrVb3f3+zaT5svxD1Exw8tA1fZNRThIDKZXVSSLDYaiRDAUg7xEMD2eDCvNQasjAwX5Tnw7R4M/CZoZhgYVwIE+vHQTh8H+M/J8CNLxPT4N3fuXCqT8YoJVUOmKHe0kE5Rtd87X2BQY5SSx6LFMRRSVdBBpWB6cwLo8egehYAScEDQh0ht/ssaraWZ2LGt5hZL0I5V58iS/6C4IOu+1ry75g6mecWoHD0fBQELB3Q3Qi6c6Hik/jgTLQHb5UMqKj/MDSdTWuxwH2dYU5H4EGAkbfufBoxw9hIpdeS7/aDulvRKtFVPfi/pxmrd1lxQCBA4ionRe4IOY0E9i419TOgMtGgZxNlEXtp445MbeIlurxIDIX8N+RGHWljGR/9K6sjbgtGKyKLUxg51DZeuDKQGdyKXtIIkZ+Od9HN+3Mv0Ch5B9htIRV9hE6oLWLT+grqJCFAOD3olGgrRXByDsd8YouahYfjqb4KNCOyFPS3j5MdUpq+fiLrG3O98/L/xtmXxw+ekl95EGAnlwiCwULsjzVjHJDzSc68cldMnzNqLwhwWXpc0iswCWCQVFce/d1KlWqrtwq2ThH2pX3BJ5Pnu+KMISNNC/tagLe9vjmrh6ZhEks7hefn0srytJdivGDFqMs/ISmcld0U/0ZqE05b7BpErpfVrG9kb5QxWBTpaEb2O0pRsaYRcllFuNF6Nl/jPDBnn4BMYnOFnn9OKGPEDUeV/6CYP9x+Wi96M5Ni6vtv+zw9Xg8drslS5DJazXQFbJ0aqW3EgalUJVV0NgykB6Hr4pxTzrwo0+R/ro32DEj5OfjjU7TB4fYie0eax8tpdvzcWJRZ/c5b/Dg1yK+hbiMg9aTctHAsYJkOvMpxvull20IuV2sErWZ7KZhId19AFOnEQ6ILlHRwUf35AyEVmUL5BqLl137EeEVShEmage4+E/N6PdKzJdJGl1AQGyb7NTD86m0Jj2+8qu6zsBgyUfiJqZ17fixKV6l9HGJKSmY9If2XrX/IhNZ5dvqSmODJ1ZRGC5gjJcxcdHp2Q1179SlNmXiR/7DMcprL/+iVhRyxzM2GEJ78q9jS6j/Z+0vLzdNOPo1KxD191ogYjl5ck9gnHAkbaiANaK4rrfMytDkNm0JRua4p0mVyVHWZWwatoMhJxVl3+9x37OkF24ICTJZ4LSKDLJxi9WCQbhgACIA1mjcW0P+4AszpbuSXOQkPtT+MQ0IxHMzX261yHAIPbGsbSzoTy+PWJywFdMDy5afXDTNpmMfpzWkw2fhBQasNoGHl2CwFftJdr4WWxuN6mSwhNVHJTw1xe4A5fa6bjip5kmrLQK85YF4Ron0OIOofjcCzvjKCkNkGVKBhRiqBoqV6Pzz1XauVHFhFgZZNWXI+le+Fg9SJojeDtFQp5w6dZKBJMxV2uNPqV0U4VOtvAas2+Ul4zIJDB/FJyDX8POrsR+VkW7via64xM1hQlOZ5ispEOUvmO/NWkAsJM0n3S7qgud6NaFqOofQZcbh5r1z2uIrXwUIb85m2t/sPJBI1J/Dql4dmzgfn/q6Siqi8FeDoma/lQBZWyEeGz+/ckHdw/BGPx5FZlc8xLegNrQj4sVkUZXVAjNoUguA5HT9GcAmE5FeOHdHtD0bdTaNFkQbKdi3yUlGA1GZeyPThwfBaizgX3i6oOtGguX3HQMQtExip5xR2vsiYJsbWXuzlKEws8GwXoiJo8xEh+TPavxxtZ7dDdnJY1mUhKTVGLBCqCrJ+uhWdWuHKvC9x++V5NO6WQrUiG/o8oOwkpWyH7GC/VtulpxkoJlxAej3JxlHn91cN4PstDo4goOhQBi9k2A5rsmvjGG75BOKlqvhaQ6BPOa+9F5D5H0RhT0hw43TZmJri+0Ba2WT3FigcHHYGtx4UJfyqfg7d+WXvpIynC7i3SIN3N7atg3EsWwPuzDKE6ycjWTD6ToKmYLMnDgl4PzOEBFstG12OdcuQwhk2Dy5uEdxqGfViy3fV+Muev0yAkE/pRwutgQjQdw0OPXyGoqchYx33/cHq1fDWmkXZab8wuVThcx3He30UI4rr3MMff0gxdnJt3e6YcHHF0R8fGwkVC03zWXI2hfqHq+rNQkBnIbbRnepKvJylmcHn8KVJ13Nm2iHRTw7B8r6fE6LsmUJndh/M2Poa1AtxfGBniMIfqtV0RuT7UR1nDI0C8Lnx7E2KTw1MXCLh4xzGr5wZ+4T5FTeUnzd6yc7EEduLxktqh7RpmnBBPRNIufI9ztPTmRPXgF7r9PxI8MI09Sr2HQq2ZmEs6G0w8l8WMiABvlG/YQd+UHGn29acrzSYp6AfggjuUV7PrCC4flKk5IGBNdUtUqFxBRUuvn0ln7HayAAYLJuVMNv9daBwqMpp3Faor/0K+jC0FhIan3R6wBpKSuJo/6jZJoSlSCLGCkFqM9ks3sgD5cDvxahV7HNOv7AisDws2LsVATHbF0HFeoEA7lp6NzjK5dgqd+9rA95U0c7w31E1E9GbmzLADC/0eSDKEkdKGIJ4mP1erpBOc+cdJ2tVP5e6cZ7KNhzjYf19tORINCTrPAp9/aLXnoHgtLp3ozkFS/dGowLZ6Q5XInPBchgiI4TVHDDxGpwMAZp3G3yM1QDptd3pxRSv4m97QIOa7ma9l3TCK8RA/bs/akYoZnxM92GvG/3FQdws1y3Lz2NjoikVSaX0TS1t16TupL3PQioaeRJLnTZu0WGR20WLL6kEBz6cHJC3ZN9Zilnoje8lEm/7/WYOCt490+w4KS24aJcgDPzV7Z1npXy19p3ywEY0AJND8uurWeTEHIBJNxMPU2OMGd0bGa2S0yr/dfbIz3FmD06noX7/XKMjQ+gW8EBXAA7s8TA2RE0HbD8IGKlg3CCIaYsS4BbvK0B71qHhe/yM8qnUo5+vv1UpbioYVBI77UfiqqUDUAIIg+apIKJjU352GqXiEovXGR6Jeag+ufzPkPq9BqvyIfW0+3r2/wp4nIu7Z9XM6iU1Lj1j/wM1goktBnDfY6hbjHA0acQFCgUrzeGqyzYSe9kufDTSw7ePbx2rLG+fXa9qwqVwY0iBjJ8Hu6xIFmvesHwq0ySH0IqyI/Y53ee2hhju0xWAz8GishuMv4/apVLWQ4MbmG788ybGRxePWqYx/KI8M1fUvZGRXmtwAqEIaakewUVpL3QhawB4eR074Yhl5gY/ElwlcxNboUVayqJwgh4BO+/2tAutTDCtkzdLMjH4JoDpMNsf4GiLVvlSahU76B+oOlttcIm69oRB5BklrgbPCwqbQldsvvP3nHuFxBAlunefMMGZFbTd59JbO5UAkAHQ7XRw3MWDq8B3V1uCF59r4uXc+kvYFS/y8DTpQGKtO0RQx5yIonoNCbJjYWtx+zMACXoXWkrH03IQJMKmPM3IMbtMDMxIdqjD1hdaQ4dAnVcCq7ZvcbIThtCHX0+Vqo9eHoqA2kBtZLRq5rq4GG8Jm7o9mrpuVTLvym0goJuK2KQbF39CxlTG8eIIRKFQNhKC1XtuTGiIQzd14UsHWHhqhWo8uXHGhAvkl3ga8+5bDuJRhJ3ndsNE/tnq/VlJf329ATseDCLmVEDRiqe7CJeeyvMLgN0oE0lGZkmf2iYfRpB0zdkj6EpVdVZs2f/vRTp7S0ldwvV0pTDj5dzboY+nhd2hzR1+EnLPuUbVGqotTz8BWkxo9DpoGkA//5ZMeCkqFtKh3f7/UAWC5EyBZpjoPN3JGtEOdBRLX9pKrvY6tqpwaiGAHA85LywmB3UoudiGyifKe3ydIlMltsSpgc8IESwQaku2+ZlvZklm8N8KVl+ctF+n58bYS0ex63FfYoJEbUzJMcyC8Gse7zfC5MFX7nVQPWRrJ6waRu+r33KKllmKp1pqtTH1SO0N3WTP8W/npELnG6A9RnnsbtXO1WhN1HuyT5yv9KRaVPq+2EkoweAEq/Q1SGtJBX0hxWaK2UDRb4VRMHC1uDF/CVMCcfvTOQ8/ihWgrZtroDQ8J8TU0ICZVCdz3duvw5/C0eCLB5szT1EsMY2x1hKpnfS21Y7SCpG3SYv2Ii47kCex1A35Et/7MMwilelxgrwDCsXyObkepVwdrBwV6YF2qd+jMj+H4mCfhempxwCSlhXgwhS0svSPmPPAJOU4gSmcVktfs/CyqCKLzpGxHXjdcA41/gWVCeYDdjOEirh9rUIy8KlIspI+3y+XNdWrRfH9UkYQsjH7mwvixOQfc3NUvMLOSnCe4bLZ1gR4mIiaGwR15YT+Tl3AkfHu3Ic062iPlWON5Sn6ZOBE1FnGi25YOiBCdDkF1vGdzPb2SLBnucVnEqKfBB3/0KcMrT6bDApKrPxfVQfx7YJnKO6T8nddFdPne2sr2Joz+QJ4DR7nnSBvu0VEZTXLAr+K7OOSJwlE76WYT/oHDHM4LivUit0ChnsUegNFwD7zO6nz3OWYzDaB+XzVr0c5wtpZP1IYRCs20L5jOc2P1dzV7WHErHJ8/VhDZ76d//2SCCdjv5kTfwXXHsfWRK8jMV+TZSmKlKgq+pDd9Um8Ao5ShvGqMz6TThFihNrXUL2xCEXJ1ki7xL3fTTCgK/SlMt7NYeOv5xqIdQdc7tSjYt9y76UbY6bVe+i1H3ppaYh2+oBaSDyzbInglXpHEWS4yJfh7kJxXV5P2u+LeOIzmz3xpZJJCiRjdW/Bl6jbAgERAoIQABPRyc9I9OY6rL6uNAQtPDR5Idnxvsr/kLjKr3IPkeLKCYrfZFezkr7rp9oK5b8V5DjrRTNQ9+j6CqdJDUr96ocK0wvpx/HR/rCYmqave3QFmKoGUEXvqgxVRrd+sjgQlTY/1X4CgU4OYSVV8VJaV4TgLr2XWoc+P3Qq+QBNT0+E4IF8BkMZp+sVDYdvloYib8L0urBn9SZZPVGPsQ1KZZQL6rXwWJ4iQUMCYsrJRFjWWB6a++UtQVMzBgKXpeV2j69z+xlqM0Bf5QO1fCoWfsOFzHh8Z7PoJ0p/2EmR8xryZsvu7fGgNXEXVF4fUrf6i52DwAb7ptUP/PPAnp5sg5lP11byyIGLEM6hCEKbJ1uC77oNY6q/xWowBMHOROYYXcqZKGWdOo7bLPSlC3EYPj8SgaIGW7spy/xv6TCB3BaYeRWwb2VQEfxjAK1sMVYPASBhqr3jWgoKeOFdoYJ7el2BLqprHod1Vbqr+2ahq2Fjt2WIGt3mjmdb8WnGht3f7xfzbX+CYGATPzEKOOHojQJ0lpptITSm336cwdW//4qo4XdMMo/cnO5cKzbjgbAdI1eCIEaSIvmpRgs0PNQuzSKPZ3GBqvPLFPeePeOZsq+IdNXs5YqPTw7BdJ3Wm/VZzZACBSbdjP3Mbr/yG+qEIx2i0x6I690twqy+fxdKy/HHcRGcjiBMODROq+cpxRROjxHqd9/8udNQqjqcg6j/iMzOiQv0FQ9+iEyEzk/jjF8rmFlp9FtSKe4FJ+ZgNfKFAdhDVt+cu5MpW5NZJ1wKkOM2xEzSKZlYrXx1MQbEqsUb6uopkHWoS435jsGrkzgjbDUTN2SW21o/xaiSJn7/27oUiezK7sKqK70Sf2ixdqXQXwBC6sBItE6aK/VFR+r8YcU0ysxzj7WhJB+CDNatv4d4M0oFZkXB9wZ7GIPD282KqAUM+TUOqMnpLKftZAEpRGC5ck/keBU+J7/vGO//HUKOjtPsqYPPV6qY1Pc6jrUn5RkIxzc+qo5lSoae3DL/e/7a+SCKN97Elac/bOtTRy/of4jYf8HgNQVd56NxQeoy+fUboH11jwuz3BSrHmBLnbljxz42gglBRFY4Zw0Vh35KISziV9yXqj+a+72dj1iOXCc0w/27E3gQERaex5m+8eGTxKb1R32HKV9Ww94UYDdkLZwW3g7sG6uXO9+tjJY2uZk8GHFxyYlCUB8a0URVNVMYdKDHqTuhrFLOv/CWjCBg92VB19bwSGFWEfwUroQlZa9nU6FHp0a9SgpLvq2VSeReOppoSngAuft8vxNUDXeDRfZfwf4jtUdp14zLE3QvSU83RKy+Wv/4jC/Y2ro7SqZ6wAWIlYr9Js1ixbOyeXu7e99D8sjWZbB3QMD5zYpsW416jOxZ0OXKrRZ9om+B6CtGgugjxZri8us9VpZXw9Q5TDcW88Ym6Dersajy71qnndzvo0K2FJBW7EMi64J/2lr70yAJADNU9z90B3BK0X5junIBbp88MfJNKVjrm7VV4DVVk5YdmpMqxWUVW/xj51ARIxmu2boXSpUxHs9ZXAoF1C/OoIVcM/7/tOtOERzUFFRClGsw6yeTEPvPlYY6eKnKQJputuCMD/+qbhj6kpxjclAnfEJMr+Wa/QnOLp+0/Lvz9gh5hyMdgYCBIaPe1rJ7TglrqsdcoIjHObvMm2OjeYdZUAHB+Hgozu0H82XC+OD57wax1n4fw+YktMtgobt2YRENRAcyYReehwfMKM0ahR6GVIdRCXQ4RggEbyQUoTArKSS13JpliMLNEhwocFsahqxazDm//tadLKCPEjnuKrWGXEwiHpJBOLas/J2HhQEQ3XKMDCAGz+QIfkjxGvbhYARpBTgf2AWNoj1BzWwPWn1vUQk8v7osEoP0s2kaSencOFlPfRzkVowKJAnR5IZ/xv6lau7bjqsOnMutoKjJ3lWUzvjhuvAHUh7AG/t/Uubn0ZdZalVIvDR4xcjcRdQSsyxcVKg5cw9V7e8fOFocHlb/JKYUqWaG7edondhueTNK9n4YAwjgykPhcj7+aJOWJAP6tTlqIt10lC09mHIkgfGdEU7gGmODgXMj6C5bW51TGKi38mtAs4YwCiUJ/m1x+yGFP3LBsB0jswMxSIL1/5B9djzeqbYRoZAUoBuS/qPzDtSNqOO7ZLmCb2YL6vV1x9nCEUkmIvEyDNB83MxZeMMv3cIp8VXPx8X5U78sLfqTHlq8dZnhvGs9zwVOUk729bfGLuk9ZQxHuFwoodFOUMLTdgJGPaXWjEaY/rdzKnuN5GDhtJ7MDqipVFd4O7PUNCjeqQo9hJAbPRaCXh7cweIWcBkVl/0df+Y4vGtmvQEyt4wvQyYYCCVE3J5m1UK60Uf/DB3OtM08Xcr/DiRG6zdIUVcdpQzRBRIJLUoP5vDp/jj4qpoh+bsR4uIQpvU1ityWixGiAAMVZuuvnJ+G/A7mc5naLN+hH6wELoqRxDbUqNerfxulkEKIpPwiZ3l5AI5O8yLiG2Pu9tPj0QoTz5neBDDNyx2EyAlQh6Be7hSZyWqOuS5YWbs+h+XVmsNdQaY0CKDsX5NjgmtYeh1KF+RPYTs44982RosMVUnijKP5LrtM945zk38/RZ5qR/Wn66Qm2ToKEiTnw5wQFFx86/lZPeFDQKpsxx+qi9rf7pxVALvl+p7vehLrNajnFDAh5DvsNlWkID/jgipuNSFIN6TsLuMvRAbqWWJBpOOVaE9Mj174Lv+/C75EJPVMUAkzvBpr2scTNl9sSixXgdFsc1TZ3zXs+vV4AKuYjw3Gq6dmnAj6Qu0XaYfgnGZqz4lzYJIff2mP1AAPHN7rCfnlza03cAppazc1WvTqIC22Gx1Sn906cdcG8LUobdx08sXTVxi6wgyqfQUuU+JbCpH4eoHFpUMifXmGHRHciQCytE/UIOKTPX1JNFnRKmEM5DYhfD8/wi5nHgNS/L6zHqpsrWfu5UyvumZJ7XA/djiZ37x7JdpTVj/8EgIn146AYRoVlS+V1xWDOz6c1BG9BUN8ZWdpY/Y4W65owEN19CNg9eKWizEQD8TH7X5rz874WVlrsEuBOTN9feYylhT0uyJCAPWX/ARhwX2iTSVsIemAGwI8tvoqq9u8vXU/j0+EtiFYjBm+GTo/E/GqLjSsEIc+B7RnARWTjfMNqNu49DoGVLUtvQWAoZlYqGLGpvis7PlO1tNIRbhaXcSXasBbO6DpASLBZwGTfZzpm3D2OC60v52f22uwJx/2tHRUILWXgbmc7/kWnkb1FZbpUSfrkxiLcX6cK+3RLT//Pnbk9wva+noJ/aVFb9ldBkkAk4iX5XYHSTWf2IdPe5Lz1bBB2Y3WtFo0MR1LKf46yQncL+FbzWTLRSHPY3UeRhVg3FHkH6MnXYpov8hHwZ4FrJaT7LMmdj13DL3HF5lwwYzvkclyUJ2taQCwnXPlgXvWRgmYfNblc98/yn3m3wWzx5rS4gGFHqBkJYwTqW2cGuRDVZ0V3t3+UfzqIJmK8nXpm0GKjZT50PfMjsS6+uVgTHaQ38HDFvpBM/1z2Sh2fcGfbkxVBWt8Wwl0Xntt6tYYamFGfqR+8W6VRVQJitb6uZZiA+wcbO+kfZOw55VGHld/USRiRv8QuxGe95TZV47f1CcCJzZhWqiaNH65DLsLAja7DeNwxd6CHaDAik6S6rD0FyZ9PQPaICPPI4/xAo/0ZVnd/yEc8OI+3yM4Ks+YgQ02Gnrl1z9lv2Y9zytEPBDFy8iWYtiyXZ8i4U7AXOGd5i4h3jKPlW7h0OkRKiSSh4TgO7dD+5Sxk5kAMUo9nxumcCmTBWL6i6yRnsKmS0nkIyZI4wuEihk4Icof6JsPqrvXxc9VgQ6QWQ0FgAeubKbqIFgV58l2JK4Qfv3JKYrKMS/n/BCjRVZh3DfkTcZzQg+m9Ytcze7bv52bN0S2xrDITaw4q0IKPgmXI5Nwb4HA2t4p0iBHgoqtMbU2tkoVyh16EVnCwnS/IhHi4HTlcKSNDCWp52NXf0cWGjgxDV2ds37QYD6JoLz6Jf+NIUElPQ/CySdVnfcTHK6h1xjG3K5OoeIboMqJ0WxKdRm+Eu/2OpC2T/x4i0YxM6pthPXUQ+tYnjYd4csTbjE9aAVexoM+ARW6WJj/utUp0VvRQOiFRTLDVNJfzG1YUDXq3u0cAWkezq9q8bny97HBHP5vnjzymajF89NHP+bjZrvPNigJOXSPybJPPFLhTPZGjryD+78fT0VrvMHkXutC/Yqa2OEXe+jYXOhx5phxknCngScLmIudX2c/fXXxxoLeJHD9Hjv2ASlDszSEuBFDawPEMuQaNf6sjTi3PLgOaVZDID+NAh9sw3RqcnQjMcyR6ojGxkDpzxj5VBNHxbPXNuAUXPNkl8KfkAgwbP1qBWbyHAzUBg0+rBcRBjnD+WHkhiJRqKW7RMyyGMgpk7E2p75ZsdtjDX1uzxJ99QT+q3qEoM8qfAMniuUoxeVX4WWaL+eS3aDhE9hJtz2qVJjx/oYu+X6tSjSoY/3OHlum80NLM5h/tVBXi8kSFmtV9NkiGPXT3OVpEodhhCXBZOblOTOkolbawoROX1tJNXpNAJCxz5d7jkjPM/VUoBrvtXcfMBJOGyAgrfCu/qZ787tsi49ZwMKPjW7SAWzgzsVVynVS3SyPfUs69um4QESoW5rMqbnh0jTRCiCGAjK/2jDjhqpA3r395j0TDlQh9goCzwzYfEyFEAPspF73GcEcR2eb64S0bRjT/SUrPrRFUSV0MhFefwXwd+mv2VcF7Zr8GzlR9fOpngy3xrC7GkyeSz2jNSwIkpssLpvXPbG4mzXs4WBFDcDb0hZmFHvU+fLI1+Do9lQ3KbSyCXxA3VoveSEv7spX+9EGJpHjesN8cPcjChjVozfOzGWDXw9xRAFVbE/eLLrik+ftGqzmqm1zNSbXInJqfFmgeJAH95eS7j6r/kqO6b38rKtMIRMWj/2xtArTtpqmEbF7JgQNM56dIsKgf+Iea3XeV2A5wa/d1EMj7omPTUezw5beqBExgShFc5xkibXHuSTLD/ibQTya42F514GH+1CpmXJ2MtoQMBv5mxJ5l+HynS6i11kfku33m6CMPzv9H7vsO+0OMgK9zf7qOIPIN6tpOkHXJPy6ytHkPNJoQ1SStUawwwddGGOVu0u/IfaCp47sLMqIoUAF1kZSt3laLGeW0Y3/Mbdb5j5NwK+36XuWUvJs+eHIKRvc7KqcW8Ww+ReglXFdc9HGmUOHV6t7hQ6YT059ThcDZQf0JasLJwFPAo9BfHL2sgBUdF4rRt0jLBVNaXbcwO+tg374KIf7dHcKKkPQ9HT0fzkBu0+SlsEJfpqMklksImd6Ls1clJSORvKAnzcPvSbxA2vcGg++Lu2vdqSzQXD+2BegqE95A7h0Dd7VH6AvuqosfLpuarI5Hs+FX4H6vpxMa9lb8RTIi2lAI70CgggBALr8nb9910Az4BdF02PCn0uM5oa1W94D2wQN9sW88ivd2pXMRlht4y0546P96ud8Daxtv1acT2henrCw1S3I9CpR/0HDoKywEzPgN3JQsJhDfsvEhRCrKnU9miwvjCe38nlkMG9PVZmVTjlvt5UWihzbTnjv9nBSnQ6fhz4QqqRBAi8Lcmc6IKuz7CuROsY4lNCHW1xLcVoKJOTOMV1DUKCXn36K4bkiYE0lhWCtAZQBVHkJWupZpogjd5mr9qy8IfXF91iIPKw02XLgNiclPX6q4r3m98aMD0c/slvsIH0r5fphjLdoQHYPt4Mp+Vum1cGk+ogmpcwSJnBJ1qbrFvlBmcGb5LoMd9z4qhvWwWVOKw565kyWkaB5WO4v1KFx67KVdPszzAUF8u2Ac5RIPY+4Db8hvTCovDH2y3q3mBynYJX2FjHS+3Q02E66thuzHfbxHIKHSazq5gJWzr+hYfal+5kZxOfydFMIC+jdRmFajNmoKFM2LOUlZMVAHPVTK40DshixVjakvEMUCJyDHURyydgDbs9W0ElSYq9mVMXF/2m11KY0Eptzvuh1LkFHIfDOdUCjKOrsd7JeUqF860WPgxHUnAas5HKBTM2xNXEyAsQXtQk1jU/CxKgLr3WDLF4eQ76a/BO3SeGhytpasDKUMQiqXyN7v1gJeBQoyiFitC1oHUVVTg7EgJfN0B0dFWKL8iyYItWB7xKtXHPsedU9EWRfghBAxoAqf8GLW0905DMHdnIQKg/43iaKWNqmNqCVRMKQnShA6GN6tOxtvaVV4WRNtwtEuOP2U42cNA702e0qFtmWDBjARuee1qhJCuklkYdDFKrzn0MXT/5xxNCtGVLeZCFPWw0uDUQu+HjD8Izc42fnVGS8fLwGLjj0Ajnn/MtVusCHvUFJSPLG8qsCXBuhsywmtpZGKKe2EP+KKphBFfExQQJWXR9tbBGIcygK9c6wj3Tnrwii8D3oIGvEgnNYWUL0pRVSs6tpRwzXwK1el1wAoU7rUQ16UoJQx01tWEvxN7wTsbo/V3IHp8F/UAMNnK1GQDZqn/NDR1Ln70yT56kqXsNf88WI38eox55vtOCePiFmpHddvRuMZrmSu9FFQtd2rK4eDMrDuGxFJh63+n53iLFlCBbNcc1XV5CP99B3STPSzYHPS9n0aCoiDL5kJ96LelFEkFqr9gOhG/3JpW7rGw30Mv1rFN4dFKn58dSyfi2tHbz2geuIVG5BEhujxvhYg53CC8v1agYd2zlSPQnCKU2efI47iXbGw66l1ACwLWsI21pR/HVt4YyjKwy8IWJoNPPN0AjcDq1Czis6kUXfmLRDks7DciEdhOqT49zQyn4hNebkFg+VCs3Y1JfMilRYdCH5aJJn6g6w9wqE/qCx6wQuq/7Y5ImEpKEYme40uqJMjO2oekz1FhsZ8PWSku+d+Srus0pQkB8MMjHoFrAtXi0QWY1y0wo6Ci1kM6T9wbVLmF8hXkfqhEdB+RcyNqQeGquNxM6rU2JKvy/HLwO+zTD53CQC1ToYV2+5MCRr9+N2/CbcifMUN4VIEn1Eej0zwHF/yN2Dc+UYWiyEQtlG14z2hlkDP0CPGq4tt8VdftJ+HvCw8DXvTWTnLnn1Zp8JOcQmEeP99YAYcjKhKnol+34BK6OqlAPxBhpdin+TRG05T1CoGS4qDFCdS/mIdCVFv9g2/QS1SdUQIS52zaRHnQQCSCWEa+ZSTfRHd58wlVwt58M3tCbGyNiM6wA90GWFA+zPn5OSuWleAC/cHp8uaJ5p1tC2CPYxbU19N/pQmg+fwNTBO24wUN+3zJXC++eGtiFofpQjnDWXLH27+oIG+YuutaWh1jf4Jsf3HybnAmDBUf4D39zprOur24+buf+h5uDddADdFHnQ8GHo7txQ0pEU1Q5L6tUw7JY4zVLZ7PF04Bl/XLIRHwb9hGoAEGsblcahUXa6SWq6oQmyoNO5l91ZDZk2ovSdq0kMrEB543Y6Uo8UvPIDgOwvcVhjrx2BDy7H0YG8rMIerCI+4mXi+xrU5Akhyom5b8TFqsEmZN5lvrsdcNtYc4/d7qnkbVYBZlx2MyeDC+ch5f1yVBY1cLnpjFFHFUXZFmpzUrhXPc20vgeXnQQgqQtV5fbQDYUGz5KIe8d1wVGIVMut1rmRa9/dspSJMmE24mNe/K11eSymPBI+oSmwmo2KobIOb4otMXXGiNmwVSN8Yv22FoF3u2zgpx6esCfGLLScnsXOpCf0f7aP4aqwqN5yeypzAlhF3+yakuuv0m/dHUEhxuOqStrxEG8ShJv5tkHsM3V1WLRkpBAadXPy6gSysA265grR8BX4LbUZnFqvoDDNrvRSweNv2HddvI2fcgltJ/fIcEu8Qk/WNLUUWJXdMRbaUwO9IPvhQULFEUCLdqvK5bB5oDnUQQ3FTq7Lspp/naoolLMn7k6K5gx2IxQnpq9+iTCzU/vrKL+O7Mi86AHJxPCr9tk/MPEqzaH1SjA8zrPqdZdtyngTEMn5ZPiHV2zMUuWPJ2xXT2zrpyx7mVXJdl0SE2gbnOTs2/5wFPy9aTKynFtKxZB1y1iWEAlBWsTnoS8FE+6CBZH01xww9GRjoMi9xDee+wXV/olDo/dROj4RPYSvIeB3tIxorxRR17YjyzZPssKDGTvfzKM8kqYYNE/BqEKBKLCz0bhPCCWxu3JaVJomTVJTrFy9JzmBMy2O3sgLRDl6X7vkqOm1AoIIAQD7nE/lfkcKEttlB0HLSKT+yDGo8kJAR4zKmi5fZpVgWYK30Aib5HFTA9BHVZElnhTeNyvYMdSO1FdtNsa7tQ1/0rD985d/GLXe/f25PAbEsmgnFMmc9zSmpLIZ5vTxIC7Bk73mqwwgZZxSNvpqurbUO+787vMn2wKC74fJHC6NF5FMFrCypu4B5RLs6C9fGjRKab1vW2mi2967gCZrB1celCcgkBzN6XA7tvjDozDz7JU+x7ugmBx+6MKpsLc/FPrRgEhwWdPIsV6R+vOqRugeTBtr+NyvFhAa639l/e9EQwpEbVJgbNg5okOZliYDF4UM7YADgv0aKJtir+4xN5Cka7Jb8vyYIAcchy4cjz8IDNK3SuvhRmPTbEOs/xwZpoN3YqUiARI0RvYznaByKpOJSpxzqqP1W/026K6n0KagIjyQht6p5ElpsXlIgcH0fwpXseNYl2pQAzj0jAGFaJNYSBdgyQdZkoiUDprKUm9dZfDL8m9FFpoDV+BuJmxDe2XUpLfDhTnF5n/F9wYjmd4Vhfui0HA6kh0dLvOS0EZEvz4mT6zD7Sxx+T4uyZJE9nq1KOEpQTW27mzJad4jXJkiYe5C33DSEdOpwVAu8pIYFxmcj9uuNHoK2hpYcst/wYuNzgAHB9LuJaJRFLZSXN+IVyBWU2S8iejIVYzAKhm7Pj72hIE25Z7oQE/MniMQeUgmoIlqxbSpnWho+K4koZGNIyiGv3N9XFTjN9YCdWSC4AVuyfyKa8c8Wl1cWggnOwhj1CkFeMCK+f02a64kupllLUL5I2bzC2drmjpdEGB8m7KaCWl+W86pWKHKltns7u6Z0TlEPCk2Y2+ypD7GEicZSbMwAPt5jpTfxoMk2h9ICzgDbFPaJTtAsYNMiAYz9Sa+w0ELdSYoGD1OqN/ZkPE/sGRcXfAk4efEkfRDbCU0hiH2HMbKFLhH63/RfGSbgeYSGDHTs66JOJ3htSh1arYOmkwBB5v33cnVCmRiUGgE4QijTnMmYLKH42txfzD6fU1TJKUr2woazXiPvpS53tgSbO/zmBUE6fiFIaOGpT0iHXhx38sDX21VPVY4zwkYvmFNKliwgnZTZiThCNF8e1r4W5SlOyoCm+cc6UnPB1XOYx/Nd1W7Njm46rL4rsfZ2w18vATLl4ofn+6M1dgN39FO6ueKvZzxHUH1Gp2J3Z1cphfke3+O8NKi0BmIe+TjfuTzCt6l/rkr0UjKqXqYF1OedZe0kwkIRDmY6cY+gQlIdIFOaefF/3bBu95mAozWMTtZZGAPrf1QM52AJ/0fZKjoBvvZTVbeP6TnuulOcahtZVGDs3Q2Io9d5Y/c/adXwEyizH19Z8dV/ImY9JdmXDB80wDodoo0/uL8Ig/2NslKCu4KtxjzLwgKHhsz2wWgjagn3AGkD6nlVdElCPwRMdHW0v1Ld5RzZG+oXD88tXe91cLH7YY6k44pB86gD2EauwqDPSk1Q0TPy+Fj8sLEWwg/prsVZWMvwLvGCRRCCUWiDJhuWT1dzOxHTcbLJSAqSTaRDccvIrFR9YdqqmZtinnSwzByzOG0xY4uO3j4EhK3GVpi6L8zgoEqP4F1vU1EwPn/W7VfsLggBBRhG06yk+R4zOBtUNOHi3Ra/P/D7smXKmgR5hnz8tfObTgCO6FdIZAnP7DbS4bw1eykk55rG9x/k76Kd9iB6PtlnTl2gqaCcx/JX09lhWNbXL0NL9J1T+aEyJiHZyViVcHBKjXaUSlf8yYbuFMSV82iT/LgYLSmEb+tsS3bm6Sa1r4uoOrET40Dky88Oru7hoZ49f1HJrGLhoRlDO4rCnXV7QABqwAE5qJCDZ0Kx1Vvs0WrK1yypHAjbmK9O4+98Ih+65HhdXoR5Ds2Yj1ovv+d9NWBMEQpLEpOdtEoZ6xqAr1DDgdPVg5wSPtEavKOEfQWfPERqCQC/oqcO9rMbwEZGx3wcJyIZZ6jbupWGcHmSu3bvb0sJjdX69wQGL9Gl5WzR3xrqMYDX/ObNKml0QM0//SX0+j3FhMzMzwzqDc79a0FnXjjMBloIRVWsFdGqt5ZF8fXSEkHejycJDbyXZ2amxtPN9LgOZ6GvboFEnoEpslW4shx2+zO3Q/u0YYbaLGZu5zKumObpau92s8clYwC37htg/IT/JYLUVvSx6HaWj3GaVfvFlQ2/oH+Pk3MOVAyx1GXpZoOtjcs44/U1fKVIIAn0jX4g//wcsdt9jdbdU1PD6UpH5VlH8xJ3fNWxr37R8nIw8HzBnbrgm6PWH1wiWzbZSR5dAn5WUv8MS8JxMKC+QyNjZ6/kgfO1Yt0PV1EPJ4ji6A0F+akKWlYVXdbgGVQyISsje66u4fncZOMHgVwlF3X2sNe+ybRMUTysPsTAmRm2YUvIX6b0IGL+CcSWMKM7PeCyX+utfIn2IWZ0Wa5mjN56TRFBx0b9Xdnq9gLbx+HaUHSLERJloYg8jfeshmUIha6qfb7ywtLBixXcJTQUYtlXkQJ5pzXyYNWqv5gKShjAxsOMxvg/AvXw1g2TKjq/vZs7X+lIbghfEilIu8UUn1r2Lkwak0AI4si1prjsqNCaxduiZGGjeKiOlDA9c+72AmrGj8hbgCyzOq8mAYTlvadCUH2GRmQQnGVvw2pxoHpFFFBx1ZPWmmU44lnBjlWxPfQ2Ic9u1yLYHEnUVYTxDKHK5bT8940F86YFfjozWK67PFKWju0iuriL7cvbi8yxyeiTwKCCABX/mhaHRoAGGl0XRgu8izYQk5dgoWVp3YgBpI+74EFlZQKQgL8b/JvosV6WV97/iSNYNKDHGGahuFEFvroXpEE20rxxXjJvEVFlrCuRBbePeFQ1PNTI19GOxtgFmASsOqTenElUoKioJ1INJKggxPRWCTtnhmeRP6deD+kvIyJiAEHFHISdbUFgdiM+QyZhAnLiv3RFHGTyInVFbzgmCXxOEsOX3lIEC1RexGW6AC+Hr5XE3YT7fQD1HSEjSjJwfHdEd3PTyucVRsI4ftdtyv/X3nCxwswQekSeFPvBbTvnC/9WxULA+IZcM7UT/zf1go9AlfHmbdvF5meQN17ueyxiEhbHC9mnHSkOMiFkjzkYQUz/ZmNdAhLhGYVvCfTgOdjGSf9vgWoAsysADZj5cKd/EK0TBzLmrLqVgVm7PxJuC1zvxmA28GgGN5DKrANCP8Ky9EuXchRX3tMZRX/03llAtDAhJjln0XMuH4TOvPxlAYMEuXMzjM+qC9r4e+CgX3oAb04y+xV8ytq3EBJpxzU6rlWmDQlVgeqCKbpIRjViloToNyKctuUcrQxKBXEXbWef0Y8iQQyUSlE4RfThhRc+D2uCbLV9wIXxGBgy9zp+Wq2ob6a7AZDpvMh52GgtjL/HU0OZw02dF8AxJuyDI8m3FNPXzvUdngpbd4nmrl5H2PZIe+oKCS7p8QLM6064IKIulPYwBBkeWFyM3bNI/0ZDa3U4aaePJmluaWIQZRhoGtjTs5Ty18WkztdbkfubFXxNy9qnmgS8V5M7nNCFYZr7C3U2UcUXJM+GZC7HFS7voSr15JIRpxH4gM/0kblyAUibAg/pxjI6x3FOCWk6j6AUXVULGta+CrZBpzUys9H47x+hhCpXc1clO9ninAazS45Xhyb7Bul5YY81zFjMHIyW3ajl2NgEjfOPyIwziYd5qqiAILL2vFqgv6lYKtTi4F8QWSdgEOCTuj1AWH/A9MFiabM3kgfgi+RkFSM5j+NkrUGSqGUtQCdm+noOZA9UzCc6CmNJjhYgb0MWgsIfBK1aRaYBmfZEgAZm5aQmCGQbSVRNosibkq2S0WKIkswx+V3vBjiLFl5IT5WSjrfyZnAvYWPqB90dBUGpLq5xYP2tyD/ZaMOVl5xmPS/b70VVkdTFpK8dF6u+coe+COx3G1BAPbwLyHSI4Ta8xbBQd0u4meGfQKOjMFv+nJZI1UdOtyMOWK+ch1Cq9HCVeLJMRisWttYTRJWwD3v4thf+wS3lZRXNcJe8fVRs/5hDPVlEj331ZDxQ9kjT3ZInw1kb/GrmBRCOmoQMQncJJ4iSXBRiNl9wTVODt5y8p9wW/l4/tUjGGs6vJuGpjd7tqD4RiMzsVT8JATcZdxMOSImx300FwrXxh14zDJcUjLSR/MTibbiZe4VvnXyBef3XlervqD9sdrN6p9/0d6qyq65j1LhbyauEt2AFVl+nkhCkGNQG1AVXFSJ4NOgnAt4D7Plm4mK8d6hgQqnlbIynRFSMoGXqrRSuBYf4VGAdZTFpvruKZKO7bxNX/wuzpTG/l8I+nR69L1oIDmGNnit4cfvxWO3GoTJp6b81gsVKLexavCW2e5wFYOoK/9yHTu8j4AZYY3VIX9Ic3uWInWJe1O2laC0wDW9eQTuL/3g8X3yqqAB2tWyDebSn6e67cr4x5NhBLqASgWimpECey0adDrVCgSggA+dZRV6fA2niJpsAhSonzO+P7/ScTc6b/SYGjao1gQpq7nh/vioPphvcMOQvYRt0eH4Z5Xwjk9ZmzfpvxNGrdkVaBpXrXWs/+JGAJRwFrylg6uRxSs/xuxL9PBFtmegv5x3Z4Tx5SojnYKoTCiSzyFPCuF7uAEeeReGmGlY5m999oVwwcDwxKjiShh44IIbNSXTuOjgJgi8voJhFKq+rZyC7Y3MosnbCdLe0oX5cXgDSiAx4emb0L70D63dhNdBSMRAzIfrKilZGtk5CqcJs5vmJBTTDC7OOZBDVQ2fELUj2hc4p2F3S8ro1oC1hfbx8FEBDoioatCFGOPID+bXZlK285umMC93t2jQhlM6C4GtHSUEp7r7S/PvRq6pLpwwiGw7CKAKc4BXfPa81igg3qEjCRfeRywkkUpd7P6Yh9cUZKh0JawCXY7bi4WLCjzbEvq6M7BXU34O/uqgQJAtv3mYLLMkc4RRPytT4TzIUxuN5uKuJOkx9yZViprAy3Nb/kyzoPuIOFgzPIrhO54w1bqvWMMv6MW4cw7sf/G5vIuz+aNfRS5HqGlgSL2cFoEllTrxeU6JQRQqy8t/kD5nhJIA55++zc9j8yAHk/sJY0DzJulv33tQYstVofdkSEmUFmmAYMrNVB8BnguDd2fKLOpeyfSw1stu7y5DsBjNrzi+/q2wZr2naA+Fly3FEXGHySjJUGwWz9LuCYgGevfZyUT9aTsi5eufmlIG1/PJoQFy5Xud4TGAVGPB2BMs9/b1DZpbMcYW54M5Dq2eqrsfCgTLZ+jNIwopJJuDzLybSC+EA3RvbzYRrMdCCvgzQbgU7t6+9WTggPoS39Fcq7LSFqB277kIzIXQm6hD+zKECmHmPN9ruEvZ5EWdalz5ZCj+NSe2xXjW7+Pd8HYg3Sx81IllU2azy+C26QDiGjbYqbvNU7DOLvY3rQjUAXVJWkIusxfVsQmO8biXxE9iNoDPNEARvQzqhNyExrr5kMmVDbgbD5+c9/BeI2tmV7SUp9cEkQKCCAEA3gJknVFNvZgq/soq/qmChnRoDYp2sTAS0OJlJwApcyHNsT8Wp6tzDQNdbB5S5PTlPIsIkZVhMrtcMzBU//oa+FB+DUBYfzPrxMH9/cuNgGEuuRJXin/FC4JCy4+M1MILI0YgB8QZwjuJ7jCCmmiDM7xpdcPHfYUCN0vSKeuwmpxTBubYJSnhELsQsur8nzU9MpmJB+c/Fzp5PAepbX7yhGSa/p1Gl5G9Yd2uUkSyuRwLN2Tw2P6vu0XY8BRlc+VVx+mpVBMGKY1xj91tlj6QkzQYMhfRx6oONd7Z0nal8O/b4gYbgkHr9p47paKaArpmVrNw9AoqnpxM3ps7lNaszU/3uosbHND3N0oZoLqhBxpfkquE1dSyb0Fo4/An2mW/SzjsDvHi4tUzlvR+gtpF8ZwvsVpUbxTue74/wT+iFNLqJSu1aLpe5MnFXhgjm38nPlGqe1hs3TAFFAMQZqeREakFkaJRx4FLVXZMWCqef5Yu0hIl8aqH5NURUiHnDl3SUjb8f1dvNiW8CQcjiNMPQCrtFzBjBoDsgyltgYqYWsbcfCgvBzquvur6ocDqeRW3kMm3nN8vZSy6V11pprsdtOz/aC6QuVsGDkEooeUXfqr4exWFmbXVGKJTezgc+EFdBKa0uujJLHuPOHuv7lHyaT3RPRxn8abcdIe4bVJS8II3jjiuP39P+hqgw5qXaON75djxuJBUHTCJTZAhL2FiT1tB4E6TFEJpBLjL5A06kZh9Q6MqH8iCnqoWJE9wmxX4WBWNm2qLxeujMASotv7/0b6GY1t49JGXfQ+c6LQY5mtDPJ7knKtb/tW77v2THFpaD0AjeHFRile86OtGcoh82hPaV4hla0GSaxiR1TjubL6a1dgNwHs0SCQojtJf0331AqxIc4V8BUKQcpUBv/hcZV9nnMtba9ZjFtsi0hQg0/3huwpVDKje1gwHXnzRPesWTDN3QlM/pkEDxydf7yHr7sRhLhXF2rSjB0Vnogq2Imw0zFRHfDc0HYxt3J1nc5u8ssX7JrI2F6Y9M4oKwh764xTTuNF79UbqV1nqo/s18OzTr8V25Nu60r2mblxTUhFk6bvz5wmzsv/GL/i41z+qnudlCkNDL3qAoQoT8uhaxSpJPNK1DplB/YPLF6lG7WbtyGmp4NEBZzLDbTUoDD3060e9Pi7VxbBnX8wwptKZ6FZRUSGsyWsUNU40pZp+qp0kXfqIOBz9vUAxK0o+/qsrqe9Jn1SPf8O6Wb82c2LL9KMIrpmuY2jwUJa1LNUS2xxhixxUwop2GZb0YgUqozqzJxU4ko+I4jgoF8MKGAnu9x0pzo9IbABgYeisHVhIXHx/2vCq9i5klyofDn12h36FIthMGGiYEKSqKcOzuyFIMkXhGINwXhpwgWXbxFfXyeZnMjqYCTr/UeJPIK2THjsEckGyUaW/OKPqDQYZrgmHxZ5+sGgrJKBQQlIuyXb7U9I2c8yNxZW1L9IDG/RRgBQWVkfSQA4qV0+0vcvGlJ7E+GV3cGzbyzxYAq4Jwk3vNF63rSpGVsRCGyPv9LR4fsV6jMpX7NLlRSbIv2Gm+QDjVkOL/Ot7h82BByj5wj2eh/WRSrpUvdgp/iG3oPn6JRkU4w8gYHR+aIobX1e0f7STwZ3jWxZTIor6pxUTTUOsf1nZjAsFdjOVLtrf3IJAfKAc6QnkXA9krtyhleUUlb6S65LBsa5zO3WyBVHT/JOblK/phDiGlZc/GofnMfgRZkec+k8Dgd7f4wIt6ZHWTYKBRzzWTfav/gHNeZBNdG/eNL6pmb4ano4tLP46arruihMVIMH8WSmG2q7gcXbDxTyHi9qPKzkwNq/h+SW18WJ+9/qBEDQ5AVKsAfJaUd7qIUmJ040lL/xUTV075bnpkBuHb5+M29JAFJe2P2vULBtv3Jc56pq/lri35sSME9eniAzUexzUp/iT4Y8fFib8TJ+ZLQ5ezHDs4o8yngXg7xDUF+V8IGazHUMDICtl+IpeuViut68EH4jR7KldLsO5syRkJU+2lpaeh+7HUwXzBPRbm0iO42h8PW5rIDxp1iQKruVtnS+e5B/0P0OLD/JFReX2TWAEWMWGBHm29Quil/VHc4XQ8sMODvUb+hEVLUsv/iv9iVXx48ERGTiotz3e9zgv84SEZFbYjM5DhG2+CWwCS24OEmgYWM2P8G7OSuwa0RmmDPshoBQVf4+ZzuBxFBPVRLC0pvvdJMow2DpTRcKCq9CS4MG0QS1AH2QRCuT9VsrYTueWGxi75+Sq6tOcSR0CEM/MkLgz/KPeNcu6r8ywuSKbIYDZtoAvwOrZ0swTEE4F05yeVJ0CmTxaQa2GkpLPaxPSpMOWCHNF9Bp9RTeeGNAVzEcEIf5L5TK/ayA6eN4MGob3PjByTlNtxOTn5cIHkYSd1mROIyh14hMeZ4gPTXNqWwZG3G8tHz1GKDTflZkb9a6Wm0iUd2xPaiStKFpQSlm7zxyRfm7b1K6hbIQvs8ulciXVr1dz4w9Y62+cGyDbox4JikfONtmKEcsroixC2JVSgqVIHYvHoMR4mXX2Mft2Occ04gE+iCbE5wcIheYFncStlNeLvFGSjCQvw9y9PJ6wLI482gAaaivJIFgGxsvu6DRDu4XrwF1ISoH6KALeSRlMJ+ZdKQUAxFDJLnGPXew7GFoGXNygE6IexiWV/swbq/VLl2BM4IvboDzAhtERI3zLRPMLfEg4OOjAO9zmvGMCgggBAI0U6B4zfbor2UG5zkmlHcBbOc+a3/N4PNZLwcWfMcS6hzsU8v7fgM1sOz03K4EEPr1ULSI/Tq71XsIcaGPt124quX2O6wzplYsDYy40MBeeKry7xsaLnGo5UCqvCprelYx2zGUY/fuz2UJxbeMyM9m9uTBZ8h3rOuioGQgmRDhI+ACcti4kMKg1W0nqd2pZ69tgCEGt3H2puq9SmukNm41xYE3YkMvo5e7yjlWVcdQ93K3x3dPP8mtr6ckkKMhOoxDB3tsd69LTxXc3ebhD1u/pGhqyAvpXcPaN0TqjhNMKdnn+G+g7BfOjmO0FsF4ElRO5d/O7KrUs/E6vfvE4m46KeWlE1plG8C6Ukx/Af6UwCHtWTMQihLfskuIMz67o/YDOnJ7miGb146yd3E1nOjydRwUoSeVPYzLCL4R7aO8DCdKbmVnQyh/xUBSM1m+MWH/UyqFQMx+vFMseDoPjx/+G2ZvKa/GXNRoThXonVpAFFXUzEU2DzIzxa75FWUNU4Nhc9h3HLsYCG4hYYb2ab45cQD3uOjHIS1VB6tXKLbwBfIQFH9bi3wnUdmGBnRHAU3NEvflxmNFCejBZoLsbqp/niVr1BIzvmmZHOR0di07sVkKdoRGBFuLuS53UPOBndoQJre+SESEyVNwdN8jnDFsCQ3k4KZbS9d85MgoCagtNA9XaZ0kvQtwP7zBqVAwEeCn/cJG2yKbVMXOstGGW4TexTHiGlSCT0Q9lSAYPLJqT96x8vL5JoUeGaIL1h7b1hdwR14LZgp0nmROKzCKovASkuMaPvSDv8kA3TLG9mJD0flp8cB2y3+njj3j8O8aY/RHx3qNJwIR8djGnmcpw5hjzFA6rbx0zj0UCc69ogNTbdeh9Ia5Z9RMdsEUkBLj1+AABk5AV90xv8wAUjxzpflhR+fz51wsvAL9CIPwvJIvbzSHZEPOgKiW1zwOkO8NOrG1GdyPYgD2JseLxfZQ3pivqfcOekLJ+X8ZT3VN+wqojz99lXFUDA4IU5WSRYVROmWypZm5LfhulX13+REGgDs0sGNmjqCNcsQ6UW6NFFIK6dh6OmVnuKW1+lSG097xlhv64IDabYM/wf9kH34QLyyZvI0OVVoUnKuiebZMExAZJ8NzxTyM6ol/J8wIHRuSHXwu812AVgUdIDGdswNF2PjapNrb/6TRXZeP6BtizlHWoMlpJp9QaNOqhNPj3uONB7P8EJrS0u23SXunTz+GIKzGP4x/a1hDburtYmoKUrls+rF2eufbTypANSJf5u7niVnXaQn2Mpy07FeeeptyYi2hWgXOrWjtsUy9OLRgR4TKyzKtj3rJX+jRJ/SgNv59VQta83JN0Xw+4qIJPWhYHvgSAdp1EugnSK70PvoLN7T3OX3Ox3HjtKcZR/ClR9w2hpoWbGEMmqeiUug64aYFQ6UyBeKWEUpPT3rd7Cusu57WiSoj7OsXX5vWlUz23Dmz/UqJq91qo7UorjlueIkyMgPpaKfEeF6FM5i/lkBBPlB0rD8l5RaJ7c6EgD/6ahcyM7bteQIpL/7P8G9VcWD/45D1HqOhc0DXenSJuZBnA50IMLaT77bomcEtEigMCiBjKpTCSjNJ/CL9aJe+1EOQpYL8kEH7ZHrUlQtO8tnOCM3tQ+0d72g0zo35pPTbwgEOdH7BAqs4z/EEdjmEaE15VdmVeDXYUUnl2XSX4TO45G2l5O7wzLwEvYWRx+cez6ro+Hv4f5MkPeQvyqLzKwuwocOG6GD4TzsUl3w/h3Tw+kEkbzPW2UijeygLSYad44jmfwTQCwee/DzPbiGXv8a3Zo1KrT0+RLgKQ4K5/RLqLFfcHZtgKqSIFKbPaNPoBs9YWNkR82Be75bYtyAWuaNj/taw9h06hHBlN9JAlXE46wUjZ8ScG9Lw+pI9SxW+k5sWzrOjCv0rH6wGF2XwEjU7zTXe9njj4zPj+Jgsc1Q01ThYUNfAXG0M1cm9SteVEovgRXT14nv3yqgyOMW2Q/REGqNuyRvrbxjfwfk7ZbvVF6mDR5ayB0qdnH5YlEbDfE/MmbEQ1UQvEkbMZsyrCzNjUoG/DxThsuCARZt1P9OpDSYmcG1LL4TgFfSZIF40QfHeJJjZhwotCwrBSWCkThF/TAHO6MFYaUvX0iofIMzIjdojuf7eTLU2dVaxDLoYWorvKl18T1zo9ESws0Ro453sXTzvQbyGJaDhYQbAhkYwvzX3D1tq4r4iBDqTlJKGsX59z2G1m5K48dIAqpjysknMyDeCz2MyfpKbj1ja0GzuNbtv0X48PMR+6PTMc25zatNU93aDR30fE1BEtjRgUrUuZzMSC0FkAkuqWTuN0mK7kYZZ8Uv3fSa0pOGh/uEyuIZ2+slOobCeqiG9hgmOnjvPAY/DXRTu+sSsRyeICSfLaawja3ZGhpz/fTFKygSY8O8Iolyg1MeyPrIz3eNndkd7RlBifbN+RZD8pNHJhljHnBRvO579Kn5eBey9cih0/DCXrqiJrxz2/rulNezKuLsY3m+l//IqzA38kpR5sbHEDoO+0HZcNTpU7hsc+3yj806eZ0SvJdDLxjiOoebLBLo6JebfOmaBAjplam8GLLuoJfH0DlwJkAEUEQvcx4Y0AbUAL3CmQUHWHiGrlCrWml7nlIyEhLj7Uj32z9lRXxBBrH5obgwl8RWpmCAti7K4ryFSveRMo0A67wR3APYYvF1DoSbIRABn2ikQVvPrcjiXDNwkx \ No newline at end of file diff --git a/src/allmydata/test/data/spki-hash-test-vectors.yaml b/src/allmydata/test/data/spki-hash-test-vectors.yaml new file mode 100644 index 000000000..33837a1ea --- /dev/null +++ b/src/allmydata/test/data/spki-hash-test-vectors.yaml @@ -0,0 +1,80 @@ +vector: +- expected-hash: >- + JIj6ezHkdSBlHhrnezAgIC_mrVQHy4KAFyL-8ZNPGPM + expected-spki: >- + MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv9vqtA8Toy9D6xLGq41iUafSiAXnuirWxML2ct/LAcGJzATg6JctmJxxZQL7vkmaFFPBF6Y39bOGbbECM2iQYn2Qemj5fl3IzKTnYLqzryGM0ZwwnNbPyetSe/sksAIYRLzn49d6l+AHR+DjGyvoLzIyGUTn41MTDafMNtPgWx1i+65lFW3GHYpEmugu4bjeUPizNja2LrqwvwFuYXwmKxbIMdioCoRvDGX9SI3/euFstuR4rbOEUDxniYRF5g6reP8UMF30zJzF5j0kyDg8Z5b1XpKFNZAeyRYxcs9wJCqVlP6BLPDnvNVpMXodnWLeTK+r6YWvGadGVufkYNC1PwIDAQAB + certificate: | + -----BEGIN CERTIFICATE----- + MIIDWTCCAkECFCf+I+3oEhTfqt+6ruH4qQ4Wst1DMA0GCSqGSIb3DQEBCwUAMGkx + CzAJBgNVBAYTAlpaMRAwDgYDVQQIDAdOb3doZXJlMRQwEgYDVQQHDAtFeGFtcGxl + dG93bjEcMBoGA1UECgwTRGVmYXVsdCBDb21wYW55IEx0ZDEUMBIGA1UEAwwLZXhh + bXBsZS5jb20wHhcNMjIwMzAyMTUyNTQ3WhcNMjMwMzAyMTUyNTQ3WjBpMQswCQYD + VQQGEwJaWjEQMA4GA1UECAwHTm93aGVyZTEUMBIGA1UEBwwLRXhhbXBsZXRvd24x + HDAaBgNVBAoME0RlZmF1bHQgQ29tcGFueSBMdGQxFDASBgNVBAMMC2V4YW1wbGUu + Y29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv9vqtA8Toy9D6xLG + q41iUafSiAXnuirWxML2ct/LAcGJzATg6JctmJxxZQL7vkmaFFPBF6Y39bOGbbEC + M2iQYn2Qemj5fl3IzKTnYLqzryGM0ZwwnNbPyetSe/sksAIYRLzn49d6l+AHR+Dj + GyvoLzIyGUTn41MTDafMNtPgWx1i+65lFW3GHYpEmugu4bjeUPizNja2LrqwvwFu + YXwmKxbIMdioCoRvDGX9SI3/euFstuR4rbOEUDxniYRF5g6reP8UMF30zJzF5j0k + yDg8Z5b1XpKFNZAeyRYxcs9wJCqVlP6BLPDnvNVpMXodnWLeTK+r6YWvGadGVufk + YNC1PwIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQByrhn78GSS3dJ0pJ6czmhMX5wH + +fauCtt1+Wbn+ctTodTycS+pfULO4gG7wRzhl8KNoOqLmWMjyA2A3mon8kdkD+0C + i8McpoPaGS2wQcqC28Ud6kP9YO81YFyTl4nHVKQ0nmplT+eoLDTCIWMVxHHzxIgs + 2ybUluAc+THSjpGxB6kWSAJeg3N+f2OKr+07Yg9LiQ2b8y0eZarpiuuuXCzWeWrQ + PudP0aniyq/gbPhxq0tYF628IBvhDAnr/2kqEmVF2TDr2Sm/Y3PDBuPY6MeIxjnr + ox5zO3LrQmQw11OaIAs2/kviKAoKTFFxeyYcpS5RuKNDZfHQCXlLwt9bySxG + -----END CERTIFICATE----- + +- expected-hash: >- + jIvdTaNKVK_iyt2EOMb0PwF23vpY3yfsQwbr5V2Rt1k + expected-spki: >- + MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxMjhLl8R6KX+/itDHCT/T7LQM1i9F6LHe3TW0KWY2FKC2Ov6sJi1pn4NM2qrlW3EUPhX4l0Ru0VE9ZJuwQB1nzFkZIP70Kr8MLmYBoDjWWXsxTiNG4Lj3ydMxBMq/LLSpgHYgb3+Hh+OQeByboW1nVWWm8+QjZNXHhMvRhJmYvyFi0VWoITe/L5R0ubMtGwZ5mal/z9OnvYcE+Jb4PUxiujDhhvAxr4acHscPDn8e4+HBswDSvIHwyxKkE/w6G0yiw736YUbGmxsThSqRqilujh3dAdIVJJxlxhHwrdUkdK/Eq96SOx/BB6M/M8n8KrRNgwuF25MsabRPphgT/l4M46ddyq4209skSnoa1uJdzfx7HQuWep2n0Nagu6WtcKtrzPI3/BKiOMzOcTNOI63VavCtn995CYY9aUoTpz/x/rlp/5TPM1KiaYMBaq+MneBtqlHyYEQUZP9l8QNtvMUO7nLYaYZhcs/QA+qmpJnxcK07njvmw6gh2oLXuvbUbohPVq/3dmRBdJh4tOZWtJsjFP0XYe41Hhw/sUSWXlJAPghLXBBbgAkkeyK5KatuvD7Lpfs/iuz17No1mo8MhLr3+EnzZ1JBuRo8Nksw4FX5ivZmJxt/HQ2UcQ9HZLejIZJbYBEpUu5hvaC0rOmWDWfftLAjD7DzDPu+u46ZNGa8ykCAwEAAQ== + certificate: | + -----BEGIN CERTIFICATE----- + MIIFazCCA1OgAwIBAgIUWcQFI0lueRJyK4txfA/Ydn0bPRIwDQYJKoZIhvcNAQEL + BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM + GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yMzA4MjIxMjUxNDFaFw0yNDA4 + MjExMjUxNDFaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw + HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggIiMA0GCSqGSIb3DQEB + AQUAA4ICDwAwggIKAoICAQDEyOEuXxHopf7+K0McJP9PstAzWL0Xosd7dNbQpZjY + UoLY6/qwmLWmfg0zaquVbcRQ+FfiXRG7RUT1km7BAHWfMWRkg/vQqvwwuZgGgONZ + ZezFOI0bguPfJ0zEEyr8stKmAdiBvf4eH45B4HJuhbWdVZabz5CNk1ceEy9GEmZi + /IWLRVaghN78vlHS5sy0bBnmZqX/P06e9hwT4lvg9TGK6MOGG8DGvhpwexw8Ofx7 + j4cGzANK8gfDLEqQT/DobTKLDvfphRsabGxOFKpGqKW6OHd0B0hUknGXGEfCt1SR + 0r8Sr3pI7H8EHoz8zyfwqtE2DC4XbkyxptE+mGBP+Xgzjp13KrjbT2yRKehrW4l3 + N/HsdC5Z6nafQ1qC7pa1wq2vM8jf8EqI4zM5xM04jrdVq8K2f33kJhj1pShOnP/H + +uWn/lM8zUqJpgwFqr4yd4G2qUfJgRBRk/2XxA228xQ7ucthphmFyz9AD6qakmfF + wrTueO+bDqCHagte69tRuiE9Wr/d2ZEF0mHi05la0myMU/Rdh7jUeHD+xRJZeUkA + +CEtcEFuACSR7Irkpq268Psul+z+K7PXs2jWajwyEuvf4SfNnUkG5Gjw2SzDgVfm + K9mYnG38dDZRxD0dkt6MhkltgESlS7mG9oLSs6ZYNZ9+0sCMPsPMM+767jpk0Zrz + KQIDAQABo1MwUTAdBgNVHQ4EFgQUl/JLslQ7ISm+9JR1dMaq2I54KAIwHwYDVR0j + BBgwFoAUl/JLslQ7ISm+9JR1dMaq2I54KAIwDwYDVR0TAQH/BAUwAwEB/zANBgkq + hkiG9w0BAQsFAAOCAgEAwcorbUP98LPyDmOdTe/Y9yLWSgD/xJV/L1oQpB8HhbXA + J3mEnlXtPMNFZULSdHxJycexeHe1tiDcFgatQv/YwURHW67s0TFHBXTvSitWz9tU + CL/t7pEIdKgzbUL2yQry7voWVUaXOf7//l/4P9x2/egn78L6+KuRek6umtIECsN0 + HoOiZzqTrXn2WNtnU1Br9m0cxFFzMzP/g2Rd9MUKjIDag7DLfvRCmTMK8825vTJI + L3nzGfWk5R+ZWO4BudfvQWpI7iMj2/7lRWxYvmS+SSJh+DFwYwV+4CaCPecXVI2x + cD/M3uKTLhUMWo1Ge0qQWhl/qwtJ6FNaxp86yiX8x8EHYB0bDZgH4xMQE0/6o0Vg + vKpy/IrEwnN8WM8yYLpm9kTe9H+jM/NEOxPMh4uid/FLmi7KN549UItAzUS3h7zP + gP4cpSW+3Dgj0l7C58RIWxwABIIJZMH/2wMT/PeNg2pqDjhkoPDg8rwsvaFn6T0u + 1A6pJFnVtWGUuyxJESVYBq4vNSLH68v/xkajxl62uWPDkpgAqWuj5TOUP0e/1Uj5 + wqF/jNlRhLMw10r0U40AYkzQjgN2Q4jasqUKsZyhDa8F8861BHsSvFPrASLy4UrZ + 9Tb4DMYXTNZOY6v1iQerRk4ujx/lTjlwuaX9FsirbkuLv/xF346uEl0jBYR7eMo= + -----END CERTIFICATE----- + +- expected-hash: >- + nG1UHCwz7nXHp2zMCiSfxRbCY29OK3RockkeOiw-t8A + expected-spki: >- + MCowBQYDK2VwAyEA6gbCgxeb9kkSDo4WbB76aTvBWnpyzColUKDxyDhPu94= + certificate: | + -----BEGIN CERTIFICATE----- + MIIBnzCCAVGgAwIBAgIUBM5d9fmVxhjKQod7TLp6Bb2vEd4wBQYDK2VwMEUxCzAJ + BgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5l + dCBXaWRnaXRzIFB0eSBMdGQwHhcNMjMwODIyMTI1NjE0WhcNMjQwODIxMTI1NjE0 + WjBFMQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwY + SW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMCowBQYDK2VwAyEA6gbCgxeb9kkSDo4W + bB76aTvBWnpyzColUKDxyDhPu96jUzBRMB0GA1UdDgQWBBQC8cbPWjZilcD4FSU/ + J1sSNYwpAjAfBgNVHSMEGDAWgBQC8cbPWjZilcD4FSU/J1sSNYwpAjAPBgNVHRMB + Af8EBTADAQH/MAUGAytlcANBAGfmvq0a+Ip6nDBlj1tOpyJzcl1J+wj+4N72V23z + H1c75cXDrl9DMOqLwNVK9YD2wmaxPyEWO4tdth560Nir4QM= + -----END CERTIFICATE----- diff --git a/src/allmydata/test/eliotutil.py b/src/allmydata/test/eliotutil.py index 1685744fd..b1351abf0 100644 --- a/src/allmydata/test/eliotutil.py +++ b/src/allmydata/test/eliotutil.py @@ -3,18 +3,6 @@ Tools aimed at the interaction between tests and Eliot. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -# Python 2 compatibility -# Can't use `builtins.str` because it's not JSON encodable: -# `exceptions.TypeError: is not JSON-encodeable` -from past.builtins import unicode as str -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401 from six import ensure_text @@ -23,11 +11,7 @@ __all__ = [ "EliotLoggedRunTest", ] -try: - from typing import Callable -except ImportError: - pass - +from typing import Callable from functools import ( partial, wraps, @@ -42,10 +26,10 @@ from zope.interface import ( from eliot import ( ActionType, Field, - MemoryLogger, ILogger, ) from eliot.testing import ( + MemoryLogger, swap_logger, check_for_errors, ) @@ -54,8 +38,9 @@ from twisted.python.monkey import ( MonkeyPatcher, ) -from ..util.jsonbytes import AnyBytesJSONEncoder - +from ..util.jsonbytes import ( + AnyBytesJSONEncoder +) _NAME = Field.for_types( u"name", @@ -71,14 +56,6 @@ RUN_TEST = ActionType( ) -# On Python 3, we want to use our custom JSON encoder when validating messages -# can be encoded to JSON: -if PY2: - _memory_logger = MemoryLogger -else: - _memory_logger = lambda: MemoryLogger(encoder=AnyBytesJSONEncoder) - - @attr.s class EliotLoggedRunTest(object): """ @@ -155,8 +132,8 @@ class EliotLoggedRunTest(object): def with_logging( - test_id, # type: str - test_method, # type: Callable + test_id: str, + test_method: Callable, ): """ Decorate a test method with additional log-related behaviors. @@ -170,7 +147,7 @@ def with_logging( """ @wraps(test_method) def run_with_logging(*args, **kwargs): - validating_logger = _memory_logger() + validating_logger = MemoryLogger(encoder=AnyBytesJSONEncoder) original = swap_logger(None) try: swap_logger(_TwoLoggers(original, validating_logger)) diff --git a/src/allmydata/test/matchers.py b/src/allmydata/test/matchers.py index 3359a7ed5..fc746aed0 100644 --- a/src/allmydata/test/matchers.py +++ b/src/allmydata/test/matchers.py @@ -3,16 +3,9 @@ Testtools-style matchers useful to the Tahoe-LAFS test suite. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import attr +from hyperlink import DecodedURL from testtools.matchers import ( Mismatch, @@ -95,6 +88,7 @@ def matches_storage_announcement(basedir, anonymous=True, options=None): } if anonymous: announcement[u"anonymous-storage-FURL"] = matches_furl() + announcement[u"anonymous-storage-NURLs"] = matches_nurls() if options: announcement[u"storage-options"] = MatchesListwise(options) return MatchesStructure( @@ -112,6 +106,16 @@ def matches_furl(): return AfterPreprocessing(decode_furl, Always()) +def matches_nurls(): + """ + Matches a sequence of NURLs. + """ + return AfterPreprocessing( + lambda nurls: [DecodedURL.from_text(u) for u in nurls], + Always() + ) + + def matches_base32(): """ Match any base32 encoded byte string. diff --git a/src/allmydata/test/mutable/test_checker.py b/src/allmydata/test/mutable/test_checker.py index 11ba776fd..8018c5d05 100644 --- a/src/allmydata/test/mutable/test_checker.py +++ b/src/allmydata/test/mutable/test_checker.py @@ -1,23 +1,16 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from twisted.trial import unittest +from ..common import AsyncTestCase from foolscap.api import flushEventualQueue from allmydata.monitor import Monitor from allmydata.mutable.common import CorruptShareError from .util import PublishMixin, corrupt, CheckerMixin -class Checker(unittest.TestCase, CheckerMixin, PublishMixin): +class Checker(AsyncTestCase, CheckerMixin, PublishMixin): def setUp(self): + super(Checker, self).setUp() return self.publish_one() diff --git a/src/allmydata/test/mutable/test_datahandle.py b/src/allmydata/test/mutable/test_datahandle.py index 1819cba01..6ddbb61b3 100644 --- a/src/allmydata/test/mutable/test_datahandle.py +++ b/src/allmydata/test/mutable/test_datahandle.py @@ -1,20 +1,15 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from twisted.trial import unittest +from ..common import SyncTestCase from allmydata.mutable.publish import MutableData +from testtools.matchers import Equals, HasLength -class DataHandle(unittest.TestCase): + +class DataHandle(SyncTestCase): def setUp(self): + super(DataHandle, self).setUp() self.test_data = b"Test Data" * 50000 self.uploadable = MutableData(self.test_data) @@ -26,13 +21,13 @@ class DataHandle(unittest.TestCase): data = b"".join(data) start = i end = i + chunk_size - self.failUnlessEqual(data, self.test_data[start:end]) + self.assertThat(data, Equals(self.test_data[start:end])) def test_datahandle_get_size(self): actual_size = len(self.test_data) size = self.uploadable.get_size() - self.failUnlessEqual(size, actual_size) + self.assertThat(size, Equals(actual_size)) def test_datahandle_get_size_out_of_order(self): @@ -40,14 +35,14 @@ class DataHandle(unittest.TestCase): # disturbing the location of the seek pointer. chunk_size = 100 data = self.uploadable.read(chunk_size) - self.failUnlessEqual(b"".join(data), self.test_data[:chunk_size]) + self.assertThat(b"".join(data), Equals(self.test_data[:chunk_size])) # Now get the size. size = self.uploadable.get_size() - self.failUnlessEqual(size, len(self.test_data)) + self.assertThat(self.test_data, HasLength(size)) # Now get more data. We should be right where we left off. more_data = self.uploadable.read(chunk_size) start = chunk_size end = chunk_size * 2 - self.failUnlessEqual(b"".join(more_data), self.test_data[start:end]) + self.assertThat(b"".join(more_data), Equals(self.test_data[start:end])) diff --git a/src/allmydata/test/mutable/test_different_encoding.py b/src/allmydata/test/mutable/test_different_encoding.py index a5165532c..8efb0bf82 100644 --- a/src/allmydata/test/mutable/test_different_encoding.py +++ b/src/allmydata/test/mutable/test_different_encoding.py @@ -1,20 +1,13 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from twisted.trial import unittest +from ..common import AsyncTestCase from .util import FakeStorage, make_nodemaker -class DifferentEncoding(unittest.TestCase): +class DifferentEncoding(AsyncTestCase): def setUp(self): + super(DifferentEncoding, self).setUp() self._storage = s = FakeStorage() self.nodemaker = make_nodemaker(s) diff --git a/src/allmydata/test/mutable/test_exceptions.py b/src/allmydata/test/mutable/test_exceptions.py index 6a9b2b575..1b83f7eb4 100644 --- a/src/allmydata/test/mutable/test_exceptions.py +++ b/src/allmydata/test/mutable/test_exceptions.py @@ -2,21 +2,14 @@ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from twisted.trial import unittest +from ..common import SyncTestCase from allmydata.mutable.common import NeedMoreDataError, UncoordinatedWriteError -class Exceptions(unittest.TestCase): + +class Exceptions(SyncTestCase): def test_repr(self): nmde = NeedMoreDataError(100, 50, 100) - self.failUnless("NeedMoreDataError" in repr(nmde), repr(nmde)) + self.assertTrue("NeedMoreDataError" in repr(nmde), msg=repr(nmde)) + self.assertTrue("NeedMoreDataError" in repr(nmde), msg=repr(nmde)) ucwe = UncoordinatedWriteError() - self.failUnless("UncoordinatedWriteError" in repr(ucwe), repr(ucwe)) + self.assertTrue("UncoordinatedWriteError" in repr(ucwe), msg=repr(ucwe)) diff --git a/src/allmydata/test/mutable/test_filehandle.py b/src/allmydata/test/mutable/test_filehandle.py index 8db02f3fd..78597f774 100644 --- a/src/allmydata/test/mutable/test_filehandle.py +++ b/src/allmydata/test/mutable/test_filehandle.py @@ -1,22 +1,16 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os from io import BytesIO -from twisted.trial import unittest +from ..common import SyncTestCase from allmydata.mutable.publish import MutableFileHandle -class FileHandle(unittest.TestCase): + +class FileHandle(SyncTestCase): def setUp(self): + super(FileHandle, self).setUp() self.test_data = b"Test Data" * 50000 self.sio = BytesIO(self.test_data) self.uploadable = MutableFileHandle(self.sio) diff --git a/src/allmydata/test/mutable/test_filenode.py b/src/allmydata/test/mutable/test_filenode.py index de03afc5a..82f1e5072 100644 --- a/src/allmydata/test/mutable/test_filenode.py +++ b/src/allmydata/test/mutable/test_filenode.py @@ -1,18 +1,17 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from six.moves import cStringIO as StringIO +from io import StringIO from twisted.internet import defer, reactor -from twisted.trial import unittest +from ..common import AsyncBrokenTestCase +from testtools.matchers import ( + Equals, + Contains, + HasLength, + Is, + IsInstance, +) from allmydata import uri, client from allmydata.util.consumer import MemoryConsumer from allmydata.interfaces import SDMF_VERSION, MDMF_VERSION, DownloadStopped @@ -23,18 +22,20 @@ from allmydata.mutable.publish import MutableData from ..test_download import PausingConsumer, PausingAndStoppingConsumer, \ StoppingConsumer, ImmediatelyStoppingConsumer from .. import common_util as testutil +from ...crypto.rsa import create_signing_keypair from .util import ( FakeStorage, make_nodemaker_with_peers, make_peer, ) -class Filenode(unittest.TestCase, testutil.ShouldFailMixin): +class Filenode(AsyncBrokenTestCase, testutil.ShouldFailMixin): # this used to be in Publish, but we removed the limit. Some of # these tests test whether the new code correctly allows files # larger than the limit. OLD_MAX_SEGMENT_SIZE = 3500000 def setUp(self): + super(Filenode, self).setUp() self._storage = FakeStorage() self._peers = list( make_peer(self._storage, n) @@ -48,25 +49,35 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): def test_create(self): d = self.nodemaker.create_mutable_file() def _created(n): - self.failUnless(isinstance(n, MutableFileNode)) - self.failUnlessEqual(n.get_storage_index(), n._storage_index) + self.assertThat(n, IsInstance(MutableFileNode)) + self.assertThat(n.get_storage_index(), Equals(n._storage_index)) sb = self.nodemaker.storage_broker peer0 = sorted(sb.get_all_serverids())[0] shnums = self._storage._peers[peer0].keys() - self.failUnlessEqual(len(shnums), 1) + self.assertThat(shnums, HasLength(1)) d.addCallback(_created) return d + async def test_create_with_keypair(self): + """ + An SDMF can be created using a given keypair. + """ + (priv, pub) = create_signing_keypair(2048) + node = await self.nodemaker.create_mutable_file(keypair=(pub, priv)) + self.assertThat( + (node.get_privkey(), node.get_pubkey()), + Equals((priv, pub)), + ) def test_create_mdmf(self): d = self.nodemaker.create_mutable_file(version=MDMF_VERSION) def _created(n): - self.failUnless(isinstance(n, MutableFileNode)) - self.failUnlessEqual(n.get_storage_index(), n._storage_index) + self.assertThat(n, IsInstance(MutableFileNode)) + self.assertThat(n.get_storage_index(), Equals(n._storage_index)) sb = self.nodemaker.storage_broker peer0 = sorted(sb.get_all_serverids())[0] shnums = self._storage._peers[peer0].keys() - self.failUnlessEqual(len(shnums), 1) + self.assertThat(shnums, HasLength(1)) d.addCallback(_created) return d @@ -80,7 +91,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(lambda ignored, v=v: self.nodemaker.create_mutable_file(version=v)) def _created(n): - self.failUnless(isinstance(n, MutableFileNode)) + self.assertThat(n, IsInstance(MutableFileNode)) self._node = n return n d.addCallback(_created) @@ -89,19 +100,19 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(lambda ignored: self._node.download_best_version()) d.addCallback(lambda contents: - self.failUnlessEqual(contents, b"Contents" * 50000)) + self.assertThat(contents, Equals(b"Contents" * 50000))) return d def test_max_shares(self): self.nodemaker.default_encoding_parameters['n'] = 255 d = self.nodemaker.create_mutable_file(version=SDMF_VERSION) def _created(n): - self.failUnless(isinstance(n, MutableFileNode)) - self.failUnlessEqual(n.get_storage_index(), n._storage_index) + self.assertThat(n, IsInstance(MutableFileNode)) + self.assertThat(n.get_storage_index(), Equals(n._storage_index)) sb = self.nodemaker.storage_broker num_shares = sum([len(self._storage._peers[x].keys()) for x \ in sb.get_all_serverids()]) - self.failUnlessEqual(num_shares, 255) + self.assertThat(num_shares, Equals(255)) self._node = n return n d.addCallback(_created) @@ -113,7 +124,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): self._node.download_best_version()) # ...and check to make sure everything went okay. d.addCallback(lambda contents: - self.failUnlessEqual(b"contents" * 50000, contents)) + self.assertThat(b"contents" * 50000, Equals(contents))) return d def test_max_shares_mdmf(self): @@ -121,12 +132,12 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): self.nodemaker.default_encoding_parameters['n'] = 255 d = self.nodemaker.create_mutable_file(version=MDMF_VERSION) def _created(n): - self.failUnless(isinstance(n, MutableFileNode)) - self.failUnlessEqual(n.get_storage_index(), n._storage_index) + self.assertThat(n, IsInstance(MutableFileNode)) + self.assertThat(n.get_storage_index(), Equals(n._storage_index)) sb = self.nodemaker.storage_broker num_shares = sum([len(self._storage._peers[x].keys()) for x \ in sb.get_all_serverids()]) - self.failUnlessEqual(num_shares, 255) + self.assertThat(num_shares, Equals(255)) self._node = n return n d.addCallback(_created) @@ -135,20 +146,20 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(lambda ignored: self._node.download_best_version()) d.addCallback(lambda contents: - self.failUnlessEqual(contents, b"contents" * 50000)) + self.assertThat(contents, Equals(b"contents" * 50000))) return d def test_mdmf_filenode_cap(self): # Test that an MDMF filenode, once created, returns an MDMF URI. d = self.nodemaker.create_mutable_file(version=MDMF_VERSION) def _created(n): - self.failUnless(isinstance(n, MutableFileNode)) + self.assertThat(n, IsInstance(MutableFileNode)) cap = n.get_cap() - self.failUnless(isinstance(cap, uri.WriteableMDMFFileURI)) + self.assertThat(cap, IsInstance(uri.WriteableMDMFFileURI)) rcap = n.get_readcap() - self.failUnless(isinstance(rcap, uri.ReadonlyMDMFFileURI)) + self.assertThat(rcap, IsInstance(uri.ReadonlyMDMFFileURI)) vcap = n.get_verify_cap() - self.failUnless(isinstance(vcap, uri.MDMFVerifierURI)) + self.assertThat(vcap, IsInstance(uri.MDMFVerifierURI)) d.addCallback(_created) return d @@ -158,13 +169,13 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): # filenode given an MDMF cap. d = self.nodemaker.create_mutable_file(version=MDMF_VERSION) def _created(n): - self.failUnless(isinstance(n, MutableFileNode)) + self.assertThat(n, IsInstance(MutableFileNode)) s = n.get_uri() - self.failUnless(s.startswith(b"URI:MDMF")) + self.assertTrue(s.startswith(b"URI:MDMF")) n2 = self.nodemaker.create_from_cap(s) - self.failUnless(isinstance(n2, MutableFileNode)) - self.failUnlessEqual(n.get_storage_index(), n2.get_storage_index()) - self.failUnlessEqual(n.get_uri(), n2.get_uri()) + self.assertThat(n2, IsInstance(MutableFileNode)) + self.assertThat(n.get_storage_index(), Equals(n2.get_storage_index())) + self.assertThat(n.get_uri(), Equals(n2.get_uri())) d.addCallback(_created) return d @@ -172,13 +183,13 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): def test_create_from_mdmf_readcap(self): d = self.nodemaker.create_mutable_file(version=MDMF_VERSION) def _created(n): - self.failUnless(isinstance(n, MutableFileNode)) + self.assertThat(n, IsInstance(MutableFileNode)) s = n.get_readonly_uri() n2 = self.nodemaker.create_from_cap(s) - self.failUnless(isinstance(n2, MutableFileNode)) + self.assertThat(n2, IsInstance(MutableFileNode)) # Check that it's a readonly node - self.failUnless(n2.is_readonly()) + self.assertTrue(n2.is_readonly()) d.addCallback(_created) return d @@ -191,10 +202,10 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d = self.nodemaker.create_mutable_file(version=MDMF_VERSION) def _created(n): self.uri = n.get_uri() - self.failUnlessEqual(n._protocol_version, MDMF_VERSION) + self.assertThat(n._protocol_version, Equals(MDMF_VERSION)) n2 = self.nodemaker.create_from_cap(self.uri) - self.failUnlessEqual(n2._protocol_version, MDMF_VERSION) + self.assertThat(n2._protocol_version, Equals(MDMF_VERSION)) d.addCallback(_created) return d @@ -203,14 +214,14 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): n = MutableFileNode(None, None, {"k": 3, "n": 10}, None) calls = [] def _callback(*args, **kwargs): - self.failUnlessEqual(args, (4,) ) - self.failUnlessEqual(kwargs, {"foo": 5}) + self.assertThat(args, Equals((4,))) + self.assertThat(kwargs, Equals({"foo": 5})) calls.append(1) return 6 d = n._do_serialized(_callback, 4, foo=5) def _check_callback(res): - self.failUnlessEqual(res, 6) - self.failUnlessEqual(calls, [1]) + self.assertThat(res, Equals(6)) + self.assertThat(calls, Equals([1])) d.addCallback(_check_callback) def _errback(): @@ -227,26 +238,26 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(lambda res: n.get_servermap(MODE_READ)) d.addCallback(lambda smap: smap.dump(StringIO())) d.addCallback(lambda sio: - self.failUnless("3-of-10" in sio.getvalue())) + self.assertTrue("3-of-10" in sio.getvalue())) d.addCallback(lambda res: n.overwrite(MutableData(b"contents 1"))) - d.addCallback(lambda res: self.failUnlessIdentical(res, None)) + d.addCallback(lambda res: self.assertThat(res, Is(None))) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 1"))) d.addCallback(lambda res: n.get_size_of_best_version()) d.addCallback(lambda size: - self.failUnlessEqual(size, len(b"contents 1"))) + self.assertThat(size, Equals(len(b"contents 1")))) d.addCallback(lambda res: n.overwrite(MutableData(b"contents 2"))) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 2"))) d.addCallback(lambda res: n.get_servermap(MODE_WRITE)) d.addCallback(lambda smap: n.upload(MutableData(b"contents 3"), smap)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 3")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 3"))) d.addCallback(lambda res: n.get_servermap(MODE_ANYTHING)) d.addCallback(lambda smap: n.download_version(smap, smap.best_recoverable_version())) - d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 3")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 3"))) # test a file that is large enough to overcome the # mapupdate-to-retrieve data caching (i.e. make the shares larger # than the default readsize, which is 2000 bytes). A 15kB file @@ -254,7 +265,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(lambda res: n.overwrite(MutableData(b"large size file" * 1000))) d.addCallback(lambda res: n.download_best_version()) d.addCallback(lambda res: - self.failUnlessEqual(res, b"large size file" * 1000)) + self.assertThat(res, Equals(b"large size file" * 1000))) return d d.addCallback(_created) return d @@ -268,7 +279,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): n.get_servermap(MODE_READ)) def _then(servermap): dumped = servermap.dump(StringIO()) - self.failUnlessIn("3-of-10", dumped.getvalue()) + self.assertThat(dumped.getvalue(), Contains("3-of-10")) d.addCallback(_then) # Now overwrite the contents with some new contents. We want # to make them big enough to force the file to be uploaded @@ -280,7 +291,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(lambda ignored: n.download_best_version()) d.addCallback(lambda data: - self.failUnlessEqual(data, big_contents)) + self.assertThat(data, Equals(big_contents))) # Overwrite the contents again with some new contents. As # before, they need to be big enough to force multiple # segments, so that we make the downloader deal with @@ -292,7 +303,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(lambda ignored: n.download_best_version()) d.addCallback(lambda data: - self.failUnlessEqual(data, bigger_contents)) + self.assertThat(data, Equals(bigger_contents))) return d d.addCallback(_created) return d @@ -323,7 +334,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): # Now we'll retrieve it into a pausing consumer. c = PausingConsumer() d = version.read(c) - d.addCallback(lambda ign: self.failUnlessEqual(c.size, len(data))) + d.addCallback(lambda ign: self.assertThat(c.size, Equals(len(data)))) c2 = PausingAndStoppingConsumer() d.addCallback(lambda ign: @@ -360,14 +371,14 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): self.uri = node.get_uri() # also confirm that the cap has no extension fields pieces = self.uri.split(b":") - self.failUnlessEqual(len(pieces), 4) + self.assertThat(pieces, HasLength(4)) return node.overwrite(MutableData(b"contents1" * 100000)) def _then(ignored): node = self.nodemaker.create_from_cap(self.uri) return node.download_best_version() def _downloaded(data): - self.failUnlessEqual(data, b"contents1" * 100000) + self.assertThat(data, Equals(b"contents1" * 100000)) d.addCallback(_created) d.addCallback(_then) d.addCallback(_downloaded) @@ -397,11 +408,11 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d = self.nodemaker.create_mutable_file(upload1) def _created(n): d = n.download_best_version() - d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 1"))) upload2 = MutableData(b"contents 2") d.addCallback(lambda res: n.overwrite(upload2)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 2"))) return d d.addCallback(_created) return d @@ -415,15 +426,15 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): def _created(n): d = n.download_best_version() d.addCallback(lambda data: - self.failUnlessEqual(data, initial_contents)) + self.assertThat(data, Equals(initial_contents))) uploadable2 = MutableData(initial_contents + b"foobarbaz") d.addCallback(lambda ignored: n.overwrite(uploadable2)) d.addCallback(lambda ignored: n.download_best_version()) d.addCallback(lambda data: - self.failUnlessEqual(data, initial_contents + - b"foobarbaz")) + self.assertThat(data, Equals(initial_contents + + b"foobarbaz"))) return d d.addCallback(_created) return d @@ -431,33 +442,33 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): def test_create_with_initial_contents_function(self): data = b"initial contents" def _make_contents(n): - self.failUnless(isinstance(n, MutableFileNode)) + self.assertThat(n, IsInstance(MutableFileNode)) key = n.get_writekey() - self.failUnless(isinstance(key, bytes), key) - self.failUnlessEqual(len(key), 16) # AES key size + self.assertTrue(isinstance(key, bytes), key) + self.assertThat(key, HasLength(16)) # AES key size return MutableData(data) d = self.nodemaker.create_mutable_file(_make_contents) def _created(n): return n.download_best_version() d.addCallback(_created) - d.addCallback(lambda data2: self.failUnlessEqual(data2, data)) + d.addCallback(lambda data2: self.assertThat(data2, Equals(data))) return d def test_create_mdmf_with_initial_contents_function(self): data = b"initial contents" * 100000 def _make_contents(n): - self.failUnless(isinstance(n, MutableFileNode)) + self.assertThat(n, IsInstance(MutableFileNode)) key = n.get_writekey() - self.failUnless(isinstance(key, bytes), key) - self.failUnlessEqual(len(key), 16) + self.assertTrue(isinstance(key, bytes), key) + self.assertThat(key, HasLength(16)) return MutableData(data) d = self.nodemaker.create_mutable_file(_make_contents, version=MDMF_VERSION) d.addCallback(lambda n: n.download_best_version()) d.addCallback(lambda data2: - self.failUnlessEqual(data2, data)) + self.assertThat(data2, Equals(data))) return d @@ -476,7 +487,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d = n.get_servermap(MODE_READ) d.addCallback(lambda servermap: servermap.best_recoverable_version()) d.addCallback(lambda verinfo: - self.failUnlessEqual(verinfo[0], expected_seqnum, which)) + self.assertThat(verinfo[0], Equals(expected_seqnum), which)) return d def test_modify(self): @@ -513,36 +524,36 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): def _created(n): d = n.modify(_modifier) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"line1line2"))) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "m")) d.addCallback(lambda res: n.modify(_non_modifier)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"line1line2"))) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "non")) d.addCallback(lambda res: n.modify(_none_modifier)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"line1line2"))) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "none")) d.addCallback(lambda res: self.shouldFail(ValueError, "error_modifier", None, n.modify, _error_modifier)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"line1line2"))) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "err")) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"line1line2"))) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "big")) d.addCallback(lambda res: n.modify(_ucw_error_modifier)) - d.addCallback(lambda res: self.failUnlessEqual(len(calls), 2)) + d.addCallback(lambda res: self.assertThat(calls, HasLength(2))) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, - b"line1line2line3")) + d.addCallback(lambda res: self.assertThat(res, + Equals(b"line1line2line3"))) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 3, "ucw")) def _reset_ucw_error_modifier(res): @@ -557,10 +568,10 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): # will only be one larger than the previous test, not two (i.e. 4 # instead of 5). d.addCallback(lambda res: n.modify(_ucw_error_non_modifier)) - d.addCallback(lambda res: self.failUnlessEqual(len(calls), 2)) + d.addCallback(lambda res: self.assertThat(calls, HasLength(2))) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, - b"line1line2line3")) + d.addCallback(lambda res: self.assertThat(res, + Equals(b"line1line2line3"))) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 4, "ucw")) d.addCallback(lambda res: n.modify(_toobig_modifier)) return d @@ -596,7 +607,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): def _created(n): d = n.modify(_modifier) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"line1line2"))) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "m")) d.addCallback(lambda res: @@ -605,7 +616,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): n.modify, _ucw_error_modifier, _backoff_stopper)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"line1line2"))) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "stop")) def _reset_ucw_error_modifier(res): @@ -615,8 +626,8 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(lambda res: n.modify(_ucw_error_modifier, _backoff_pauser)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, - b"line1line2line3")) + d.addCallback(lambda res: self.assertThat(res, + Equals(b"line1line2line3"))) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 3, "pause")) d.addCallback(lambda res: @@ -625,8 +636,8 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): n.modify, _always_ucw_error_modifier, giveuper.delay)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, - b"line1line2line3")) + d.addCallback(lambda res: self.assertThat(res, + Equals(b"line1line2line3"))) d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 3, "giveup")) return d @@ -641,23 +652,23 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(lambda res: n.get_servermap(MODE_READ)) d.addCallback(lambda smap: smap.dump(StringIO())) d.addCallback(lambda sio: - self.failUnless("3-of-10" in sio.getvalue())) + self.assertTrue("3-of-10" in sio.getvalue())) d.addCallback(lambda res: n.overwrite(MutableData(b"contents 1"))) - d.addCallback(lambda res: self.failUnlessIdentical(res, None)) + d.addCallback(lambda res: self.assertThat(res, Is(None))) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 1"))) d.addCallback(lambda res: n.overwrite(MutableData(b"contents 2"))) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 2"))) d.addCallback(lambda res: n.get_servermap(MODE_WRITE)) d.addCallback(lambda smap: n.upload(MutableData(b"contents 3"), smap)) d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 3")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 3"))) d.addCallback(lambda res: n.get_servermap(MODE_ANYTHING)) d.addCallback(lambda smap: n.download_version(smap, smap.best_recoverable_version())) - d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 3")) + d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 3"))) return d d.addCallback(_created) return d @@ -673,14 +684,14 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin): return n.get_servermap(MODE_READ) d.addCallback(_created) d.addCallback(lambda ignored: - self.failUnlessEqual(self.n.get_size(), 0)) + self.assertThat(self.n.get_size(), Equals(0))) d.addCallback(lambda ignored: self.n.overwrite(MutableData(b"foobarbaz"))) d.addCallback(lambda ignored: - self.failUnlessEqual(self.n.get_size(), 9)) + self.assertThat(self.n.get_size(), Equals(9))) d.addCallback(lambda ignored: self.nodemaker.create_mutable_file(MutableData(b"foobarbaz"))) d.addCallback(_created) d.addCallback(lambda ignored: - self.failUnlessEqual(self.n.get_size(), 9)) + self.assertThat(self.n.get_size(), Equals(9))) return d diff --git a/src/allmydata/test/mutable/test_interoperability.py b/src/allmydata/test/mutable/test_interoperability.py index 5d7414907..deb20bb17 100644 --- a/src/allmydata/test/mutable/test_interoperability.py +++ b/src/allmydata/test/mutable/test_interoperability.py @@ -1,24 +1,17 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os, base64 -from twisted.trial import unittest +from ..common import AsyncTestCase +from testtools.matchers import HasLength from allmydata import uri from allmydata.storage.common import storage_index_to_dir from allmydata.util import fileutil from .. import common_util as testutil from ..no_network import GridTestMixin -class Interoperability(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): +class Interoperability(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin): sdmf_old_shares = {} sdmf_old_shares[0] = b"VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcABOOLy8EETxh7h7/z9d62EiPu9CNpRrCOLxUhn+JUS+DuAAhgcAb/adrQFrhlrRNoRpvjDuxmFebA4F0qCyqWssm61AAQ/EX4eC/1+hGOQ/h4EiKUkqxdsfzdcPlDvd11SGWZ0VHsUclZChTzuBAU2zLTXm+cG8IFhO50ly6Ey/DB44NtMKVaVzO0nU8DE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" sdmf_old_shares[1] = b"VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcABOOLy8EETxh7h7/z9d62EiPu9CNpRrCOLxUhn+JUS+DuAAhgcAb/adrQFrhlrRNoRpvjDuxmFebA4F0qCyqWssm61AAP7FHJWQoU87gQFNsy015vnBvCBYTudJcuhMvwweODbTD8Rfh4L/X6EY5D+HgSIpSSrF2x/N1w+UO93XVIZZnRUeePDXEwhqYDE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA=" @@ -53,7 +46,7 @@ class Interoperability(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixi sharedata) # ...and verify that the shares are there. shares = self.find_uri_shares(self.sdmf_old_cap) - assert len(shares) == 10 + self.assertThat(shares, HasLength(10)) def test_new_downloader_can_read_old_shares(self): self.basedir = "mutable/Interoperability/new_downloader_can_read_old_shares" @@ -62,5 +55,5 @@ class Interoperability(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixi nm = self.g.clients[0].nodemaker n = nm.create_from_cap(self.sdmf_old_cap) d = n.download_best_version() - d.addCallback(self.failUnlessEqual, self.sdmf_old_contents) + d.addCallback(self.assertEqual, self.sdmf_old_contents) return d diff --git a/src/allmydata/test/mutable/test_multiple_encodings.py b/src/allmydata/test/mutable/test_multiple_encodings.py index 12c5be051..7f9699a07 100644 --- a/src/allmydata/test/mutable/test_multiple_encodings.py +++ b/src/allmydata/test/mutable/test_multiple_encodings.py @@ -1,16 +1,9 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from twisted.trial import unittest +from ..common import AsyncTestCase +from testtools.matchers import Equals from allmydata.interfaces import SDMF_VERSION from allmydata.monitor import Monitor from foolscap.logging import log @@ -20,8 +13,9 @@ from allmydata.mutable.servermap import ServerMap, ServermapUpdater from ..common_util import DevNullDictionary from .util import FakeStorage, make_nodemaker -class MultipleEncodings(unittest.TestCase): +class MultipleEncodings(AsyncTestCase): def setUp(self): + super(MultipleEncodings, self).setUp() self.CONTENTS = b"New contents go here" self.uploadable = MutableData(self.CONTENTS) self._storage = FakeStorage() @@ -159,6 +153,6 @@ class MultipleEncodings(unittest.TestCase): d.addCallback(lambda res: fn3.download_best_version()) def _retrieved(new_contents): # the current specified behavior is "first version recoverable" - self.failUnlessEqual(new_contents, contents1) + self.assertThat(new_contents, Equals(contents1)) d.addCallback(_retrieved) return d diff --git a/src/allmydata/test/mutable/test_multiple_versions.py b/src/allmydata/test/mutable/test_multiple_versions.py index 460cde4b3..2062b01d4 100644 --- a/src/allmydata/test/mutable/test_multiple_versions.py +++ b/src/allmydata/test/mutable/test_multiple_versions.py @@ -1,24 +1,18 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from twisted.trial import unittest +from ..common import AsyncTestCase +from testtools.matchers import Equals, HasLength from allmydata.monitor import Monitor from allmydata.mutable.common import MODE_CHECK, MODE_READ from .util import PublishMixin, CheckerMixin -class MultipleVersions(unittest.TestCase, PublishMixin, CheckerMixin): +class MultipleVersions(AsyncTestCase, PublishMixin, CheckerMixin): def setUp(self): + super(MultipleVersions, self).setUp() return self.publish_multiple() def test_multiple_versions(self): @@ -26,7 +20,7 @@ class MultipleVersions(unittest.TestCase, PublishMixin, CheckerMixin): # should get the latest one self._set_versions(dict([(i,2) for i in (0,2,4,6,8)])) d = self._fn.download_best_version() - d.addCallback(lambda res: self.failUnlessEqual(res, self.CONTENTS[4])) + d.addCallback(lambda res: self.assertThat(res, Equals(self.CONTENTS[4]))) # and the checker should report problems d.addCallback(lambda res: self._fn.check(Monitor())) d.addCallback(self.check_bad, "test_multiple_versions") @@ -35,23 +29,23 @@ class MultipleVersions(unittest.TestCase, PublishMixin, CheckerMixin): d.addCallback(lambda res: self._set_versions(dict([(i,2) for i in range(10)]))) d.addCallback(lambda res: self._fn.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, self.CONTENTS[2])) + d.addCallback(lambda res: self.assertThat(res, Equals(self.CONTENTS[2]))) # if exactly one share is at version 3, we should still get v2 d.addCallback(lambda res: self._set_versions({0:3})) d.addCallback(lambda res: self._fn.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, self.CONTENTS[2])) + d.addCallback(lambda res: self.assertThat(res, Equals(self.CONTENTS[2]))) # but the servermap should see the unrecoverable version. This # depends upon the single newer share being queried early. d.addCallback(lambda res: self._fn.get_servermap(MODE_READ)) def _check_smap(smap): - self.failUnlessEqual(len(smap.unrecoverable_versions()), 1) + self.assertThat(smap.unrecoverable_versions(), HasLength(1)) newer = smap.unrecoverable_newer_versions() - self.failUnlessEqual(len(newer), 1) + self.assertThat(newer, HasLength(1)) verinfo, health = list(newer.items())[0] - self.failUnlessEqual(verinfo[0], 4) - self.failUnlessEqual(health, (1,3)) - self.failIf(smap.needs_merge()) + self.assertThat(verinfo[0], Equals(4)) + self.assertThat(health, Equals((1,3))) + self.assertThat(smap.needs_merge(), Equals(False)) d.addCallback(_check_smap) # if we have a mix of two parallel versions (s4a and s4b), we could # recover either @@ -60,13 +54,13 @@ class MultipleVersions(unittest.TestCase, PublishMixin, CheckerMixin): 1:4,3:4,5:4,7:4,9:4})) d.addCallback(lambda res: self._fn.get_servermap(MODE_READ)) def _check_smap_mixed(smap): - self.failUnlessEqual(len(smap.unrecoverable_versions()), 0) + self.assertThat(smap.unrecoverable_versions(), HasLength(0)) newer = smap.unrecoverable_newer_versions() - self.failUnlessEqual(len(newer), 0) - self.failUnless(smap.needs_merge()) + self.assertThat(newer, HasLength(0)) + self.assertTrue(smap.needs_merge()) d.addCallback(_check_smap_mixed) d.addCallback(lambda res: self._fn.download_best_version()) - d.addCallback(lambda res: self.failUnless(res == self.CONTENTS[3] or + d.addCallback(lambda res: self.assertTrue(res == self.CONTENTS[3] or res == self.CONTENTS[4])) return d @@ -86,12 +80,12 @@ class MultipleVersions(unittest.TestCase, PublishMixin, CheckerMixin): d = self._fn.modify(_modify) d.addCallback(lambda res: self._fn.download_best_version()) expected = self.CONTENTS[2] + b" modified" - d.addCallback(lambda res: self.failUnlessEqual(res, expected)) + d.addCallback(lambda res: self.assertThat(res, Equals(expected))) # and the servermap should indicate that the outlier was replaced too d.addCallback(lambda res: self._fn.get_servermap(MODE_CHECK)) def _check_smap(smap): - self.failUnlessEqual(smap.highest_seqnum(), 5) - self.failUnlessEqual(len(smap.unrecoverable_versions()), 0) - self.failUnlessEqual(len(smap.recoverable_versions()), 1) + self.assertThat(smap.highest_seqnum(), Equals(5)) + self.assertThat(smap.unrecoverable_versions(), HasLength(0)) + self.assertThat(smap.recoverable_versions(), HasLength(1)) d.addCallback(_check_smap) return d diff --git a/src/allmydata/test/mutable/test_problems.py b/src/allmydata/test/mutable/test_problems.py index 86a367596..d94668ff4 100644 --- a/src/allmydata/test/mutable/test_problems.py +++ b/src/allmydata/test/mutable/test_problems.py @@ -1,17 +1,10 @@ """ Ported to Python 3. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os, base64 -from twisted.trial import unittest +from ..common import AsyncTestCase +from testtools.matchers import HasLength from twisted.internet import defer from foolscap.logging import log from allmydata import uri @@ -25,7 +18,6 @@ from allmydata.mutable.common import \ NotEnoughServersError from allmydata.mutable.publish import MutableData from allmydata.storage.common import storage_index_to_dir -from ..common import TEST_RSA_KEY_SIZE from ..no_network import GridTestMixin from .. import common_util as testutil from ..common_util import DevNullDictionary @@ -61,7 +53,7 @@ class FirstServerGetsDeleted(object): return (True, {}) return retval -class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): +class Problems(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin): def do_publish_surprise(self, version): self.basedir = "mutable/Problems/test_publish_surprise_%s" % version self.set_up_grid() @@ -198,8 +190,8 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): def _overwritten_again(smap): # Make sure that all shares were updated by making sure that # there aren't any other versions in the sharemap. - self.failUnlessEqual(len(smap.recoverable_versions()), 1) - self.failUnlessEqual(len(smap.unrecoverable_versions()), 0) + self.assertThat(smap.recoverable_versions(), HasLength(1)) + self.assertThat(smap.unrecoverable_versions(), HasLength(0)) d.addCallback(_overwritten_again) return d @@ -218,7 +210,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): # use #467 static-server-selection to disable permutation and force # the choice of server for share[0]. - d = nm.key_generator.generate(TEST_RSA_KEY_SIZE) + d = nm.key_generator.generate() def _got_key(keypair): (pubkey, privkey) = keypair nm.key_generator = SameKeyGenerator(pubkey, privkey) @@ -240,7 +232,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): # that ought to work def _got_node(n): d = n.download_best_version() - d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1")) + d.addCallback(lambda res: self.assertEquals(res, b"contents 1")) # now break the second peer def _break_peer1(res): self.g.break_server(self.server1.get_serverid()) @@ -248,7 +240,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(lambda res: n.overwrite(MutableData(b"contents 2"))) # that ought to work too d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2")) + d.addCallback(lambda res: self.assertEquals(res, b"contents 2")) def _explain_error(f): print(f) if f.check(NotEnoughServersError): @@ -280,7 +272,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): d = nm.create_mutable_file(MutableData(b"contents 1")) def _created(n): d = n.download_best_version() - d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1")) + d.addCallback(lambda res: self.assertEquals(res, b"contents 1")) # now break one of the remaining servers def _break_second_server(res): self.g.break_server(peerids[1]) @@ -288,7 +280,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(lambda res: n.overwrite(MutableData(b"contents 2"))) # that ought to work too d.addCallback(lambda res: n.download_best_version()) - d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2")) + d.addCallback(lambda res: self.assertEquals(res, b"contents 2")) return d d.addCallback(_created) return d @@ -419,7 +411,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): return self._node.download_version(servermap, ver) d.addCallback(_then) d.addCallback(lambda data: - self.failUnlessEqual(data, CONTENTS)) + self.assertEquals(data, CONTENTS)) return d def test_1654(self): diff --git a/src/allmydata/test/mutable/test_repair.py b/src/allmydata/test/mutable/test_repair.py index fb1caa974..dd2b435e5 100644 --- a/src/allmydata/test/mutable/test_repair.py +++ b/src/allmydata/test/mutable/test_repair.py @@ -1,16 +1,9 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from twisted.trial import unittest +from ..common import AsyncTestCase +from testtools.matchers import Equals, HasLength from allmydata.interfaces import IRepairResults, ICheckAndRepairResults from allmydata.monitor import Monitor from allmydata.mutable.common import MODE_CHECK @@ -19,7 +12,7 @@ from allmydata.mutable.repairer import MustForceRepairError from ..common import ShouldFailMixin from .util import PublishMixin -class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin): +class Repair(AsyncTestCase, PublishMixin, ShouldFailMixin): def get_shares(self, s): all_shares = {} # maps (peerid, shnum) to share data @@ -40,8 +33,8 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin): d.addCallback(lambda res: self._fn.check(Monitor())) d.addCallback(lambda check_results: self._fn.repair(check_results)) def _check_results(rres): - self.failUnless(IRepairResults.providedBy(rres)) - self.failUnless(rres.get_successful()) + self.assertThat(IRepairResults.providedBy(rres), Equals(True)) + self.assertThat(rres.get_successful(), Equals(True)) # TODO: examine results self.copy_shares() @@ -50,11 +43,11 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin): new_shares = self.old_shares[1] # TODO: this really shouldn't change anything. When we implement # a "minimal-bandwidth" repairer", change this test to assert: - #self.failUnlessEqual(new_shares, initial_shares) + #self.assertThat(new_shares, Equals(initial_shares)) # all shares should be in the same place as before - self.failUnlessEqual(set(initial_shares.keys()), - set(new_shares.keys())) + self.assertThat(set(initial_shares.keys()), + Equals(set(new_shares.keys()))) # but they should all be at a newer seqnum. The IV will be # different, so the roothash will be too. for key in initial_shares: @@ -70,19 +63,19 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin): IV1, k1, N1, segsize1, datalen1, o1) = unpack_header(new_shares[key]) - self.failUnlessEqual(version0, version1) - self.failUnlessEqual(seqnum0+1, seqnum1) - self.failUnlessEqual(k0, k1) - self.failUnlessEqual(N0, N1) - self.failUnlessEqual(segsize0, segsize1) - self.failUnlessEqual(datalen0, datalen1) + self.assertThat(version0, Equals(version1)) + self.assertThat(seqnum0+1, Equals(seqnum1)) + self.assertThat(k0, Equals(k1)) + self.assertThat(N0, Equals(N1)) + self.assertThat(segsize0, Equals(segsize1)) + self.assertThat(datalen0, Equals(datalen1)) d.addCallback(_check_results) return d def failIfSharesChanged(self, ignored=None): old_shares = self.old_shares[-2] current_shares = self.old_shares[-1] - self.failUnlessEqual(old_shares, current_shares) + self.assertThat(old_shares, Equals(current_shares)) def _test_whether_repairable(self, publisher, nshares, expected_result): @@ -96,12 +89,12 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin): d.addCallback(_delete_some_shares) d.addCallback(lambda ign: self._fn.check(Monitor())) def _check(cr): - self.failIf(cr.is_healthy()) - self.failUnlessEqual(cr.is_recoverable(), expected_result) + self.assertThat(cr.is_healthy(), Equals(False)) + self.assertThat(cr.is_recoverable(), Equals(expected_result)) return cr d.addCallback(_check) d.addCallback(lambda check_results: self._fn.repair(check_results)) - d.addCallback(lambda crr: self.failUnlessEqual(crr.get_successful(), expected_result)) + d.addCallback(lambda crr: self.assertThat(crr.get_successful(), Equals(expected_result))) return d def test_unrepairable_0shares(self): @@ -136,7 +129,7 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin): del shares[peerid][shnum] d.addCallback(_delete_some_shares) d.addCallback(lambda ign: self._fn.check_and_repair(Monitor())) - d.addCallback(lambda crr: self.failUnlessEqual(crr.get_repair_successful(), expected_result)) + d.addCallback(lambda crr: self.assertThat(crr.get_repair_successful(), Equals(expected_result))) return d def test_unrepairable_0shares_checkandrepair(self): @@ -181,13 +174,13 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin): self._fn.repair(check_results, force=True)) # this should give us 10 shares of the highest roothash def _check_repair_results(rres): - self.failUnless(rres.get_successful()) + self.assertThat(rres.get_successful(), Equals(True)) pass # TODO d.addCallback(_check_repair_results) d.addCallback(lambda res: self._fn.get_servermap(MODE_CHECK)) def _check_smap(smap): - self.failUnlessEqual(len(smap.recoverable_versions()), 1) - self.failIf(smap.unrecoverable_versions()) + self.assertThat(smap.recoverable_versions(), HasLength(1)) + self.assertThat(smap.unrecoverable_versions(), HasLength(0)) # now, which should have won? roothash_s4a = self.get_roothash_for(3) roothash_s4b = self.get_roothash_for(4) @@ -196,9 +189,9 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin): else: expected_contents = self.CONTENTS[3] new_versionid = smap.best_recoverable_version() - self.failUnlessEqual(new_versionid[0], 5) # seqnum 5 + self.assertThat(new_versionid[0], Equals(5)) # seqnum 5 d2 = self._fn.download_version(smap, new_versionid) - d2.addCallback(self.failUnlessEqual, expected_contents) + d2.addCallback(self.assertEqual, expected_contents) return d2 d.addCallback(_check_smap) return d @@ -216,19 +209,19 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin): d.addCallback(lambda check_results: self._fn.repair(check_results)) # this should give us 10 shares of v3 def _check_repair_results(rres): - self.failUnless(rres.get_successful()) + self.assertThat(rres.get_successful(), Equals(True)) pass # TODO d.addCallback(_check_repair_results) d.addCallback(lambda res: self._fn.get_servermap(MODE_CHECK)) def _check_smap(smap): - self.failUnlessEqual(len(smap.recoverable_versions()), 1) - self.failIf(smap.unrecoverable_versions()) + self.assertThat(smap.recoverable_versions(), HasLength(1)) + self.assertThat(smap.unrecoverable_versions(), HasLength(0)) # now, which should have won? expected_contents = self.CONTENTS[3] new_versionid = smap.best_recoverable_version() - self.failUnlessEqual(new_versionid[0], 5) # seqnum 5 + self.assertThat(new_versionid[0], Equals(5)) # seqnum 5 d2 = self._fn.download_version(smap, new_versionid) - d2.addCallback(self.failUnlessEqual, expected_contents) + d2.addCallback(self.assertEquals, expected_contents) return d2 d.addCallback(_check_smap) return d @@ -256,12 +249,12 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin): d.addCallback(_get_readcap) d.addCallback(lambda res: self._fn3.check_and_repair(Monitor())) def _check_results(crr): - self.failUnless(ICheckAndRepairResults.providedBy(crr)) + self.assertThat(ICheckAndRepairResults.providedBy(crr), Equals(True)) # we should detect the unhealthy, but skip over mutable-readcap # repairs until #625 is fixed - self.failIf(crr.get_pre_repair_results().is_healthy()) - self.failIf(crr.get_repair_attempted()) - self.failIf(crr.get_post_repair_results().is_healthy()) + self.assertThat(crr.get_pre_repair_results().is_healthy(), Equals(False)) + self.assertThat(crr.get_repair_attempted(), Equals(False)) + self.assertThat(crr.get_post_repair_results().is_healthy(), Equals(False)) d.addCallback(_check_results) return d @@ -281,6 +274,6 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin): d.addCallback(lambda ign: self._fn2.check(Monitor())) d.addCallback(lambda check_results: self._fn2.repair(check_results)) def _check(crr): - self.failUnlessEqual(crr.get_successful(), True) + self.assertThat(crr.get_successful(), Equals(True)) d.addCallback(_check) return d diff --git a/src/allmydata/test/mutable/test_roundtrip.py b/src/allmydata/test/mutable/test_roundtrip.py index 79292b000..405219347 100644 --- a/src/allmydata/test/mutable/test_roundtrip.py +++ b/src/allmydata/test/mutable/test_roundtrip.py @@ -1,17 +1,10 @@ """ Ported to Python 3. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from six.moves import cStringIO as StringIO -from twisted.trial import unittest +from io import StringIO +from ..common import AsyncTestCase +from testtools.matchers import Equals, HasLength, Contains from twisted.internet import defer from allmydata.util import base32, consumer @@ -23,8 +16,9 @@ from allmydata.mutable.retrieve import Retrieve from .util import PublishMixin, make_storagebroker, corrupt from .. import common_util as testutil -class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): +class Roundtrip(AsyncTestCase, testutil.ShouldFailMixin, PublishMixin): def setUp(self): + super(Roundtrip, self).setUp() return self.publish_one() def make_servermap(self, mode=MODE_READ, oldmap=None, sb=None): @@ -73,11 +67,11 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): def _do_retrieve(servermap): self._smap = servermap #self.dump_servermap(servermap) - self.failUnlessEqual(len(servermap.recoverable_versions()), 1) + self.assertThat(servermap.recoverable_versions(), HasLength(1)) return self.do_download(servermap) d.addCallback(_do_retrieve) def _retrieved(new_contents): - self.failUnlessEqual(new_contents, self.CONTENTS) + self.assertThat(new_contents, Equals(self.CONTENTS)) d.addCallback(_retrieved) # we should be able to re-use the same servermap, both with and # without updating it. @@ -132,10 +126,10 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): # back empty d = self.make_servermap(sb=sb2) def _check_servermap(servermap): - self.failUnlessEqual(servermap.best_recoverable_version(), None) - self.failIf(servermap.recoverable_versions()) - self.failIf(servermap.unrecoverable_versions()) - self.failIf(servermap.all_servers()) + self.assertThat(servermap.best_recoverable_version(), Equals(None)) + self.assertFalse(servermap.recoverable_versions()) + self.assertFalse(servermap.unrecoverable_versions()) + self.assertFalse(servermap.all_servers()) d.addCallback(_check_servermap) return d @@ -154,7 +148,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): self._fn._storage_broker = self._storage_broker return self._fn.download_best_version() def _retrieved(new_contents): - self.failUnlessEqual(new_contents, self.CONTENTS) + self.assertThat(new_contents, Equals(self.CONTENTS)) d.addCallback(_restore) d.addCallback(_retrieved) return d @@ -178,13 +172,13 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): # should be noted in the servermap's list of problems. if substring: allproblems = [str(f) for f in servermap.get_problems()] - self.failUnlessIn(substring, "".join(allproblems)) + self.assertThat("".join(allproblems), Contains(substring)) return servermap if should_succeed: d1 = self._fn.download_version(servermap, ver, fetch_privkey) d1.addCallback(lambda new_contents: - self.failUnlessEqual(new_contents, self.CONTENTS)) + self.assertThat(new_contents, Equals(self.CONTENTS))) else: d1 = self.shouldFail(NotEnoughSharesError, "_corrupt_all(offset=%s)" % (offset,), @@ -207,7 +201,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): # and the dump should mention the problems s = StringIO() dump = servermap.dump(s).getvalue() - self.failUnless("30 PROBLEMS" in dump, dump) + self.assertTrue("30 PROBLEMS" in dump, msg=dump) d.addCallback(_check_servermap) return d @@ -299,8 +293,8 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): # in NotEnoughSharesError, since each share will look invalid def _check(res): f = res[0] - self.failUnless(f.check(NotEnoughSharesError)) - self.failUnless("uncoordinated write" in str(f)) + self.assertThat(f.check(NotEnoughSharesError), HasLength(1)) + self.assertThat("uncoordinated write" in str(f), Equals(True)) return self._test_corrupt_all(1, "ran out of servers", corrupt_early=False, failure_checker=_check) @@ -309,7 +303,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): def test_corrupt_all_block_late(self): def _check(res): f = res[0] - self.failUnless(f.check(NotEnoughSharesError)) + self.assertTrue(f.check(NotEnoughSharesError)) return self._test_corrupt_all("share_data", "block hash tree failure", corrupt_early=False, failure_checker=_check) @@ -330,9 +324,9 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): shnums_to_corrupt=list(range(0, N-k))) d.addCallback(lambda res: self.make_servermap()) def _do_retrieve(servermap): - self.failUnless(servermap.get_problems()) - self.failUnless("pubkey doesn't match fingerprint" - in str(servermap.get_problems()[0])) + self.assertTrue(servermap.get_problems()) + self.assertThat("pubkey doesn't match fingerprint" + in str(servermap.get_problems()[0]), Equals(True)) ver = servermap.best_recoverable_version() r = Retrieve(self._fn, self._storage_broker, servermap, ver) c = consumer.MemoryConsumer() @@ -340,7 +334,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): d.addCallback(_do_retrieve) d.addCallback(lambda mc: b"".join(mc.chunks)) d.addCallback(lambda new_contents: - self.failUnlessEqual(new_contents, self.CONTENTS)) + self.assertThat(new_contents, Equals(self.CONTENTS))) return d @@ -355,11 +349,11 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin): self.make_servermap()) def _do_retrieve(servermap): ver = servermap.best_recoverable_version() - self.failUnless(ver) + self.assertTrue(ver) return self._fn.download_best_version() d.addCallback(_do_retrieve) d.addCallback(lambda new_contents: - self.failUnlessEqual(new_contents, self.CONTENTS)) + self.assertThat(new_contents, Equals(self.CONTENTS))) return d diff --git a/src/allmydata/test/mutable/test_servermap.py b/src/allmydata/test/mutable/test_servermap.py index e8f933977..eaf2eddbc 100644 --- a/src/allmydata/test/mutable/test_servermap.py +++ b/src/allmydata/test/mutable/test_servermap.py @@ -2,16 +2,8 @@ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from twisted.trial import unittest +from ..common import AsyncTestCase +from testtools.matchers import Equals, NotEquals, HasLength from twisted.internet import defer from allmydata.monitor import Monitor from allmydata.mutable.common import \ @@ -20,8 +12,9 @@ from allmydata.mutable.publish import MutableData from allmydata.mutable.servermap import ServerMap, ServermapUpdater from .util import PublishMixin -class Servermap(unittest.TestCase, PublishMixin): +class Servermap(AsyncTestCase, PublishMixin): def setUp(self): + super(Servermap, self).setUp() return self.publish_one() def make_servermap(self, mode=MODE_CHECK, fn=None, sb=None, @@ -42,17 +35,17 @@ class Servermap(unittest.TestCase, PublishMixin): return d def failUnlessOneRecoverable(self, sm, num_shares): - self.failUnlessEqual(len(sm.recoverable_versions()), 1) - self.failUnlessEqual(len(sm.unrecoverable_versions()), 0) + self.assertThat(sm.recoverable_versions(), HasLength(1)) + self.assertThat(sm.unrecoverable_versions(), HasLength(0)) best = sm.best_recoverable_version() - self.failIfEqual(best, None) - self.failUnlessEqual(sm.recoverable_versions(), set([best])) - self.failUnlessEqual(len(sm.shares_available()), 1) - self.failUnlessEqual(sm.shares_available()[best], (num_shares, 3, 10)) + self.assertThat(best, NotEquals(None)) + self.assertThat(sm.recoverable_versions(), Equals(set([best]))) + self.assertThat(sm.shares_available(), HasLength(1)) + self.assertThat(sm.shares_available()[best], Equals((num_shares, 3, 10))) shnum, servers = list(sm.make_sharemap().items())[0] server = list(servers)[0] - self.failUnlessEqual(sm.version_on_server(server, shnum), best) - self.failUnlessEqual(sm.version_on_server(server, 666), None) + self.assertThat(sm.version_on_server(server, shnum), Equals(best)) + self.assertThat(sm.version_on_server(server, 666), Equals(None)) return sm def test_basic(self): @@ -117,7 +110,7 @@ class Servermap(unittest.TestCase, PublishMixin): v = sm.best_recoverable_version() vm = sm.make_versionmap() shares = list(vm[v]) - self.failUnlessEqual(len(shares), 6) + self.assertThat(shares, HasLength(6)) self._corrupted = set() # mark the first 5 shares as corrupt, then update the servermap. # The map should not have the marked shares it in any more, and @@ -135,18 +128,17 @@ class Servermap(unittest.TestCase, PublishMixin): shares = list(vm[v]) for (server, shnum) in self._corrupted: server_shares = sm.debug_shares_on_server(server) - self.failIf(shnum in server_shares, - "%d was in %s" % (shnum, server_shares)) - self.failUnlessEqual(len(shares), 5) + self.assertFalse(shnum in server_shares, "%d was in %s" % (shnum, server_shares)) + self.assertThat(shares, HasLength(5)) d.addCallback(_check_map) return d def failUnlessNoneRecoverable(self, sm): - self.failUnlessEqual(len(sm.recoverable_versions()), 0) - self.failUnlessEqual(len(sm.unrecoverable_versions()), 0) + self.assertThat(sm.recoverable_versions(), HasLength(0)) + self.assertThat(sm.unrecoverable_versions(), HasLength(0)) best = sm.best_recoverable_version() - self.failUnlessEqual(best, None) - self.failUnlessEqual(len(sm.shares_available()), 0) + self.assertThat(best, Equals(None)) + self.assertThat(sm.shares_available(), HasLength(0)) def test_no_shares(self): self._storage._peers = {} # delete all shares @@ -168,12 +160,12 @@ class Servermap(unittest.TestCase, PublishMixin): return d def failUnlessNotQuiteEnough(self, sm): - self.failUnlessEqual(len(sm.recoverable_versions()), 0) - self.failUnlessEqual(len(sm.unrecoverable_versions()), 1) + self.assertThat(sm.recoverable_versions(), HasLength(0)) + self.assertThat(sm.unrecoverable_versions(), HasLength(1)) best = sm.best_recoverable_version() - self.failUnlessEqual(best, None) - self.failUnlessEqual(len(sm.shares_available()), 1) - self.failUnlessEqual(list(sm.shares_available().values())[0], (2,3,10) ) + self.assertThat(best, Equals(None)) + self.assertThat(sm.shares_available(), HasLength(1)) + self.assertThat(list(sm.shares_available().values())[0], Equals((2,3,10))) return sm def test_not_quite_enough_shares(self): @@ -193,7 +185,7 @@ class Servermap(unittest.TestCase, PublishMixin): d.addCallback(lambda res: ms(mode=MODE_CHECK)) d.addCallback(lambda sm: self.failUnlessNotQuiteEnough(sm)) d.addCallback(lambda sm: - self.failUnlessEqual(len(sm.make_sharemap()), 2)) + self.assertThat(sm.make_sharemap(), HasLength(2))) d.addCallback(lambda res: ms(mode=MODE_ANYTHING)) d.addCallback(lambda sm: self.failUnlessNotQuiteEnough(sm)) d.addCallback(lambda res: ms(mode=MODE_WRITE)) @@ -216,7 +208,7 @@ class Servermap(unittest.TestCase, PublishMixin): # Calling make_servermap also updates the servermap in the mode # that we specify, so we just need to see what it says. def _check_servermap(sm): - self.failUnlessEqual(len(sm.recoverable_versions()), 1) + self.assertThat(sm.recoverable_versions(), HasLength(1)) d.addCallback(_check_servermap) return d @@ -229,10 +221,10 @@ class Servermap(unittest.TestCase, PublishMixin): self.make_servermap(mode=MODE_WRITE, update_range=(1, 2))) def _check_servermap(sm): # 10 shares - self.failUnlessEqual(len(sm.update_data), 10) + self.assertThat(sm.update_data, HasLength(10)) # one version for data in sm.update_data.values(): - self.failUnlessEqual(len(data), 1) + self.assertThat(data, HasLength(1)) d.addCallback(_check_servermap) return d @@ -244,5 +236,5 @@ class Servermap(unittest.TestCase, PublishMixin): d.addCallback(lambda ignored: self.make_servermap(mode=MODE_CHECK)) d.addCallback(lambda servermap: - self.failUnlessEqual(len(servermap.recoverable_versions()), 1)) + self.assertThat(servermap.recoverable_versions(), HasLength(1))) return d diff --git a/src/allmydata/test/mutable/test_update.py b/src/allmydata/test/mutable/test_update.py index da5d53e4c..37a4aa6b7 100644 --- a/src/allmydata/test/mutable/test_update.py +++ b/src/allmydata/test/mutable/test_update.py @@ -1,21 +1,18 @@ """ Ported to Python 3. """ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import re -from twisted.trial import unittest +from ..common import AsyncTestCase +from testtools.matchers import ( + Equals, + IsInstance, + GreaterThan, +) from twisted.internet import defer from allmydata.interfaces import MDMF_VERSION from allmydata.mutable.filenode import MutableFileNode -from allmydata.mutable.publish import MutableData, DEFAULT_MAX_SEGMENT_SIZE +from allmydata.mutable.publish import MutableData, DEFAULT_MUTABLE_MAX_SEGMENT_SIZE from ..no_network import GridTestMixin from .. import common_util as testutil @@ -25,7 +22,7 @@ from .. import common_util as testutil # this up. SEGSIZE = 128*1024 -class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): +class Update(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin): def setUp(self): GridTestMixin.setUp(self) self.basedir = self.mktemp() @@ -35,14 +32,14 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): # self.data should be at least three segments long. td = b"testdata " self.data = td*(int(3*SEGSIZE//len(td))+10) # currently about 400kB - assert len(self.data) > 3*SEGSIZE + self.assertThat(len(self.data), GreaterThan(3*SEGSIZE)) self.small_data = b"test data" * 10 # 90 B; SDMF def do_upload_sdmf(self): d = self.nm.create_mutable_file(MutableData(self.small_data)) def _then(n): - assert isinstance(n, MutableFileNode) + self.assertThat(n, IsInstance(MutableFileNode)) self.sdmf_node = n d.addCallback(_then) return d @@ -51,7 +48,7 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): d = self.nm.create_mutable_file(MutableData(self.data), version=MDMF_VERSION) def _then(n): - assert isinstance(n, MutableFileNode) + self.assertThat(n, IsInstance(MutableFileNode)) self.mdmf_node = n d.addCallback(_then) return d @@ -175,7 +172,7 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): # long -- this is 7 segments in the default segment size. So we # need to add 2 segments worth of data to push it over a # power-of-two boundary. - segment = b"a" * DEFAULT_MAX_SEGMENT_SIZE + segment = b"a" * DEFAULT_MUTABLE_MAX_SEGMENT_SIZE new_data = self.data + (segment * 2) d0 = self.do_upload_mdmf() def _run(ign): @@ -185,7 +182,7 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): len(self.data))) d.addCallback(lambda ign: self.mdmf_node.download_best_version()) d.addCallback(lambda results: - self.failUnlessEqual(results, new_data)) + self.assertThat(results, Equals(new_data))) return d d0.addCallback(_run) return d0 @@ -201,7 +198,7 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): len(self.small_data))) d.addCallback(lambda ign: self.sdmf_node.download_best_version()) d.addCallback(lambda results: - self.failUnlessEqual(results, new_data)) + self.assertThat(results, Equals(new_data))) return d d0.addCallback(_run) return d0 @@ -221,15 +218,15 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): replace_offset)) d.addCallback(lambda ign: self.mdmf_node.download_best_version()) d.addCallback(lambda results: - self.failUnlessEqual(results, new_data)) + self.assertThat(results, Equals(new_data))) return d d0.addCallback(_run) return d0 def test_multiple_segment_replace(self): - replace_offset = 2 * DEFAULT_MAX_SEGMENT_SIZE + replace_offset = 2 * DEFAULT_MUTABLE_MAX_SEGMENT_SIZE new_data = self.data[:replace_offset] - new_segment = b"a" * DEFAULT_MAX_SEGMENT_SIZE + new_segment = b"a" * DEFAULT_MUTABLE_MAX_SEGMENT_SIZE new_data += 2 * new_segment new_data += b"replaced" rest_offset = len(new_data) @@ -242,7 +239,7 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): replace_offset)) d.addCallback(lambda ignored: self.mdmf_node.download_best_version()) d.addCallback(lambda results: - self.failUnlessEqual(results, new_data)) + self.assertThat(results, Equals(new_data))) return d d0.addCallback(_run) return d0 diff --git a/src/allmydata/test/mutable/test_version.py b/src/allmydata/test/mutable/test_version.py index 042305c24..c91c1d4f1 100644 --- a/src/allmydata/test/mutable/test_version.py +++ b/src/allmydata/test/mutable/test_version.py @@ -1,20 +1,19 @@ """ -Ported to Python 3. +Tests related to the way ``allmydata.mutable`` handles different versions +of data for an object. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from io import StringIO import os -from six.moves import cStringIO as StringIO +from typing import Optional -from twisted.internet import defer -from twisted.trial import unittest +from ..common import AsyncTestCase +from testtools.matchers import ( + Equals, + IsInstance, + HasLength, + Contains, +) from allmydata import uri from allmydata.interfaces import SDMF_VERSION, MDMF_VERSION @@ -29,7 +28,7 @@ from ..no_network import GridTestMixin from .util import PublishMixin from .. import common_util as testutil -class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ +class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \ PublishMixin): def setUp(self): GridTestMixin.setUp(self) @@ -41,343 +40,273 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ self.small_data = b"test data" * 10 # 90 B; SDMF - def do_upload_mdmf(self, data=None): + async def do_upload_mdmf(self, data: Optional[bytes] = None) -> MutableFileNode: if data is None: data = self.data - d = self.nm.create_mutable_file(MutableData(data), - version=MDMF_VERSION) - def _then(n): - assert isinstance(n, MutableFileNode) - assert n._protocol_version == MDMF_VERSION - self.mdmf_node = n - return n - d.addCallback(_then) - return d + n = await self.nm.create_mutable_file(MutableData(data), + version=MDMF_VERSION) + self.assertThat(n, IsInstance(MutableFileNode)) + self.assertThat(n._protocol_version, Equals(MDMF_VERSION)) + self.mdmf_node = n + return n - def do_upload_sdmf(self, data=None): + async def do_upload_sdmf(self, data: Optional[bytes] = None) -> MutableFileNode: if data is None: data = self.small_data - d = self.nm.create_mutable_file(MutableData(data)) - def _then(n): - assert isinstance(n, MutableFileNode) - assert n._protocol_version == SDMF_VERSION - self.sdmf_node = n - return n - d.addCallback(_then) - return d + n = await self.nm.create_mutable_file(MutableData(data)) + self.assertThat(n, IsInstance(MutableFileNode)) + self.assertThat(n._protocol_version, Equals(SDMF_VERSION)) + self.sdmf_node = n + return n - def do_upload_empty_sdmf(self): - d = self.nm.create_mutable_file(MutableData(b"")) - def _then(n): - assert isinstance(n, MutableFileNode) - self.sdmf_zero_length_node = n - assert n._protocol_version == SDMF_VERSION - return n - d.addCallback(_then) - return d + async def do_upload_empty_sdmf(self) -> MutableFileNode: + n = await self.nm.create_mutable_file(MutableData(b"")) + self.assertThat(n, IsInstance(MutableFileNode)) + self.sdmf_zero_length_node = n + self.assertThat(n._protocol_version, Equals(SDMF_VERSION)) + return n - def do_upload(self): - d = self.do_upload_mdmf() - d.addCallback(lambda ign: self.do_upload_sdmf()) - return d + async def do_upload(self) -> MutableFileNode: + await self.do_upload_mdmf() + return await self.do_upload_sdmf() - def test_debug(self): - d = self.do_upload_mdmf() - def _debug(n): - fso = debug.FindSharesOptions() - storage_index = base32.b2a(n.get_storage_index()) - fso.si_s = str(storage_index, "utf-8") # command-line options are unicode on Python 3 - fso.nodedirs = [os.path.dirname(abspath_expanduser_unicode(str(storedir))) - for (i,ss,storedir) - in self.iterate_servers()] - fso.stdout = StringIO() - fso.stderr = StringIO() - debug.find_shares(fso) - sharefiles = fso.stdout.getvalue().splitlines() - expected = self.nm.default_encoding_parameters["n"] - self.failUnlessEqual(len(sharefiles), expected) + async def test_debug(self) -> None: + n = await self.do_upload_mdmf() + fso = debug.FindSharesOptions() + storage_index = base32.b2a(n.get_storage_index()) + fso.si_s = str(storage_index, "utf-8") # command-line options are unicode on Python 3 + fso.nodedirs = [os.path.dirname(abspath_expanduser_unicode(str(storedir))) + for (i,ss,storedir) + in self.iterate_servers()] + # This attribute isn't defined on FindSharesOptions but `find_shares()` + # definitely expects it... + fso.stdout = StringIO() # type: ignore[attr-defined] + debug.find_shares(fso) + sharefiles = fso.stdout.getvalue().splitlines() # type: ignore[attr-defined] + expected = self.nm.default_encoding_parameters["n"] + self.assertThat(sharefiles, HasLength(expected)) - do = debug.DumpOptions() - do["filename"] = sharefiles[0] - do.stdout = StringIO() - debug.dump_share(do) - output = do.stdout.getvalue() - lines = set(output.splitlines()) - self.failUnless("Mutable slot found:" in lines, output) - self.failUnless(" share_type: MDMF" in lines, output) - self.failUnless(" num_extra_leases: 0" in lines, output) - self.failUnless(" MDMF contents:" in lines, output) - self.failUnless(" seqnum: 1" in lines, output) - self.failUnless(" required_shares: 3" in lines, output) - self.failUnless(" total_shares: 10" in lines, output) - self.failUnless(" segsize: 131073" in lines, output) - self.failUnless(" datalen: %d" % len(self.data) in lines, output) - vcap = str(n.get_verify_cap().to_string(), "utf-8") - self.failUnless(" verify-cap: %s" % vcap in lines, output) - cso = debug.CatalogSharesOptions() - cso.nodedirs = fso.nodedirs - cso.stdout = StringIO() - cso.stderr = StringIO() - debug.catalog_shares(cso) - shares = cso.stdout.getvalue().splitlines() - oneshare = shares[0] # all shares should be MDMF - self.failIf(oneshare.startswith("UNKNOWN"), oneshare) - self.failUnless(oneshare.startswith("MDMF"), oneshare) - fields = oneshare.split() - self.failUnlessEqual(fields[0], "MDMF") - self.failUnlessEqual(fields[1].encode("ascii"), storage_index) - self.failUnlessEqual(fields[2], "3/10") - self.failUnlessEqual(fields[3], "%d" % len(self.data)) - self.failUnless(fields[4].startswith("#1:"), fields[3]) - # the rest of fields[4] is the roothash, which depends upon - # encryption salts and is not constant. fields[5] is the - # remaining time on the longest lease, which is timing dependent. - # The rest of the line is the quoted pathname to the share. - d.addCallback(_debug) - return d + # This attribute isn't defined on DebugOptions but `dump_share()` + # definitely expects it... + do = debug.DumpOptions() + do["filename"] = sharefiles[0] + do.stdout = StringIO() # type: ignore[attr-defined] + debug.dump_share(do) + output = do.stdout.getvalue() # type: ignore[attr-defined] + lines = set(output.splitlines()) + self.assertTrue("Mutable slot found:" in lines, output) + self.assertTrue(" share_type: MDMF" in lines, output) + self.assertTrue(" num_extra_leases: 0" in lines, output) + self.assertTrue(" MDMF contents:" in lines, output) + self.assertTrue(" seqnum: 1" in lines, output) + self.assertTrue(" required_shares: 3" in lines, output) + self.assertTrue(" total_shares: 10" in lines, output) + self.assertTrue(" segsize: 131073" in lines, output) + self.assertTrue(" datalen: %d" % len(self.data) in lines, output) + vcap = str(n.get_verify_cap().to_string(), "utf-8") + self.assertTrue(" verify-cap: %s" % vcap in lines, output) + cso = debug.CatalogSharesOptions() + cso.nodedirs = fso.nodedirs + # Definitely not options on CatalogSharesOptions, but the code does use + # stdout and stderr... + cso.stdout = StringIO() # type: ignore[attr-defined] + cso.stderr = StringIO() # type: ignore[attr-defined] + debug.catalog_shares(cso) + shares = cso.stdout.getvalue().splitlines() # type: ignore[attr-defined] + oneshare = shares[0] # all shares should be MDMF + self.failIf(oneshare.startswith("UNKNOWN"), oneshare) + self.assertTrue(oneshare.startswith("MDMF"), oneshare) + fields = oneshare.split() + self.assertThat(fields[0], Equals("MDMF")) + self.assertThat(fields[1].encode("ascii"), Equals(storage_index)) + self.assertThat(fields[2], Equals("3/10")) + self.assertThat(fields[3], Equals("%d" % len(self.data))) + self.assertTrue(fields[4].startswith("#1:"), fields[3]) + # the rest of fields[4] is the roothash, which depends upon + # encryption salts and is not constant. fields[5] is the + # remaining time on the longest lease, which is timing dependent. + # The rest of the line is the quoted pathname to the share. + + async def test_get_sequence_number(self) -> None: + await self.do_upload() + bv = await self.mdmf_node.get_best_readable_version() + self.assertThat(bv.get_sequence_number(), Equals(1)) + bv = await self.sdmf_node.get_best_readable_version() + self.assertThat(bv.get_sequence_number(), Equals(1)) - def test_get_sequence_number(self): - d = self.do_upload() - d.addCallback(lambda ign: self.mdmf_node.get_best_readable_version()) - d.addCallback(lambda bv: - self.failUnlessEqual(bv.get_sequence_number(), 1)) - d.addCallback(lambda ignored: - self.sdmf_node.get_best_readable_version()) - d.addCallback(lambda bv: - self.failUnlessEqual(bv.get_sequence_number(), 1)) # Now update. The sequence number in both cases should be 1 in # both cases. - def _do_update(ignored): - new_data = MutableData(b"foo bar baz" * 100000) - new_small_data = MutableData(b"foo bar baz" * 10) - d1 = self.mdmf_node.overwrite(new_data) - d2 = self.sdmf_node.overwrite(new_small_data) - dl = gatherResults([d1, d2]) - return dl - d.addCallback(_do_update) - d.addCallback(lambda ignored: - self.mdmf_node.get_best_readable_version()) - d.addCallback(lambda bv: - self.failUnlessEqual(bv.get_sequence_number(), 2)) - d.addCallback(lambda ignored: - self.sdmf_node.get_best_readable_version()) - d.addCallback(lambda bv: - self.failUnlessEqual(bv.get_sequence_number(), 2)) - return d + new_data = MutableData(b"foo bar baz" * 100000) + new_small_data = MutableData(b"foo bar baz" * 10) + d1 = self.mdmf_node.overwrite(new_data) + d2 = self.sdmf_node.overwrite(new_small_data) + await gatherResults([d1, d2]) + bv = await self.mdmf_node.get_best_readable_version() + self.assertThat(bv.get_sequence_number(), Equals(2)) + bv = await self.sdmf_node.get_best_readable_version() + self.assertThat(bv.get_sequence_number(), Equals(2)) - - def test_cap_after_upload(self): + async def test_cap_after_upload(self) -> None: # If we create a new mutable file and upload things to it, and # it's an MDMF file, we should get an MDMF cap back from that # file and should be able to use that. # That's essentially what MDMF node is, so just check that. - d = self.do_upload_mdmf() - def _then(ign): - mdmf_uri = self.mdmf_node.get_uri() - cap = uri.from_string(mdmf_uri) - self.failUnless(isinstance(cap, uri.WriteableMDMFFileURI)) - readonly_mdmf_uri = self.mdmf_node.get_readonly_uri() - cap = uri.from_string(readonly_mdmf_uri) - self.failUnless(isinstance(cap, uri.ReadonlyMDMFFileURI)) - d.addCallback(_then) - return d + await self.do_upload_mdmf() + mdmf_uri = self.mdmf_node.get_uri() + cap = uri.from_string(mdmf_uri) + self.assertTrue(isinstance(cap, uri.WriteableMDMFFileURI)) + readonly_mdmf_uri = self.mdmf_node.get_readonly_uri() + cap = uri.from_string(readonly_mdmf_uri) + self.assertTrue(isinstance(cap, uri.ReadonlyMDMFFileURI)) - def test_mutable_version(self): + async def test_mutable_version(self) -> None: # assert that getting parameters from the IMutableVersion object # gives us the same data as getting them from the filenode itself - d = self.do_upload() - d.addCallback(lambda ign: self.mdmf_node.get_best_mutable_version()) - def _check_mdmf(bv): - n = self.mdmf_node - self.failUnlessEqual(bv.get_writekey(), n.get_writekey()) - self.failUnlessEqual(bv.get_storage_index(), n.get_storage_index()) - self.failIf(bv.is_readonly()) - d.addCallback(_check_mdmf) - d.addCallback(lambda ign: self.sdmf_node.get_best_mutable_version()) - def _check_sdmf(bv): - n = self.sdmf_node - self.failUnlessEqual(bv.get_writekey(), n.get_writekey()) - self.failUnlessEqual(bv.get_storage_index(), n.get_storage_index()) - self.failIf(bv.is_readonly()) - d.addCallback(_check_sdmf) - return d + await self.do_upload() + bv = await self.mdmf_node.get_best_mutable_version() + n = self.mdmf_node + self.assertThat(bv.get_writekey(), Equals(n.get_writekey())) + self.assertThat(bv.get_storage_index(), Equals(n.get_storage_index())) + self.assertFalse(bv.is_readonly()) + + bv = await self.sdmf_node.get_best_mutable_version() + n = self.sdmf_node + self.assertThat(bv.get_writekey(), Equals(n.get_writekey())) + self.assertThat(bv.get_storage_index(), Equals(n.get_storage_index())) + self.assertFalse(bv.is_readonly()) - def test_get_readonly_version(self): - d = self.do_upload() - d.addCallback(lambda ign: self.mdmf_node.get_best_readable_version()) - d.addCallback(lambda bv: self.failUnless(bv.is_readonly())) + async def test_get_readonly_version(self) -> None: + await self.do_upload() + bv = await self.mdmf_node.get_best_readable_version() + self.assertTrue(bv.is_readonly()) # Attempting to get a mutable version of a mutable file from a # filenode initialized with a readcap should return a readonly # version of that same node. - d.addCallback(lambda ign: self.mdmf_node.get_readonly()) - d.addCallback(lambda ro: ro.get_best_mutable_version()) - d.addCallback(lambda v: self.failUnless(v.is_readonly())) + ro = self.mdmf_node.get_readonly() + v = await ro.get_best_mutable_version() + self.assertTrue(v.is_readonly()) - d.addCallback(lambda ign: self.sdmf_node.get_best_readable_version()) - d.addCallback(lambda bv: self.failUnless(bv.is_readonly())) + bv = await self.sdmf_node.get_best_readable_version() + self.assertTrue(bv.is_readonly()) - d.addCallback(lambda ign: self.sdmf_node.get_readonly()) - d.addCallback(lambda ro: ro.get_best_mutable_version()) - d.addCallback(lambda v: self.failUnless(v.is_readonly())) - return d + ro = self.sdmf_node.get_readonly() + v = await ro.get_best_mutable_version() + self.assertTrue(v.is_readonly()) - def test_toplevel_overwrite(self): + async def test_toplevel_overwrite(self) -> None: new_data = MutableData(b"foo bar baz" * 100000) new_small_data = MutableData(b"foo bar baz" * 10) - d = self.do_upload() - d.addCallback(lambda ign: self.mdmf_node.overwrite(new_data)) - d.addCallback(lambda ignored: - self.mdmf_node.download_best_version()) - d.addCallback(lambda data: - self.failUnlessEqual(data, b"foo bar baz" * 100000)) - d.addCallback(lambda ignored: - self.sdmf_node.overwrite(new_small_data)) - d.addCallback(lambda ignored: - self.sdmf_node.download_best_version()) - d.addCallback(lambda data: - self.failUnlessEqual(data, b"foo bar baz" * 10)) - return d + await self.do_upload() + await self.mdmf_node.overwrite(new_data) + data = await self.mdmf_node.download_best_version() + self.assertThat(data, Equals(b"foo bar baz" * 100000)) + await self.sdmf_node.overwrite(new_small_data) + data = await self.sdmf_node.download_best_version() + self.assertThat(data, Equals(b"foo bar baz" * 10)) - def test_toplevel_modify(self): - d = self.do_upload() + async def test_toplevel_modify(self) -> None: + await self.do_upload() def modifier(old_contents, servermap, first_time): return old_contents + b"modified" - d.addCallback(lambda ign: self.mdmf_node.modify(modifier)) - d.addCallback(lambda ignored: - self.mdmf_node.download_best_version()) - d.addCallback(lambda data: - self.failUnlessIn(b"modified", data)) - d.addCallback(lambda ignored: - self.sdmf_node.modify(modifier)) - d.addCallback(lambda ignored: - self.sdmf_node.download_best_version()) - d.addCallback(lambda data: - self.failUnlessIn(b"modified", data)) - return d + await self.mdmf_node.modify(modifier) + data = await self.mdmf_node.download_best_version() + self.assertThat(data, Contains(b"modified")) + await self.sdmf_node.modify(modifier) + data = await self.sdmf_node.download_best_version() + self.assertThat(data, Contains(b"modified")) - def test_version_modify(self): + async def test_version_modify(self) -> None: # TODO: When we can publish multiple versions, alter this test # to modify a version other than the best usable version, then # test to see that the best recoverable version is that. - d = self.do_upload() + await self.do_upload() def modifier(old_contents, servermap, first_time): return old_contents + b"modified" - d.addCallback(lambda ign: self.mdmf_node.modify(modifier)) - d.addCallback(lambda ignored: - self.mdmf_node.download_best_version()) - d.addCallback(lambda data: - self.failUnlessIn(b"modified", data)) - d.addCallback(lambda ignored: - self.sdmf_node.modify(modifier)) - d.addCallback(lambda ignored: - self.sdmf_node.download_best_version()) - d.addCallback(lambda data: - self.failUnlessIn(b"modified", data)) - return d + await self.mdmf_node.modify(modifier) + data = await self.mdmf_node.download_best_version() + self.assertThat(data, Contains(b"modified")) + await self.sdmf_node.modify(modifier) + data = await self.sdmf_node.download_best_version() + self.assertThat(data, Contains(b"modified")) - def test_download_version(self): - d = self.publish_multiple() + async def test_download_version(self) -> None: + await self.publish_multiple() # We want to have two recoverable versions on the grid. - d.addCallback(lambda res: - self._set_versions({0:0,2:0,4:0,6:0,8:0, - 1:1,3:1,5:1,7:1,9:1})) + self._set_versions({0:0,2:0,4:0,6:0,8:0, + 1:1,3:1,5:1,7:1,9:1}) # Now try to download each version. We should get the plaintext # associated with that version. - d.addCallback(lambda ignored: - self._fn.get_servermap(mode=MODE_READ)) - def _got_servermap(smap): - versions = smap.recoverable_versions() - assert len(versions) == 2 + smap = await self._fn.get_servermap(mode=MODE_READ) + versions = smap.recoverable_versions() + assert len(versions) == 2 - self.servermap = smap - self.version1, self.version2 = versions - assert self.version1 != self.version2 + self.servermap = smap + self.version1, self.version2 = versions + assert self.version1 != self.version2 - self.version1_seqnum = self.version1[0] - self.version2_seqnum = self.version2[0] - self.version1_index = self.version1_seqnum - 1 - self.version2_index = self.version2_seqnum - 1 + self.version1_seqnum = self.version1[0] + self.version2_seqnum = self.version2[0] + self.version1_index = self.version1_seqnum - 1 + self.version2_index = self.version2_seqnum - 1 - d.addCallback(_got_servermap) - d.addCallback(lambda ignored: - self._fn.download_version(self.servermap, self.version1)) - d.addCallback(lambda results: - self.failUnlessEqual(self.CONTENTS[self.version1_index], - results)) - d.addCallback(lambda ignored: - self._fn.download_version(self.servermap, self.version2)) - d.addCallback(lambda results: - self.failUnlessEqual(self.CONTENTS[self.version2_index], - results)) - return d + results = await self._fn.download_version(self.servermap, self.version1) + self.assertThat(self.CONTENTS[self.version1_index], + Equals(results)) + results = await self._fn.download_version(self.servermap, self.version2) + self.assertThat(self.CONTENTS[self.version2_index], + Equals(results)) - def test_download_nonexistent_version(self): - d = self.do_upload_mdmf() - d.addCallback(lambda ign: self.mdmf_node.get_servermap(mode=MODE_WRITE)) - def _set_servermap(servermap): - self.servermap = servermap - d.addCallback(_set_servermap) - d.addCallback(lambda ignored: - self.shouldFail(UnrecoverableFileError, "nonexistent version", - None, - self.mdmf_node.download_version, self.servermap, - "not a version")) - return d + async def test_download_nonexistent_version(self) -> None: + await self.do_upload_mdmf() + servermap = await self.mdmf_node.get_servermap(mode=MODE_WRITE) + await self.shouldFail(UnrecoverableFileError, "nonexistent version", + None, + self.mdmf_node.download_version, servermap, + "not a version") - def _test_partial_read(self, node, expected, modes, step): - d = node.get_best_readable_version() + async def _test_partial_read(self, node, expected, modes, step) -> None: + version = await node.get_best_readable_version() for (name, offset, length) in modes: - d.addCallback(self._do_partial_read, name, expected, offset, length) + await self._do_partial_read(version, name, expected, offset, length) # then read the whole thing, but only a few bytes at a time, and see # that the results are what we expect. - def _read_data(version): - c = consumer.MemoryConsumer() - d2 = defer.succeed(None) - for i in range(0, len(expected), step): - d2.addCallback(lambda ignored, i=i: version.read(c, i, step)) - d2.addCallback(lambda ignored: - self.failUnlessEqual(expected, b"".join(c.chunks))) - return d2 - d.addCallback(_read_data) - return d - - def _do_partial_read(self, version, name, expected, offset, length): c = consumer.MemoryConsumer() - d = version.read(c, offset, length) + for i in range(0, len(expected), step): + await version.read(c, i, step) + self.assertThat(expected, Equals(b"".join(c.chunks))) + + async def _do_partial_read(self, version, name, expected, offset, length) -> None: + c = consumer.MemoryConsumer() + await version.read(c, offset, length) if length is None: expected_range = expected[offset:] else: expected_range = expected[offset:offset+length] - d.addCallback(lambda ignored: b"".join(c.chunks)) - def _check(results): - if results != expected_range: - print("read([%d]+%s) got %d bytes, not %d" % \ - (offset, length, len(results), len(expected_range))) - print("got: %s ... %s" % (results[:20], results[-20:])) - print("exp: %s ... %s" % (expected_range[:20], expected_range[-20:])) - self.fail("results[%s] != expected_range" % name) - return version # daisy-chained to next call - d.addCallback(_check) - return d + results = b"".join(c.chunks) + if results != expected_range: + print("read([%d]+%s) got %d bytes, not %d" % \ + (offset, length, len(results), len(expected_range))) + print("got: %r ... %r" % (results[:20], results[-20:])) + print("exp: %r ... %r" % (expected_range[:20], expected_range[-20:])) + self.fail("results[%s] != expected_range" % name) - def test_partial_read_mdmf_0(self): + async def test_partial_read_mdmf_0(self) -> None: data = b"" - d = self.do_upload_mdmf(data=data) + result = await self.do_upload_mdmf(data=data) modes = [("all1", 0,0), ("all2", 0,None), ] - d.addCallback(self._test_partial_read, data, modes, 1) - return d + await self._test_partial_read(result, data, modes, 1) - def test_partial_read_mdmf_large(self): + async def test_partial_read_mdmf_large(self) -> None: segment_boundary = mathutil.next_multiple(128 * 1024, 3) modes = [("start_on_segment_boundary", segment_boundary, 50), ("ending_one_byte_after_segment_boundary", segment_boundary-50, 51), @@ -387,20 +316,18 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ ("complete_file1", 0, len(self.data)), ("complete_file2", 0, None), ] - d = self.do_upload_mdmf() - d.addCallback(self._test_partial_read, self.data, modes, 10000) - return d + result = await self.do_upload_mdmf() + await self._test_partial_read(result, self.data, modes, 10000) - def test_partial_read_sdmf_0(self): + async def test_partial_read_sdmf_0(self) -> None: data = b"" modes = [("all1", 0,0), ("all2", 0,None), ] - d = self.do_upload_sdmf(data=data) - d.addCallback(self._test_partial_read, data, modes, 1) - return d + result = await self.do_upload_sdmf(data=data) + await self._test_partial_read(result, data, modes, 1) - def test_partial_read_sdmf_2(self): + async def test_partial_read_sdmf_2(self) -> None: data = b"hi" modes = [("one_byte", 0, 1), ("last_byte", 1, 1), @@ -408,11 +335,10 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ ("complete_file", 0, 2), ("complete_file2", 0, None), ] - d = self.do_upload_sdmf(data=data) - d.addCallback(self._test_partial_read, data, modes, 1) - return d + result = await self.do_upload_sdmf(data=data) + await self._test_partial_read(result, data, modes, 1) - def test_partial_read_sdmf_90(self): + async def test_partial_read_sdmf_90(self) -> None: modes = [("start_at_middle", 50, 40), ("start_at_middle2", 50, None), ("zero_length_at_start", 0, 0), @@ -421,11 +347,10 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ ("complete_file1", 0, None), ("complete_file2", 0, 90), ] - d = self.do_upload_sdmf() - d.addCallback(self._test_partial_read, self.small_data, modes, 10) - return d + result = await self.do_upload_sdmf() + await self._test_partial_read(result, self.small_data, modes, 10) - def test_partial_read_sdmf_100(self): + async def test_partial_read_sdmf_100(self) -> None: data = b"test data "*10 modes = [("start_at_middle", 50, 50), ("start_at_middle2", 50, None), @@ -434,42 +359,30 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \ ("complete_file1", 0, 100), ("complete_file2", 0, None), ] - d = self.do_upload_sdmf(data=data) - d.addCallback(self._test_partial_read, data, modes, 10) - return d + result = await self.do_upload_sdmf(data=data) + await self._test_partial_read(result, data, modes, 10) + async def _test_read_and_download(self, node, expected) -> None: + version = await node.get_best_readable_version() + c = consumer.MemoryConsumer() + await version.read(c) + self.assertThat(expected, Equals(b"".join(c.chunks))) - def _test_read_and_download(self, node, expected): - d = node.get_best_readable_version() - def _read_data(version): - c = consumer.MemoryConsumer() - c2 = consumer.MemoryConsumer() - d2 = defer.succeed(None) - d2.addCallback(lambda ignored: version.read(c)) - d2.addCallback(lambda ignored: - self.failUnlessEqual(expected, b"".join(c.chunks))) + c2 = consumer.MemoryConsumer() + await version.read(c2, offset=0, size=len(expected)) + self.assertThat(expected, Equals(b"".join(c2.chunks))) - d2.addCallback(lambda ignored: version.read(c2, offset=0, - size=len(expected))) - d2.addCallback(lambda ignored: - self.failUnlessEqual(expected, b"".join(c2.chunks))) - return d2 - d.addCallback(_read_data) - d.addCallback(lambda ignored: node.download_best_version()) - d.addCallback(lambda data: self.failUnlessEqual(expected, data)) - return d + data = await node.download_best_version() + self.assertThat(expected, Equals(data)) - def test_read_and_download_mdmf(self): - d = self.do_upload_mdmf() - d.addCallback(self._test_read_and_download, self.data) - return d + async def test_read_and_download_mdmf(self) -> None: + result = await self.do_upload_mdmf() + await self._test_read_and_download(result, self.data) - def test_read_and_download_sdmf(self): - d = self.do_upload_sdmf() - d.addCallback(self._test_read_and_download, self.small_data) - return d + async def test_read_and_download_sdmf(self) -> None: + result = await self.do_upload_sdmf() + await self._test_read_and_download(result, self.small_data) - def test_read_and_download_sdmf_zero_length(self): - d = self.do_upload_empty_sdmf() - d.addCallback(self._test_read_and_download, b"") - return d + async def test_read_and_download_sdmf_zero_length(self) -> None: + result = await self.do_upload_empty_sdmf() + await self._test_read_and_download(result, b"") diff --git a/src/allmydata/test/mutable/util.py b/src/allmydata/test/mutable/util.py index dac61a6e3..fd1fc2970 100644 --- a/src/allmydata/test/mutable/util.py +++ b/src/allmydata/test/mutable/util.py @@ -1,16 +1,8 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2, bchr -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from past.builtins import long +from future.utils import bchr from io import BytesIO import attr @@ -25,7 +17,6 @@ from allmydata.storage_client import StorageFarmBroker from allmydata.mutable.layout import MDMFSlotReadProxy from allmydata.mutable.publish import MutableData from ..common import ( - TEST_RSA_KEY_SIZE, EMPTY_CLIENT_CONFIG, ) @@ -136,8 +127,8 @@ class FakeStorageServer(object): continue vector = response[shnum] = [] for (offset, length) in readv: - assert isinstance(offset, (int, long)), offset - assert isinstance(length, (int, long)), length + assert isinstance(offset, int), offset + assert isinstance(length, int), length vector.append(shares[shnum][offset:offset+length]) return response d.addCallback(_read) @@ -287,7 +278,7 @@ def make_storagebroker_with_peers(peers): return storage_broker -def make_nodemaker(s=None, num_peers=10, keysize=TEST_RSA_KEY_SIZE): +def make_nodemaker(s=None, num_peers=10): """ Make a ``NodeMaker`` connected to some number of fake storage servers. @@ -298,20 +289,20 @@ def make_nodemaker(s=None, num_peers=10, keysize=TEST_RSA_KEY_SIZE): the node maker. """ storage_broker = make_storagebroker(s, num_peers) - return make_nodemaker_with_storage_broker(storage_broker, keysize) + return make_nodemaker_with_storage_broker(storage_broker) -def make_nodemaker_with_peers(peers, keysize=TEST_RSA_KEY_SIZE): +def make_nodemaker_with_peers(peers): """ Make a ``NodeMaker`` connected to the given storage servers. :param list peers: The storage servers to associate with the node maker. """ storage_broker = make_storagebroker_with_peers(peers) - return make_nodemaker_with_storage_broker(storage_broker, keysize) + return make_nodemaker_with_storage_broker(storage_broker) -def make_nodemaker_with_storage_broker(storage_broker, keysize): +def make_nodemaker_with_storage_broker(storage_broker): """ Make a ``NodeMaker`` using the given storage broker. @@ -319,8 +310,6 @@ def make_nodemaker_with_storage_broker(storage_broker, keysize): """ sh = client.SecretHolder(b"lease secret", b"convergence secret") keygen = client.KeyGenerator() - if keysize: - keygen.set_default_keysize(keysize) nodemaker = NodeMaker(storage_broker, sh, None, None, None, {"k": 3, "n": 10}, SDMF_VERSION, keygen) diff --git a/src/allmydata/test/no_network.py b/src/allmydata/test/no_network.py index 2f75f9274..dbf994ee0 100644 --- a/src/allmydata/test/no_network.py +++ b/src/allmydata/test/no_network.py @@ -1,31 +1,24 @@ """ -Ported to Python 3. +This contains a test harness that creates a full Tahoe grid in a single +process (actually in a single MultiService) which does not use the network. +It does not use an Introducer, and there are no foolscap Tubs. Each storage +server puts real shares on disk, but is accessed through loopback +RemoteReferences instead of over serialized SSL. It is not as complete as +the common.SystemTestMixin framework (which does use the network), but +should be considerably faster: on my laptop, it takes 50-80ms to start up, +whereas SystemTestMixin takes close to 2s. + +This should be useful for tests which want to examine and/or manipulate the +uploaded shares, checker/verifier/repairer tests, etc. The clients have no +Tubs, so it is not useful for tests that involve a Helper. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -# This contains a test harness that creates a full Tahoe grid in a single -# process (actually in a single MultiService) which does not use the network. -# It does not use an Introducer, and there are no foolscap Tubs. Each storage -# server puts real shares on disk, but is accessed through loopback -# RemoteReferences instead of over serialized SSL. It is not as complete as -# the common.SystemTestMixin framework (which does use the network), but -# should be considerably faster: on my laptop, it takes 50-80ms to start up, -# whereas SystemTestMixin takes close to 2s. +from __future__ import annotations -# This should be useful for tests which want to examine and/or manipulate the -# uploaded shares, checker/verifier/repairer tests, etc. The clients have no -# Tubs, so it is not useful for tests that involve a Helper or the -# control.furl . - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -from past.builtins import unicode from six import ensure_text +from typing import Callable + import os from base64 import b32encode from functools import ( @@ -46,7 +39,9 @@ from allmydata.util.assertutil import _assert from allmydata import uri as tahoe_uri from allmydata.client import _Client -from allmydata.storage.server import StorageServer, storage_index_to_dir +from allmydata.storage.server import ( + StorageServer, storage_index_to_dir, FoolscapStorageServer, +) from allmydata.util import fileutil, idlib, hashutil from allmydata.util.hashutil import permute_server_hash from allmydata.util.fileutil import abspath_expanduser_unicode @@ -55,7 +50,6 @@ from allmydata.storage_client import ( _StorageServer, ) from .common import ( - TEST_RSA_KEY_SIZE, SameProcessStreamEndpointAssigner, ) @@ -190,6 +184,10 @@ class NoNetworkServer(object): return self def __deepcopy__(self, memodict): return self + + def upload_permitted(self): + return True + def get_serverid(self): return self.serverid def get_permutation_seed(self): @@ -220,7 +218,7 @@ class NoNetworkServer(object): @implementer(IStorageBroker) class NoNetworkStorageBroker(object): # type: ignore # missing many methods - def get_servers_for_psi(self, peer_selection_index): + def get_servers_for_psi(self, peer_selection_index, for_upload=True): def _permuted(server): seed = server.get_permutation_seed() return permute_server_hash(peer_selection_index, seed) @@ -242,7 +240,7 @@ def create_no_network_client(basedir): :return: a Deferred yielding an instance of _Client subclass which does no actual networking but has the same API. """ - basedir = abspath_expanduser_unicode(unicode(basedir)) + basedir = abspath_expanduser_unicode(str(basedir)) fileutil.make_dirs(os.path.join(basedir, "private"), 0o700) from allmydata.client import read_config @@ -251,11 +249,10 @@ def create_no_network_client(basedir): client = _NoNetworkClient( config, main_tub=None, - control_tub=None, i2p_provider=None, tor_provider=None, introducer_clients=[], - storage_farm_broker=storage_broker, + storage_farm_broker=storage_broker ) # this is a (pre-existing) reference-cycle and also a bad idea, see: # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2949 @@ -274,8 +271,6 @@ class _NoNetworkClient(_Client): # type: ignore # tahoe-lafs/ticket/3573 pass def init_introducer_client(self): pass - def create_control_tub(self): - pass def create_log_tub(self): pass def setup_logging(self): @@ -284,8 +279,6 @@ class _NoNetworkClient(_Client): # type: ignore # tahoe-lafs/ticket/3573 service.MultiService.startService(self) def stopService(self): return service.MultiService.stopService(self) - def init_control(self): - pass def init_helper(self): pass def init_key_gen(self): @@ -392,7 +385,6 @@ class NoNetworkGrid(service.MultiService): if not c: c = yield create_no_network_client(clientdir) - c.set_default_mutable_keysize(TEST_RSA_KEY_SIZE) c.nodeid = clientid c.short_nodeid = b32encode(clientid).lower()[:8] @@ -418,7 +410,7 @@ class NoNetworkGrid(service.MultiService): ss.setServiceParent(middleman) serverid = ss.my_nodeid self.servers_by_number[i] = ss - wrapper = wrap_storage_server(ss) + wrapper = wrap_storage_server(FoolscapStorageServer(ss)) self.wrappers_by_id[serverid] = wrapper self.proxies_by_id[serverid] = NoNetworkServer(serverid, wrapper) self.rebuild_serverlist() @@ -484,7 +476,21 @@ class GridTestMixin(object): ]) def set_up_grid(self, num_clients=1, num_servers=10, - client_config_hooks={}, oneshare=False): + client_config_hooks=None, oneshare=False): + """ + Create a Tahoe-LAFS storage grid. + + :param num_clients: See ``NoNetworkGrid`` + :param num_servers: See `NoNetworkGrid`` + :param client_config_hooks: See ``NoNetworkGrid`` + + :param bool oneshare: If ``True`` then the first client node is + configured with ``n == k == happy == 1``. + + :return: ``None`` + """ + if client_config_hooks is None: + client_config_hooks = {} # self.basedir must be set port_assigner = SameProcessStreamEndpointAssigner() port_assigner.setUp() @@ -562,7 +568,15 @@ class GridTestMixin(object): pass return sorted(shares) - def copy_shares(self, uri): + def copy_shares(self, uri: bytes) -> dict[bytes, bytes]: + """ + Read all of the share files for the given capability from the storage area + of the storage servers created by ``set_up_grid``. + + :param bytes uri: A Tahoe-LAFS data capability. + + :return: A ``dict`` mapping share file names to share file contents. + """ shares = {} for (shnum, serverid, sharefile) in self.find_uri_shares(uri): with open(sharefile, "rb") as f: @@ -606,11 +620,15 @@ class GridTestMixin(object): with open(i_sharefile, "wb") as f: f.write(corruptdata) - def corrupt_all_shares(self, uri, corruptor, debug=False): + def corrupt_all_shares(self, uri: bytes, corruptor: Callable[[bytes, bool], bytes], debug: bool=False): + """ + Apply ``corruptor`` to the contents of all share files associated with a + given capability and replace the share file contents with its result. + """ for (i_shnum, i_serverid, i_sharefile) in self.find_uri_shares(uri): with open(i_sharefile, "rb") as f: sharedata = f.read() - corruptdata = corruptor(sharedata, debug=debug) + corruptdata = corruptor(sharedata, debug) with open(i_sharefile, "wb") as f: f.write(corruptdata) diff --git a/src/allmydata/test/storage_plugin.py b/src/allmydata/test/storage_plugin.py index d3f1ec7c9..46088903f 100644 --- a/src/allmydata/test/storage_plugin.py +++ b/src/allmydata/test/storage_plugin.py @@ -4,15 +4,7 @@ functionality. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -from future.utils import native_str, native_str_to_bytes from six import ensure_str import attr @@ -47,7 +39,7 @@ from allmydata.util.jsonbytes import ( class RIDummy(RemoteInterface): - __remote_name__ = native_str("RIDummy.tahoe.allmydata.com") + __remote_name__ = "RIDummy.tahoe.allmydata.com" def just_some_method(): """ @@ -94,7 +86,7 @@ class DummyStorage(object): """ items = configuration.items(self._client_section_name, []) resource = Data( - native_str_to_bytes(dumps(dict(items))), + dumps(dict(items)).encode("utf-8"), ensure_str("text/json"), ) # Give it some dynamic stuff too. @@ -112,7 +104,7 @@ class GetCounter(Resource, object): value = 0 def render_GET(self, request): self.value += 1 - return native_str_to_bytes(dumps({"value": self.value})) + return dumps({"value": self.value}).encode("utf-8") @implementer(RIDummy) diff --git a/src/allmydata/test/strategies.py b/src/allmydata/test/strategies.py index c0f558ef6..a15e40d9a 100644 --- a/src/allmydata/test/strategies.py +++ b/src/allmydata/test/strategies.py @@ -3,19 +3,12 @@ Hypothesis strategies use for testing Tahoe-LAFS. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from hypothesis.strategies import ( one_of, builds, binary, + integers, ) from ..uri import ( @@ -25,6 +18,11 @@ from ..uri import ( MDMFDirectoryURI, ) +from allmydata.util.base32 import ( + b2a, +) + + def write_capabilities(): """ Build ``IURI`` providers representing all kinds of write capabilities. @@ -119,3 +117,28 @@ def dir2_mdmf_capabilities(): MDMFDirectoryURI, mdmf_capabilities(), ) + + +def offsets(min_value=0, max_value=2 ** 16): + """ + Build ``int`` values that could be used as valid offsets into a sequence + (such as share data in a share file). + """ + return integers(min_value, max_value) + +def lengths(min_value=1, max_value=2 ** 16): + """ + Build ``int`` values that could be used as valid lengths of data (such as + share data in a share file). + """ + return integers(min_value, max_value) + + +def base32text(): + """ + Build text()s that are valid base32 + """ + return builds( + lambda b: str(b2a(b), "ascii"), + binary(), + ) diff --git a/src/allmydata/test/test_abbreviate.py b/src/allmydata/test/test_abbreviate.py index 3ef1e96a6..082dadf4f 100644 --- a/src/allmydata/test/test_abbreviate.py +++ b/src/allmydata/test/test_abbreviate.py @@ -3,14 +3,6 @@ Tests for allmydata.util.abbreviate. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from datetime import timedelta diff --git a/src/allmydata/test/test_auth.py b/src/allmydata/test/test_auth.py index d5198d326..696ad1709 100644 --- a/src/allmydata/test/test_auth.py +++ b/src/allmydata/test/test_auth.py @@ -1,14 +1,17 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import str # noqa: F401 +from typing import Literal + +from hypothesis import ( + given, +) +from hypothesis.strategies import ( + text, + characters, + lists, +) from twisted.trial import unittest from twisted.python import filepath @@ -38,25 +41,187 @@ dBSD8940XU3YW+oeq8e+p3yQ2GinHfeJ3BYQyNQLuMAJ -----END RSA PRIVATE KEY----- """) -DUMMY_ACCOUNTS = u"""\ -alice herpassword URI:DIR2:aaaaaaaaaaaaaaaaaaaaaaaaaa:1111111111111111111111111111111111111111111111111111 -bob sekrit URI:DIR2:bbbbbbbbbbbbbbbbbbbbbbbbbb:2222222222222222222222222222222222222222222222222222 +DUMMY_KEY_DSA = keys.Key.fromString("""\ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABsQAAAAdzc2gtZH +NzAAAAgQDKMh/ELaiP21LYRBuPbUy7dUhv/XZwV7aS1LzxSP+KaJvtDOei8X76XEAfkqX+ +aGh9eup+BLkezrV6LlpO9uPzhY8ChlKpkvw5PZKv/2agSrVxZyG7yEzHNtSBQXE6qNMwIk +N/ycXLGCqyAhQSzRhLz9ETNaslRDLo7YyVWkiuAQAAABUA5nTatFKux5EqZS4EarMWFRBU +i1UAAACAFpkkK+JsPixSTPyn0DNMoGKA0Klqy8h61Ds6pws+4+aJQptUBshpwNw1ypo7MO ++goDZy3wwdWtURTPGMgesNdEfxp8L2/kqE4vpMK0myoczCqOiWMeNB/x1AStbSkBI8WmHW +2htgsC01xbaix/FrA3edK8WEyv+oIxlbV1FkrPkAAACANb0EpCc8uoR4/32rO2JLsbcLBw +H5wc2khe7AKkIa9kUknRIRvoCZUtXF5XuXXdRmnpVEm2KcsLdtZjip43asQcqgt0Kz3nuF +kAf7bI98G1waFUimcCSPsal4kCmW2HC11sg/BWOt5qczX/0/3xVxpo6juUeBq9ncnFTvPX +5fOlEAAAHoJkFqHiZBah4AAAAHc3NoLWRzcwAAAIEAyjIfxC2oj9tS2EQbj21Mu3VIb/12 +cFe2ktS88Uj/imib7QznovF++lxAH5Kl/mhofXrqfgS5Hs61ei5aTvbj84WPAoZSqZL8OT +2Sr/9moEq1cWchu8hMxzbUgUFxOqjTMCJDf8nFyxgqsgIUEs0YS8/REzWrJUQy6O2MlVpI +rgEAAAAVAOZ02rRSrseRKmUuBGqzFhUQVItVAAAAgBaZJCvibD4sUkz8p9AzTKBigNCpas +vIetQ7OqcLPuPmiUKbVAbIacDcNcqaOzDvoKA2ct8MHVrVEUzxjIHrDXRH8afC9v5KhOL6 +TCtJsqHMwqjoljHjQf8dQErW0pASPFph1tobYLAtNcW2osfxawN3nSvFhMr/qCMZW1dRZK +z5AAAAgDW9BKQnPLqEeP99qztiS7G3CwcB+cHNpIXuwCpCGvZFJJ0SEb6AmVLVxeV7l13U +Zp6VRJtinLC3bWY4qeN2rEHKoLdCs957hZAH+2yPfBtcGhVIpnAkj7GpeJAplthwtdbIPw +VjreanM1/9P98VcaaOo7lHgavZ3JxU7z1+XzpRAAAAFQC7360pZLbv7PFt4BPFJ8zAHxAe +QwAAAA5leGFya3VuQGJhcnlvbgECAwQ= +-----END OPENSSH PRIVATE KEY----- +""") -# dennis password URI:DIR2:aaaaaaaaaaaaaaaaaaaaaaaaaa:1111111111111111111111111111111111111111111111111111 +ACCOUNTS = u"""\ +# dennis {key} URI:DIR2:aaaaaaaaaaaaaaaaaaaaaaaaaa:1111111111111111111111111111111111111111111111111111 carol {key} URI:DIR2:cccccccccccccccccccccccccc:3333333333333333333333333333333333333333333333333333 """.format(key=str(DUMMY_KEY.public().toString("openssh"), "ascii")).encode("ascii") +# Python str.splitlines considers NEXT LINE, LINE SEPARATOR, and PARAGRAPH +# separator to be line separators, too. However, file.readlines() does not... +LINE_SEPARATORS = ( + '\x0a', # line feed + '\x0b', # vertical tab + '\x0c', # form feed + '\x0d', # carriage return +) + +SURROGATES: Literal["Cs"] = "Cs" + + +class AccountFileParserTests(unittest.TestCase): + """ + Tests for ``load_account_file`` and its helper functions. + """ + @given(lists( + text(alphabet=characters( + blacklist_categories=( + # Surrogates are an encoding trick to help out UTF-16. + # They're not necessary to represent any non-surrogate code + # point in unicode. They're also not legal individually but + # only in pairs. + SURROGATES, + ), + # Exclude all our line separators too. + blacklist_characters=("\n", "\r"), + )), + )) + def test_ignore_comments(self, lines): + """ + ``auth.content_lines`` filters out lines beginning with `#` and empty + lines. + """ + expected = set() + + # It's not clear that real files and StringIO behave sufficiently + # similarly to use the latter instead of the former here. In + # particular, they seem to have distinct and incompatible + # line-splitting rules. + bufpath = self.mktemp() + with open(bufpath, "wt", encoding="utf-8") as buf: + for line in lines: + stripped = line.strip() + is_content = stripped and not stripped.startswith("#") + if is_content: + expected.add(stripped) + buf.write(line + "\n") + + with auth.open_account_file(bufpath) as buf: + actual = set(auth.content_lines(buf)) + + self.assertEqual(expected, actual) + + def test_parse_accounts(self): + """ + ``auth.parse_accounts`` accepts an iterator of account lines and returns + an iterator of structured account data. + """ + alice_key = DUMMY_KEY.public().toString("openssh").decode("utf-8") + alice_cap = "URI:DIR2:aaaa:1111" + + bob_key = DUMMY_KEY_DSA.public().toString("openssh").decode("utf-8") + bob_cap = "URI:DIR2:aaaa:2222" + self.assertEqual( + list(auth.parse_accounts([ + "alice {} {}".format(alice_key, alice_cap), + "bob {} {}".format(bob_key, bob_cap), + ])), + [ + ("alice", DUMMY_KEY.public(), alice_cap), + ("bob", DUMMY_KEY_DSA.public(), bob_cap), + ], + ) + + def test_parse_accounts_rejects_passwords(self): + """ + The iterator returned by ``auth.parse_accounts`` raises ``ValueError`` + when processing reaches a line that has what looks like a password + instead of an ssh key. + """ + with self.assertRaises(ValueError): + list(auth.parse_accounts(["alice apassword URI:DIR2:aaaa:1111"])) + + def test_create_account_maps(self): + """ + ``auth.create_account_maps`` accepts an iterator of structured account + data and returns two mappings: one from account name to rootcap, the + other from account name to public keys. + """ + alice_cap = "URI:DIR2:aaaa:1111" + alice_key = DUMMY_KEY.public() + bob_cap = "URI:DIR2:aaaa:2222" + bob_key = DUMMY_KEY_DSA.public() + accounts = [ + ("alice", alice_key, alice_cap), + ("bob", bob_key, bob_cap), + ] + self.assertEqual( + auth.create_account_maps(accounts), + ({ + b"alice": alice_cap.encode("utf-8"), + b"bob": bob_cap.encode("utf-8"), + }, + { + b"alice": [alice_key], + b"bob": [bob_key], + }), + ) + + def test_load_account_file(self): + """ + ``auth.load_account_file`` accepts an iterator of serialized account lines + and returns two mappings: one from account name to rootcap, the other + from account name to public keys. + """ + alice_key = DUMMY_KEY.public().toString("openssh").decode("utf-8") + alice_cap = "URI:DIR2:aaaa:1111" + + bob_key = DUMMY_KEY_DSA.public().toString("openssh").decode("utf-8") + bob_cap = "URI:DIR2:aaaa:2222" + + accounts = [ + "alice {} {}".format(alice_key, alice_cap), + "bob {} {}".format(bob_key, bob_cap), + "# carol {} {}".format(alice_key, alice_cap), + ] + + self.assertEqual( + auth.load_account_file(accounts), + ({ + b"alice": alice_cap.encode("utf-8"), + b"bob": bob_cap.encode("utf-8"), + }, + { + b"alice": [DUMMY_KEY.public()], + b"bob": [DUMMY_KEY_DSA.public()], + }), + ) + + class AccountFileCheckerKeyTests(unittest.TestCase): """ Tests for key handling done by allmydata.frontends.auth.AccountFileChecker. """ def setUp(self): self.account_file = filepath.FilePath(self.mktemp()) - self.account_file.setContent(DUMMY_ACCOUNTS) + self.account_file.setContent(ACCOUNTS) abspath = abspath_expanduser_unicode(str(self.account_file.path)) self.checker = auth.AccountFileChecker(None, abspath) - def test_unknown_user_ssh(self): + def test_unknown_user(self): """ AccountFileChecker.requestAvatarId returns a Deferred that fires with UnauthorizedLogin if called with an SSHPrivateKey object with a @@ -67,67 +232,6 @@ class AccountFileCheckerKeyTests(unittest.TestCase): avatarId = self.checker.requestAvatarId(key_credentials) return self.assertFailure(avatarId, error.UnauthorizedLogin) - def test_unknown_user_password(self): - """ - AccountFileChecker.requestAvatarId returns a Deferred that fires with - UnauthorizedLogin if called with an SSHPrivateKey object with a - username not present in the account file. - - We use a commented out user, so we're also checking that comments are - skipped. - """ - key_credentials = credentials.UsernamePassword(b"dennis", b"password") - d = self.checker.requestAvatarId(key_credentials) - return self.assertFailure(d, error.UnauthorizedLogin) - - def test_password_auth_user_with_ssh_key(self): - """ - AccountFileChecker.requestAvatarId returns a Deferred that fires with - UnauthorizedLogin if called with an SSHPrivateKey object for a username - only associated with a password in the account file. - """ - key_credentials = credentials.SSHPrivateKey( - b"alice", b"md5", None, None, None) - avatarId = self.checker.requestAvatarId(key_credentials) - return self.assertFailure(avatarId, error.UnauthorizedLogin) - - def test_password_auth_user_with_correct_password(self): - """ - AccountFileChecker.requestAvatarId returns a Deferred that fires with - the user if the correct password is given. - """ - key_credentials = credentials.UsernamePassword(b"alice", b"herpassword") - d = self.checker.requestAvatarId(key_credentials) - def authenticated(avatarId): - self.assertEqual( - (b"alice", - b"URI:DIR2:aaaaaaaaaaaaaaaaaaaaaaaaaa:1111111111111111111111111111111111111111111111111111"), - (avatarId.username, avatarId.rootcap)) - return d - - def test_password_auth_user_with_correct_hashed_password(self): - """ - AccountFileChecker.requestAvatarId returns a Deferred that fires with - the user if the correct password is given in hashed form. - """ - key_credentials = credentials.UsernameHashedPassword(b"alice", b"herpassword") - d = self.checker.requestAvatarId(key_credentials) - def authenticated(avatarId): - self.assertEqual( - (b"alice", - b"URI:DIR2:aaaaaaaaaaaaaaaaaaaaaaaaaa:1111111111111111111111111111111111111111111111111111"), - (avatarId.username, avatarId.rootcap)) - return d - - def test_password_auth_user_with_wrong_password(self): - """ - AccountFileChecker.requestAvatarId returns a Deferred that fires with - UnauthorizedLogin if the wrong password is given. - """ - key_credentials = credentials.UsernamePassword(b"alice", b"WRONG") - avatarId = self.checker.requestAvatarId(key_credentials) - return self.assertFailure(avatarId, error.UnauthorizedLogin) - def test_unrecognized_key(self): """ AccountFileChecker.requestAvatarId returns a Deferred that fires with diff --git a/src/allmydata/test/test_base32.py b/src/allmydata/test/test_base32.py index 0b9a018b9..83625371f 100644 --- a/src/allmydata/test/test_base32.py +++ b/src/allmydata/test/test_base32.py @@ -3,14 +3,6 @@ Tests for allmydata.util.base32. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import base64 diff --git a/src/allmydata/test/test_base62.py b/src/allmydata/test/test_base62.py index 8bbb6dfeb..d77eaef9c 100644 --- a/src/allmydata/test/test_base62.py +++ b/src/allmydata/test/test_base62.py @@ -4,15 +4,6 @@ Tests for allmydata.util.base62. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - from past.builtins import chr as byteschr import random, unittest diff --git a/src/allmydata/test/test_checker.py b/src/allmydata/test/test_checker.py index f56ecd089..f116606db 100644 --- a/src/allmydata/test/test_checker.py +++ b/src/allmydata/test/test_checker.py @@ -2,16 +2,6 @@ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - - import json import os.path, shutil @@ -773,13 +763,13 @@ class AddLease(GridTestMixin, unittest.TestCase): d.addCallback(_check_cr, "mutable-normal") really_did_break = [] - # now break the server's remote_add_lease call + # now break the server's add_lease call def _break_add_lease(ign): def broken_add_lease(*args, **kwargs): really_did_break.append(1) raise KeyError("intentional failure, should be ignored") - assert self.g.servers_by_number[0].remote_add_lease - self.g.servers_by_number[0].remote_add_lease = broken_add_lease + assert self.g.servers_by_number[0].add_lease + self.g.servers_by_number[0].add_lease = broken_add_lease d.addCallback(_break_add_lease) # and confirm that the files still look healthy diff --git a/src/allmydata/test/test_client.py b/src/allmydata/test/test_client.py index fd2837f1d..57748d5fa 100644 --- a/src/allmydata/test/test_client.py +++ b/src/allmydata/test/test_client.py @@ -1,16 +1,7 @@ -""" -Ported to Python 3. -""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import annotations -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -import os, sys +import os +from unittest import skipIf from functools import ( partial, ) @@ -41,11 +32,13 @@ from twisted.internet import defer from twisted.python.filepath import ( FilePath, ) +from twisted.python.runtime import platform from testtools.matchers import ( Equals, AfterPreprocessing, MatchesListwise, MatchesDict, + ContainsDict, Always, Is, raises, @@ -72,6 +65,7 @@ from allmydata.util import ( fileutil, encodingutil, configutil, + jsonbytes as json, ) from allmydata.util.eliotutil import capture_logging from allmydata.util.fileutil import abspath_expanduser_unicode @@ -83,12 +77,14 @@ from allmydata.scripts.common import ( from foolscap.api import flushEventualQueue import allmydata.test.common_util as testutil from .common import ( + superuser, EMPTY_CLIENT_CONFIG, SyncTestCase, AsyncBrokenTestCase, UseTestPlugins, MemoryIntroducerClient, get_published_announcements, + UseNode, ) from .matchers import ( MatchesSameElements, @@ -152,12 +148,12 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase): yield client.create_client(basedir) self.assertIn("[client]helper.furl", str(ctx.exception)) + # if somebody knows a clever way to do this (cause + # EnvironmentError when reading a file that really exists), on + # windows, please fix this + @skipIf(platform.isWindows(), "We don't know how to set permissions on Windows.") + @skipIf(superuser, "cannot test as superuser with all permissions") def test_unreadable_config(self): - if sys.platform == "win32": - # if somebody knows a clever way to do this (cause - # EnvironmentError when reading a file that really exists), on - # windows, please fix this - raise unittest.SkipTest("can't make unreadable files on windows") basedir = "test_client.Basic.test_unreadable_config" os.mkdir(basedir) fn = os.path.join(basedir, "tahoe.cfg") @@ -600,7 +596,7 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase): "enabled = true\n") c = yield client.create_client(basedir) ss = c.getServiceNamed("storage") - verdict = ss.remote_get_version() + verdict = ss.get_version() self.failUnlessReallyEqual(verdict[b"application-version"], allmydata.__full_version__.encode("ascii")) self.failIfEqual(str(allmydata.__version__), "unknown") @@ -854,6 +850,7 @@ class StorageClients(SyncTestCase): actionType=u"storage-client:broker:set-static-servers", succeeded=True, ), + encoder_=json.AnyBytesJSONEncoder ) def test_static_servers(self, logger): """ @@ -888,6 +885,7 @@ class StorageClients(SyncTestCase): actionType=u"storage-client:broker:make-storage-server", succeeded=False, ), + encoder_=json.AnyBytesJSONEncoder ) def test_invalid_static_server(self, logger): """ @@ -953,13 +951,14 @@ class Run(unittest.TestCase, testutil.StallMixin): @defer.inlineCallbacks def test_reloadable(self): - basedir = FilePath("test_client.Run.test_reloadable") - private = basedir.child("private") - private.makedirs() + from twisted.internet import reactor + dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus" - write_introducer(basedir, "someintroducer", dummy) - basedir.child("tahoe.cfg").setContent(BASECONFIG. encode("ascii")) - c1 = yield client.create_client(basedir.path) + fixture = UseNode(None, None, FilePath(self.mktemp()), dummy, reactor=reactor) + fixture.setUp() + self.addCleanup(fixture.cleanUp) + + c1 = yield fixture.create_node() c1.setServiceParent(self.sparent) # delay to let the service start up completely. I'm not entirely sure @@ -981,7 +980,7 @@ class Run(unittest.TestCase, testutil.StallMixin): # also change _check_exit_trigger to use it instead of a raw # reactor.stop, also instrument the shutdown event in an # attribute that we can check.) - c2 = yield client.create_client(basedir.path) + c2 = yield fixture.create_node() c2.setServiceParent(self.sparent) yield c2.disownServiceParent() @@ -1506,3 +1505,45 @@ enabled = {storage_enabled} ), ), ) + + def test_announcement_includes_grid_manager(self): + """ + When Grid Manager is enabled certificates are included in the + announcement + """ + fake_cert = { + "certificate": "{\"expires\":1601687822,\"public_key\":\"pub-v0-cbq6hcf3pxcz6ouoafrbktmkixkeuywpcpbcomzd3lqbkq4nmfga\",\"version\":1}", + "signature": "fvjd3uvvupf2v6tnvkwjd473u3m3inyqkwiclhp7balmchkmn3px5pei3qyfjnhymq4cjcwvbpqmcwwnwswdtrfkpnlaxuih2zbdmda", + } + with self.basedir.child("zero.cert").open("w") as f: + f.write(json.dumps_bytes(fake_cert)) + with self.basedir.child("gm0.cert").open("w") as f: + f.write(json.dumps_bytes(fake_cert)) + + config = client.config_from_string( + self.basedir.path, + "tub.port", + self.get_config( + storage_enabled=True, + more_storage="grid_management = True", + more_sections=( + "[grid_managers]\n" + "gm0 = pub-v0-ibpbsexcjfbv3ni7gwlclgn6mldaqnqd5mrtan2fnq2b27xnovca\n" + "[grid_manager_certificates]\n" + "foo = zero.cert\n" + ) + ), + ) + + self.assertThat( + client.create_client_from_config( + config, + _introducer_factory=MemoryIntroducerClient, + ), + succeeded(AfterPreprocessing( + lambda client: get_published_announcements(client)[0].ann, + ContainsDict({ + "grid-manager-certificates": Equals([fake_cert]), + }), + )), + ) diff --git a/src/allmydata/test/test_codec.py b/src/allmydata/test/test_codec.py index ee64e2bf2..59595f760 100644 --- a/src/allmydata/test/test_codec.py +++ b/src/allmydata/test/test_codec.py @@ -3,14 +3,6 @@ Tests for allmydata.codec. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os from twisted.trial import unittest diff --git a/src/allmydata/test/test_common_util.py b/src/allmydata/test/test_common_util.py index 55986d123..01982473a 100644 --- a/src/allmydata/test/test_common_util.py +++ b/src/allmydata/test/test_common_util.py @@ -1,29 +1,93 @@ """ This module has been ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +import sys import random -import unittest +from hypothesis import given +from hypothesis.strategies import lists, sampled_from +from testtools.matchers import Equals +from twisted.python.reflect import ( + ModuleNotFound, + namedAny, +) + +from .common import ( + SyncTestCase, + disable_modules, +) from allmydata.test.common_util import flip_one_bit -class TestFlipOneBit(unittest.TestCase): +class TestFlipOneBit(SyncTestCase): def setUp(self): - random.seed(42) # I tried using version=1 on PY3 to avoid the if below, to no avail. + super(TestFlipOneBit, self).setUp() + # I tried using version=1 on PY3 to avoid the if below, to no avail. + random.seed(42) def test_accepts_byte_string(self): actual = flip_one_bit(b'foo') - self.assertEqual(actual, b'fno' if PY2 else b'fom') + self.assertEqual(actual, b'fom') def test_rejects_unicode_string(self): self.assertRaises(AssertionError, flip_one_bit, u'foo') + + + +def some_existing_modules(): + """ + Build the names of modules (as native strings) that exist and can be + imported. + """ + candidates = sorted( + name + for name + in sys.modules + if "." not in name + and sys.modules[name] is not None + ) + return sampled_from(candidates) + +class DisableModulesTests(SyncTestCase): + """ + Tests for ``disable_modules``. + """ + def setup_example(self): + return sys.modules.copy() + + def teardown_example(self, safe_modules): + sys.modules.update(safe_modules) + + @given(lists(some_existing_modules(), unique=True)) + def test_importerror(self, module_names): + """ + While the ``disable_modules`` context manager is active any import of the + modules identified by the names passed to it result in ``ImportError`` + being raised. + """ + def get_modules(): + return list( + namedAny(name) + for name + in module_names + ) + before_modules = get_modules() + + with disable_modules(*module_names): + for name in module_names: + with self.assertRaises(ModuleNotFound): + namedAny(name) + + after_modules = get_modules() + self.assertThat(before_modules, Equals(after_modules)) + + def test_dotted_names_rejected(self): + """ + If names with "." in them are passed to ``disable_modules`` then + ``ValueError`` is raised. + """ + with self.assertRaises(ValueError): + with disable_modules("foo.bar"): + pass diff --git a/src/allmydata/test/test_configutil.py b/src/allmydata/test/test_configutil.py index 1b8fb5029..a4e7f56ea 100644 --- a/src/allmydata/test/test_configutil.py +++ b/src/allmydata/test/test_configutil.py @@ -3,15 +3,6 @@ Tests for allmydata.util.configutil. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - # Omitted dict, cause worried about interactions. - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 import os.path from configparser import ( diff --git a/src/allmydata/test/test_connection_status.py b/src/allmydata/test/test_connection_status.py index 2bd8bf6ab..da41f5a47 100644 --- a/src/allmydata/test/test_connection_status.py +++ b/src/allmydata/test/test_connection_status.py @@ -1,25 +1,46 @@ """ Tests for allmydata.util.connection_status. - -Port to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations -import mock +from typing import Optional -from twisted.trial import unittest +from foolscap.reconnector import ReconnectionInfo, Reconnector +from foolscap.info import ConnectionInfo from ..util import connection_status +from .common import SyncTestCase -class Status(unittest.TestCase): - def test_hint_statuses(self): +def reconnector(info: ReconnectionInfo) -> Reconnector: + rc = Reconnector(None, None, (), {}) # type: ignore[no-untyped-call] + rc._reconnectionInfo = info + return rc + +def connection_info( + statuses: dict[str, str], + handlers: dict[str, str], + winningHint: Optional[str], + establishedAt: Optional[int], +) -> ConnectionInfo: + ci = ConnectionInfo() # type: ignore[no-untyped-call] + ci.connectorStatuses = statuses + ci.connectionHandlers = handlers + ci.winningHint = winningHint + ci.establishedAt = establishedAt + return ci + +def reconnection_info( + state: str, + connection_info: ConnectionInfo, +) -> ReconnectionInfo: + ri = ReconnectionInfo() # type: ignore[no-untyped-call] + ri.state = state + ri.connectionInfo = connection_info + return ri + +class Status(SyncTestCase): + def test_hint_statuses(self) -> None: ncs = connection_status._hint_statuses(["h2","h1"], {"h1": "hand1", "h4": "hand4"}, {"h1": "st1", "h2": "st2", @@ -27,17 +48,10 @@ class Status(unittest.TestCase): self.assertEqual(ncs, {"h1 via hand1": "st1", "h2": "st2"}) - def test_reconnector_connected(self): - ci = mock.Mock() - ci.connectorStatuses = {"h1": "st1"} - ci.connectionHandlers = {"h1": "hand1"} - ci.winningHint = "h1" - ci.establishedAt = 120 - ri = mock.Mock() - ri.state = "connected" - ri.connectionInfo = ci - rc = mock.Mock - rc.getReconnectionInfo = mock.Mock(return_value=ri) + def test_reconnector_connected(self) -> None: + ci = connection_info({"h1": "st1"}, {"h1": "hand1"}, "h1", 120) + ri = reconnection_info("connected", ci) + rc = reconnector(ri) cs = connection_status.from_foolscap_reconnector(rc, 123) self.assertEqual(cs.connected, True) self.assertEqual(cs.summary, "Connected to h1 via hand1") @@ -45,17 +59,10 @@ class Status(unittest.TestCase): self.assertEqual(cs.last_connection_time, 120) self.assertEqual(cs.last_received_time, 123) - def test_reconnector_connected_others(self): - ci = mock.Mock() - ci.connectorStatuses = {"h1": "st1", "h2": "st2"} - ci.connectionHandlers = {"h1": "hand1"} - ci.winningHint = "h1" - ci.establishedAt = 120 - ri = mock.Mock() - ri.state = "connected" - ri.connectionInfo = ci - rc = mock.Mock - rc.getReconnectionInfo = mock.Mock(return_value=ri) + def test_reconnector_connected_others(self) -> None: + ci = connection_info({"h1": "st1", "h2": "st2"}, {"h1": "hand1"}, "h1", 120) + ri = reconnection_info("connected", ci) + rc = reconnector(ri) cs = connection_status.from_foolscap_reconnector(rc, 123) self.assertEqual(cs.connected, True) self.assertEqual(cs.summary, "Connected to h1 via hand1") @@ -63,18 +70,11 @@ class Status(unittest.TestCase): self.assertEqual(cs.last_connection_time, 120) self.assertEqual(cs.last_received_time, 123) - def test_reconnector_connected_listener(self): - ci = mock.Mock() - ci.connectorStatuses = {"h1": "st1", "h2": "st2"} - ci.connectionHandlers = {"h1": "hand1"} + def test_reconnector_connected_listener(self) -> None: + ci = connection_info({"h1": "st1", "h2": "st2"}, {"h1": "hand1"}, None, 120) ci.listenerStatus = ("listener1", "successful") - ci.winningHint = None - ci.establishedAt = 120 - ri = mock.Mock() - ri.state = "connected" - ri.connectionInfo = ci - rc = mock.Mock - rc.getReconnectionInfo = mock.Mock(return_value=ri) + ri = reconnection_info("connected", ci) + rc = reconnector(ri) cs = connection_status.from_foolscap_reconnector(rc, 123) self.assertEqual(cs.connected, True) self.assertEqual(cs.summary, "Connected via listener (listener1)") @@ -83,15 +83,10 @@ class Status(unittest.TestCase): self.assertEqual(cs.last_connection_time, 120) self.assertEqual(cs.last_received_time, 123) - def test_reconnector_connecting(self): - ci = mock.Mock() - ci.connectorStatuses = {"h1": "st1", "h2": "st2"} - ci.connectionHandlers = {"h1": "hand1"} - ri = mock.Mock() - ri.state = "connecting" - ri.connectionInfo = ci - rc = mock.Mock - rc.getReconnectionInfo = mock.Mock(return_value=ri) + def test_reconnector_connecting(self) -> None: + ci = connection_info({"h1": "st1", "h2": "st2"}, {"h1": "hand1"}, None, None) + ri = reconnection_info("connecting", ci) + rc = reconnector(ri) cs = connection_status.from_foolscap_reconnector(rc, 123) self.assertEqual(cs.connected, False) self.assertEqual(cs.summary, "Trying to connect") @@ -100,19 +95,13 @@ class Status(unittest.TestCase): self.assertEqual(cs.last_connection_time, None) self.assertEqual(cs.last_received_time, 123) - def test_reconnector_waiting(self): - ci = mock.Mock() - ci.connectorStatuses = {"h1": "st1", "h2": "st2"} - ci.connectionHandlers = {"h1": "hand1"} - ri = mock.Mock() - ri.state = "waiting" + def test_reconnector_waiting(self) -> None: + ci = connection_info({"h1": "st1", "h2": "st2"}, {"h1": "hand1"}, None, None) + ri = reconnection_info("waiting", ci) ri.lastAttempt = 10 ri.nextAttempt = 20 - ri.connectionInfo = ci - rc = mock.Mock - rc.getReconnectionInfo = mock.Mock(return_value=ri) - with mock.patch("time.time", return_value=12): - cs = connection_status.from_foolscap_reconnector(rc, 5) + rc = reconnector(ri) + cs = connection_status.from_foolscap_reconnector(rc, 5, time=lambda: 12) self.assertEqual(cs.connected, False) self.assertEqual(cs.summary, "Reconnecting in 8 seconds (last attempt 2s ago)") diff --git a/src/allmydata/test/test_connections.py b/src/allmydata/test/test_connections.py index 5816afdab..8cc985816 100644 --- a/src/allmydata/test/test_connections.py +++ b/src/allmydata/test/test_connections.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from twisted.trial import unittest from twisted.internet import reactor diff --git a/src/allmydata/test/test_consumer.py b/src/allmydata/test/test_consumer.py index a689de462..085f6b7ac 100644 --- a/src/allmydata/test/test_consumer.py +++ b/src/allmydata/test/test_consumer.py @@ -4,21 +4,18 @@ Tests for allmydata.util.consumer. Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - from zope.interface import implementer -from twisted.trial.unittest import TestCase from twisted.internet.interfaces import IPushProducer, IPullProducer from allmydata.util.consumer import MemoryConsumer +from .common import ( + SyncTestCase, +) +from testtools.matchers import ( + Equals, +) + @implementer(IPushProducer) @implementer(IPullProducer) @@ -33,6 +30,12 @@ class Producer(object): self.consumer = consumer self.done = False + def stopProducing(self): + pass + + def pauseProducing(self): + pass + def resumeProducing(self): """Kick off streaming.""" self.iterate() @@ -50,7 +53,7 @@ class Producer(object): self.consumer.unregisterProducer() -class MemoryConsumerTests(TestCase): +class MemoryConsumerTests(SyncTestCase): """Tests for MemoryConsumer.""" def test_push_producer(self): @@ -60,14 +63,14 @@ class MemoryConsumerTests(TestCase): consumer = MemoryConsumer() producer = Producer(consumer, [b"abc", b"def", b"ghi"]) consumer.registerProducer(producer, True) - self.assertEqual(consumer.chunks, [b"abc"]) + self.assertThat(consumer.chunks, Equals([b"abc"])) producer.iterate() producer.iterate() - self.assertEqual(consumer.chunks, [b"abc", b"def", b"ghi"]) - self.assertEqual(consumer.done, False) + self.assertThat(consumer.chunks, Equals([b"abc", b"def", b"ghi"])) + self.assertFalse(consumer.done) producer.iterate() - self.assertEqual(consumer.chunks, [b"abc", b"def", b"ghi"]) - self.assertEqual(consumer.done, True) + self.assertThat(consumer.chunks, Equals([b"abc", b"def", b"ghi"])) + self.assertTrue(consumer.done) def test_pull_producer(self): """ @@ -76,8 +79,8 @@ class MemoryConsumerTests(TestCase): consumer = MemoryConsumer() producer = Producer(consumer, [b"abc", b"def", b"ghi"]) consumer.registerProducer(producer, False) - self.assertEqual(consumer.chunks, [b"abc", b"def", b"ghi"]) - self.assertEqual(consumer.done, True) + self.assertThat(consumer.chunks, Equals([b"abc", b"def", b"ghi"])) + self.assertTrue(consumer.done) # download_to_data() is effectively tested by some of the filenode tests, e.g. diff --git a/src/allmydata/test/test_crawler.py b/src/allmydata/test/test_crawler.py index a9be90c43..bf08828bd 100644 --- a/src/allmydata/test/test_crawler.py +++ b/src/allmydata/test/test_crawler.py @@ -4,17 +4,6 @@ Tests for allmydata.storage.crawler. Ported to Python 3. """ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from __future__ import unicode_literals - -from future.utils import PY2, PY3 -if PY2: - # Don't use future bytes, since it breaks tests. No further works is - # needed, once we're only on Python 3 we'll be deleting this future imports - # anyway, and tests pass just fine on Python 3. - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min # noqa: F401 import time import os.path @@ -27,7 +16,7 @@ from allmydata.util import fileutil, hashutil, pollmixin from allmydata.storage.server import StorageServer, si_b2a from allmydata.storage.crawler import ShareCrawler, TimeSliceExceeded -from allmydata.test.common_util import StallMixin, FakeCanary +from allmydata.test.common_util import StallMixin class BucketEnumeratingCrawler(ShareCrawler): cpu_slice = 500 # make sure it can complete in a single slice @@ -37,10 +26,9 @@ class BucketEnumeratingCrawler(ShareCrawler): self.all_buckets = [] self.finished_d = defer.Deferred() def process_bucket(self, cycle, prefix, prefixdir, storage_index_b32): - if PY3: - # Bucket _inputs_ are bytes, and that's what we will compare this - # to: - storage_index_b32 = storage_index_b32.encode("ascii") + # Bucket _inputs_ are bytes, and that's what we will compare this + # to: + storage_index_b32 = storage_index_b32.encode("ascii") self.all_buckets.append(storage_index_b32) def finished_cycle(self, cycle): eventually(self.finished_d.callback, None) @@ -55,10 +43,9 @@ class PacedCrawler(ShareCrawler): self.finished_d = defer.Deferred() self.yield_cb = None def process_bucket(self, cycle, prefix, prefixdir, storage_index_b32): - if PY3: - # Bucket _inputs_ are bytes, and that's what we will compare this - # to: - storage_index_b32 = storage_index_b32.encode("ascii") + # Bucket _inputs_ are bytes, and that's what we will compare this + # to: + storage_index_b32 = storage_index_b32.encode("ascii") self.all_buckets.append(storage_index_b32) self.countdown -= 1 if self.countdown == 0: @@ -124,12 +111,12 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin): def write(self, i, ss, serverid, tail=0): si = self.si(i) si = si[:-1] + bytes(bytearray((tail,))) - had,made = ss.remote_allocate_buckets(si, - self.rs(i, serverid), - self.cs(i, serverid), - set([0]), 99, FakeCanary()) - made[0].remote_write(0, b"data") - made[0].remote_close() + had,made = ss.allocate_buckets(si, + self.rs(i, serverid), + self.cs(i, serverid), + set([0]), 99) + made[0].write(0, b"data") + made[0].close() return si_b2a(si) def test_immediate(self): diff --git a/src/allmydata/test/test_crypto.py b/src/allmydata/test/test_crypto.py index 0aefa757f..dd5b53e5f 100644 --- a/src/allmydata/test/test_crypto.py +++ b/src/allmydata/test/test_crypto.py @@ -1,14 +1,3 @@ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from future.utils import native_bytes - import unittest from base64 import b64decode @@ -48,7 +37,7 @@ class TestRegression(unittest.TestCase): # priv_str = b64encode(priv.serialize()) # pub_str = b64encode(priv.get_verifying_key().serialize()) RSA_2048_PRIV_KEY = b64decode(f.read().strip()) - assert isinstance(RSA_2048_PRIV_KEY, native_bytes) + assert isinstance(RSA_2048_PRIV_KEY, bytes) with RESOURCE_DIR.child('pycryptopp-rsa-2048-sig.txt').open('r') as f: # Signature created using `RSA_2048_PRIV_KEY` via: @@ -60,6 +49,28 @@ class TestRegression(unittest.TestCase): # The public key corresponding to `RSA_2048_PRIV_KEY`. RSA_2048_PUB_KEY = b64decode(f.read().strip()) + with RESOURCE_DIR.child('pycryptopp-rsa-1024-priv.txt').open('r') as f: + # Created using `pycryptopp`: + # + # from base64 import b64encode + # from pycryptopp.publickey import rsa + # priv = rsa.generate(1024) + # priv_str = b64encode(priv.serialize()) + # pub_str = b64encode(priv.get_verifying_key().serialize()) + RSA_TINY_PRIV_KEY = b64decode(f.read().strip()) + assert isinstance(RSA_TINY_PRIV_KEY, bytes) + + with RESOURCE_DIR.child('pycryptopp-rsa-32768-priv.txt').open('r') as f: + # Created using `pycryptopp`: + # + # from base64 import b64encode + # from pycryptopp.publickey import rsa + # priv = rsa.generate(32768) + # priv_str = b64encode(priv.serialize()) + # pub_str = b64encode(priv.get_verifying_key().serialize()) + RSA_HUGE_PRIV_KEY = b64decode(f.read().strip()) + assert isinstance(RSA_HUGE_PRIV_KEY, bytes) + def test_old_start_up_test(self): """ This was the old startup test run at import time in `pycryptopp.cipher.aes`. @@ -232,6 +243,22 @@ class TestRegression(unittest.TestCase): priv_key, pub_key = rsa.create_signing_keypair_from_string(self.RSA_2048_PRIV_KEY) rsa.verify_signature(pub_key, self.RSA_2048_SIG, b'test') + def test_decode_tiny_rsa_keypair(self): + ''' + An unreasonably small RSA key is rejected ("unreasonably small" + means less that 2048 bits) + ''' + with self.assertRaises(ValueError): + rsa.create_signing_keypair_from_string(self.RSA_TINY_PRIV_KEY) + + def test_decode_huge_rsa_keypair(self): + ''' + An unreasonably _large_ RSA key is rejected ("unreasonably large" + means 32768 or more bits) + ''' + with self.assertRaises(ValueError): + rsa.create_signing_keypair_from_string(self.RSA_HUGE_PRIV_KEY) + def test_encrypt_data_not_bytes(self): ''' only bytes can be encrypted @@ -294,7 +321,7 @@ class TestEd25519(unittest.TestCase): private_key, public_key = ed25519.create_signing_keypair() private_key_str = ed25519.string_from_signing_key(private_key) - self.assertIsInstance(private_key_str, native_bytes) + self.assertIsInstance(private_key_str, bytes) private_key2, public_key2 = ed25519.signing_keypair_from_string(private_key_str) @@ -310,7 +337,7 @@ class TestEd25519(unittest.TestCase): # ditto, but for the verifying keys public_key_str = ed25519.string_from_verifying_key(public_key) - self.assertIsInstance(public_key_str, native_bytes) + self.assertIsInstance(public_key_str, bytes) public_key2 = ed25519.verifying_key_from_string(public_key_str) self.assertEqual( @@ -414,7 +441,7 @@ class TestRsa(unittest.TestCase): priv_key, pub_key = rsa.create_signing_keypair(2048) priv_key_str = rsa.der_string_from_signing_key(priv_key) - self.assertIsInstance(priv_key_str, native_bytes) + self.assertIsInstance(priv_key_str, bytes) priv_key2, pub_key2 = rsa.create_signing_keypair_from_string(priv_key_str) @@ -469,7 +496,7 @@ class TestUtil(unittest.TestCase): """ remove a simple prefix properly """ - self.assertEquals( + self.assertEqual( remove_prefix(b"foobar", b"foo"), b"bar" ) @@ -485,7 +512,7 @@ class TestUtil(unittest.TestCase): """ removing a zero-length prefix does nothing """ - self.assertEquals( + self.assertEqual( remove_prefix(b"foobar", b""), b"foobar", ) @@ -494,7 +521,7 @@ class TestUtil(unittest.TestCase): """ removing a prefix which is the whole string is empty """ - self.assertEquals( + self.assertEqual( remove_prefix(b"foobar", b"foobar"), b"", ) diff --git a/src/allmydata/test/test_deepcheck.py b/src/allmydata/test/test_deepcheck.py index 652e51ea5..7473363c7 100644 --- a/src/allmydata/test/test_deepcheck.py +++ b/src/allmydata/test/test_deepcheck.py @@ -1,25 +1,7 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -# Python 2 compatibility -# Can't use `builtins.str` because something deep in Twisted callbacks ends up repr'ing -# a `future.types.newstr.newstr` as a *Python 3* byte string representation under -# *Python 2*: -# File "/home/rpatterson/src/work/sfu/tahoe-lafs/.tox/py27/lib/python2.7/site-packages/allmydata/util/netstring.py", line 43, in split_netstring -# assert data[position] == b","[0], position -# exceptions.AssertionError: 15 -# ... -# (Pdb) pp data -# '334:12:b\'mutable-good\',90:URI:SSK-RO:... -from past.builtins import unicode as str -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401 from six import ensure_text import os, json diff --git a/src/allmydata/test/test_deferredutil.py b/src/allmydata/test/test_deferredutil.py index 2a155089f..34358d0c8 100644 --- a/src/allmydata/test/test_deferredutil.py +++ b/src/allmydata/test/test_deferredutil.py @@ -1,23 +1,18 @@ """ Tests for allmydata.util.deferredutil. - -Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations from twisted.trial import unittest from twisted.internet import defer, reactor +from twisted.internet.defer import Deferred from twisted.python.failure import Failure +from hypothesis.strategies import integers +from hypothesis import given from allmydata.util import deferredutil +from allmydata.util.deferredutil import race, MultiFailure class DeferredUtilTests(unittest.TestCase, deferredutil.WaitForDelayedCallsMixin): @@ -129,3 +124,176 @@ class UntilTests(unittest.TestCase): self.assertEqual([1], counter) r1.callback(None) self.assertEqual([2], counter) + + +class AsyncToDeferred(unittest.TestCase): + """Tests for ``deferredutil.async_to_deferred.``""" + + def test_async_to_deferred_success(self): + """ + Normal results from a ``@async_to_deferred``-wrapped function get + turned into a ``Deferred`` with that value. + """ + @deferredutil.async_to_deferred + async def f(x, y): + return x + y + + result = f(1, y=2) + self.assertEqual(self.successResultOf(result), 3) + + def test_async_to_deferred_exception(self): + """ + Exceptions from a ``@async_to_deferred``-wrapped function get + turned into a ``Deferred`` with that value. + """ + @deferredutil.async_to_deferred + async def f(x, y): + return x/y + + result = f(1, 0) + self.assertIsInstance(self.failureResultOf(result).value, ZeroDivisionError) + + + +def _setupRaceState(numDeferreds: int) -> tuple[list[int], list[Deferred[object]]]: + """ + Create a list of Deferreds and a corresponding list of integers + tracking how many times each Deferred has been cancelled. Without + additional steps the Deferreds will never fire. + """ + cancelledState = [0] * numDeferreds + + ds: list[Deferred[object]] = [] + for n in range(numDeferreds): + + def cancel(d: Deferred, n: int = n) -> None: + cancelledState[n] += 1 + + ds.append(Deferred(canceller=cancel)) + + return cancelledState, ds + + +class RaceTests(unittest.SynchronousTestCase): + """ + Tests for L{race}. + """ + + @given( + beforeWinner=integers(min_value=0, max_value=3), + afterWinner=integers(min_value=0, max_value=3), + ) + def test_success(self, beforeWinner: int, afterWinner: int) -> None: + """ + When one of the L{Deferred}s passed to L{race} fires successfully, + the L{Deferred} return by L{race} fires with the index of that + L{Deferred} and its result and cancels the rest of the L{Deferred}s. + @param beforeWinner: A randomly selected number of Deferreds to + appear before the "winning" Deferred in the list passed in. + @param beforeWinner: A randomly selected number of Deferreds to + appear after the "winning" Deferred in the list passed in. + """ + cancelledState, ds = _setupRaceState(beforeWinner + 1 + afterWinner) + + raceResult = race(ds) + expected = object() + ds[beforeWinner].callback(expected) + + # The result should be the index and result of the only Deferred that + # fired. + self.assertEqual( + self.successResultOf(raceResult), + (beforeWinner, expected), + ) + # All Deferreds except the winner should have been cancelled once. + expectedCancelledState = [1] * beforeWinner + [0] + [1] * afterWinner + self.assertEqual( + cancelledState, + expectedCancelledState, + ) + + @given( + beforeWinner=integers(min_value=0, max_value=3), + afterWinner=integers(min_value=0, max_value=3), + ) + def test_failure(self, beforeWinner: int, afterWinner: int) -> None: + """ + When all of the L{Deferred}s passed to L{race} fire with failures, + the L{Deferred} return by L{race} fires with L{MultiFailure} wrapping + all of their failures. + @param beforeWinner: A randomly selected number of Deferreds to + appear before the "winning" Deferred in the list passed in. + @param beforeWinner: A randomly selected number of Deferreds to + appear after the "winning" Deferred in the list passed in. + """ + cancelledState, ds = _setupRaceState(beforeWinner + 1 + afterWinner) + + failure = Failure(Exception("The test demands failures.")) + raceResult = race(ds) + for d in ds: + d.errback(failure) + + actualFailure = self.failureResultOf(raceResult, MultiFailure) + self.assertEqual( + actualFailure.value.failures, + [failure] * len(ds), + ) + self.assertEqual( + cancelledState, + [0] * len(ds), + ) + + @given( + beforeWinner=integers(min_value=0, max_value=3), + afterWinner=integers(min_value=0, max_value=3), + ) + def test_resultAfterCancel(self, beforeWinner: int, afterWinner: int) -> None: + """ + If one of the Deferreds fires after it was cancelled its result + goes nowhere. In particular, it does not cause any errors to be + logged. + """ + # Ensure we have a Deferred to win and at least one other Deferred + # that can ignore cancellation. + ds: list[Deferred[None]] = [ + Deferred() for n in range(beforeWinner + 2 + afterWinner) + ] + + raceResult = race(ds) + ds[beforeWinner].callback(None) + ds[beforeWinner + 1].callback(None) + + self.successResultOf(raceResult) + self.assertEqual(len(self.flushLoggedErrors()), 0) + + def test_resultFromCancel(self) -> None: + """ + If one of the input Deferreds has a cancel function that fires it + with success, nothing bad happens. + """ + winner: Deferred[object] = Deferred() + ds: list[Deferred[object]] = [ + winner, + Deferred(canceller=lambda d: d.callback(object())), + ] + expected = object() + raceResult = race(ds) + winner.callback(expected) + + self.assertEqual(self.successResultOf(raceResult), (0, expected)) + + @given( + numDeferreds=integers(min_value=1, max_value=3), + ) + def test_cancel(self, numDeferreds: int) -> None: + """ + If the result of L{race} is cancelled then all of the L{Deferred}s + passed in are cancelled. + """ + cancelledState, ds = _setupRaceState(numDeferreds) + + raceResult = race(ds) + raceResult.cancel() + + self.assertEqual(cancelledState, [1] * numDeferreds) + self.failureResultOf(raceResult, MultiFailure) diff --git a/src/allmydata/test/test_dictutil.py b/src/allmydata/test/test_dictutil.py index 7e26a6ed9..8ee119a4f 100644 --- a/src/allmydata/test/test_dictutil.py +++ b/src/allmydata/test/test_dictutil.py @@ -1,19 +1,7 @@ """ Tests for allmydata.util.dictutil. - -Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2, PY3 -if PY2: - # dict omitted to match dictutil.py. - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 - -from unittest import skipIf +from __future__ import annotations from twisted.trial import unittest @@ -96,7 +84,6 @@ class DictUtil(unittest.TestCase): class TypedKeyDict(unittest.TestCase): """Tests for dictionaries that limit keys.""" - @skipIf(PY2, "Python 2 doesn't have issues mixing bytes and unicode.") def setUp(self): pass @@ -149,22 +136,16 @@ class TypedKeyDict(unittest.TestCase): self.assertEqual(d[u"456"], 300) -class TypedKeyDictPython2(unittest.TestCase): - """Tests for dictionaries that limit keys on Python 2.""" - - @skipIf(PY3, "Testing Python 2 behavior.") - def test_python2(self): +class FilterTests(unittest.TestCase): + """ + Tests for ``dictutil.filter``. + """ + def test_filter(self) -> None: """ - On Python2, BytesKeyDict and UnicodeKeyDict are unnecessary, because - dicts can mix both without problem so you don't get confusing behavior - if you get the type wrong. - - Eventually in a Python 3-only world mixing bytes and unicode will be - bad, thus the existence of these classes, but as we port there will be - situations where it's mixed on Python 2, which again is fine. + ``dictutil.filter`` returns a ``dict`` that contains the key/value + pairs for which the value is matched by the given predicate. """ - self.assertIs(dictutil.UnicodeKeyDict, dict) - self.assertIs(dictutil.BytesKeyDict, dict) - # Demonstration of how bytes and unicode can be mixed: - d = {u"abc": 1} - self.assertEqual(d[b"abc"], 1) + self.assertEqual( + {1: 2}, + dictutil.filter(lambda v: v == 2, {1: 2, 2: 3}), + ) diff --git a/src/allmydata/test/test_dirnode.py b/src/allmydata/test/test_dirnode.py index 67d331430..93122ba19 100644 --- a/src/allmydata/test/test_dirnode.py +++ b/src/allmydata/test/test_dirnode.py @@ -2,17 +2,6 @@ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from past.builtins import long - -from future.utils import PY2 -if PY2: - # Skip list() since it results in spurious test failures - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, object, range, str, max, min # noqa: F401 import time import unicodedata @@ -20,8 +9,10 @@ from zope.interface import implementer from twisted.trial import unittest from twisted.internet import defer from twisted.internet.interfaces import IConsumer +from twisted.python.filepath import FilePath from allmydata import uri, dirnode from allmydata.client import _Client +from allmydata.crypto.rsa import create_signing_keypair from allmydata.immutable import upload from allmydata.immutable.literal import LiteralFileNode from allmydata.interfaces import IImmutableFileNode, IMutableFileNode, \ @@ -30,16 +21,25 @@ from allmydata.interfaces import IImmutableFileNode, IMutableFileNode, \ IDeepCheckResults, IDeepCheckAndRepairResults, \ MDMF_VERSION, SDMF_VERSION from allmydata.mutable.filenode import MutableFileNode -from allmydata.mutable.common import UncoordinatedWriteError +from allmydata.mutable.common import ( + UncoordinatedWriteError, + derive_mutable_keys, +) from allmydata.util import hashutil, base32 from allmydata.util.netstring import split_netstring from allmydata.monitor import Monitor from allmydata.test.common import make_chk_file_uri, make_mutable_file_uri, \ ErrorMixin +from allmydata.test.mutable.util import ( + FakeStorage, + make_nodemaker_with_peers, + make_peer, +) from allmydata.test.no_network import GridTestMixin from allmydata.unknown import UnknownNode, strip_prefix_for_ro from allmydata.nodemaker import NodeMaker from base64 import b32decode +from cryptography.hazmat.primitives.serialization import load_pem_private_key import allmydata.test.common_util as testutil from hypothesis import given @@ -1619,7 +1619,8 @@ class FakeMutableFile(object): # type: ignore # incomplete implementation return defer.succeed(None) class FakeNodeMaker(NodeMaker): - def create_mutable_file(self, contents=b"", keysize=None, version=None): + def create_mutable_file(self, contents=b"", keysize=None, version=None, keypair=None): + assert keypair is None, "FakeNodeMaker does not support externally supplied keypairs" return defer.succeed(FakeMutableFile(contents)) class FakeClient2(_Client): # type: ignore # tahoe-lafs/ticket/3573 @@ -1862,7 +1863,7 @@ class DeepStats(testutil.ReallyEqualMixin, unittest.TestCase): (101, 316, 216), (317, 1000, 684), (1001, 3162, 99), - (long(3162277660169), long(10000000000000), 1), + (3162277660169, 10000000000000, 1), ]) class UCWEingMutableFileNode(MutableFileNode): @@ -1988,3 +1989,75 @@ class Adder(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin): d.addCallback(_test_adder) return d + + +class DeterministicDirnode(testutil.ReallyEqualMixin, testutil.ShouldFailMixin, unittest.TestCase): + def setUp(self): + # Copied from allmydata.test.mutable.test_filenode + super(DeterministicDirnode, self).setUp() + self._storage = FakeStorage() + self._peers = list( + make_peer(self._storage, n) + for n + in range(10) + ) + self.nodemaker = make_nodemaker_with_peers(self._peers) + + async def test_create_with_random_keypair(self): + """ + Create a dirnode using a random RSA keypair. + + The writekey and fingerprint of the enclosed mutable filecap + should match those derived from the given keypair. + """ + privkey, pubkey = create_signing_keypair(2048) + writekey, _, fingerprint = derive_mutable_keys((pubkey, privkey)) + + node = await self.nodemaker.create_new_mutable_directory( + keypair=(pubkey, privkey) + ) + self.failUnless(isinstance(node, dirnode.DirectoryNode)) + + dircap = uri.from_string(node.get_uri()) + self.failUnless(isinstance(dircap, uri.DirectoryURI)) + + filecap = dircap.get_filenode_cap() + self.failUnless(isinstance(filecap, uri.WriteableSSKFileURI)) + + self.failUnlessReallyEqual(filecap.writekey, writekey) + self.failUnlessReallyEqual(filecap.fingerprint, fingerprint) + + async def test_create_with_known_keypair(self): + """ + Create a dirnode using a known RSA keypair. + + The writekey and fingerprint of the enclosed mutable filecap + should match those derived from the given keypair. Because + these values are derived deterministically, given the same + keypair, the resulting filecap should also always be the same. + """ + # Generated with `openssl genrsa -out openssl-rsa-2048-2.txt 2048` + pempath = FilePath(__file__).sibling("data").child("openssl-rsa-2048-2.txt") + privkey = load_pem_private_key(pempath.getContent(), password=None) + pubkey = privkey.public_key() + writekey, _, fingerprint = derive_mutable_keys((pubkey, privkey)) + + node = await self.nodemaker.create_new_mutable_directory( + keypair=(pubkey, privkey) + ) + self.failUnless(isinstance(node, dirnode.DirectoryNode)) + + dircap = uri.from_string(node.get_uri()) + self.failUnless(isinstance(dircap, uri.DirectoryURI)) + + filecap = dircap.get_filenode_cap() + self.failUnless(isinstance(filecap, uri.WriteableSSKFileURI)) + + self.failUnlessReallyEqual(filecap.writekey, writekey) + self.failUnlessReallyEqual(filecap.fingerprint, fingerprint) + + self.failUnlessReallyEqual( + # Despite being named "to_string", this actually returns bytes.. + dircap.to_string(), + b'URI:DIR2:n4opqgewgcn4mddu4oiippaxru:ukpe4z6xdlujdpguoabergyih3bj7iaafukdqzwthy2ytdd5bs2a' + ) diff --git a/src/allmydata/test/test_download.py b/src/allmydata/test/test_download.py index d61942839..709786f0e 100644 --- a/src/allmydata/test/test_download.py +++ b/src/allmydata/test/test_download.py @@ -1,20 +1,15 @@ """ Ported to Python 3. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals -from future.utils import PY2, bchr -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from future.utils import bchr # system-level upload+download roundtrip test, but using shares created from # a previous run. This asserts that the current code is capable of decoding # shares from a previous version. -import six +from typing import Any + import os from twisted.trial import unittest from twisted.internet import defer, reactor @@ -34,9 +29,6 @@ from allmydata.immutable.downloader.fetcher import SegmentFetcher from allmydata.codec import CRSDecoder from foolscap.eventual import eventually, fireEventually, flushEventualQueue -if six.PY3: - long = int - plaintext = b"This is a moderate-sized file.\n" * 10 mutable_plaintext = b"This is a moderate-sized mutable file.\n" * 10 @@ -493,7 +485,7 @@ class DownloadTest(_Base, unittest.TestCase): d.addCallback(_done) return d - def test_simultaneous_onefails_onecancelled(self): + def test_simul_1fail_1cancel(self): # This exercises an mplayer behavior in ticket #1154. I believe that # mplayer made two simultaneous webapi GET requests: first one for an # index region at the end of the (mp3/video) file, then one for the @@ -951,12 +943,52 @@ class Corruption(_Base, unittest.TestCase): self.corrupt_shares_numbered(imm_uri, [2], _corruptor) def _corrupt_set(self, ign, imm_uri, which, newvalue): + # type: (Any, bytes, int, int) -> None + """ + Replace a single byte share file number 2 for the given capability with a + new byte. + + :param imm_uri: Corrupt share number 2 belonging to this capability. + :param which: The byte position to replace. + :param newvalue: The new byte value to set in the share. + """ log.msg("corrupt %d" % which) def _corruptor(s, debug=False): return s[:which] + bchr(newvalue) + s[which+1:] self.corrupt_shares_numbered(imm_uri, [2], _corruptor) def test_each_byte(self): + """ + Test share selection behavior of the downloader in the face of certain + kinds of data corruption. + + 1. upload a small share to the no-network grid + 2. read all of the resulting share files out of the no-network storage servers + 3. for each of + + a. each byte of the share file version field + b. each byte of the immutable share version field + c. each byte of the immutable share data offset field + d. the most significant byte of the block_shares offset field + e. one of the bytes of one of the merkle trees + f. one of the bytes of the share hashes list + + i. flip the least significant bit in all of the the share files + ii. perform the download/check/restore process + + 4. add 2 ** 24 to the share file version number + 5. perform the download/check/restore process + + 6. add 2 ** 24 to the share version number + 7. perform the download/check/restore process + + The download/check/restore process is: + + 1. attempt to download the data + 2. assert that the recovered plaintext is correct + 3. assert that only the "correct" share numbers were used to reconstruct the plaintext + 4. restore all of the share files to their pristine condition + """ # Setting catalog_detection=True performs an exhaustive test of the # Downloader's response to corruption in the lsb of each byte of the # 2070-byte share, with two goals: make sure we tolerate all forms of @@ -1068,9 +1100,17 @@ class Corruption(_Base, unittest.TestCase): d.addCallback(_download, imm_uri, i, expected) d.addCallback(lambda ign: self.restore_all_shares(self.shares)) d.addCallback(fireEventually) - corrupt_values = [(3, 2, "no-sh2"), - (15, 2, "need-4th"), # share looks v2 - ] + corrupt_values = [ + # Make the container version for share number 2 look + # unsupported. If you add support for immutable share file + # version number much past 16 million then you will have to + # update this test. Also maybe you have other problems. + (1, 255, "no-sh2"), + # Make the immutable share number 2 (not the container, the + # thing inside the container) look unsupported. Ditto the + # above about version numbers in the ballpark of 16 million. + (13, 255, "need-4th"), + ] for i,newvalue,expected in corrupt_values: d.addCallback(self._corrupt_set, imm_uri, i, newvalue) d.addCallback(_download, imm_uri, i, expected) @@ -1144,9 +1184,18 @@ class Corruption(_Base, unittest.TestCase): return d - def _corrupt_flip_all(self, ign, imm_uri, which): + def _corrupt_flip_all(self, ign: Any, imm_uri: bytes, which: int) -> None: + """ + Flip the least significant bit at a given byte position in all share files + for the given capability. + """ def _corruptor(s, debug=False): - return s[:which] + bchr(ord(s[which:which+1])^0x01) + s[which+1:] + # type: (bytes, bool) -> bytes + before_corruption = s[:which] + after_corruption = s[which+1:] + original_byte = s[which:which+1] + corrupt_byte = bchr(ord(original_byte) ^ 0x01) + return b"".join([before_corruption, corrupt_byte, after_corruption]) self.corrupt_all_shares(imm_uri, _corruptor) class DownloadV2(_Base, unittest.TestCase): diff --git a/src/allmydata/test/test_eliotutil.py b/src/allmydata/test/test_eliotutil.py index 3f915ecd2..5b191cd92 100644 --- a/src/allmydata/test/test_eliotutil.py +++ b/src/allmydata/test/test_eliotutil.py @@ -1,20 +1,7 @@ """ Tests for ``allmydata.util.eliotutil``. - -Ported to Python 3. """ -from __future__ import ( - unicode_literals, - print_function, - absolute_import, - division, -) - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - from sys import stdout import logging @@ -27,13 +14,12 @@ from fixtures import ( ) from testtools import ( TestCase, -) -from testtools import ( TestResult, ) from testtools.matchers import ( Is, IsInstance, + Not, MatchesStructure, Equals, HasLength, @@ -48,7 +34,6 @@ from eliot import ( Message, MessageType, fields, - FileDestination, MemoryLogger, ) from eliot.twisted import DeferredContext @@ -69,7 +54,7 @@ from ..util.eliotutil import ( _parse_destination_description, _EliotLogging, ) -from ..util.jsonbytes import AnyBytesJSONEncoder +from ..util.deferredutil import async_to_deferred from .common import ( SyncTestCase, @@ -77,24 +62,105 @@ from .common import ( ) -class EliotLoggedTestTests(AsyncTestCase): +def passes(): + """ + Create a matcher that matches a ``TestCase`` that runs without failures or + errors. + """ + def run(case): + result = TestResult() + case.run(result) + return result.wasSuccessful() + return AfterPreprocessing(run, Equals(True)) + + +class EliotLoggedTestTests(TestCase): + """ + Tests for the automatic log-related provided by ``AsyncTestCase``. + + This class uses ``testtools.TestCase`` because it is inconvenient to nest + ``AsyncTestCase`` inside ``AsyncTestCase`` (in particular, Eliot messages + emitted by the inner test case get observed by the outer test case and if + an inner case emits invalid messages they cause the outer test case to + fail). + """ + def test_fails(self): + """ + A test method of an ``AsyncTestCase`` subclass can fail. + """ + class UnderTest(AsyncTestCase): + def test_it(self): + self.fail("make sure it can fail") + + self.assertThat(UnderTest("test_it"), Not(passes())) + + def test_unserializable_fails(self): + """ + A test method of an ``AsyncTestCase`` subclass that logs an unserializable + value with Eliot fails. + """ + class world(object): + """ + an unserializable object + """ + + class UnderTest(AsyncTestCase): + def test_it(self): + Message.log(hello=world) + + self.assertThat(UnderTest("test_it"), Not(passes())) + + def test_logs_non_utf_8_byte(self): + """ + A test method of an ``AsyncTestCase`` subclass can log a message that + contains a non-UTF-8 byte string and return ``None`` and pass. + """ + class UnderTest(AsyncTestCase): + def test_it(self): + Message.log(hello=b"\xFF") + + self.assertThat(UnderTest("test_it"), passes()) + def test_returns_none(self): - Message.log(hello="world") + """ + A test method of an ``AsyncTestCase`` subclass can log a message and + return ``None`` and pass. + """ + class UnderTest(AsyncTestCase): + def test_it(self): + Message.log(hello="world") + + self.assertThat(UnderTest("test_it"), passes()) def test_returns_fired_deferred(self): - Message.log(hello="world") - return succeed(None) + """ + A test method of an ``AsyncTestCase`` subclass can log a message and + return an already-fired ``Deferred`` and pass. + """ + class UnderTest(AsyncTestCase): + def test_it(self): + Message.log(hello="world") + return succeed(None) + + self.assertThat(UnderTest("test_it"), passes()) def test_returns_unfired_deferred(self): - Message.log(hello="world") - # @eliot_logged_test automatically gives us an action context but it's - # still our responsibility to maintain it across stack-busting - # operations. - d = DeferredContext(deferLater(reactor, 0.0, lambda: None)) - d.addCallback(lambda ignored: Message.log(goodbye="world")) - # We didn't start an action. We're not finishing an action. - return d.result + """ + A test method of an ``AsyncTestCase`` subclass can log a message and + return an unfired ``Deferred`` and pass when the ``Deferred`` fires. + """ + class UnderTest(AsyncTestCase): + def test_it(self): + Message.log(hello="world") + # @eliot_logged_test automatically gives us an action context + # but it's still our responsibility to maintain it across + # stack-busting operations. + d = DeferredContext(deferLater(reactor, 0.0, lambda: None)) + d.addCallback(lambda ignored: Message.log(goodbye="world")) + # We didn't start an action. We're not finishing an action. + return d.result + self.assertThat(UnderTest("test_it"), passes()) class ParseDestinationDescriptionTests(SyncTestCase): @@ -108,8 +174,8 @@ class ParseDestinationDescriptionTests(SyncTestCase): """ reactor = object() self.assertThat( - _parse_destination_description("file:-")(reactor), - Equals(FileDestination(stdout, encoder=AnyBytesJSONEncoder)), + _parse_destination_description("file:-")(reactor).file, + Equals(stdout), ) @@ -136,13 +202,14 @@ class ParseDestinationDescriptionTests(SyncTestCase): ) -# Opt out of the great features of common.SyncTestCase because we're -# interacting with Eliot in a very obscure, particular, fragile way. :/ -class EliotLoggingTests(TestCase): +# We need AsyncTestCase because logging happens in a thread tied to the +# reactor. +class EliotLoggingTests(AsyncTestCase): """ Tests for ``_EliotLogging``. """ - def test_stdlib_event_relayed(self): + @async_to_deferred + async def test_stdlib_event_relayed(self): """ An event logged using the stdlib logging module is delivered to the Eliot destination. @@ -150,23 +217,16 @@ class EliotLoggingTests(TestCase): collected = [] service = _EliotLogging([collected.append]) service.startService() - self.addCleanup(service.stopService) - - # The first destination added to the global log destinations gets any - # buffered messages delivered to it. We don't care about those. - # Throw them on the floor. Sorry. - del collected[:] logging.critical("oh no") - self.assertThat( - collected, - AfterPreprocessing( - len, - Equals(1), - ), + await service.stopService() + + self.assertTrue( + "oh no" in str(collected[-1]), collected ) - def test_twisted_event_relayed(self): + @async_to_deferred + async def test_twisted_event_relayed(self): """ An event logged with a ``twisted.logger.Logger`` is delivered to the Eliot destination. @@ -174,15 +234,13 @@ class EliotLoggingTests(TestCase): collected = [] service = _EliotLogging([collected.append]) service.startService() - self.addCleanup(service.stopService) from twisted.logger import Logger Logger().critical("oh no") - self.assertThat( - collected, - AfterPreprocessing( - len, Equals(1), - ), + await service.stopService() + + self.assertTrue( + "oh no" in str(collected[-1]), collected ) def test_validation_failure(self): @@ -240,7 +298,6 @@ class EliotLoggingTests(TestCase): ) - class LogCallDeferredTests(TestCase): """ Tests for ``log_call_deferred``. diff --git a/src/allmydata/test/test_encode.py b/src/allmydata/test/test_encode.py index 028a988cb..ba11605ab 100644 --- a/src/allmydata/test/test_encode.py +++ b/src/allmydata/test/test_encode.py @@ -1,15 +1,8 @@ """ Ported to Python 3. """ -from __future__ import division -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -from past.builtins import chr as byteschr, long +from past.builtins import chr as byteschr from zope.interface import implementer from twisted.trial import unittest @@ -106,7 +99,7 @@ class FakeBucketReaderWriterProxy(object): def get_block_data(self, blocknum, blocksize, size): d = self._start() def _try(unused=None): - assert isinstance(blocknum, (int, long)) + assert isinstance(blocknum, int) if self.mode == "bad block": return flip_bit(self.blocks[blocknum]) return self.blocks[blocknum] diff --git a/src/allmydata/test/test_encodingutil.py b/src/allmydata/test/test_encodingutil.py index 062c64ba1..fef9e6d57 100644 --- a/src/allmydata/test/test_encodingutil.py +++ b/src/allmydata/test/test_encodingutil.py @@ -1,14 +1,3 @@ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2, PY3 -if PY2: - # We don't import str because omg way too ambiguous in this context. - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401 - -from past.builtins import unicode lumiere_nfc = u"lumi\u00E8re" Artonwall_nfc = u"\u00C4rtonwall.mp3" @@ -53,13 +42,7 @@ if __name__ == "__main__": for fname in TEST_FILENAMES: open(os.path.join(tmpdir, fname), 'w').close() - # On Python 2, listing directories returns unicode under Windows or - # MacOS X if the input is unicode. On Python 3, it always returns - # Unicode. - if PY2 and sys.platform in ('win32', 'darwin'): - dirlist = os.listdir(unicode(tmpdir)) - else: - dirlist = os.listdir(tmpdir) + dirlist = os.listdir(tmpdir) print(" dirlist = %s" % repr(dirlist)) except: @@ -71,7 +54,6 @@ if __name__ == "__main__": import os, sys -from unittest import skipIf from twisted.trial import unittest @@ -83,7 +65,7 @@ from allmydata.test.common_util import ( from allmydata.util import encodingutil, fileutil from allmydata.util.encodingutil import unicode_to_url, \ unicode_to_output, quote_output, quote_path, quote_local_unicode_path, \ - quote_filepath, unicode_platform, listdir_unicode, FilenameEncodingError, \ + quote_filepath, unicode_platform, listdir_unicode, \ get_filesystem_encoding, to_bytes, from_utf8_or_none, _reload, \ to_filepath, extend_filepath, unicode_from_filepath, unicode_segments_from, \ unicode_to_argv @@ -91,47 +73,6 @@ from allmydata.util.encodingutil import unicode_to_url, \ class MockStdout(object): pass -# The following tests apply only to platforms that don't store filenames as -# Unicode entities on the filesystem. -class EncodingUtilNonUnicodePlatform(unittest.TestCase): - @skipIf(PY3, "Python 3 is always Unicode, regardless of OS.") - def setUp(self): - # Make sure everything goes back to the way it was at the end of the - # test. - self.addCleanup(_reload) - - # Mock sys.platform because unicode_platform() uses it. Cleanups run - # in reverse order so we do this second so it gets undone first. - self.patch(sys, "platform", "linux") - - def test_listdir_unicode(self): - # What happens if latin1-encoded filenames are encountered on an UTF-8 - # filesystem? - def call_os_listdir(path): - return [ - lumiere_nfc.encode('utf-8'), - lumiere_nfc.encode('latin1') - ] - self.patch(os, 'listdir', call_os_listdir) - - sys_filesystemencoding = 'utf-8' - def call_sys_getfilesystemencoding(): - return sys_filesystemencoding - self.patch(sys, 'getfilesystemencoding', call_sys_getfilesystemencoding) - - _reload() - self.failUnlessRaises(FilenameEncodingError, - listdir_unicode, - u'/dummy') - - # We're trying to list a directory whose name cannot be represented in - # the filesystem encoding. This should fail. - sys_filesystemencoding = 'ascii' - _reload() - self.failUnlessRaises(FilenameEncodingError, - listdir_unicode, - u'/' + lumiere_nfc) - class EncodingUtil(ReallyEqualMixin): def setUp(self): @@ -141,19 +82,6 @@ class EncodingUtil(ReallyEqualMixin): def test_unicode_to_url(self): self.failUnless(unicode_to_url(lumiere_nfc), b"lumi\xc3\xa8re") - @skipIf(PY3, "Python 3 is always Unicode, regardless of OS.") - def test_unicode_to_output_py2(self): - if 'argv' not in dir(self): - return - - mock_stdout = MockStdout() - mock_stdout.encoding = self.io_encoding - self.patch(sys, 'stdout', mock_stdout) - - _reload() - self.failUnlessReallyEqual(unicode_to_output(lumiere_nfc), self.argv) - - @skipIf(PY2, "Python 3 only.") def test_unicode_to_output_py3(self): self.failUnlessReallyEqual(unicode_to_output(lumiere_nfc), lumiere_nfc) @@ -163,28 +91,11 @@ class EncodingUtil(ReallyEqualMixin): converts to bytes using UTF-8 elsewhere. """ result = unicode_to_argv(lumiere_nfc) - if PY3 or self.platform == "win32": - expected_value = lumiere_nfc - else: - expected_value = lumiere_nfc.encode(self.io_encoding) + expected_value = lumiere_nfc self.assertIsInstance(result, type(expected_value)) self.assertEqual(result, expected_value) - @skipIf(PY3, "Python 3 only.") - def test_unicode_platform_py2(self): - matrix = { - 'linux2': False, - 'linux3': False, - 'openbsd4': False, - 'win32': True, - 'darwin': True, - } - - _reload() - self.failUnlessReallyEqual(unicode_platform(), matrix[self.platform]) - - @skipIf(PY2, "Python 3 isn't Python 2.") def test_unicode_platform_py3(self): _reload() self.failUnlessReallyEqual(unicode_platform(), True) @@ -201,13 +112,10 @@ class EncodingUtil(ReallyEqualMixin): % (self.filesystem_encoding,)) def call_os_listdir(path): - if PY2: - return self.dirlist - else: - # Python 3 always lists unicode filenames: - return [d.decode(self.filesystem_encoding) if isinstance(d, bytes) - else d - for d in self.dirlist] + # Python 3 always lists unicode filenames: + return [d.decode(self.filesystem_encoding) if isinstance(d, bytes) + else d + for d in self.dirlist] self.patch(os, 'listdir', call_os_listdir) @@ -238,10 +146,7 @@ class StdlibUnicode(unittest.TestCase): fn = lumiere_nfc + u'/' + lumiere_nfc + u'.txt' open(fn, 'wb').close() self.failUnless(os.path.exists(fn)) - if PY2: - getcwdu = os.getcwdu - else: - getcwdu = os.getcwd + getcwdu = os.getcwd self.failUnless(os.path.exists(os.path.join(getcwdu(), fn))) filenames = listdir_unicode(lumiere_nfc) @@ -271,7 +176,7 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase): _reload() def _check(self, inp, out, enc, optional_quotes, quote_newlines): - if PY3 and isinstance(out, bytes): + if isinstance(out, bytes): out = out.decode(enc or encodingutil.io_encoding) out2 = out if optional_quotes: @@ -300,9 +205,7 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase): def _test_quote_output_all(self, enc): def check(inp, out, optional_quotes=False, quote_newlines=None): - if PY3: - # Result is always Unicode on Python 3 - out = out.decode("ascii") + out = out.decode("ascii") self._check(inp, out, enc, optional_quotes, quote_newlines) # optional single quotes @@ -365,9 +268,6 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase): def test_quote_output_utf8(self, enc='utf-8'): def check(inp, out, optional_quotes=False, quote_newlines=None): - if PY2: - # On Python 3 output is always Unicode: - out = out.encode('utf-8') self._check(inp, out, enc, optional_quotes, quote_newlines) self._test_quote_output_all(enc) @@ -391,9 +291,7 @@ def win32_other(win32, other): class QuotePaths(ReallyEqualMixin, unittest.TestCase): def assertPathsEqual(self, actual, expected): - if PY3: - # On Python 3, results should be unicode: - expected = expected.decode("ascii") + expected = expected.decode("ascii") self.failUnlessReallyEqual(actual, expected) def test_quote_path(self): @@ -445,8 +343,7 @@ class FilePaths(ReallyEqualMixin, unittest.TestCase): for fp in (nosep_fp, sep_fp): self.failUnlessReallyEqual(fp, FilePath(foo_u)) - if encodingutil.use_unicode_filepath: - self.failUnlessReallyEqual(fp.path, foo_u) + self.failUnlessReallyEqual(fp.path, foo_u) if sys.platform == "win32": long_u = u'\\\\?\\C:\\foo' @@ -462,8 +359,7 @@ class FilePaths(ReallyEqualMixin, unittest.TestCase): for foo_fp in (foo_bfp, foo_ufp): fp = extend_filepath(foo_fp, [u'bar', u'baz']) self.failUnlessReallyEqual(fp, FilePath(foo_bar_baz_u)) - if encodingutil.use_unicode_filepath: - self.failUnlessReallyEqual(fp.path, foo_bar_baz_u) + self.failUnlessReallyEqual(fp.path, foo_bar_baz_u) def test_unicode_from_filepath(self): foo_bfp = FilePath(win32_other(b'C:\\foo', b'/foo')) diff --git a/src/allmydata/test/test_filenode.py b/src/allmydata/test/test_filenode.py index a8f0e2431..311e6516c 100644 --- a/src/allmydata/test/test_filenode.py +++ b/src/allmydata/test/test_filenode.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from twisted.trial import unittest from allmydata import uri, client diff --git a/src/allmydata/test/test_grid_manager.py b/src/allmydata/test/test_grid_manager.py new file mode 100644 index 000000000..78280a168 --- /dev/null +++ b/src/allmydata/test/test_grid_manager.py @@ -0,0 +1,455 @@ +""" +Tests for the grid manager. +""" + +from datetime import ( + timedelta, +) + +from twisted.python.filepath import ( + FilePath, +) + +from hypothesis import given + +from allmydata.node import ( + config_from_string, +) +from allmydata.client import ( + _valid_config as client_valid_config, +) +from allmydata.crypto import ( + ed25519, +) +from allmydata.util import ( + jsonbytes as json, +) +from allmydata.grid_manager import ( + load_grid_manager, + save_grid_manager, + create_grid_manager, + parse_grid_manager_certificate, + create_grid_manager_verifier, + SignedCertificate, +) +from allmydata.test.strategies import ( + base32text, +) + +from .common import SyncTestCase + + +class GridManagerUtilities(SyncTestCase): + """ + Confirm operation of utility functions used by GridManager + """ + + def test_load_certificates(self): + """ + Grid Manager certificates are deserialized from config properly + """ + cert_path = self.mktemp() + fake_cert = { + "certificate": "{\"expires\":1601687822,\"public_key\":\"pub-v0-cbq6hcf3pxcz6ouoafrbktmkixkeuywpcpbcomzd3lqbkq4nmfga\",\"version\":1}", + "signature": "fvjd3uvvupf2v6tnvkwjd473u3m3inyqkwiclhp7balmchkmn3px5pei3qyfjnhymq4cjcwvbpqmcwwnwswdtrfkpnlaxuih2zbdmda" + } + with open(cert_path, "wb") as f: + f.write(json.dumps_bytes(fake_cert)) + config_data = ( + "[grid_managers]\n" + "fluffy = pub-v0-vqimc4s5eflwajttsofisp5st566dbq36xnpp4siz57ufdavpvlq\n" + "[grid_manager_certificates]\n" + "ding = {}\n".format(cert_path) + ) + config = config_from_string("/foo", "portnum", config_data, client_valid_config()) + self.assertEqual( + {"fluffy": "pub-v0-vqimc4s5eflwajttsofisp5st566dbq36xnpp4siz57ufdavpvlq"}, + config.enumerate_section("grid_managers") + ) + certs = config.get_grid_manager_certificates() + self.assertEqual([fake_cert], certs) + + def test_load_certificates_invalid_version(self): + """ + An error is reported loading invalid certificate version + """ + gm_path = FilePath(self.mktemp()) + gm_path.makedirs() + config = { + "grid_manager_config_version": 0, + "private_key": "priv-v0-ub7knkkmkptqbsax4tznymwzc4nk5lynskwjsiubmnhcpd7lvlqa", + "storage_servers": { + "radia": { + "public_key": "pub-v0-cbq6hcf3pxcz6ouoafrbktmkixkeuywpcpbcomzd3lqbkq4nmfga" + } + } + } + with gm_path.child("config.json").open("wb") as f: + f.write(json.dumps_bytes(config)) + + fake_cert = { + "certificate": "{\"expires\":1601687822,\"public_key\":\"pub-v0-cbq6hcf3pxcz6ouoafrbktmkixkeuywpcpbcomzd3lqbkq4nmfga\",\"version\":22}", + "signature": "fvjd3uvvupf2v6tnvkwjd473u3m3inyqkwiclhp7balmchkmn3px5pei3qyfjnhymq4cjcwvbpqmcwwnwswdtrfkpnlaxuih2zbdmda" + } + with gm_path.child("radia.cert.0").open("wb") as f: + f.write(json.dumps_bytes(fake_cert)) + + with self.assertRaises(ValueError) as ctx: + load_grid_manager(gm_path) + self.assertIn( + "22", + str(ctx.exception), + ) + + def test_load_certificates_unknown_key(self): + """ + An error is reported loading certificates with invalid keys in them + """ + cert_path = self.mktemp() + fake_cert = { + "certificate": "{\"expires\":1601687822,\"public_key\":\"pub-v0-cbq6hcf3pxcz6ouoafrbktmkixkeuywpcpbcomzd3lqbkq4nmfga\",\"version\":22}", + "signature": "fvjd3uvvupf2v6tnvkwjd473u3m3inyqkwiclhp7balmchkmn3px5pei3qyfjnhymq4cjcwvbpqmcwwnwswdtrfkpnlaxuih2zbdmda", + "something-else": "not valid in a v0 certificate" + } + with open(cert_path, "wb") as f: + f.write(json.dumps_bytes(fake_cert)) + config_data = ( + "[grid_manager_certificates]\n" + "ding = {}\n".format(cert_path) + ) + config = config_from_string("/foo", "portnum", config_data, client_valid_config()) + with self.assertRaises(ValueError) as ctx: + config.get_grid_manager_certificates() + + self.assertIn( + "Unknown key in Grid Manager certificate", + str(ctx.exception) + ) + + def test_load_certificates_missing(self): + """ + An error is reported for missing certificates + """ + cert_path = self.mktemp() + config_data = ( + "[grid_managers]\n" + "fluffy = pub-v0-vqimc4s5eflwajttsofisp5st566dbq36xnpp4siz57ufdavpvlq\n" + "[grid_manager_certificates]\n" + "ding = {}\n".format(cert_path) + ) + config = config_from_string("/foo", "portnum", config_data, client_valid_config()) + with self.assertRaises(ValueError) as ctx: + config.get_grid_manager_certificates() + # we don't reliably know how Windows or MacOS will represent + # the path in the exception, so we don't check for the *exact* + # message with full-path here.. + self.assertIn( + "Grid Manager certificate file", + str(ctx.exception) + ) + self.assertIn( + " doesn't exist", + str(ctx.exception) + ) + + +class GridManagerVerifier(SyncTestCase): + """ + Tests related to rejecting or accepting Grid Manager certificates. + """ + + def setUp(self): + self.gm = create_grid_manager() + return super(GridManagerVerifier, self).setUp() + + def test_sign_cert(self): + """ + For a storage server previously added to a grid manager, + _GridManager.sign returns a dict with "certificate" and + "signature" properties where the value of "signature" gives + the ed25519 signature (using the grid manager's private key of + the value) of "certificate". + """ + priv, pub = ed25519.create_signing_keypair() + self.gm.add_storage_server("test", pub) + cert0 = self.gm.sign("test", timedelta(seconds=86400)) + cert1 = self.gm.sign("test", timedelta(seconds=3600)) + self.assertNotEqual(cert0, cert1) + + self.assertIsInstance(cert0, SignedCertificate) + gm_key = ed25519.verifying_key_from_string(self.gm.public_identity()) + self.assertEqual( + ed25519.verify_signature( + gm_key, + cert0.signature, + cert0.certificate, + ), + None + ) + + def test_sign_cert_wrong_name(self): + """ + Try to sign a storage-server that doesn't exist + """ + with self.assertRaises(KeyError): + self.gm.sign("doesn't exist", timedelta(seconds=86400)) + + def test_add_cert(self): + """ + Add a storage-server and serialize it + """ + priv, pub = ed25519.create_signing_keypair() + self.gm.add_storage_server("test", pub) + + data = self.gm.marshal() + self.assertEqual( + data["storage_servers"], + { + "test": { + "public_key": ed25519.string_from_verifying_key(pub), + } + } + ) + + def test_remove(self): + """ + Add then remove a storage-server + """ + priv, pub = ed25519.create_signing_keypair() + self.gm.add_storage_server("test", pub) + self.gm.remove_storage_server("test") + self.assertEqual(len(self.gm.storage_servers), 0) + + def test_serialize(self): + """ + Write and then read a Grid Manager config + """ + priv0, pub0 = ed25519.create_signing_keypair() + priv1, pub1 = ed25519.create_signing_keypair() + self.gm.add_storage_server("test0", pub0) + self.gm.add_storage_server("test1", pub1) + + tempdir = self.mktemp() + fp = FilePath(tempdir) + + save_grid_manager(fp, self.gm) + gm2 = load_grid_manager(fp) + self.assertEqual( + self.gm.public_identity(), + gm2.public_identity(), + ) + self.assertEqual( + len(self.gm.storage_servers), + len(gm2.storage_servers), + ) + for name, ss0 in list(self.gm.storage_servers.items()): + ss1 = gm2.storage_servers[name] + self.assertEqual(ss0.name, ss1.name) + self.assertEqual(ss0.public_key_string(), ss1.public_key_string()) + self.assertEqual(self.gm.marshal(), gm2.marshal()) + + def test_invalid_no_version(self): + """ + Invalid Grid Manager config with no version + """ + tempdir = self.mktemp() + fp = FilePath(tempdir) + bad_config = { + "private_key": "at least we have one", + } + fp.makedirs() + with fp.child("config.json").open("w") as f: + f.write(json.dumps_bytes(bad_config)) + + with self.assertRaises(ValueError) as ctx: + load_grid_manager(fp) + self.assertIn( + "unknown version", + str(ctx.exception), + ) + + def test_invalid_certificate_bad_version(self): + """ + Invalid Grid Manager config containing a certificate with an + illegal version + """ + tempdir = self.mktemp() + fp = FilePath(tempdir) + config = { + "grid_manager_config_version": 0, + "private_key": "priv-v0-ub7knkkmkptqbsax4tznymwzc4nk5lynskwjsiubmnhcpd7lvlqa", + "storage_servers": { + "alice": { + "public_key": "pub-v0-cbq6hcf3pxcz6ouoafrbktmkixkeuywpcpbcomzd3lqbkq4nmfga" + } + } + } + bad_cert = { + "certificate": "{\"expires\":1601687822,\"public_key\":\"pub-v0-cbq6hcf3pxcz6ouoafrbktmkixkeuywpcpbcomzd3lqbkq4nmfga\",\"version\":0}", + "signature": "fvjd3uvvupf2v6tnvkwjd473u3m3inyqkwiclhp7balmchkmn3px5pei3qyfjnhymq4cjcwvbpqmcwwnwswdtrfkpnlaxuih2zbdmda" + } + + fp.makedirs() + with fp.child("config.json").open("w") as f: + f.write(json.dumps_bytes(config)) + with fp.child("alice.cert.0").open("w") as f: + f.write(json.dumps_bytes(bad_cert)) + + with self.assertRaises(ValueError) as ctx: + load_grid_manager(fp) + + self.assertIn( + "Unknown certificate version", + str(ctx.exception), + ) + + def test_invalid_no_private_key(self): + """ + Invalid Grid Manager config with no private key + """ + tempdir = self.mktemp() + fp = FilePath(tempdir) + bad_config = { + "grid_manager_config_version": 0, + } + fp.makedirs() + with fp.child("config.json").open("w") as f: + f.write(json.dumps_bytes(bad_config)) + + with self.assertRaises(ValueError) as ctx: + load_grid_manager(fp) + self.assertIn( + "'private_key' required", + str(ctx.exception), + ) + + def test_invalid_bad_private_key(self): + """ + Invalid Grid Manager config with bad private-key + """ + tempdir = self.mktemp() + fp = FilePath(tempdir) + bad_config = { + "grid_manager_config_version": 0, + "private_key": "not actually encoded key", + } + fp.makedirs() + with fp.child("config.json").open("w") as f: + f.write(json.dumps_bytes(bad_config)) + + with self.assertRaises(ValueError) as ctx: + load_grid_manager(fp) + self.assertIn( + "Invalid Grid Manager private_key", + str(ctx.exception), + ) + + def test_invalid_storage_server(self): + """ + Invalid Grid Manager config with missing public-key for + storage-server + """ + tempdir = self.mktemp() + fp = FilePath(tempdir) + bad_config = { + "grid_manager_config_version": 0, + "private_key": "priv-v0-ub7knkkmkptqbsax4tznymwzc4nk5lynskwjsiubmnhcpd7lvlqa", + "storage_servers": { + "bad": {} + } + } + fp.makedirs() + with fp.child("config.json").open("w") as f: + f.write(json.dumps_bytes(bad_config)) + + with self.assertRaises(ValueError) as ctx: + load_grid_manager(fp) + self.assertIn( + "No 'public_key' for storage server", + str(ctx.exception), + ) + + def test_parse_cert(self): + """ + Parse an ostensibly valid storage certificate + """ + js = parse_grid_manager_certificate('{"certificate": "", "signature": ""}') + self.assertEqual( + set(js.keys()), + {"certificate", "signature"} + ) + # the signature isn't *valid*, but that's checked in a + # different function + + def test_parse_cert_not_dict(self): + """ + Certificate data not even a dict + """ + with self.assertRaises(ValueError) as ctx: + parse_grid_manager_certificate("[]") + self.assertIn( + "must be a dict", + str(ctx.exception), + ) + + def test_parse_cert_missing_signature(self): + """ + Missing the signature + """ + with self.assertRaises(ValueError) as ctx: + parse_grid_manager_certificate('{"certificate": ""}') + self.assertIn( + "must contain", + str(ctx.exception), + ) + + def test_validate_cert(self): + """ + Validate a correctly-signed certificate + """ + priv0, pub0 = ed25519.create_signing_keypair() + self.gm.add_storage_server("test0", pub0) + cert0 = self.gm.sign("test0", timedelta(seconds=86400)) + + verify = create_grid_manager_verifier( + [self.gm._public_key], + [cert0], + ed25519.string_from_verifying_key(pub0), + ) + + self.assertTrue(verify()) + + +class GridManagerInvalidVerifier(SyncTestCase): + """ + Invalid certificate rejection tests + """ + + def setUp(self): + self.gm = create_grid_manager() + self.priv0, self.pub0 = ed25519.create_signing_keypair() + self.gm.add_storage_server("test0", self.pub0) + self.cert0 = self.gm.sign("test0", timedelta(seconds=86400)) + return super(GridManagerInvalidVerifier, self).setUp() + + @given( + base32text(), + ) + def test_validate_cert_invalid(self, invalid_signature): + """ + An incorrect signature is rejected + """ + # make signature invalid + invalid_cert = SignedCertificate( + self.cert0.certificate, + invalid_signature.encode("ascii"), + ) + + verify = create_grid_manager_verifier( + [self.gm._public_key], + [invalid_cert], + ed25519.string_from_verifying_key(self.pub0), + bad_cert = lambda key, cert: None, + ) + + self.assertFalse(verify()) diff --git a/src/allmydata/test/test_happiness.py b/src/allmydata/test/test_happiness.py index 9ff36ef26..190a7c7d4 100644 --- a/src/allmydata/test/test_happiness.py +++ b/src/allmydata/test/test_happiness.py @@ -6,16 +6,6 @@ allmydata.util.happinessutil. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - # We omit dict, just in case newdict breaks things. - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 - from twisted.trial import unittest from hypothesis import given from hypothesis.strategies import text, sets diff --git a/src/allmydata/test/test_hashtree.py b/src/allmydata/test/test_hashtree.py index 5abe2095e..d9be96bd5 100644 --- a/src/allmydata/test/test_hashtree.py +++ b/src/allmydata/test/test_hashtree.py @@ -1,32 +1,22 @@ """ Tests for allmydata.hashtree. - -Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import annotations -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - - -from twisted.trial import unittest +from .common import SyncTestCase +from base64 import b32encode from allmydata.util.hashutil import tagged_hash from allmydata import hashtree - def make_tree(numleaves): leaves = [b"%d" % i for i in range(numleaves)] leaf_hashes = [tagged_hash(b"tag", leaf) for leaf in leaves] ht = hashtree.HashTree(leaf_hashes) return ht -class Complete(unittest.TestCase): +class Complete(SyncTestCase): def test_create(self): # try out various sizes, since we pad to a power of two ht = make_tree(6) @@ -40,6 +30,18 @@ class Complete(unittest.TestCase): self.failUnlessRaises(IndexError, ht.parent, 0) self.failUnlessRaises(IndexError, ht.needed_for, -1) + def test_well_known_tree(self): + self.assertEqual( + [b32encode(s).strip(b"=").lower() for s in make_tree(3)], + [b'vxuqudnucceja4pqkdqy5txapagxubm5moupzqywkbg2jrjkaola', + b'weycjri4jlcaunca2jyx2kr7sbtb7qdriog3f26g5jpc5awfeazq', + b'5ovy3g2wwjnxoqtja4licckxkbqjef4xsjtclk6gxnsl66kvow6a', + b'esd34nbzri75l3j2vwetpk3dvlvsxstkbaktomonrulpks3df3sq', + b'jkxbwa2tppyfax35o72tbjecxvaa4xphma6zbyfbkkku3ed2657a', + b'wfisavaqgab2raihe7dld2qjps4rtxyiubgfs5enziokey2msjwa', + b't3kza5vwx3tlowdemmgdyigp62ju57qduyfh7uulnfkc7mj2ncrq'], + ) + def test_needed_hashes(self): ht = make_tree(8) self.failUnlessEqual(ht.needed_hashes(0), set([8, 4, 2])) @@ -65,7 +67,7 @@ class Complete(unittest.TestCase): self.failUnless("\n 8:" in d) self.failUnless("\n 4:" in d) -class Incomplete(unittest.TestCase): +class Incomplete(SyncTestCase): def test_create(self): ht = hashtree.IncompleteHashTree(6) diff --git a/src/allmydata/test/test_hashutil.py b/src/allmydata/test/test_hashutil.py index 482e79c0b..9be8f05d6 100644 --- a/src/allmydata/test/test_hashutil.py +++ b/src/allmydata/test/test_hashutil.py @@ -3,14 +3,6 @@ Tests for allmydata.util.hashutil. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from twisted.trial import unittest @@ -44,6 +36,34 @@ class HashUtilTests(unittest.TestCase): self.failUnlessEqual(len(h2), 16) self.failUnlessEqual(h1, h2) + def test_well_known_tagged_hash(self): + self.assertEqual( + b"yra322btzoqjp4ts2jon5dztgnilcdg6jgztgk7joi6qpjkitg2q", + base32.b2a(hashutil.tagged_hash(b"tag", b"hello world")), + ) + self.assertEqual( + b"kfbsfssrv2bvtp3regne6j7gpdjcdjwncewriyfdtt764o5oa7ta", + base32.b2a(hashutil.tagged_hash(b"different", b"hello world")), + ) + self.assertEqual( + b"z34pzkgo36chbjz2qykonlxthc4zdqqquapw4bcaoogzvmmcr3zq", + base32.b2a(hashutil.tagged_hash(b"different", b"goodbye world")), + ) + + def test_well_known_tagged_pair_hash(self): + self.assertEqual( + b"wmto44q3shtezwggku2fxztfkwibvznkfu6clatnvfog527sb6dq", + base32.b2a(hashutil.tagged_pair_hash(b"tag", b"hello", b"world")), + ) + self.assertEqual( + b"lzn27njx246jhijpendqrxlk4yb23nznbcrihommbymg5e7quh4a", + base32.b2a(hashutil.tagged_pair_hash(b"different", b"hello", b"world")), + ) + self.assertEqual( + b"qnehpoypxxdhjheqq7dayloghtu42yr55uylc776zt23ii73o3oq", + base32.b2a(hashutil.tagged_pair_hash(b"different", b"goodbye", b"world")), + ) + def test_chk(self): h1 = hashutil.convergence_hash(3, 10, 1000, b"data", b"secret") h2 = hashutil.convergence_hasher(3, 10, 1000, b"secret") diff --git a/src/allmydata/test/test_helper.py b/src/allmydata/test/test_helper.py index 3faffbe0d..b280f95df 100644 --- a/src/allmydata/test/test_helper.py +++ b/src/allmydata/test/test_helper.py @@ -1,14 +1,7 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations import os from struct import ( @@ -17,13 +10,8 @@ from struct import ( from functools import ( partial, ) -import attr -try: - from typing import List - from allmydata.introducer.client import IntroducerClient -except ImportError: - pass +import attr from twisted.internet import defer from twisted.trial import unittest @@ -35,10 +23,12 @@ from eliot.twisted import ( inline_callbacks, ) +from allmydata.introducer.client import IntroducerClient from allmydata.crypto import aes from allmydata.storage.server import ( si_b2a, StorageServer, + FoolscapStorageServer, ) from allmydata.storage_client import StorageFarmBroker from allmydata.immutable.layout import ( @@ -131,7 +121,7 @@ class FakeCHKCheckerAndUEBFetcher(object): )) class FakeClient(service.MultiService): - introducer_clients = [] # type: List[IntroducerClient] + introducer_clients : list[IntroducerClient] = [] DEFAULT_ENCODING_PARAMETERS = {"k":25, "happy": 75, "n": 100, @@ -427,7 +417,7 @@ class CHKCheckerAndUEBFetcherTests(SyncTestCase): """ storage_index = b"a" * 16 serverid = b"b" * 20 - storage = StorageServer(self.mktemp(), serverid) + storage = FoolscapStorageServer(StorageServer(self.mktemp(), serverid)) rref_without_ueb = LocalWrapper(storage, fireNow) yield write_bad_share(rref_without_ueb, storage_index) server_without_ueb = NoNetworkServer(serverid, rref_without_ueb) @@ -451,7 +441,7 @@ class CHKCheckerAndUEBFetcherTests(SyncTestCase): """ storage_index = b"a" * 16 serverid = b"b" * 20 - storage = StorageServer(self.mktemp(), serverid) + storage = FoolscapStorageServer(StorageServer(self.mktemp(), serverid)) rref_with_ueb = LocalWrapper(storage, fireNow) ueb = { "needed_shares": 2, @@ -487,7 +477,7 @@ class CHKCheckerAndUEBFetcherTests(SyncTestCase): in [b"b", b"c"] ) storages = list( - StorageServer(self.mktemp(), serverid) + FoolscapStorageServer(StorageServer(self.mktemp(), serverid)) for serverid in serverids ) diff --git a/src/allmydata/test/test_humanreadable.py b/src/allmydata/test/test_humanreadable.py index 94de8f6be..277abc283 100644 --- a/src/allmydata/test/test_humanreadable.py +++ b/src/allmydata/test/test_humanreadable.py @@ -4,17 +4,6 @@ Tests for allmydata.util.humanreadable. This module has been ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from past.builtins import long - from twisted.trial import unittest from allmydata.util import humanreadable @@ -31,10 +20,11 @@ class NoArgumentException(Exception): class HumanReadable(unittest.TestCase): def test_repr(self): hr = humanreadable.hr - self.failUnlessEqual(hr(foo), "") + # we match on regex so this test isn't fragile about line-numbers + self.assertRegex(hr(foo), r"") self.failUnlessEqual(hr(self.test_repr), ">") - self.failUnlessEqual(hr(long(1)), "1") + self.failUnlessEqual(hr(1), "1") self.assertIn(hr(10**40), ["100000000000000000...000000000000000000", "100000000000000000...0000000000000000000"]) diff --git a/src/allmydata/test/test_hung_server.py b/src/allmydata/test/test_hung_server.py index 490315500..a78f8614e 100644 --- a/src/allmydata/test/test_hung_server.py +++ b/src/allmydata/test/test_hung_server.py @@ -3,14 +3,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os, shutil from twisted.trial import unittest @@ -73,7 +65,7 @@ class HungServerDownloadTest(GridTestMixin, ShouldFailMixin, PollMixin, def _copy_share(self, share, to_server): (sharenum, sharefile) = share (id, ss) = to_server - shares_dir = os.path.join(ss.original.storedir, "shares") + shares_dir = os.path.join(ss.original._server.storedir, "shares") si = uri.from_string(self.uri).get_storage_index() si_dir = os.path.join(shares_dir, storage_index_to_dir(si)) if not os.path.exists(si_dir): @@ -82,7 +74,7 @@ class HungServerDownloadTest(GridTestMixin, ShouldFailMixin, PollMixin, shutil.copy(sharefile, new_sharefile) self.shares = self.find_uri_shares(self.uri) # Make sure that the storage server has the share. - self.failUnless((sharenum, ss.original.my_nodeid, new_sharefile) + self.failUnless((sharenum, ss.original._server.my_nodeid, new_sharefile) in self.shares) def _corrupt_share(self, share, corruptor_func): diff --git a/src/allmydata/test/test_i2p_provider.py b/src/allmydata/test/test_i2p_provider.py index 364a85c5b..f470e77af 100644 --- a/src/allmydata/test/test_i2p_provider.py +++ b/src/allmydata/test/test_i2p_provider.py @@ -1,21 +1,13 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os from twisted.trial import unittest from twisted.internet import defer, error from twisted.python.usage import UsageError -from six.moves import StringIO -import mock +from io import StringIO +from unittest import mock from ..util import i2p_provider from ..scripts import create_node, runner @@ -177,7 +169,7 @@ class CreateDest(unittest.TestCase): with mock.patch("allmydata.util.i2p_provider.clientFromString", return_value=ep) as cfs: d = i2p_provider.create_config(reactor, cli_config) - tahoe_config_i2p, i2p_port, i2p_location = self.successResultOf(d) + i2p_config = self.successResultOf(d) connect_to_i2p.assert_called_with(reactor, cli_config, txi2p) cfs.assert_called_with(reactor, "goodport") @@ -189,9 +181,9 @@ class CreateDest(unittest.TestCase): "dest.private_key_file": os.path.join("private", "i2p_dest.privkey"), } - self.assertEqual(tahoe_config_i2p, expected) - self.assertEqual(i2p_port, "listen:i2p") - self.assertEqual(i2p_location, "i2p:FOOBAR.b32.i2p:3457") + self.assertEqual(dict(i2p_config.node_config["i2p"]), expected) + self.assertEqual(i2p_config.tub_ports, ["listen:i2p"]) + self.assertEqual(i2p_config.tub_locations, ["i2p:FOOBAR.b32.i2p:3457"]) _None = object() class FakeConfig(dict): diff --git a/src/allmydata/test/test_immutable.py b/src/allmydata/test/test_immutable.py index 12f2012e0..39c31623d 100644 --- a/src/allmydata/test/test_immutable.py +++ b/src/allmydata/test/test_immutable.py @@ -1,14 +1,6 @@ """ This module has been ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import random diff --git a/src/allmydata/test/test_introducer.py b/src/allmydata/test/test_introducer.py index 0475d3f6c..d37df48a9 100644 --- a/src/allmydata/test/test_introducer.py +++ b/src/allmydata/test/test_introducer.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six import ensure_binary, ensure_text diff --git a/src/allmydata/test/test_iputil.py b/src/allmydata/test/test_iputil.py index 081c80ee3..26274830f 100644 --- a/src/allmydata/test/test_iputil.py +++ b/src/allmydata/test/test_iputil.py @@ -4,18 +4,13 @@ Tests for allmydata.util.iputil. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2, native_str -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations import os, socket import gc +from functools import wraps +from typing import TypeVar, Callable from testtools.matchers import ( MatchesAll, IsInstance, @@ -25,8 +20,6 @@ from testtools.matchers import ( from twisted.trial import unittest -from tenacity import retry, stop_after_attempt - from foolscap.api import Tub from allmydata.util import iputil, gcutil @@ -39,6 +32,43 @@ from .common import ( SyncTestCase, ) +T = TypeVar("T", contravariant=True) +U = TypeVar("U", covariant=True) + +def retry(stop: Callable[[], bool]) -> Callable[[Callable[[T], U]], Callable[[T], U]]: + """ + Call a function until the predicate says to stop or the function stops + raising an exception. + + :param stop: A callable to call after the decorated function raises an + exception. The decorated function will be called again if ``stop`` + returns ``False``. + + :return: A decorator function. + """ + def decorate(f: Callable[[T], U]) -> Callable[[T], U]: + @wraps(f) + def decorator(self: T) -> U: + while True: + try: + return f(self) + except Exception: + if stop(): + raise + return decorator + return decorate + +def stop_after_attempt(limit: int) -> Callable[[], bool]: + """ + Stop after ``limit`` calls. + """ + counter = 0 + def check(): + nonlocal counter + counter += 1 + return counter < limit + return check + class ListenOnUsed(unittest.TestCase): """Tests for listenOnUnused.""" @@ -127,7 +157,7 @@ class GetLocalAddressesSyncTests(SyncTestCase): IsInstance(list), AllMatch( MatchesAll( - IsInstance(native_str), + IsInstance(str), MatchesPredicate( lambda addr: socket.inet_pton(socket.AF_INET, addr), "%r is not an IPv4 address.", diff --git a/src/allmydata/test/test_istorageserver.py b/src/allmydata/test/test_istorageserver.py index bd056ae13..ded9ac1ac 100644 --- a/src/allmydata/test/test_istorageserver.py +++ b/src/allmydata/test/test_istorageserver.py @@ -1,32 +1,30 @@ """ Tests for the ``IStorageServer`` interface. +Keep in mind that ``IStorageServer`` is actually the storage _client_ interface. + Note that for performance, in the future we might want the same node to be reused across tests, so each test should be careful to generate unique storage indexes. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import annotations -from future.utils import PY2, bchr - -if PY2: - # fmt: off - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - # fmt: on +from future.utils import bchr from random import Random +from unittest import SkipTest from twisted.internet.defer import inlineCallbacks, returnValue - +from twisted.internet.task import Clock from foolscap.api import Referenceable, RemoteException +# A better name for this would be IStorageClient... from allmydata.interfaces import IStorageServer + from .common_system import SystemTestMixin from .common import AsyncTestCase +from allmydata.storage.server import StorageServer # not a IStorageServer!! # Use random generator with known seed, so results are reproducible if tests @@ -56,7 +54,7 @@ class IStorageServerSharedAPIsTestsMixin(object): """ Tests for ``IStorageServer``'s shared APIs. - ``self.storage_server`` is expected to provide ``IStorageServer``. + ``self.storage_client`` is expected to provide ``IStorageServer``. """ @inlineCallbacks @@ -65,7 +63,7 @@ class IStorageServerSharedAPIsTestsMixin(object): ``IStorageServer`` returns a dictionary where the key is an expected protocol version. """ - result = yield self.storage_server.get_version() + result = yield self.storage_client.get_version() self.assertIsInstance(result, dict) self.assertIn(b"http://allmydata.org/tahoe/protocols/storage/v1", result) @@ -74,11 +72,16 @@ class IStorageServerImmutableAPIsTestsMixin(object): """ Tests for ``IStorageServer``'s immutable APIs. - ``self.storage_server`` is expected to provide ``IStorageServer``. + ``self.storage_client`` is expected to provide ``IStorageServer``. ``self.disconnect()`` should disconnect and then reconnect, creating a new - ``self.storage_server``. Some implementations may wish to skip tests using + ``self.storage_client``. Some implementations may wish to skip tests using this; HTTP has no notion of disconnection. + + ``self.server`` is expected to be the corresponding + ``allmydata.storage.server.StorageServer`` instance. Time should be + instrumented, such that ``self.fake_time()`` and ``self.fake_sleep()`` + return and advance the server time, respectively. """ @inlineCallbacks @@ -87,7 +90,7 @@ class IStorageServerImmutableAPIsTestsMixin(object): allocate_buckets() with a new storage index returns the matching shares. """ - (already_got, allocated) = yield self.storage_server.allocate_buckets( + (already_got, allocated) = yield self.storage_client.allocate_buckets( new_storage_index(), renew_secret=new_secret(), cancel_secret=new_secret(), @@ -110,7 +113,7 @@ class IStorageServerImmutableAPIsTestsMixin(object): new_secret(), new_secret(), ) - (already_got, allocated) = yield self.storage_server.allocate_buckets( + (already_got, allocated) = yield self.storage_client.allocate_buckets( storage_index, renew_secret, cancel_secret, @@ -118,7 +121,7 @@ class IStorageServerImmutableAPIsTestsMixin(object): allocated_size=1024, canary=Referenceable(), ) - (already_got2, allocated2) = yield self.storage_server.allocate_buckets( + (already_got2, allocated2) = yield self.storage_client.allocate_buckets( storage_index, renew_secret, cancel_secret, @@ -146,7 +149,7 @@ class IStorageServerImmutableAPIsTestsMixin(object): new_secret(), new_secret(), ) - (_, allocated) = yield self.storage_server.allocate_buckets( + (_, allocated) = yield self.storage_client.allocate_buckets( storage_index, renew_secret, cancel_secret, @@ -155,14 +158,15 @@ class IStorageServerImmutableAPIsTestsMixin(object): canary=Referenceable(), ) - # Bucket 1 is fully written in one go. - yield allocated[0].callRemote("write", 0, b"1" * 1024) + # Bucket 1 get some data written (but not all, or HTTP implicitly + # finishes the upload) + yield allocated[0].callRemote("write", 0, b"1" * 1023) # Disconnect or abort, depending on the test: yield abort_or_disconnect(allocated[0]) # Write different data with no complaint: - (_, allocated) = yield self.storage_server.allocate_buckets( + (_, allocated) = yield self.storage_client.allocate_buckets( storage_index, renew_secret, cancel_secret, @@ -172,20 +176,6 @@ class IStorageServerImmutableAPIsTestsMixin(object): ) yield allocated[0].callRemote("write", 0, b"2" * 1024) - def test_disconnection(self): - """ - If we disconnect in the middle of writing to a bucket, all data is - wiped, and it's even possible to write different data to the bucket. - - (In the real world one shouldn't do that, but writing different data is - a good way to test that the original data really was wiped.) - - HTTP protocol should skip this test, since disconnection is meaningless - concept; this is more about testing implicit contract the Foolscap - implementation depends on doesn't change as we refactor things. - """ - return self.abort_or_disconnect_half_way(lambda _: self.disconnect()) - @inlineCallbacks def test_written_shares_are_allocated(self): """ @@ -198,7 +188,7 @@ class IStorageServerImmutableAPIsTestsMixin(object): new_secret(), new_secret(), ) - (_, allocated) = yield self.storage_server.allocate_buckets( + (_, allocated) = yield self.storage_client.allocate_buckets( storage_index, renew_secret, cancel_secret, @@ -219,7 +209,7 @@ class IStorageServerImmutableAPIsTestsMixin(object): # Bucket 0 has partial write. yield allocated[0].callRemote("write", 0, b"1" * 512) - (already_got, _) = yield self.storage_server.allocate_buckets( + (already_got, _) = yield self.storage_client.allocate_buckets( storage_index, renew_secret, cancel_secret, @@ -242,7 +232,7 @@ class IStorageServerImmutableAPIsTestsMixin(object): new_secret(), new_secret(), ) - (_, allocated) = yield self.storage_server.allocate_buckets( + (_, allocated) = yield self.storage_client.allocate_buckets( storage_index, renew_secret, cancel_secret, @@ -261,7 +251,7 @@ class IStorageServerImmutableAPIsTestsMixin(object): yield allocated[2].callRemote("write", 0, b"3" * 512) yield allocated[2].callRemote("close") - buckets = yield self.storage_server.get_buckets(storage_index) + buckets = yield self.storage_client.get_buckets(storage_index) self.assertEqual(set(buckets.keys()), {1, 2}) self.assertEqual( @@ -282,7 +272,7 @@ class IStorageServerImmutableAPIsTestsMixin(object): new_secret(), new_secret(), ) - (_, allocated) = yield self.storage_server.allocate_buckets( + (_, allocated) = yield self.storage_client.allocate_buckets( storage_index, renew_secret, cancel_secret, @@ -307,7 +297,7 @@ class IStorageServerImmutableAPIsTestsMixin(object): new_secret(), new_secret(), ) - (_, allocated) = yield self.storage_server.allocate_buckets( + (_, allocated) = yield self.storage_client.allocate_buckets( storage_index, renew_secret, cancel_secret, @@ -321,7 +311,7 @@ class IStorageServerImmutableAPIsTestsMixin(object): yield allocated[0].callRemote("write", 5, b"1" * 20) yield allocated[0].callRemote("close") - buckets = yield self.storage_server.get_buckets(storage_index) + buckets = yield self.storage_client.get_buckets(storage_index) self.assertEqual(set(buckets.keys()), {0}) self.assertEqual((yield buckets[0].callRemote("read", 0, 25)), b"1" * 25) @@ -346,7 +336,7 @@ class IStorageServerImmutableAPIsTestsMixin(object): ``IStorageServer.get_buckets()`` implementations. """ storage_index = new_storage_index() - (_, allocated) = yield self.storage_server.allocate_buckets( + (_, allocated) = yield self.storage_client.allocate_buckets( storage_index, renew_secret=new_secret(), cancel_secret=new_secret(), @@ -362,7 +352,7 @@ class IStorageServerImmutableAPIsTestsMixin(object): # Bucket 2 is partially written yield allocated[2].callRemote("write", 0, b"1" * 5) - buckets = yield self.storage_server.get_buckets(storage_index) + buckets = yield self.storage_client.get_buckets(storage_index) self.assertEqual(set(buckets.keys()), {1}) @inlineCallbacks @@ -375,7 +365,7 @@ class IStorageServerImmutableAPIsTestsMixin(object): length = 256 * 17 storage_index = new_storage_index() - (_, allocated) = yield self.storage_server.allocate_buckets( + (_, allocated) = yield self.storage_client.allocate_buckets( storage_index, renew_secret=new_secret(), cancel_secret=new_secret(), @@ -388,7 +378,7 @@ class IStorageServerImmutableAPIsTestsMixin(object): yield allocated[0].callRemote("write", 0, total_data) yield allocated[0].callRemote("close") - buckets = yield self.storage_server.get_buckets(storage_index) + buckets = yield self.storage_client.get_buckets(storage_index) bucket = buckets[0] for start, to_read in [ (0, 250), # fraction @@ -408,10 +398,12 @@ class IStorageServerImmutableAPIsTestsMixin(object): def create_share(self): """Create a share, return the storage index.""" storage_index = new_storage_index() - (_, allocated) = yield self.storage_server.allocate_buckets( + renew_secret = new_secret() + cancel_secret = new_secret() + (_, allocated) = yield self.storage_client.allocate_buckets( storage_index, - renew_secret=new_secret(), - cancel_secret=new_secret(), + renew_secret=renew_secret, + cancel_secret=cancel_secret, sharenums=set(range(1)), allocated_size=10, canary=Referenceable(), @@ -419,7 +411,7 @@ class IStorageServerImmutableAPIsTestsMixin(object): yield allocated[0].callRemote("write", 0, b"0123456789") yield allocated[0].callRemote("close") - returnValue(storage_index) + returnValue((storage_index, renew_secret, cancel_secret)) @inlineCallbacks def test_bucket_advise_corrupt_share(self): @@ -428,8 +420,8 @@ class IStorageServerImmutableAPIsTestsMixin(object): ``IStorageServer.get_buckets()`` does not result in error (other behavior is opaque at this level of abstraction). """ - storage_index = yield self.create_share() - buckets = yield self.storage_server.get_buckets(storage_index) + storage_index, _, _ = yield self.create_share() + buckets = yield self.storage_client.get_buckets(storage_index) yield buckets[0].callRemote("advise_corrupt_share", b"OH NO") @inlineCallbacks @@ -439,17 +431,98 @@ class IStorageServerImmutableAPIsTestsMixin(object): result in error (other behavior is opaque at this level of abstraction). """ - storage_index = yield self.create_share() - yield self.storage_server.advise_corrupt_share( + storage_index, _, _ = yield self.create_share() + yield self.storage_client.advise_corrupt_share( b"immutable", storage_index, 0, b"ono" ) + @inlineCallbacks + def test_advise_corrupt_share_unknown_share_number(self): + """ + Calling ``advise_corrupt_share()`` on an immutable share, with an + unknown share number, does not result in error. + """ + storage_index, _, _ = yield self.create_share() + yield self.storage_client.advise_corrupt_share( + b"immutable", storage_index, 999, b"ono" + ) + + @inlineCallbacks + def test_allocate_buckets_creates_lease(self): + """ + When buckets are created using ``allocate_buckets()``, a lease is + created once writing is done. + """ + storage_index, _, _ = yield self.create_share() + [lease] = self.server.get_leases(storage_index) + # Lease expires in 31 days. + self.assertTrue( + lease.get_expiration_time() - self.fake_time() > (31 * 24 * 60 * 60 - 10) + ) + + @inlineCallbacks + def test_add_lease_non_existent(self): + """ + If the storage index doesn't exist, adding the lease silently does nothing. + """ + storage_index = new_storage_index() + self.assertEqual(list(self.server.get_leases(storage_index)), []) + + renew_secret = new_secret() + cancel_secret = new_secret() + + # Add a lease: + yield self.storage_client.add_lease(storage_index, renew_secret, cancel_secret) + self.assertEqual(list(self.server.get_leases(storage_index)), []) + + @inlineCallbacks + def test_add_lease_renewal(self): + """ + If the lease secret is reused, ``add_lease()`` extends the existing + lease. + """ + storage_index, renew_secret, cancel_secret = yield self.create_share() + [lease] = self.server.get_leases(storage_index) + initial_expiration_time = lease.get_expiration_time() + + # Time passes: + self.fake_sleep(178) + + # We renew the lease: + yield self.storage_client.add_lease(storage_index, renew_secret, cancel_secret) + [lease] = self.server.get_leases(storage_index) + new_expiration_time = lease.get_expiration_time() + self.assertEqual(new_expiration_time - initial_expiration_time, 178) + + @inlineCallbacks + def test_add_new_lease(self): + """ + If a new lease secret is used, ``add_lease()`` creates a new lease. + """ + storage_index, _, _ = yield self.create_share() + [lease] = self.server.get_leases(storage_index) + initial_expiration_time = lease.get_expiration_time() + + # Time passes: + self.fake_sleep(167) + + # We create a new lease: + renew_secret = new_secret() + cancel_secret = new_secret() + yield self.storage_client.add_lease(storage_index, renew_secret, cancel_secret) + [lease1, lease2] = self.server.get_leases(storage_index) + self.assertEqual(lease1.get_expiration_time(), initial_expiration_time) + self.assertEqual(lease2.get_expiration_time() - initial_expiration_time, 167) + class IStorageServerMutableAPIsTestsMixin(object): """ Tests for ``IStorageServer``'s mutable APIs. - ``self.storage_server`` is expected to provide ``IStorageServer``. + ``self.storage_client`` is expected to provide ``IStorageServer``. + + ``self.server`` is expected to be the corresponding + ``allmydata.storage.server.StorageServer`` instance. ``STARAW`` is short for ``slot_testv_and_readv_and_writev``. """ @@ -460,7 +533,7 @@ class IStorageServerMutableAPIsTestsMixin(object): def staraw(self, *args, **kwargs): """Like ``slot_testv_and_readv_and_writev``, but less typing.""" - return self.storage_server.slot_testv_and_readv_and_writev(*args, **kwargs) + return self.storage_client.slot_testv_and_readv_and_writev(*args, **kwargs) @inlineCallbacks def test_STARAW_reads_after_write(self): @@ -756,7 +829,7 @@ class IStorageServerMutableAPIsTestsMixin(object): ) self.assertEqual(written, True) - reads = yield self.storage_server.slot_readv( + reads = yield self.storage_client.slot_readv( storage_index, shares=[0, 1], # Whole thing, partial, going beyond the edge, completely outside @@ -787,7 +860,7 @@ class IStorageServerMutableAPIsTestsMixin(object): ) self.assertEqual(written, True) - reads = yield self.storage_server.slot_readv( + reads = yield self.storage_client.slot_readv( storage_index, shares=[], readv=[(0, 7)], @@ -798,12 +871,25 @@ class IStorageServerMutableAPIsTestsMixin(object): ) @inlineCallbacks - def test_advise_corrupt_share(self): + def test_slot_readv_unknown_storage_index(self): """ - Calling ``advise_corrupt_share()`` on a mutable share does not - result in error (other behavior is opaque at this level of - abstraction). + With unknown storage index, ``IStorageServer.slot_readv()`` returns + empty dict. """ + storage_index = new_storage_index() + reads = yield self.storage_client.slot_readv( + storage_index, + shares=[], + readv=[(0, 7)], + ) + self.assertEqual( + reads, + {}, + ) + + @inlineCallbacks + def create_slot(self): + """Create a slot with sharenum 0.""" secrets = self.new_secrets() storage_index = new_storage_index() (written, _) = yield self.staraw( @@ -815,56 +901,259 @@ class IStorageServerMutableAPIsTestsMixin(object): r_vector=[], ) self.assertEqual(written, True) + returnValue((secrets, storage_index)) - yield self.storage_server.advise_corrupt_share( + @inlineCallbacks + def test_advise_corrupt_share(self): + """ + Calling ``advise_corrupt_share()`` on a mutable share does not + result in error (other behavior is opaque at this level of + abstraction). + """ + secrets, storage_index = yield self.create_slot() + + yield self.storage_client.advise_corrupt_share( b"mutable", storage_index, 0, b"ono" ) + @inlineCallbacks + def test_advise_corrupt_share_unknown_share_number(self): + """ + Calling ``advise_corrupt_share()`` on a mutable share with an unknown + share number does not result in error (other behavior is opaque at this + level of abstraction). + """ + secrets, storage_index = yield self.create_slot() -class _FoolscapMixin(SystemTestMixin): - """Run tests on Foolscap version of ``IStorageServer.""" + yield self.storage_client.advise_corrupt_share( + b"mutable", storage_index, 999, b"ono" + ) - def _get_native_server(self): - return next(iter(self.clients[0].storage_broker.get_known_servers())) + @inlineCallbacks + def test_STARAW_create_lease(self): + """ + When STARAW creates a new slot, it also creates a lease. + """ + _, storage_index = yield self.create_slot() + [lease] = self.server.get_slot_leases(storage_index) + # Lease expires in 31 days. + self.assertTrue( + lease.get_expiration_time() - self.fake_time() > (31 * 24 * 60 * 60 - 10) + ) + + @inlineCallbacks + def test_STARAW_renews_lease(self): + """ + When STARAW is run on an existing slot with same renewal secret, it + renews the lease. + """ + secrets, storage_index = yield self.create_slot() + [lease] = self.server.get_slot_leases(storage_index) + initial_expire = lease.get_expiration_time() + + # Time passes... + self.fake_sleep(17) + + # We do another write: + (written, _) = yield self.staraw( + storage_index, + secrets, + tw_vectors={ + 0: ([], [(0, b"1234567")], 7), + }, + r_vector=[], + ) + self.assertEqual(written, True) + + # The lease has been renewed: + [lease] = self.server.get_slot_leases(storage_index) + self.assertEqual(lease.get_expiration_time() - initial_expire, 17) + + @inlineCallbacks + def test_STARAW_new_lease(self): + """ + When STARAW is run with a new renewal secret on an existing slot, it + adds a new lease. + """ + secrets, storage_index = yield self.create_slot() + [lease] = self.server.get_slot_leases(storage_index) + initial_expire = lease.get_expiration_time() + + # Time passes... + self.fake_sleep(19) + + # We do another write: + (written, _) = yield self.staraw( + storage_index, + (secrets[0], new_secret(), new_secret()), + tw_vectors={ + 0: ([], [(0, b"1234567")], 7), + }, + r_vector=[], + ) + self.assertEqual(written, True) + + # A new lease was added: + [lease1, lease2] = self.server.get_slot_leases(storage_index) + self.assertEqual(lease1.get_expiration_time(), initial_expire) + self.assertEqual(lease2.get_expiration_time() - initial_expire, 19) + + @inlineCallbacks + def test_add_lease_renewal(self): + """ + If the lease secret is reused, ``add_lease()`` extends the existing + lease. + """ + secrets, storage_index = yield self.create_slot() + [lease] = self.server.get_slot_leases(storage_index) + initial_expiration_time = lease.get_expiration_time() + + # Time passes: + self.fake_sleep(178) + + # We renew the lease: + yield self.storage_client.add_lease(storage_index, secrets[1], secrets[2]) + [lease] = self.server.get_slot_leases(storage_index) + new_expiration_time = lease.get_expiration_time() + self.assertEqual(new_expiration_time - initial_expiration_time, 178) + + @inlineCallbacks + def test_add_new_lease(self): + """ + If a new lease secret is used, ``add_lease()`` creates a new lease. + """ + secrets, storage_index = yield self.create_slot() + [lease] = self.server.get_slot_leases(storage_index) + initial_expiration_time = lease.get_expiration_time() + + # Time passes: + self.fake_sleep(167) + + # We create a new lease: + renew_secret = new_secret() + cancel_secret = new_secret() + yield self.storage_client.add_lease(storage_index, renew_secret, cancel_secret) + [lease1, lease2] = self.server.get_slot_leases(storage_index) + self.assertEqual(lease1.get_expiration_time(), initial_expiration_time) + self.assertEqual(lease2.get_expiration_time() - initial_expiration_time, 167) + + +class _SharedMixin(SystemTestMixin): + """Base class for Foolscap and HTTP mixins.""" + + SKIP_TESTS : set[str] = set() + + def _get_istorage_server(self): + native_server = next(iter(self.clients[0].storage_broker.get_known_servers())) + client = native_server.get_storage_server() + self.assertTrue(IStorageServer.providedBy(client)) + return client @inlineCallbacks def setUp(self): + if self._testMethodName in self.SKIP_TESTS: + raise SkipTest( + "Test {} is still not supported".format(self._testMethodName) + ) + AsyncTestCase.setUp(self) + self.basedir = "test_istorageserver/" + self.id() yield SystemTestMixin.setUp(self) yield self.set_up_nodes(1) - self.storage_server = self._get_native_server().get_storage_server() - self.assertTrue(IStorageServer.providedBy(self.storage_server)) + self.server = None + for s in self.clients[0].services: + if isinstance(s, StorageServer): + self.server = s + break + assert self.server is not None, "Couldn't find StorageServer" + self._clock = Clock() + self._clock.advance(123456) + self.server._clock = self._clock + self.storage_client = self._get_istorage_server() + + def fake_time(self): + """Return the current fake, test-controlled, time.""" + return self._clock.seconds() + + def fake_sleep(self, seconds): + """Advance the fake time by the given number of seconds.""" + self._clock.advance(seconds) @inlineCallbacks def tearDown(self): AsyncTestCase.tearDown(self) yield SystemTestMixin.tearDown(self) + +class FoolscapSharedAPIsTests( + _SharedMixin, IStorageServerSharedAPIsTestsMixin, AsyncTestCase +): + """Foolscap-specific tests for shared ``IStorageServer`` APIs.""" + + FORCE_FOOLSCAP_FOR_STORAGE = True + + +class HTTPSharedAPIsTests( + _SharedMixin, IStorageServerSharedAPIsTestsMixin, AsyncTestCase +): + """HTTP-specific tests for shared ``IStorageServer`` APIs.""" + + FORCE_FOOLSCAP_FOR_STORAGE = False + + +class FoolscapImmutableAPIsTests( + _SharedMixin, IStorageServerImmutableAPIsTestsMixin, AsyncTestCase +): + """Foolscap-specific tests for immutable ``IStorageServer`` APIs.""" + + FORCE_FOOLSCAP_FOR_STORAGE = True + + def test_disconnection(self): + """ + If we disconnect in the middle of writing to a bucket, all data is + wiped, and it's even possible to write different data to the bucket. + + (In the real world one shouldn't do that, but writing different data is + a good way to test that the original data really was wiped.) + + HTTP protocol doesn't need this test, since disconnection is a + meaningless concept; this is more about testing the implicit contract + the Foolscap implementation depends on doesn't change as we refactor + things. + """ + return self.abort_or_disconnect_half_way(lambda _: self.disconnect()) + @inlineCallbacks def disconnect(self): """ Disconnect and then reconnect with a new ``IStorageServer``. """ - current = self.storage_server + current = self.storage_client yield self.bounce_client(0) - self.storage_server = self._get_native_server().get_storage_server() - assert self.storage_server is not current + self.storage_client = self._get_istorage_server() + assert self.storage_client is not current -class FoolscapSharedAPIsTests( - _FoolscapMixin, IStorageServerSharedAPIsTestsMixin, AsyncTestCase +class HTTPImmutableAPIsTests( + _SharedMixin, IStorageServerImmutableAPIsTestsMixin, AsyncTestCase ): - """Foolscap-specific tests for shared ``IStorageServer`` APIs.""" + """HTTP-specific tests for immutable ``IStorageServer`` APIs.""" - -class FoolscapImmutableAPIsTests( - _FoolscapMixin, IStorageServerImmutableAPIsTestsMixin, AsyncTestCase -): - """Foolscap-specific tests for immutable ``IStorageServer`` APIs.""" + FORCE_FOOLSCAP_FOR_STORAGE = False class FoolscapMutableAPIsTests( - _FoolscapMixin, IStorageServerMutableAPIsTestsMixin, AsyncTestCase + _SharedMixin, IStorageServerMutableAPIsTestsMixin, AsyncTestCase ): - """Foolscap-specific tests for immutable ``IStorageServer`` APIs.""" + """Foolscap-specific tests for mutable ``IStorageServer`` APIs.""" + + FORCE_FOOLSCAP_FOR_STORAGE = True + + +class HTTPMutableAPIsTests( + _SharedMixin, IStorageServerMutableAPIsTestsMixin, AsyncTestCase +): + """HTTP-specific tests for mutable ``IStorageServer`` APIs.""" + + FORCE_FOOLSCAP_FOR_STORAGE = False diff --git a/src/allmydata/test/test_json_metadata.py b/src/allmydata/test/test_json_metadata.py index a0cb9c142..950a5847c 100644 --- a/src/allmydata/test/test_json_metadata.py +++ b/src/allmydata/test/test_json_metadata.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from twisted.trial.unittest import TestCase diff --git a/src/allmydata/test/test_log.py b/src/allmydata/test/test_log.py index bf079aaeb..0d3361b36 100644 --- a/src/allmydata/test/test_log.py +++ b/src/allmydata/test/test_log.py @@ -4,15 +4,6 @@ Tests for allmydata.util.log. Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2, native_str -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - from twisted.trial import unittest from twisted.python.failure import Failure @@ -167,4 +158,4 @@ class Log(unittest.TestCase): obj.log(**{"my": "message"}) for message in self.messages: for k in message[-1].keys(): - self.assertIsInstance(k, native_str) + self.assertIsInstance(k, str) diff --git a/src/allmydata/test/test_monitor.py b/src/allmydata/test/test_monitor.py index 7010da73a..492597bd9 100644 --- a/src/allmydata/test/test_monitor.py +++ b/src/allmydata/test/test_monitor.py @@ -2,15 +2,6 @@ Tests for allmydata.monitor. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - from twisted.trial import unittest from allmydata.monitor import Monitor, OperationCancelledError diff --git a/src/allmydata/test/test_multi_introducers.py b/src/allmydata/test/test_multi_introducers.py index a385abe54..f5d0ff98c 100644 --- a/src/allmydata/test/test_multi_introducers.py +++ b/src/allmydata/test/test_multi_introducers.py @@ -1,14 +1,7 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six import ensure_binary import os @@ -28,14 +21,14 @@ INTRODUCERS_CFG_FURLS_COMMENTED="""introducers: class MultiIntroTests(unittest.TestCase): - def setUp(self): + async def setUp(self): # setup tahoe.cfg and basedir/private/introducers # create a custom tahoe.cfg self.basedir = os.path.dirname(self.mktemp()) c = open(os.path.join(self.basedir, "tahoe.cfg"), "w") config = {'hide-ip':False, 'listen': 'tcp', 'port': None, 'location': None, 'hostname': 'example.net'} - write_node_config(c, config) + await write_node_config(c, config) c.write("[storage]\n") c.write("enabled = false\n") c.close() @@ -63,8 +56,7 @@ class MultiIntroTests(unittest.TestCase): # assertions self.failUnlessEqual(ic_count, len(connections["introducers"])) - @defer.inlineCallbacks - def test_read_introducer_furl_from_tahoecfg(self): + async def test_read_introducer_furl_from_tahoecfg(self): """ The deprecated [client]introducer.furl item is still read and respected. """ @@ -72,7 +64,7 @@ class MultiIntroTests(unittest.TestCase): c = open(os.path.join(self.basedir, "tahoe.cfg"), "w") config = {'hide-ip':False, 'listen': 'tcp', 'port': None, 'location': None, 'hostname': 'example.net'} - write_node_config(c, config) + await write_node_config(c, config) fake_furl = "furl1" c.write("[client]\n") c.write("introducer.furl = %s\n" % fake_furl) @@ -139,14 +131,14 @@ introducers: """ class NoDefault(unittest.TestCase): - def setUp(self): + async def setUp(self): # setup tahoe.cfg and basedir/private/introducers # create a custom tahoe.cfg self.basedir = os.path.dirname(self.mktemp()) c = open(os.path.join(self.basedir, "tahoe.cfg"), "w") config = {'hide-ip':False, 'listen': 'tcp', 'port': None, 'location': None, 'hostname': 'example.net'} - write_node_config(c, config) + await write_node_config(c, config) c.write("[storage]\n") c.write("enabled = false\n") c.close() diff --git a/src/allmydata/test/test_netstring.py b/src/allmydata/test/test_netstring.py index d5ff379cd..6f9a21ee2 100644 --- a/src/allmydata/test/test_netstring.py +++ b/src/allmydata/test/test_netstring.py @@ -3,14 +3,6 @@ Tests for allmydata.util.netstring. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from twisted.trial import unittest diff --git a/src/allmydata/test/test_no_network.py b/src/allmydata/test/test_no_network.py index b1aa1350a..88eb27979 100644 --- a/src/allmydata/test/test_no_network.py +++ b/src/allmydata/test/test_no_network.py @@ -3,14 +3,6 @@ Test the NoNetworkGrid test harness. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from twisted.trial import unittest from twisted.application import service diff --git a/src/allmydata/test/test_node.py b/src/allmydata/test/test_node.py index cf5fa27f3..90da877fb 100644 --- a/src/allmydata/test/test_node.py +++ b/src/allmydata/test/test_node.py @@ -1,14 +1,4 @@ -""" -Ported to Python 3. -""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations import base64 import os @@ -31,6 +21,7 @@ from unittest import skipIf from twisted.python.filepath import ( FilePath, ) +from twisted.python.runtime import platform from twisted.trial import unittest from twisted.internet import defer @@ -69,6 +60,9 @@ import allmydata.test.common_util as testutil from .common import ( ConstantAddresses, + SameProcessStreamEndpointAssigner, + UseNode, + superuser, ) def port_numbers(): @@ -80,11 +74,10 @@ class LoggingMultiService(service.MultiService): # see https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2946 -def testing_tub(config_data=''): +def testing_tub(reactor, config_data=''): """ Creates a 'main' Tub for testing purposes, from config data """ - from twisted.internet import reactor basedir = 'dummy_basedir' config = config_from_string(basedir, 'DEFAULT_PORTNUMFILE_BLANK', config_data) fileutil.make_dirs(os.path.join(basedir, 'private')) @@ -112,6 +105,9 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): # try to bind the port. We'll use a low-numbered one that's likely to # conflict with another service to prove it. self._available_port = 22 + self.port_assigner = SameProcessStreamEndpointAssigner() + self.port_assigner.setUp() + self.addCleanup(self.port_assigner.tearDown) def _test_location( self, @@ -137,11 +133,23 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): :param local_addresses: If not ``None`` then a list of addresses to supply to the system under test as local addresses. """ + from twisted.internet import reactor + basedir = self.mktemp() create_node_dir(basedir, "testing") + if tub_port is None: + # Always configure a usable tub.port address instead of relying on + # the automatic port assignment. The automatic port assignment is + # prone to collisions and spurious test failures. + _, tub_port = self.port_assigner.assign(reactor) + config_data = "[node]\n" - if tub_port: - config_data += "tub.port = {}\n".format(tub_port) + config_data += "tub.port = {}\n".format(tub_port) + + # If they wanted a certain location, go for it. This probably won't + # agree with the tub.port value we set but that only matters if + # anything tries to use this to establish a connection ... which + # nothing in this test suite will. if tub_location is not None: config_data += "tub.location = {}\n".format(tub_location) @@ -149,7 +157,7 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): self.patch(iputil, 'get_local_addresses_sync', lambda: local_addresses) - tub = testing_tub(config_data) + tub = testing_tub(reactor, config_data) class Foo(object): pass @@ -245,6 +253,20 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): with self.assertRaises(MissingConfigEntry): config.get_config("node", "log_gatherer.furl") + def test_missing_config_section(self): + """ + Enumerating a missing section returns empty dict + """ + basedir = self.mktemp() + fileutil.make_dirs(basedir) + with open(os.path.join(basedir, 'tahoe.cfg'), 'w'): + pass + config = read_config(basedir, "") + self.assertEquals( + config.enumerate_section("not-a-section"), + {} + ) + def test_config_required(self): """ Asking for missing (but required) configuration is an error @@ -303,10 +325,8 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): default = [("hello", "world")] self.assertEqual(config.items("nosuch", default), default) - @skipIf( - "win32" in sys.platform.lower() or "cygwin" in sys.platform.lower(), - "We don't know how to set permissions on Windows.", - ) + @skipIf(platform.isWindows(), "We don't know how to set permissions on Windows.") + @skipIf(superuser, "cannot test as superuser with all permissions") def test_private_config_unreadable(self): """ Asking for inaccessible private config is an error @@ -321,10 +341,8 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): with self.assertRaises(Exception): config.get_or_create_private_config("foo") - @skipIf( - "win32" in sys.platform.lower() or "cygwin" in sys.platform.lower(), - "We don't know how to set permissions on Windows.", - ) + @skipIf(platform.isWindows(), "We don't know how to set permissions on Windows.") + @skipIf(superuser, "cannot test as superuser with all permissions") def test_private_config_unreadable_preexisting(self): """ error if reading private config data fails @@ -381,6 +399,7 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): self.assertEqual(len(counter), 1) # don't call unless necessary self.assertEqual(value, "newer") + @skipIf(superuser, "cannot test as superuser with all permissions") def test_write_config_unwritable_file(self): """ Existing behavior merely logs any errors upon writing @@ -431,7 +450,12 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): @defer.inlineCallbacks def test_logdir_is_str(self): - basedir = "test_node/test_logdir_is_str" + from twisted.internet import reactor + + basedir = FilePath(self.mktemp()) + fixture = UseNode(None, None, basedir, "pb://introducer/furl", {}, reactor=reactor) + fixture.setUp() + self.addCleanup(fixture.cleanUp) ns = Namespace() ns.called = False @@ -440,8 +464,7 @@ class TestCase(testutil.SignalMixin, unittest.TestCase): self.failUnless(isinstance(logdir, str), logdir) self.patch(foolscap.logging.log, 'setLogDir', call_setLogDir) - create_node_dir(basedir, "nothing to see here") - yield client.create_client(basedir) + yield fixture.create_node() self.failUnless(ns.called) def test_set_config_unescaped_furl_hash(self): diff --git a/src/allmydata/test/test_observer.py b/src/allmydata/test/test_observer.py index 134876be3..6d26b2470 100644 --- a/src/allmydata/test/test_observer.py +++ b/src/allmydata/test/test_observer.py @@ -4,15 +4,6 @@ Tests for allmydata.util.observer. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - from twisted.trial import unittest from twisted.internet import defer, reactor from allmydata.util import observer diff --git a/src/allmydata/test/test_openmetrics.py b/src/allmydata/test/test_openmetrics.py index 66cbc7dec..4987aed11 100644 --- a/src/allmydata/test/test_openmetrics.py +++ b/src/allmydata/test/test_openmetrics.py @@ -4,18 +4,6 @@ Tests for ``/statistics?t=openmetrics``. Ported to Python 3. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 - -if PY2: - # fmt: off - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - # fmt: on - from prometheus_client.openmetrics import parser from treq.testing import RequestTraversalAgent diff --git a/src/allmydata/test/test_pipeline.py b/src/allmydata/test/test_pipeline.py deleted file mode 100644 index 31d952836..000000000 --- a/src/allmydata/test/test_pipeline.py +++ /dev/null @@ -1,198 +0,0 @@ -""" -Tests for allmydata.util.pipeline. - -Ported to Python 3. -""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -import gc - -from twisted.internet import defer -from twisted.trial import unittest -from twisted.python import log -from twisted.python.failure import Failure - -from allmydata.util import pipeline - - -class Pipeline(unittest.TestCase): - def pause(self, *args, **kwargs): - d = defer.Deferred() - self.calls.append( (d, args, kwargs) ) - return d - - def failUnlessCallsAre(self, expected): - #print(self.calls) - #print(expected) - self.failUnlessEqual(len(self.calls), len(expected), self.calls) - for i,c in enumerate(self.calls): - self.failUnlessEqual(c[1:], expected[i], str(i)) - - def test_basic(self): - self.calls = [] - finished = [] - p = pipeline.Pipeline(100) - - d = p.flush() # fires immediately - d.addCallbacks(finished.append, log.err) - self.failUnlessEqual(len(finished), 1) - finished = [] - - d = p.add(10, self.pause, "one") - # the call should start right away, and our return Deferred should - # fire right away - d.addCallbacks(finished.append, log.err) - self.failUnlessEqual(len(finished), 1) - self.failUnlessEqual(finished[0], None) - self.failUnlessCallsAre([ ( ("one",) , {} ) ]) - self.failUnlessEqual(p.gauge, 10) - - # pipeline: [one] - - finished = [] - d = p.add(20, self.pause, "two", kw=2) - # pipeline: [one, two] - - # the call and the Deferred should fire right away - d.addCallbacks(finished.append, log.err) - self.failUnlessEqual(len(finished), 1) - self.failUnlessEqual(finished[0], None) - self.failUnlessCallsAre([ ( ("one",) , {} ), - ( ("two",) , {"kw": 2} ), - ]) - self.failUnlessEqual(p.gauge, 30) - - self.calls[0][0].callback("one-result") - # pipeline: [two] - self.failUnlessEqual(p.gauge, 20) - - finished = [] - d = p.add(90, self.pause, "three", "posarg1") - # pipeline: [two, three] - flushed = [] - fd = p.flush() - fd.addCallbacks(flushed.append, log.err) - self.failUnlessEqual(flushed, []) - - # the call will be made right away, but the return Deferred will not, - # because the pipeline is now full. - d.addCallbacks(finished.append, log.err) - self.failUnlessEqual(len(finished), 0) - self.failUnlessCallsAre([ ( ("one",) , {} ), - ( ("two",) , {"kw": 2} ), - ( ("three", "posarg1"), {} ), - ]) - self.failUnlessEqual(p.gauge, 110) - - self.failUnlessRaises(pipeline.SingleFileError, p.add, 10, self.pause) - - # retiring either call will unblock the pipeline, causing the #3 - # Deferred to fire - self.calls[2][0].callback("three-result") - # pipeline: [two] - - self.failUnlessEqual(len(finished), 1) - self.failUnlessEqual(finished[0], None) - self.failUnlessEqual(flushed, []) - - # retiring call#2 will finally allow the flush() Deferred to fire - self.calls[1][0].callback("two-result") - self.failUnlessEqual(len(flushed), 1) - - def test_errors(self): - self.calls = [] - p = pipeline.Pipeline(100) - - d1 = p.add(200, self.pause, "one") - d2 = p.flush() - - finished = [] - d1.addBoth(finished.append) - self.failUnlessEqual(finished, []) - - flushed = [] - d2.addBoth(flushed.append) - self.failUnlessEqual(flushed, []) - - self.calls[0][0].errback(ValueError("oops")) - - self.failUnlessEqual(len(finished), 1) - f = finished[0] - self.failUnless(isinstance(f, Failure)) - self.failUnless(f.check(pipeline.PipelineError)) - self.failUnlessIn("PipelineError", str(f.value)) - self.failUnlessIn("ValueError", str(f.value)) - r = repr(f.value) - self.failUnless("ValueError" in r, r) - f2 = f.value.error - self.failUnless(f2.check(ValueError)) - - self.failUnlessEqual(len(flushed), 1) - f = flushed[0] - self.failUnless(isinstance(f, Failure)) - self.failUnless(f.check(pipeline.PipelineError)) - f2 = f.value.error - self.failUnless(f2.check(ValueError)) - - # now that the pipeline is in the failed state, any new calls will - # fail immediately - - d3 = p.add(20, self.pause, "two") - - finished = [] - d3.addBoth(finished.append) - self.failUnlessEqual(len(finished), 1) - f = finished[0] - self.failUnless(isinstance(f, Failure)) - self.failUnless(f.check(pipeline.PipelineError)) - r = repr(f.value) - self.failUnless("ValueError" in r, r) - f2 = f.value.error - self.failUnless(f2.check(ValueError)) - - d4 = p.flush() - flushed = [] - d4.addBoth(flushed.append) - self.failUnlessEqual(len(flushed), 1) - f = flushed[0] - self.failUnless(isinstance(f, Failure)) - self.failUnless(f.check(pipeline.PipelineError)) - f2 = f.value.error - self.failUnless(f2.check(ValueError)) - - def test_errors2(self): - self.calls = [] - p = pipeline.Pipeline(100) - - d1 = p.add(10, self.pause, "one") - d2 = p.add(20, self.pause, "two") - d3 = p.add(30, self.pause, "three") - d4 = p.flush() - - # one call fails, then the second one succeeds: make sure - # ExpandableDeferredList tolerates the second one - - flushed = [] - d4.addBoth(flushed.append) - self.failUnlessEqual(flushed, []) - - self.calls[0][0].errback(ValueError("oops")) - self.failUnlessEqual(len(flushed), 1) - f = flushed[0] - self.failUnless(isinstance(f, Failure)) - self.failUnless(f.check(pipeline.PipelineError)) - f2 = f.value.error - self.failUnless(f2.check(ValueError)) - - self.calls[1][0].callback("two-result") - self.calls[2][0].errback(ValueError("three-error")) - - del d1,d2,d3,d4 - gc.collect() # for PyPy diff --git a/src/allmydata/test/test_protocol_switch.py b/src/allmydata/test/test_protocol_switch.py new file mode 100644 index 000000000..4906896dc --- /dev/null +++ b/src/allmydata/test/test_protocol_switch.py @@ -0,0 +1,43 @@ +""" +Unit tests for ``allmydata.protocol_switch``. + +By its nature, most of the testing needs to be end-to-end; essentially any test +that uses real Foolscap (``test_system.py``, integration tests) ensures +Foolscap still works. ``test_istorageserver.py`` tests the HTTP support. +""" + +from foolscap.negotiate import Negotiation + +from .common import TestCase +from ..protocol_switch import _PretendToBeNegotiation + + +class UtilityTests(TestCase): + """Tests for utilities in the protocol switch code.""" + + def test_metaclass(self): + """ + A class that has the ``_PretendToBeNegotiation`` metaclass will support + ``isinstance()``'s normal semantics on its own instances, but will also + indicate that ``Negotiation`` instances are its instances. + """ + + class Parent(metaclass=_PretendToBeNegotiation): + pass + + class Child(Parent): + pass + + class Other: + pass + + p = Parent() + self.assertIsInstance(p, Parent) + self.assertIsInstance(Negotiation(), Parent) + self.assertNotIsInstance(Other(), Parent) + + c = Child() + self.assertIsInstance(c, Child) + self.assertIsInstance(c, Parent) + self.assertIsInstance(Negotiation(), Child) + self.assertNotIsInstance(Other(), Child) diff --git a/src/allmydata/test/test_python2_regressions.py b/src/allmydata/test/test_python2_regressions.py deleted file mode 100644 index c641d2dba..000000000 --- a/src/allmydata/test/test_python2_regressions.py +++ /dev/null @@ -1,81 +0,0 @@ -""" -Tests to check for Python2 regressions -""" - -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from unittest import skipUnless -from inspect import isclass - -from twisted.python.modules import getModule - -from testtools import ( - TestCase, -) -from testtools.matchers import ( - Equals, -) - -BLACKLIST = { - "allmydata.scripts.types_", - "allmydata.test._win_subprocess", - "allmydata.windows.registry", - "allmydata.windows.fixups", -} - - -def is_new_style(cls): - """ - :return bool: ``True`` if and only if the given class is "new style". - """ - # All new-style classes are instances of type. By definition. - return isinstance(cls, type) - -def defined_here(cls, where): - """ - :return bool: ``True`` if and only if the given class was defined in a - module with the given name. - - :note: Classes can lie about where they are defined. Try not to do that. - """ - return cls.__module__ == where - - -class PythonTwoRegressions(TestCase): - """ - Regression tests for Python 2 behaviors related to Python 3 porting. - """ - @skipUnless(PY2, "No point in running on Python 3.") - def test_new_style_classes(self): - """ - All classes in Tahoe-LAFS are new-style. - """ - newstyle = set() - classic = set() - for mod in getModule("allmydata").walkModules(): - if mod.name in BLACKLIST: - continue - - # iterAttributes will only work on loaded modules. So, load it. - mod.load() - - for attr in mod.iterAttributes(): - value = attr.load() - if isclass(value) and defined_here(value, mod.name): - if is_new_style(value): - newstyle.add(value) - else: - classic.add(value) - - self.assertThat( - classic, - Equals(set()), - "Expected to find no classic classes.", - ) diff --git a/src/allmydata/test/test_repairer.py b/src/allmydata/test/test_repairer.py index 88696000c..cf1cf843b 100644 --- a/src/allmydata/test/test_repairer.py +++ b/src/allmydata/test/test_repairer.py @@ -2,14 +2,6 @@ """ Ported to Python 3. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from allmydata.test import common from allmydata.monitor import Monitor @@ -251,6 +243,12 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin): self.judge_invisible_corruption) def test_corrupt_ueb(self): + # Note that in some rare situations this might fail, specifically if + # the length of the UEB is corrupted to be a value that is bigger than + # the size but less than 2000, it might not get caught... But that's + # mostly because in that case it doesn't meaningfully corrupt it. See + # _get_uri_extension_the_old_way() in layout.py for where the 2000 + # number comes from. self.basedir = "repairer/Verifier/corrupt_ueb" return self._help_test_verify(common._corrupt_uri_extension, self.judge_invisible_corruption) @@ -717,7 +715,7 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin, ss = self.g.servers_by_number[0] # we want to delete the share corresponding to the server # we're making not-respond - share = next(ss._get_bucket_shares(self.c0_filenode.get_storage_index()))[0] + share = next(ss.get_shares(self.c0_filenode.get_storage_index()))[0] self.delete_shares_numbered(self.uri, [share]) return self.c0_filenode.check_and_repair(Monitor()) d.addCallback(_then) diff --git a/src/allmydata/test/test_runner.py b/src/allmydata/test/test_runner.py index 44c7e1bee..bc55d507d 100644 --- a/src/allmydata/test/test_runner.py +++ b/src/allmydata/test/test_runner.py @@ -2,28 +2,10 @@ Ported to Python 3 """ -from __future__ import ( - absolute_import, -) -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from six import ensure_text - import os.path, re, sys from os import linesep import locale -import six - -from testtools import ( - skipUnless, -) from testtools.matchers import ( MatchesListwise, MatchesAny, @@ -42,20 +24,26 @@ from twisted.trial import unittest from twisted.internet import reactor from twisted.python import usage +from twisted.python.runtime import platform from twisted.internet.defer import ( inlineCallbacks, DeferredList, ) -from twisted.python.filepath import FilePath -from twisted.python.runtime import ( - platform, +from twisted.internet.testing import ( + MemoryReactorClock, ) +from twisted.python.filepath import FilePath from allmydata.util import fileutil, pollmixin from allmydata.util.encodingutil import unicode_to_argv +from allmydata.util.pid import ( + check_pid_process, + _pidfile_to_lockpath, + ProcessInTheWay, +) from allmydata.test import common_util import allmydata -from allmydata.scripts.runner import ( - parse_options, +from allmydata.scripts.tahoe_run import ( + on_stdin_close, ) from .common import ( @@ -97,27 +85,6 @@ srcfile = allmydata.__file__ rootdir = get_root_from_file(srcfile) -class ParseOptionsTests(SyncTestCase): - """ - Tests for ``parse_options``. - """ - @skipUnless(six.PY2, "Only Python 2 exceptions must stringify to bytes.") - def test_nonascii_unknown_subcommand_python2(self): - """ - When ``parse_options`` is called with an argv indicating a subcommand that - does not exist and which also contains non-ascii characters, the - exception it raises includes the subcommand encoded as UTF-8. - """ - tricky = u"\u00F6" - try: - parse_options([tricky]) - except usage.error as e: - self.assertEqual( - b"Unknown command: \\xf6", - b"{}".format(e), - ) - - class ParseOrExitTests(SyncTestCase): """ Tests for ``parse_or_exit``. @@ -158,18 +125,14 @@ def run_bintahoe(extra_argv, python_options=None): :return: A three-tuple of stdout (unicode), stderr (unicode), and the child process "returncode" (int). """ - executable = ensure_text(sys.executable) - argv = [executable] + argv = [sys.executable] if python_options is not None: argv.extend(python_options) argv.extend([u"-b", u"-m", u"allmydata.scripts.runner"]) argv.extend(extra_argv) argv = list(unicode_to_argv(arg) for arg in argv) p = Popen(argv, stdout=PIPE, stderr=PIPE) - if PY2: - encoding = "utf-8" - else: - encoding = locale.getpreferredencoding(False) + encoding = locale.getpreferredencoding(False) out = p.stdout.read().decode(encoding) err = p.stderr.read().decode(encoding) returncode = p.wait() @@ -183,10 +146,7 @@ class BinTahoe(common_util.SignalMixin, unittest.TestCase): """ tricky = u"\u00F6" out, err, returncode = run_bintahoe([tricky]) - if PY2: - expected = u"Unknown command: \\xf6" - else: - expected = u"Unknown command: \xf6" + expected = u"Unknown command: \xf6" self.assertEqual(returncode, 1) self.assertIn( expected, @@ -203,10 +163,10 @@ class BinTahoe(common_util.SignalMixin, unittest.TestCase): # but on Windows we parse the whole command line string ourselves so # we have to have our own implementation of skipping these options. - # -t is a harmless option that warns about tabs so we can add it + # -B is a harmless option that prevents writing bytecode so we can add it # without impacting other behavior noticably. - out, err, returncode = run_bintahoe([u"--version"], python_options=[u"-t"]) - self.assertEqual(returncode, 0) + out, err, returncode = run_bintahoe([u"--version"], python_options=[u"-B"]) + self.assertEqual(returncode, 0, f"Out:\n{out}\nErr:\n{err}") self.assertTrue(out.startswith(allmydata.__appname__ + '/')) def test_help_eliot_destinations(self): @@ -418,9 +378,7 @@ class RunNode(common_util.SignalMixin, unittest.TestCase, pollmixin.PollMixin): tahoe.active() - # We don't keep track of PIDs in files on Windows. - if not platform.isWindows(): - self.assertTrue(tahoe.twistd_pid_file.exists()) + self.assertTrue(tahoe.twistd_pid_file.exists()) self.assertTrue(tahoe.node_url_file.exists()) # rm this so we can detect when the second incarnation is ready @@ -493,9 +451,7 @@ class RunNode(common_util.SignalMixin, unittest.TestCase, pollmixin.PollMixin): # change on restart storage_furl = fileutil.read(tahoe.storage_furl_file.path) - # We don't keep track of PIDs in files on Windows. - if not platform.isWindows(): - self.assertTrue(tahoe.twistd_pid_file.exists()) + self.assertTrue(tahoe.twistd_pid_file.exists()) # rm this so we can detect when the second incarnation is ready tahoe.node_url_file.remove() @@ -513,22 +469,23 @@ class RunNode(common_util.SignalMixin, unittest.TestCase, pollmixin.PollMixin): fileutil.read(tahoe.storage_furl_file.path), ) - if not platform.isWindows(): - self.assertTrue( - tahoe.twistd_pid_file.exists(), - "PID file ({}) didn't exist when we expected it to. " - "These exist: {}".format( - tahoe.twistd_pid_file, - tahoe.twistd_pid_file.parent().listdir(), - ), - ) + self.assertTrue( + tahoe.twistd_pid_file.exists(), + "PID file ({}) didn't exist when we expected it to. " + "These exist: {}".format( + tahoe.twistd_pid_file, + tahoe.twistd_pid_file.parent().listdir(), + ), + ) yield tahoe.stop_and_wait() + # twistd.pid should be gone by now -- except on Windows, where + # killing a subprocess immediately exits with no chance for + # any shutdown code (that is, no Twisted shutdown hooks can + # run). if not platform.isWindows(): - # twistd.pid should be gone by now. self.assertFalse(tahoe.twistd_pid_file.exists()) - def _remove(self, res, file): fileutil.remove(file) return res @@ -610,8 +567,9 @@ class RunNode(common_util.SignalMixin, unittest.TestCase, pollmixin.PollMixin): ), ) + # It should not be running (but windows shutdown can't run + # code so the PID file still exists there). if not platform.isWindows(): - # It should not be running. self.assertFalse(tahoe.twistd_pid_file.exists()) # Wait for the operation to *complete*. If we got this far it's @@ -621,3 +579,100 @@ class RunNode(common_util.SignalMixin, unittest.TestCase, pollmixin.PollMixin): # What's left is a perfect indicator that the process has exited and # we won't get blamed for leaving the reactor dirty. yield client_running + + +def _simulate_windows_stdin_close(stdio): + """ + on Unix we can just close all the readers, correctly "simulating" + a stdin close .. of course, Windows has to be difficult + """ + stdio.writeConnectionLost() + stdio.readConnectionLost() + + +class OnStdinCloseTests(SyncTestCase): + """ + Tests for on_stdin_close + """ + + def test_close_called(self): + """ + our on-close method is called when stdin closes + """ + reactor = MemoryReactorClock() + called = [] + + def onclose(): + called.append(True) + transport = on_stdin_close(reactor, onclose) + self.assertEqual(called, []) + + if platform.isWindows(): + _simulate_windows_stdin_close(transport) + else: + for reader in reactor.getReaders(): + reader.loseConnection() + reactor.advance(1) # ProcessReader does a callLater(0, ..) + + self.assertEqual(called, [True]) + + def test_exception_ignored(self): + """ + An exception from our on-close function is discarded. + """ + reactor = MemoryReactorClock() + called = [] + + def onclose(): + called.append(True) + raise RuntimeError("unexpected error") + transport = on_stdin_close(reactor, onclose) + self.assertEqual(called, []) + + if platform.isWindows(): + _simulate_windows_stdin_close(transport) + else: + for reader in reactor.getReaders(): + reader.loseConnection() + reactor.advance(1) # ProcessReader does a callLater(0, ..) + + self.assertEqual(called, [True]) + + +class PidFileLocking(SyncTestCase): + """ + Direct tests for allmydata.util.pid functions + """ + + def test_locking(self): + """ + Fail to create a pidfile if another process has the lock already. + """ + # this can't just be "our" process because the locking library + # allows the same process to acquire a lock multiple times. + pidfile = FilePath(self.mktemp()) + lockfile = _pidfile_to_lockpath(pidfile) + + with open("other_lock.py", "w") as f: + f.write( + "\n".join([ + "import filelock, time, sys", + "with filelock.FileLock(sys.argv[1], timeout=1):", + " sys.stdout.write('.\\n')", + " sys.stdout.flush()", + " time.sleep(10)", + ]) + ) + proc = Popen( + [sys.executable, "other_lock.py", lockfile.path], + stdout=PIPE, + stderr=PIPE, + ) + # make sure our subprocess has had time to acquire the lock + # for sure (from the "." it prints) + proc.stdout.read(2) + + # acquiring the same lock should fail; it is locked by the subprocess + with self.assertRaises(ProcessInTheWay): + check_pid_process(pidfile) + proc.terminate() diff --git a/src/allmydata/test/test_sftp.py b/src/allmydata/test/test_sftp.py index 2214e4e5b..a7de35320 100644 --- a/src/allmydata/test/test_sftp.py +++ b/src/allmydata/test/test_sftp.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import re, struct, traceback, time, calendar from stat import S_IFREG, S_IFDIR diff --git a/src/allmydata/test/test_spans.py b/src/allmydata/test/test_spans.py index 281f916c4..578075e8d 100644 --- a/src/allmydata/test/test_spans.py +++ b/src/allmydata/test/test_spans.py @@ -2,17 +2,6 @@ Tests for allmydata.util.spans. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from past.builtins import long - import binascii import hashlib @@ -125,9 +114,6 @@ class ByteSpans(unittest.TestCase): s1 = Spans(3, 4) # 3,4,5,6 self._check1(s1) - s1 = Spans(long(3), long(4)) # 3,4,5,6 - self._check1(s1) - s2 = Spans(s1) self._check1(s2) @@ -455,9 +441,9 @@ class StringSpans(unittest.TestCase): self.failUnlessEqual(ds.get(2, 4), b"fear") ds = klass() - ds.add(long(2), b"four") - ds.add(long(3), b"ea") - self.failUnlessEqual(ds.get(long(2), long(4)), b"fear") + ds.add(2, b"four") + ds.add(3, b"ea") + self.failUnlessEqual(ds.get(2, 4), b"fear") def do_scan(self, klass): diff --git a/src/allmydata/test/test_statistics.py b/src/allmydata/test/test_statistics.py index 476f0a084..5a382e686 100644 --- a/src/allmydata/test/test_statistics.py +++ b/src/allmydata/test/test_statistics.py @@ -3,16 +3,8 @@ Tests for allmydata.util.statistics. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from six.moves import StringIO # native string StringIO +from io import StringIO from twisted.trial import unittest diff --git a/src/allmydata/test/test_stats.py b/src/allmydata/test/test_stats.py index e56f9d444..aba3a0e9c 100644 --- a/src/allmydata/test/test_stats.py +++ b/src/allmydata/test/test_stats.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from twisted.trial import unittest from twisted.application import service @@ -17,7 +9,7 @@ from allmydata.util import pollmixin import allmydata.test.common_util as testutil class FasterMonitor(CPUUsageMonitor): - POLL_INTERVAL = 0.1 + POLL_INTERVAL = 0.01 class CPUUsage(unittest.TestCase, pollmixin.PollMixin, testutil.StallMixin): @@ -36,9 +28,9 @@ class CPUUsage(unittest.TestCase, pollmixin.PollMixin, testutil.StallMixin): def _poller(): return bool(len(m.samples) == m.HISTORY_LENGTH+1) d = self.poll(_poller) - # pause one more second, to make sure that the history-trimming code - # is exercised - d.addCallback(self.stall, 1.0) + # pause a couple more intervals, to make sure that the history-trimming + # code is exercised + d.addCallback(self.stall, FasterMonitor.POLL_INTERVAL * 2) def _check(res): s = m.get_stats() self.failUnless("cpu_monitor.1min_avg" in s) diff --git a/src/allmydata/test/test_storage.py b/src/allmydata/test/test_storage.py index d18960a1e..2964206c7 100644 --- a/src/allmydata/test/test_storage.py +++ b/src/allmydata/test/test_storage.py @@ -3,44 +3,62 @@ Tests for allmydata.storage. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import native_str, PY2, bytes_to_native_str, bchr -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations +from future.utils import bchr from six import ensure_str +from io import ( + BytesIO, +) import time import os.path import platform import stat import struct import shutil +from functools import partial from uuid import uuid4 +from testtools.matchers import ( + Equals, + NotEquals, + Contains, + HasLength, + IsInstance, +) + from twisted.trial import unittest from twisted.internet import defer from twisted.internet.task import Clock -from hypothesis import given, strategies +from hypothesis import given, strategies, example import itertools from allmydata import interfaces from allmydata.util import fileutil, hashutil, base32 -from allmydata.storage.server import StorageServer, DEFAULT_RENEWAL_TIME +from allmydata.storage.server import ( + StorageServer, DEFAULT_RENEWAL_TIME, FoolscapStorageServer, +) from allmydata.storage.shares import get_share_file from allmydata.storage.mutable import MutableShareFile -from allmydata.storage.immutable import BucketWriter, BucketReader, ShareFile +from allmydata.storage.mutable_schema import ( + ALL_SCHEMAS as ALL_MUTABLE_SCHEMAS, +) +from allmydata.storage.immutable import ( + BucketWriter, BucketReader, ShareFile, FoolscapBucketWriter, + FoolscapBucketReader, +) +from allmydata.storage.immutable_schema import ( + ALL_SCHEMAS as ALL_IMMUTABLE_SCHEMAS, +) from allmydata.storage.common import storage_index_to_dir, \ UnknownMutableContainerVersionError, UnknownImmutableContainerVersionError, \ si_b2a, si_a2b from allmydata.storage.lease import LeaseInfo from allmydata.immutable.layout import WriteBucketProxy, WriteBucketProxy_v2, \ - ReadBucketProxy + ReadBucketProxy, _WriteBuffer from allmydata.mutable.layout import MDMFSlotWriteProxy, MDMFSlotReadProxy, \ LayoutInvalid, MDMFSIGNABLEHEADER, \ SIGNED_PREFIX, MDMFHEADER, \ @@ -56,27 +74,42 @@ from allmydata.test.no_network import NoNetworkServer from allmydata.storage_client import ( _StorageServer, ) -from .common import LoggingServiceParent, ShouldFailMixin +from .common import ( + LoggingServiceParent, + ShouldFailMixin, + FakeDisk, + SyncTestCase, + AsyncTestCase, +) + from .common_util import FakeCanary +from .common_storage import ( + upload_immutable, + upload_mutable, +) +from .strategies import ( + offsets, + lengths, +) -class UtilTests(unittest.TestCase): +class UtilTests(SyncTestCase): """Tests for allmydata.storage.common and .shares.""" def test_encoding(self): """b2a/a2b are the same as base32.""" s = b"\xFF HELLO \xF3" result = si_b2a(s) - self.assertEqual(base32.b2a(s), result) - self.assertEqual(si_a2b(result), s) + self.assertThat(base32.b2a(s), Equals(result)) + self.assertThat(si_a2b(result), Equals(s)) def test_storage_index_to_dir(self): """storage_index_to_dir creates a native string path.""" s = b"\xFF HELLO \xF3" path = storage_index_to_dir(s) parts = os.path.split(path) - self.assertEqual(parts[0], parts[1][:2]) - self.assertIsInstance(path, native_str) + self.assertThat(parts[0], Equals(parts[1][:2])) + self.assertThat(path, IsInstance(str)) def test_get_share_file_mutable(self): """A mutable share is identified by get_share_file().""" @@ -84,16 +117,16 @@ class UtilTests(unittest.TestCase): msf = MutableShareFile(path) msf.create(b"12", b"abc") # arbitrary values loaded = get_share_file(path) - self.assertIsInstance(loaded, MutableShareFile) - self.assertEqual(loaded.home, path) + self.assertThat(loaded, IsInstance(MutableShareFile)) + self.assertThat(loaded.home, Equals(path)) def test_get_share_file_immutable(self): """An immutable share is identified by get_share_file().""" path = self.mktemp() _ = ShareFile(path, max_size=1000, create=True) loaded = get_share_file(path) - self.assertIsInstance(loaded, ShareFile) - self.assertEqual(loaded.home, path) + self.assertThat(loaded, IsInstance(ShareFile)) + self.assertThat(loaded.home, Equals(path)) class FakeStatsProvider(object): @@ -102,7 +135,8 @@ class FakeStatsProvider(object): def register_producer(self, producer): pass -class Bucket(unittest.TestCase): + +class Bucket(SyncTestCase): def make_workdir(self, name): basedir = os.path.join("storage", "Bucket", name) incoming = os.path.join(basedir, "tmp", "bucket") @@ -128,26 +162,26 @@ class Bucket(unittest.TestCase): def test_create(self): incoming, final = self.make_workdir("test_create") - bw = BucketWriter(self, incoming, final, 200, self.make_lease()) - bw.remote_write(0, b"a"*25) - bw.remote_write(25, b"b"*25) - bw.remote_write(50, b"c"*25) - bw.remote_write(75, b"d"*7) - bw.remote_close() + bw = BucketWriter(self, incoming, final, 200, self.make_lease(), Clock()) + bw.write(0, b"a"*25) + bw.write(25, b"b"*25) + bw.write(50, b"c"*25) + bw.write(75, b"d"*7) + bw.close() def test_readwrite(self): incoming, final = self.make_workdir("test_readwrite") - bw = BucketWriter(self, incoming, final, 200, self.make_lease()) - bw.remote_write(0, b"a"*25) - bw.remote_write(25, b"b"*25) - bw.remote_write(50, b"c"*7) # last block may be short - bw.remote_close() + bw = BucketWriter(self, incoming, final, 200, self.make_lease(), Clock()) + bw.write(0, b"a"*25) + bw.write(25, b"b"*25) + bw.write(50, b"c"*7) # last block may be short + bw.close() # now read from it br = BucketReader(self, bw.finalhome) - self.failUnlessEqual(br.remote_read(0, 25), b"a"*25) - self.failUnlessEqual(br.remote_read(25, 25), b"b"*25) - self.failUnlessEqual(br.remote_read(50, 7), b"c"*7) + self.assertThat(br.read(0, 25), Equals(b"a"*25)) + self.assertThat(br.read(25, 25), Equals(b"b"*25)) + self.assertThat(br.read(50, 7), Equals(b"c"*7)) def test_write_past_size_errors(self): """Writing beyond the size of the bucket throws an exception.""" @@ -155,9 +189,9 @@ class Bucket(unittest.TestCase): incoming, final = self.make_workdir( "test_write_past_size_errors-{}".format(i) ) - bw = BucketWriter(self, incoming, final, 200, self.make_lease()) + bw = BucketWriter(self, incoming, final, 200, self.make_lease(), Clock()) with self.assertRaises(DataTooLargeError): - bw.remote_write(offset, b"a" * length) + bw.write(offset, b"a" * length) @given( maybe_overlapping_offset=strategies.integers(min_value=0, max_value=98), @@ -174,29 +208,28 @@ class Bucket(unittest.TestCase): expected_data = b"".join(bchr(i) for i in range(100)) incoming, final = self.make_workdir("overlapping_writes_{}".format(uuid4())) bw = BucketWriter( - self, incoming, final, length, self.make_lease(), + self, incoming, final, length, self.make_lease(), Clock() ) # Three writes: 10-19, 30-39, 50-59. This allows for a bunch of holes. - bw.remote_write(10, expected_data[10:20]) - bw.remote_write(30, expected_data[30:40]) - bw.remote_write(50, expected_data[50:60]) + bw.write(10, expected_data[10:20]) + bw.write(30, expected_data[30:40]) + bw.write(50, expected_data[50:60]) # Then, an overlapping write but with matching data: - bw.remote_write( + bw.write( maybe_overlapping_offset, expected_data[ maybe_overlapping_offset:maybe_overlapping_offset + maybe_overlapping_length ] ) # Now fill in the holes: - bw.remote_write(0, expected_data[0:10]) - bw.remote_write(20, expected_data[20:30]) - bw.remote_write(40, expected_data[40:50]) - bw.remote_write(60, expected_data[60:]) - bw.remote_close() + bw.write(0, expected_data[0:10]) + bw.write(20, expected_data[20:30]) + bw.write(40, expected_data[40:50]) + bw.write(60, expected_data[60:]) + bw.close() br = BucketReader(self, bw.finalhome) - self.assertEqual(br.remote_read(0, length), expected_data) - + self.assertEqual(br.read(0, length), expected_data) @given( maybe_overlapping_offset=strategies.integers(min_value=0, max_value=98), @@ -212,24 +245,56 @@ class Bucket(unittest.TestCase): length = 100 incoming, final = self.make_workdir("overlapping_writes_{}".format(uuid4())) bw = BucketWriter( - self, incoming, final, length, self.make_lease(), + self, incoming, final, length, self.make_lease(), Clock() ) # Three writes: 10-19, 30-39, 50-59. This allows for a bunch of holes. - bw.remote_write(10, b"1" * 10) - bw.remote_write(30, b"1" * 10) - bw.remote_write(50, b"1" * 10) + bw.write(10, b"1" * 10) + bw.write(30, b"1" * 10) + bw.write(50, b"1" * 10) # Then, write something that might overlap with some of them, but # conflicts. Then fill in holes left by first three writes. Conflict is # inevitable. with self.assertRaises(ConflictingWriteError): - bw.remote_write( + bw.write( maybe_overlapping_offset, b'X' * min(maybe_overlapping_length, length - maybe_overlapping_offset), ) - bw.remote_write(0, b"1" * 10) - bw.remote_write(20, b"1" * 10) - bw.remote_write(40, b"1" * 10) - bw.remote_write(60, b"1" * 40) + bw.write(0, b"1" * 10) + bw.write(20, b"1" * 10) + bw.write(40, b"1" * 10) + bw.write(60, b"1" * 40) + + @given( + offsets=strategies.lists( + strategies.integers(min_value=0, max_value=99), + min_size=20, + max_size=20 + ), + ) + @example(offsets=[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 40, 70]) + def test_writes_return_when_finished( + self, offsets + ): + """ + The ``BucketWriter.write()`` return true if and only if the maximum + size has been reached via potentially overlapping writes. The + remaining ranges can be checked via ``BucketWriter.required_ranges()``. + """ + incoming, final = self.make_workdir("overlapping_writes_{}".format(uuid4())) + bw = BucketWriter( + self, incoming, final, 100, self.make_lease(), Clock() + ) + local_written = [0] * 100 + for offset in offsets: + length = min(30, 100 - offset) + data = b"1" * length + for i in range(offset, offset+length): + local_written[i] = 1 + finished = bw.write(offset, data) + self.assertEqual(finished, sum(local_written) == 100) + required_ranges = bw.required_ranges() + for i in range(0, 100): + self.assertEqual(local_written[i] == 1, required_ranges.get(i) is None) def test_read_past_end_of_share_data(self): # test vector for immutable files (hard-coded contents of an immutable share @@ -274,16 +339,77 @@ class Bucket(unittest.TestCase): # Now read from it. br = BucketReader(mockstorageserver, final) - self.failUnlessEqual(br.remote_read(0, len(share_data)), share_data) + self.assertThat(br.read(0, len(share_data)), Equals(share_data)) # Read past the end of share data to get the cancel secret. read_length = len(share_data) + len(ownernumber) + len(renewsecret) + len(cancelsecret) - result_of_read = br.remote_read(0, read_length) - self.failUnlessEqual(result_of_read, share_data) + result_of_read = br.read(0, read_length) + self.assertThat(result_of_read, Equals(share_data)) + + result_of_read = br.read(0, len(share_data)+1) + self.assertThat(result_of_read, Equals(share_data)) + + def _assert_timeout_only_after_30_minutes(self, clock, bw): + """ + The ``BucketWriter`` times out and is closed after 30 minutes, but not + sooner. + """ + self.assertFalse(bw.closed) + # 29 minutes pass. Everything is fine. + for i in range(29): + clock.advance(60) + self.assertFalse(bw.closed, "Bucket closed after only %d minutes" % (i + 1,)) + # After the 30th minute, the bucket is closed due to lack of writes. + clock.advance(60) + self.assertTrue(bw.closed) + + def test_bucket_expires_if_no_writes_for_30_minutes(self): + """ + If a ``BucketWriter`` receives no writes for 30 minutes, it is removed. + """ + incoming, final = self.make_workdir("test_bucket_expires") + clock = Clock() + bw = BucketWriter(self, incoming, final, 200, self.make_lease(), clock) + self._assert_timeout_only_after_30_minutes(clock, bw) + + def test_bucket_writes_delay_timeout(self): + """ + So long as the ``BucketWriter`` receives writes, the the removal + timeout is put off. + """ + incoming, final = self.make_workdir("test_bucket_writes_delay_timeout") + clock = Clock() + bw = BucketWriter(self, incoming, final, 200, self.make_lease(), clock) + # 29 minutes pass, getting close to the timeout... + clock.advance(29 * 60) + # .. but we receive a write! So that should delay the timeout again to + # another 30 minutes. + bw.write(0, b"hello") + self._assert_timeout_only_after_30_minutes(clock, bw) + + def test_bucket_closing_cancels_timeout(self): + """ + Closing cancels the ``BucketWriter`` timeout. + """ + incoming, final = self.make_workdir("test_bucket_close_timeout") + clock = Clock() + bw = BucketWriter(self, incoming, final, 10, self.make_lease(), clock) + self.assertTrue(clock.getDelayedCalls()) + bw.close() + self.assertFalse(clock.getDelayedCalls()) + + def test_bucket_aborting_cancels_timeout(self): + """ + Closing cancels the ``BucketWriter`` timeout. + """ + incoming, final = self.make_workdir("test_bucket_abort_timeout") + clock = Clock() + bw = BucketWriter(self, incoming, final, 10, self.make_lease(), clock) + self.assertTrue(clock.getDelayedCalls()) + bw.abort() + self.assertFalse(clock.getDelayedCalls()) - result_of_read = br.remote_read(0, len(share_data)+1) - self.failUnlessEqual(result_of_read, share_data) class RemoteBucket(object): @@ -305,15 +431,15 @@ class RemoteBucket(object): return defer.maybeDeferred(_call) -class BucketProxy(unittest.TestCase): +class BucketProxy(AsyncTestCase): def make_bucket(self, name, size): basedir = os.path.join("storage", "BucketProxy", name) incoming = os.path.join(basedir, "tmp", "bucket") final = os.path.join(basedir, "bucket") fileutil.make_dirs(basedir) fileutil.make_dirs(os.path.join(basedir, "tmp")) - bw = BucketWriter(self, incoming, final, size, self.make_lease()) - rb = RemoteBucket(bw) + bw = BucketWriter(self, incoming, final, size, self.make_lease(), Clock()) + rb = RemoteBucket(FoolscapBucketWriter(bw)) return bw, rb, final def make_lease(self): @@ -338,8 +464,8 @@ class BucketProxy(unittest.TestCase): block_size=10, num_segments=5, num_share_hashes=3, - uri_extension_size_max=500) - self.failUnless(interfaces.IStorageBucketWriter.providedBy(bp), bp) + uri_extension_size=500) + self.assertTrue(interfaces.IStorageBucketWriter.providedBy(bp), bp) def _do_test_readwrite(self, name, header_size, wbp_class, rbp_class): # Let's pretend each share has 100 bytes of data, and that there are @@ -369,7 +495,7 @@ class BucketProxy(unittest.TestCase): block_size=25, num_segments=4, num_share_hashes=3, - uri_extension_size_max=len(uri_extension)) + uri_extension_size=len(uri_extension)) d = bp.put_header() d.addCallback(lambda res: bp.put_block(0, b"a"*25)) @@ -385,11 +511,11 @@ class BucketProxy(unittest.TestCase): # now read everything back def _start_reading(res): br = BucketReader(self, sharefname) - rb = RemoteBucket(br) + rb = RemoteBucket(FoolscapBucketReader(br)) server = NoNetworkServer(b"abc", None) rbp = rbp_class(rb, server, storage_index=b"") - self.failUnlessIn("to peer", repr(rbp)) - self.failUnless(interfaces.IStorageBucketReader.providedBy(rbp), rbp) + self.assertThat(repr(rbp), Contains("to peer")) + self.assertTrue(interfaces.IStorageBucketReader.providedBy(rbp), rbp) d1 = rbp.get_block_data(0, 25, 25) d1.addCallback(lambda res: self.failUnlessEqual(res, b"a"*25)) @@ -425,24 +551,26 @@ class BucketProxy(unittest.TestCase): return self._do_test_readwrite("test_readwrite_v2", 0x44, WriteBucketProxy_v2, ReadBucketProxy) -class Server(unittest.TestCase): +class Server(AsyncTestCase): def setUp(self): + super(Server, self).setUp() self.sparent = LoggingServiceParent() self.sparent.startService() self._lease_secret = itertools.count() - def tearDown(self): - return self.sparent.stopService() + self.addCleanup(self.sparent.stopService) def workdir(self, name): basedir = os.path.join("storage", "Server", name) return basedir - def create(self, name, reserved_space=0, klass=StorageServer, get_current_time=time.time): + def create(self, name, reserved_space=0, klass=StorageServer, clock=None): + if clock is None: + clock = Clock() workdir = self.workdir(name) ss = klass(workdir, b"\x00" * 20, reserved_space=reserved_space, stats_provider=FakeStatsProvider(), - get_current_time=get_current_time) + clock=clock) ss.setServiceParent(self.sparent) return ss @@ -451,31 +579,38 @@ class Server(unittest.TestCase): def test_declares_fixed_1528(self): ss = self.create("test_declares_fixed_1528") - ver = ss.remote_get_version() + ver = ss.get_version() sv1 = ver[b'http://allmydata.org/tahoe/protocols/storage/v1'] - self.failUnless(sv1.get(b'prevents-read-past-end-of-share-data'), sv1) + self.assertTrue(sv1.get(b'prevents-read-past-end-of-share-data'), sv1) def test_declares_maximum_share_sizes(self): ss = self.create("test_declares_maximum_share_sizes") - ver = ss.remote_get_version() + ver = ss.get_version() sv1 = ver[b'http://allmydata.org/tahoe/protocols/storage/v1'] - self.failUnlessIn(b'maximum-immutable-share-size', sv1) - self.failUnlessIn(b'maximum-mutable-share-size', sv1) + self.assertThat(sv1, Contains(b'maximum-immutable-share-size')) + self.assertThat(sv1, Contains(b'maximum-mutable-share-size')) def test_declares_available_space(self): ss = self.create("test_declares_available_space") - ver = ss.remote_get_version() + ver = ss.get_version() sv1 = ver[b'http://allmydata.org/tahoe/protocols/storage/v1'] - self.failUnlessIn(b'available-space', sv1) + self.assertThat(sv1, Contains(b'available-space')) - def allocate(self, ss, storage_index, sharenums, size, canary=None): + def allocate(self, ss, storage_index, sharenums, size, renew_leases=True): + """ + Call directly into the storage server's allocate_buckets implementation, + skipping the Foolscap layer. + """ renew_secret = hashutil.my_renewal_secret_hash(b"%d" % next(self._lease_secret)) cancel_secret = hashutil.my_cancel_secret_hash(b"%d" % next(self._lease_secret)) - if not canary: - canary = FakeCanary() - return ss.remote_allocate_buckets(storage_index, - renew_secret, cancel_secret, - sharenums, size, canary) + if isinstance(ss, FoolscapStorageServer): + ss = ss._server + return ss.allocate_buckets( + storage_index, + renew_secret, cancel_secret, + sharenums, size, + renew_leases=renew_leases, + ) def test_large_share(self): syslow = platform.system().lower() @@ -489,17 +624,17 @@ class Server(unittest.TestCase): ss = self.create("test_large_share") already,writers = self.allocate(ss, b"allocate", [0], 2**32+2) - self.failUnlessEqual(already, set()) - self.failUnlessEqual(set(writers.keys()), set([0])) + self.assertThat(set(), Equals(already)) + self.assertThat(set([0]), Equals(set(writers.keys()))) shnum, bucket = list(writers.items())[0] # This test is going to hammer your filesystem if it doesn't make a sparse file for this. :-( - bucket.remote_write(2**32, b"ab") - bucket.remote_close() + bucket.write(2**32, b"ab") + bucket.close() - readers = ss.remote_get_buckets(b"allocate") + readers = ss.get_buckets(b"allocate") reader = readers[shnum] - self.failUnlessEqual(reader.remote_read(2**32, 2), b"ab") + self.assertThat(b"ab", Equals(reader.read(2**32, 2))) def test_dont_overfill_dirs(self): """ @@ -510,8 +645,8 @@ class Server(unittest.TestCase): ss = self.create("test_dont_overfill_dirs") already, writers = self.allocate(ss, b"storageindex", [0], 10) for i, wb in writers.items(): - wb.remote_write(0, b"%10d" % i) - wb.remote_close() + wb.write(0, b"%10d" % i) + wb.close() storedir = os.path.join(self.workdir("test_dont_overfill_dirs"), "shares") children_of_storedir = set(os.listdir(storedir)) @@ -520,26 +655,26 @@ class Server(unittest.TestCase): # chars the same as the first storageindex. already, writers = self.allocate(ss, b"storageindey", [0], 10) for i, wb in writers.items(): - wb.remote_write(0, b"%10d" % i) - wb.remote_close() + wb.write(0, b"%10d" % i) + wb.close() storedir = os.path.join(self.workdir("test_dont_overfill_dirs"), "shares") new_children_of_storedir = set(os.listdir(storedir)) - self.failUnlessEqual(children_of_storedir, new_children_of_storedir) + self.assertThat(new_children_of_storedir, Equals(children_of_storedir)) def test_remove_incoming(self): ss = self.create("test_remove_incoming") already, writers = self.allocate(ss, b"vid", list(range(3)), 10) for i,wb in writers.items(): - wb.remote_write(0, b"%10d" % i) - wb.remote_close() + wb.write(0, b"%10d" % i) + wb.close() incoming_share_dir = wb.incominghome incoming_bucket_dir = os.path.dirname(incoming_share_dir) incoming_prefix_dir = os.path.dirname(incoming_bucket_dir) incoming_dir = os.path.dirname(incoming_prefix_dir) - self.failIf(os.path.exists(incoming_bucket_dir), incoming_bucket_dir) - self.failIf(os.path.exists(incoming_prefix_dir), incoming_prefix_dir) - self.failUnless(os.path.exists(incoming_dir), incoming_dir) + self.assertFalse(os.path.exists(incoming_bucket_dir), incoming_bucket_dir) + self.assertFalse(os.path.exists(incoming_prefix_dir), incoming_prefix_dir) + self.assertTrue(os.path.exists(incoming_dir), incoming_dir) def test_abort(self): # remote_abort, when called on a writer, should make sure that @@ -547,72 +682,142 @@ class Server(unittest.TestCase): # server when accounting for space. ss = self.create("test_abort") already, writers = self.allocate(ss, b"allocate", [0, 1, 2], 150) - self.failIfEqual(ss.allocated_size(), 0) + self.assertThat(ss.allocated_size(), NotEquals(0)) # Now abort the writers. for writer in writers.values(): - writer.remote_abort() - self.failUnlessEqual(ss.allocated_size(), 0) + writer.abort() + self.assertThat(ss.allocated_size(), Equals(0)) + def test_immutable_length(self): + """ + ``get_immutable_share_length()`` returns the length of an immutable + share, as does ``BucketWriter.get_length()``.. + """ + ss = self.create("test_immutable_length") + _, writers = self.allocate(ss, b"allocate", [22], 75) + bucket = writers[22] + bucket.write(0, b"X" * 75) + bucket.close() + self.assertThat(ss.get_immutable_share_length(b"allocate", 22), Equals(75)) + self.assertThat(ss.get_buckets(b"allocate")[22].get_length(), Equals(75)) def test_allocate(self): ss = self.create("test_allocate") - self.failUnlessEqual(ss.remote_get_buckets(b"allocate"), {}) + self.assertThat(ss.get_buckets(b"allocate"), Equals({})) already,writers = self.allocate(ss, b"allocate", [0,1,2], 75) - self.failUnlessEqual(already, set()) - self.failUnlessEqual(set(writers.keys()), set([0,1,2])) + self.assertThat(already, Equals(set())) + self.assertThat(set(writers.keys()), Equals(set([0,1,2]))) # while the buckets are open, they should not count as readable - self.failUnlessEqual(ss.remote_get_buckets(b"allocate"), {}) + self.assertThat(ss.get_buckets(b"allocate"), Equals({})) # close the buckets for i,wb in writers.items(): - wb.remote_write(0, b"%25d" % i) - wb.remote_close() + wb.write(0, b"%25d" % i) + wb.close() # aborting a bucket that was already closed is a no-op - wb.remote_abort() + wb.abort() # now they should be readable - b = ss.remote_get_buckets(b"allocate") - self.failUnlessEqual(set(b.keys()), set([0,1,2])) - self.failUnlessEqual(b[0].remote_read(0, 25), b"%25d" % 0) + b = ss.get_buckets(b"allocate") + self.assertThat(set(b.keys()), Equals(set([0,1,2]))) + self.assertThat(b[0].read(0, 25), Equals(b"%25d" % 0)) b_str = str(b[0]) - self.failUnlessIn("BucketReader", b_str) - self.failUnlessIn("mfwgy33dmf2g 0", b_str) + self.assertThat(b_str, Contains("BucketReader")) + self.assertThat(b_str, Contains("mfwgy33dmf2g 0")) # now if we ask about writing again, the server should offer those # three buckets as already present. It should offer them even if we # don't ask about those specific ones. already,writers = self.allocate(ss, b"allocate", [2,3,4], 75) - self.failUnlessEqual(already, set([0,1,2])) - self.failUnlessEqual(set(writers.keys()), set([3,4])) + self.assertThat(already, Equals(set([0,1,2]))) + self.assertThat(set(writers.keys()), Equals(set([3,4]))) # while those two buckets are open for writing, the server should # refuse to offer them to uploaders already2,writers2 = self.allocate(ss, b"allocate", [2,3,4,5], 75) - self.failUnlessEqual(already2, set([0,1,2])) - self.failUnlessEqual(set(writers2.keys()), set([5])) + self.assertThat(already2, Equals(set([0,1,2]))) + self.assertThat(set(writers2.keys()), Equals(set([5]))) # aborting the writes should remove the tempfiles for i,wb in writers2.items(): - wb.remote_abort() + wb.abort() already2,writers2 = self.allocate(ss, b"allocate", [2,3,4,5], 75) - self.failUnlessEqual(already2, set([0,1,2])) - self.failUnlessEqual(set(writers2.keys()), set([5])) + self.assertThat(already2, Equals(set([0,1,2]))) + self.assertThat(set(writers2.keys()), Equals(set([5]))) for i,wb in writers2.items(): - wb.remote_abort() + wb.abort() for i,wb in writers.items(): - wb.remote_abort() + wb.abort() + + def test_allocate_without_lease_renewal(self): + """ + ``StorageServer._allocate_buckets`` does not renew leases on existing + shares if ``renew_leases`` is ``False``. + """ + first_lease = 456 + second_lease = 543 + storage_index = b"allocate" + + clock = Clock() + clock.advance(first_lease) + ss = self.create( + "test_allocate_without_lease_renewal", + clock=clock, + ) + + # Put a share on there + already, writers = self.allocate( + ss, storage_index, [0], 1, renew_leases=False, + ) + (writer,) = writers.values() + writer.write(0, b"x") + writer.close() + + # It should have a lease granted at the current time. + shares = dict(ss.get_shares(storage_index)) + self.assertEqual( + [first_lease], + list( + lease.get_grant_renew_time_time() + for lease + in ShareFile(shares[0]).get_leases() + ), + ) + + # Let some time pass so we can tell if the lease on share 0 is + # renewed. + clock.advance(second_lease) + + # Put another share on there. + already, writers = self.allocate( + ss, storage_index, [1], 1, renew_leases=False, + ) + (writer,) = writers.values() + writer.write(0, b"x") + writer.close() + + # The first share's lease expiration time is unchanged. + shares = dict(ss.get_shares(storage_index)) + self.assertThat( + [first_lease], + Equals(list( + lease.get_grant_renew_time_time() + for lease + in ShareFile(shares[0]).get_leases() + )), + ) def test_bad_container_version(self): ss = self.create("test_bad_container_version") a,w = self.allocate(ss, b"si1", [0], 10) - w[0].remote_write(0, b"\xff"*10) - w[0].remote_close() + w[0].write(0, b"\xff"*10) + w[0].close() fn = os.path.join(ss.sharedir, storage_index_to_dir(b"si1"), "0") f = open(fn, "rb+") @@ -620,19 +825,30 @@ class Server(unittest.TestCase): f.write(struct.pack(">L", 0)) # this is invalid: minimum used is v1 f.close() - ss.remote_get_buckets(b"allocate") + ss.get_buckets(b"allocate") e = self.failUnlessRaises(UnknownImmutableContainerVersionError, - ss.remote_get_buckets, b"si1") - self.failUnlessIn(" had version 0 but we wanted 1", str(e)) + ss.get_buckets, b"si1") + self.assertThat(e.filename, Equals(fn)) + self.assertThat(e.version, Equals(0)) + self.assertThat(str(e), Contains("had unexpected version 0")) def test_disconnect(self): # simulate a disconnection - ss = self.create("test_disconnect") + ss = FoolscapStorageServer(self.create("test_disconnect")) + renew_secret = b"r" * 32 + cancel_secret = b"c" * 32 canary = FakeCanary() - already,writers = self.allocate(ss, b"disconnect", [0,1,2], 75, canary) - self.failUnlessEqual(already, set()) - self.failUnlessEqual(set(writers.keys()), set([0,1,2])) + already,writers = ss.remote_allocate_buckets( + b"disconnect", + renew_secret, + cancel_secret, + sharenums=[0,1,2], + allocated_size=75, + canary=canary, + ) + self.assertThat(already, Equals(set())) + self.assertThat(set(writers.keys()), Equals(set([0,1,2]))) for (f,args,kwargs) in list(canary.disconnectors.values()): f(*args, **kwargs) del already @@ -640,8 +856,74 @@ class Server(unittest.TestCase): # that ought to delete the incoming shares already,writers = self.allocate(ss, b"disconnect", [0,1,2], 75) - self.failUnlessEqual(already, set()) - self.failUnlessEqual(set(writers.keys()), set([0,1,2])) + self.assertThat(already, Equals(set())) + self.assertThat(set(writers.keys()), Equals(set([0,1,2]))) + + def test_reserved_space_immutable_lease(self): + """ + If there is not enough available space to store an additional lease on an + immutable share then ``remote_add_lease`` fails with ``NoSpace`` when + an attempt is made to use it to create a new lease. + """ + disk = FakeDisk(total=1024, used=0) + self.patch(fileutil, "get_disk_stats", disk.get_disk_stats) + + ss = self.create("test_reserved_space_immutable_lease") + + storage_index = b"x" * 16 + renew_secret = b"r" * 32 + cancel_secret = b"c" * 32 + shares = {0: b"y" * 500} + upload_immutable(ss, storage_index, renew_secret, cancel_secret, shares) + + # use up all the available space + disk.use(disk.available) + + # Different secrets to produce a different lease, not a renewal. + renew_secret = b"R" * 32 + cancel_secret = b"C" * 32 + with self.assertRaises(interfaces.NoSpace): + ss.add_lease(storage_index, renew_secret, cancel_secret) + + def test_reserved_space_mutable_lease(self): + """ + If there is not enough available space to store an additional lease on a + mutable share then ``remote_add_lease`` fails with ``NoSpace`` when an + attempt is made to use it to create a new lease. + """ + disk = FakeDisk(total=1024, used=0) + self.patch(fileutil, "get_disk_stats", disk.get_disk_stats) + + ss = self.create("test_reserved_space_mutable_lease") + + renew_secrets = iter( + "{}{}".format("r" * 31, i).encode("ascii") + for i + in range(5) + ) + + storage_index = b"x" * 16 + write_enabler = b"w" * 32 + cancel_secret = b"c" * 32 + secrets = (write_enabler, next(renew_secrets), cancel_secret) + shares = {0: b"y" * 500} + upload_mutable(ss, storage_index, secrets, shares) + + # use up all the available space + disk.use(disk.available) + + # The upload created one lease. There is room for three more leases + # in the share header. Even if we're out of disk space, on a boring + # enough filesystem we can write these. + for i in range(3): + ss.add_lease(storage_index, next(renew_secrets), cancel_secret) + + # Having used all of the space for leases in the header, we would have + # to allocate storage for the next lease. Since there is no space + # available, this must fail instead. + with self.assertRaises(interfaces.NoSpace): + ss.add_lease(storage_index, next(renew_secrets), cancel_secret) + def test_reserved_space(self): reserved = 10000 @@ -655,40 +937,49 @@ class Server(unittest.TestCase): } self.patch(fileutil, 'get_disk_stats', call_get_disk_stats) - ss = self.create("test_reserved_space", reserved_space=reserved) + ss = FoolscapStorageServer(self.create("test_reserved_space", reserved_space=reserved)) # 15k available, 10k reserved, leaves 5k for shares # a newly created and filled share incurs this much overhead, beyond # the size we request. OVERHEAD = 3*4 LEASE_SIZE = 4+32+32+4 + renew_secret = b"r" * 32 + cancel_secret = b"c" * 32 canary = FakeCanary() - already, writers = self.allocate(ss, b"vid1", [0,1,2], 1000, canary) - self.failUnlessEqual(len(writers), 3) + already, writers = ss.remote_allocate_buckets( + b"vid1", + renew_secret, + cancel_secret, + sharenums=[0,1,2], + allocated_size=1000, + canary=canary, + ) + self.assertThat(writers, HasLength(3)) # now the StorageServer should have 3000 bytes provisionally # allocated, allowing only 2000 more to be claimed - self.failUnlessEqual(len(ss._bucket_writers), 3) + self.assertThat(ss._server._bucket_writers, HasLength(3)) # allocating 1001-byte shares only leaves room for one canary2 = FakeCanary() already2, writers2 = self.allocate(ss, b"vid2", [0,1,2], 1001, canary2) - self.failUnlessEqual(len(writers2), 1) - self.failUnlessEqual(len(ss._bucket_writers), 4) + self.assertThat(writers2, HasLength(1)) + self.assertThat(ss._server._bucket_writers, HasLength(4)) # we abandon the first set, so their provisional allocation should be # returned canary.disconnected() - self.failUnlessEqual(len(ss._bucket_writers), 1) + self.assertThat(ss._server._bucket_writers, HasLength(1)) # now we have a provisional allocation of 1001 bytes # and we close the second set, so their provisional allocation should # become real, long-term allocation, and grows to include the # overhead. for bw in writers2.values(): - bw.remote_write(0, b"a"*25) - bw.remote_close() - self.failUnlessEqual(len(ss._bucket_writers), 0) + bw.write(0, b"a"*25) + bw.close() + self.assertThat(ss._server._bucket_writers, HasLength(0)) # this also changes the amount reported as available by call_get_disk_stats allocated = 1001 + OVERHEAD + LEASE_SIZE @@ -696,14 +987,21 @@ class Server(unittest.TestCase): # now there should be ALLOCATED=1001+12+72=1085 bytes allocated, and # 5000-1085=3915 free, therefore we can fit 39 100byte shares canary3 = FakeCanary() - already3, writers3 = self.allocate(ss, b"vid3", list(range(100)), 100, canary3) - self.failUnlessEqual(len(writers3), 39) - self.failUnlessEqual(len(ss._bucket_writers), 39) + already3, writers3 = ss.remote_allocate_buckets( + b"vid3", + renew_secret, + cancel_secret, + sharenums=list(range(100)), + allocated_size=100, + canary=canary3, + ) + self.assertThat(writers3, HasLength(39)) + self.assertThat(ss._server._bucket_writers, HasLength(39)) canary3.disconnected() - self.failUnlessEqual(len(ss._bucket_writers), 0) - ss.disownServiceParent() + self.assertThat(ss._server._bucket_writers, HasLength(0)) + ss._server.disownServiceParent() del ss def test_seek(self): @@ -721,9 +1019,9 @@ class Server(unittest.TestCase): f.write(b"100") f.close() filelen = os.stat(filename)[stat.ST_SIZE] - self.failUnlessEqual(filelen, 100+3) + self.assertThat(filelen, Equals(100+3)) f2 = open(filename, "rb") - self.failUnlessEqual(f2.read(5), b"start") + self.assertThat(f2.read(5), Equals(b"start")) def create_bucket_5_shares( self, ss, storage_index, expected_already=0, expected_writers=5 @@ -732,96 +1030,97 @@ class Server(unittest.TestCase): Given a StorageServer, create a bucket with 5 shares and return renewal and cancellation secrets. """ - canary = FakeCanary() sharenums = list(range(5)) size = 100 # Creating a bucket also creates a lease: rs, cs = (hashutil.my_renewal_secret_hash(b"%d" % next(self._lease_secret)), hashutil.my_cancel_secret_hash(b"%d" % next(self._lease_secret))) - already, writers = ss.remote_allocate_buckets(storage_index, rs, cs, - sharenums, size, canary) - self.failUnlessEqual(len(already), expected_already) - self.failUnlessEqual(len(writers), expected_writers) + already, writers = ss.allocate_buckets(storage_index, rs, cs, + sharenums, size) + self.assertThat(already, HasLength(expected_already)) + self.assertThat(writers, HasLength(expected_writers)) for wb in writers.values(): - wb.remote_close() + wb.close() return rs, cs def test_leases(self): ss = self.create("test_leases") - canary = FakeCanary() sharenums = list(range(5)) size = 100 # Create a bucket: rs0, cs0 = self.create_bucket_5_shares(ss, b"si0") - leases = list(ss.get_leases(b"si0")) - self.failUnlessEqual(len(leases), 1) - self.failUnlessEqual(set([l.renew_secret for l in leases]), set([rs0])) + + # Upload of an immutable implies creation of a single lease with the + # supplied secrets. + (lease,) = ss.get_leases(b"si0") + self.assertTrue(lease.is_renew_secret(rs0)) rs1, cs1 = self.create_bucket_5_shares(ss, b"si1") # take out a second lease on si1 rs2, cs2 = self.create_bucket_5_shares(ss, b"si1", 5, 0) - leases = list(ss.get_leases(b"si1")) - self.failUnlessEqual(len(leases), 2) - self.failUnlessEqual(set([l.renew_secret for l in leases]), set([rs1, rs2])) + (lease1, lease2) = ss.get_leases(b"si1") + self.assertTrue(lease1.is_renew_secret(rs1)) + self.assertTrue(lease2.is_renew_secret(rs2)) # and a third lease, using add-lease rs2a,cs2a = (hashutil.my_renewal_secret_hash(b"%d" % next(self._lease_secret)), hashutil.my_cancel_secret_hash(b"%d" % next(self._lease_secret))) - ss.remote_add_lease(b"si1", rs2a, cs2a) - leases = list(ss.get_leases(b"si1")) - self.failUnlessEqual(len(leases), 3) - self.failUnlessEqual(set([l.renew_secret for l in leases]), set([rs1, rs2, rs2a])) + ss.add_lease(b"si1", rs2a, cs2a) + (lease1, lease2, lease3) = ss.get_leases(b"si1") + self.assertTrue(lease1.is_renew_secret(rs1)) + self.assertTrue(lease2.is_renew_secret(rs2)) + self.assertTrue(lease3.is_renew_secret(rs2a)) # add-lease on a missing storage index is silently ignored - self.failUnlessEqual(ss.remote_add_lease(b"si18", b"", b""), None) + self.assertThat(ss.add_lease(b"si18", b"", b""), Equals(None)) # check that si0 is readable - readers = ss.remote_get_buckets(b"si0") - self.failUnlessEqual(len(readers), 5) + readers = ss.get_buckets(b"si0") + self.assertThat(readers, HasLength(5)) # renew the first lease. Only the proper renew_secret should work - ss.remote_renew_lease(b"si0", rs0) - self.failUnlessRaises(IndexError, ss.remote_renew_lease, b"si0", cs0) - self.failUnlessRaises(IndexError, ss.remote_renew_lease, b"si0", rs1) + ss.renew_lease(b"si0", rs0) + self.failUnlessRaises(IndexError, ss.renew_lease, b"si0", cs0) + self.failUnlessRaises(IndexError, ss.renew_lease, b"si0", rs1) # check that si0 is still readable - readers = ss.remote_get_buckets(b"si0") - self.failUnlessEqual(len(readers), 5) + readers = ss.get_buckets(b"si0") + self.assertThat(readers, HasLength(5)) # There is no such method as remote_cancel_lease for now -- see # ticket #1528. - self.failIf(hasattr(ss, 'remote_cancel_lease'), \ - "ss should not have a 'remote_cancel_lease' method/attribute") + self.assertFalse(hasattr(FoolscapStorageServer(ss), 'remote_cancel_lease'), \ + "ss should not have a 'remote_cancel_lease' method/attribute") # test overlapping uploads rs3,cs3 = (hashutil.my_renewal_secret_hash(b"%d" % next(self._lease_secret)), hashutil.my_cancel_secret_hash(b"%d" % next(self._lease_secret))) rs4,cs4 = (hashutil.my_renewal_secret_hash(b"%d" % next(self._lease_secret)), hashutil.my_cancel_secret_hash(b"%d" % next(self._lease_secret))) - already,writers = ss.remote_allocate_buckets(b"si3", rs3, cs3, - sharenums, size, canary) - self.failUnlessEqual(len(already), 0) - self.failUnlessEqual(len(writers), 5) - already2,writers2 = ss.remote_allocate_buckets(b"si3", rs4, cs4, - sharenums, size, canary) - self.failUnlessEqual(len(already2), 0) - self.failUnlessEqual(len(writers2), 0) + already,writers = ss.allocate_buckets(b"si3", rs3, cs3, + sharenums, size) + self.assertThat(already, HasLength(0)) + self.assertThat(writers, HasLength(5)) + already2,writers2 = ss.allocate_buckets(b"si3", rs4, cs4, + sharenums, size) + self.assertThat(already2, HasLength(0)) + self.assertThat(writers2, HasLength(0)) for wb in writers.values(): - wb.remote_close() + wb.close() leases = list(ss.get_leases(b"si3")) - self.failUnlessEqual(len(leases), 1) + self.assertThat(leases, HasLength(1)) - already3,writers3 = ss.remote_allocate_buckets(b"si3", rs4, cs4, - sharenums, size, canary) - self.failUnlessEqual(len(already3), 5) - self.failUnlessEqual(len(writers3), 0) + already3,writers3 = ss.allocate_buckets(b"si3", rs4, cs4, + sharenums, size) + self.assertThat(already3, HasLength(5)) + self.assertThat(writers3, HasLength(0)) leases = list(ss.get_leases(b"si3")) - self.failUnlessEqual(len(leases), 2) + self.assertThat(leases, HasLength(2)) def test_immutable_add_lease_renews(self): """ @@ -830,20 +1129,20 @@ class Server(unittest.TestCase): """ clock = Clock() clock.advance(123) - ss = self.create("test_immutable_add_lease_renews", get_current_time=clock.seconds) + ss = self.create("test_immutable_add_lease_renews", clock=clock) # Start out with single lease created with bucket: renewal_secret, cancel_secret = self.create_bucket_5_shares(ss, b"si0") [lease] = ss.get_leases(b"si0") - self.assertEqual(lease.expiration_time, 123 + DEFAULT_RENEWAL_TIME) + self.assertThat(lease.get_expiration_time(), Equals(123 + DEFAULT_RENEWAL_TIME)) # Time passes: clock.advance(123456) # Adding a lease with matching renewal secret just renews it: - ss.remote_add_lease(b"si0", renewal_secret, cancel_secret) + ss.add_lease(b"si0", renewal_secret, cancel_secret) [lease] = ss.get_leases(b"si0") - self.assertEqual(lease.expiration_time, 123 + 123456 + DEFAULT_RENEWAL_TIME) + self.assertThat(lease.get_expiration_time(), Equals(123 + 123456 + DEFAULT_RENEWAL_TIME)) def test_have_shares(self): """By default the StorageServer has no shares.""" @@ -857,15 +1156,15 @@ class Server(unittest.TestCase): ss.setServiceParent(self.sparent) already,writers = self.allocate(ss, b"vid", [0,1,2], 75) - self.failUnlessEqual(already, set()) - self.failUnlessEqual(writers, {}) + self.assertThat(already, Equals(set())) + self.assertThat(writers, Equals({})) stats = ss.get_stats() - self.failUnlessEqual(stats["storage_server.accepting_immutable_shares"], 0) + self.assertThat(stats["storage_server.accepting_immutable_shares"], Equals(0)) if "storage_server.disk_avail" in stats: # Some platforms may not have an API to get disk stats. # But if there are stats, readonly_storage means disk_avail=0 - self.failUnlessEqual(stats["storage_server.disk_avail"], 0) + self.assertThat(stats["storage_server.disk_avail"], Equals(0)) def test_discard(self): # discard is really only used for other tests, but we test it anyways @@ -874,17 +1173,38 @@ class Server(unittest.TestCase): ss.setServiceParent(self.sparent) already,writers = self.allocate(ss, b"vid", [0,1,2], 75) - self.failUnlessEqual(already, set()) - self.failUnlessEqual(set(writers.keys()), set([0,1,2])) + self.assertThat(already, Equals(set())) + self.assertThat(set(writers.keys()), Equals(set([0,1,2]))) for i,wb in writers.items(): - wb.remote_write(0, b"%25d" % i) - wb.remote_close() + wb.write(0, b"%25d" % i) + wb.close() # since we discard the data, the shares should be present but sparse. # Since we write with some seeks, the data we read back will be all # zeros. - b = ss.remote_get_buckets(b"vid") - self.failUnlessEqual(set(b.keys()), set([0,1,2])) - self.failUnlessEqual(b[0].remote_read(0, 25), b"\x00" * 25) + b = ss.get_buckets(b"vid") + self.assertThat(set(b.keys()), Equals(set([0,1,2]))) + self.assertThat(b[0].read(0, 25), Equals(b"\x00" * 25)) + + def test_reserved_space_advise_corruption(self): + """ + If there is no available space then ``remote_advise_corrupt_share`` does + not write a corruption report. + """ + disk = FakeDisk(total=1024, used=1024) + self.patch(fileutil, "get_disk_stats", disk.get_disk_stats) + + workdir = self.workdir("test_reserved_space_advise_corruption") + ss = StorageServer(workdir, b"\x00" * 20, discard_storage=True) + ss.setServiceParent(self.sparent) + + upload_immutable(ss, b"si0", b"r" * 32, b"c" * 32, {0: b""}) + ss.advise_corrupt_share(b"immutable", b"si0", 0, + b"This share smells funny.\n") + + self.assertThat( + [], + Equals(os.listdir(ss.corruption_advisory_dir)), + ) def test_advise_corruption(self): workdir = self.workdir("test_advise_corruption") @@ -892,62 +1212,85 @@ class Server(unittest.TestCase): ss.setServiceParent(self.sparent) si0_s = base32.b2a(b"si0") - ss.remote_advise_corrupt_share(b"immutable", b"si0", 0, - b"This share smells funny.\n") + upload_immutable(ss, b"si0", b"r" * 32, b"c" * 32, {0: b""}) + ss.advise_corrupt_share(b"immutable", b"si0", 0, + b"This share smells funny.\n") reportdir = os.path.join(workdir, "corruption-advisories") reports = os.listdir(reportdir) - self.failUnlessEqual(len(reports), 1) + self.assertThat(reports, HasLength(1)) report_si0 = reports[0] - self.failUnlessIn(ensure_str(si0_s), report_si0) + self.assertThat(report_si0, Contains(ensure_str(si0_s))) f = open(os.path.join(reportdir, report_si0), "rb") report = f.read() f.close() - self.failUnlessIn(b"type: immutable", report) - self.failUnlessIn(b"storage_index: %s" % si0_s, report) - self.failUnlessIn(b"share_number: 0", report) - self.failUnlessIn(b"This share smells funny.", report) + self.assertThat(report, Contains(b"type: immutable")) + self.assertThat(report, Contains(b"storage_index: %s" % si0_s)) + self.assertThat(report, Contains(b"share_number: 0")) + self.assertThat(report, Contains(b"This share smells funny.")) # test the RIBucketWriter version too si1_s = base32.b2a(b"si1") already,writers = self.allocate(ss, b"si1", [1], 75) - self.failUnlessEqual(already, set()) - self.failUnlessEqual(set(writers.keys()), set([1])) - writers[1].remote_write(0, b"data") - writers[1].remote_close() + self.assertThat(already, Equals(set())) + self.assertThat(set(writers.keys()), Equals(set([1]))) + writers[1].write(0, b"data") + writers[1].close() - b = ss.remote_get_buckets(b"si1") - self.failUnlessEqual(set(b.keys()), set([1])) - b[1].remote_advise_corrupt_share(b"This share tastes like dust.\n") + b = ss.get_buckets(b"si1") + self.assertThat(set(b.keys()), Equals(set([1]))) + b[1].advise_corrupt_share(b"This share tastes like dust.\n") reports = os.listdir(reportdir) - self.failUnlessEqual(len(reports), 2) - report_si1 = [r for r in reports if bytes_to_native_str(si1_s) in r][0] + self.assertThat(reports, HasLength(2)) + report_si1 = [r for r in reports if si1_s.decode() in r][0] f = open(os.path.join(reportdir, report_si1), "rb") report = f.read() f.close() - self.failUnlessIn(b"type: immutable", report) - self.failUnlessIn(b"storage_index: %s" % si1_s, report) - self.failUnlessIn(b"share_number: 1", report) - self.failUnlessIn(b"This share tastes like dust.", report) + self.assertThat(report, Contains(b"type: immutable")) + self.assertThat(report, Contains(b"storage_index: %s" % si1_s)) + self.assertThat(report, Contains(b"share_number: 1")) + self.assertThat(report, Contains(b"This share tastes like dust.")) + + def test_advise_corruption_missing(self): + """ + If a corruption advisory is received for a share that is not present on + this server then it is not persisted. + """ + workdir = self.workdir("test_advise_corruption_missing") + ss = StorageServer(workdir, b"\x00" * 20, discard_storage=True) + ss.setServiceParent(self.sparent) + + # Upload one share for this storage index + upload_immutable(ss, b"si0", b"r" * 32, b"c" * 32, {0: b""}) + + # And try to submit a corruption advisory about a different share + ss.advise_corrupt_share(b"immutable", b"si0", 1, + b"This share smells funny.\n") + + self.assertThat( + [], + Equals(os.listdir(ss.corruption_advisory_dir)), + ) - -class MutableServer(unittest.TestCase): +class MutableServer(SyncTestCase): def setUp(self): + super(MutableServer, self).setUp() self.sparent = LoggingServiceParent() self._lease_secret = itertools.count() - def tearDown(self): - return self.sparent.stopService() + self.addCleanup(self.sparent.stopService) def workdir(self, name): basedir = os.path.join("storage", "MutableServer", name) return basedir - def create(self, name, get_current_time=time.time): + def create(self, name, clock=None): workdir = self.workdir(name) + if clock is None: + clock = Clock() ss = StorageServer(workdir, b"\x00" * 20, - get_current_time=get_current_time) + clock=clock) ss.setServiceParent(self.sparent) return ss @@ -960,20 +1303,20 @@ class MutableServer(unittest.TestCase): def renew_secret(self, tag): if isinstance(tag, int): tag = b"%d" % (tag,) - assert isinstance(tag, bytes) + self.assertThat(tag, IsInstance(bytes)) return hashutil.tagged_hash(b"renew_blah", tag) def cancel_secret(self, tag): if isinstance(tag, int): tag = b"%d" % (tag,) - assert isinstance(tag, bytes) + self.assertThat(tag, IsInstance(bytes)) return hashutil.tagged_hash(b"cancel_blah", tag) def allocate(self, ss, storage_index, we_tag, lease_tag, sharenums, size): write_enabler = self.write_enabler(we_tag) renew_secret = self.renew_secret(lease_tag) cancel_secret = self.cancel_secret(lease_tag) - rstaraw = ss.remote_slot_testv_and_readv_and_writev + rstaraw = ss.slot_testv_and_readv_and_writev testandwritev = dict( [ (shnum, ([], [], None) ) for shnum in sharenums ] ) readv = [] @@ -982,9 +1325,67 @@ class MutableServer(unittest.TestCase): testandwritev, readv) (did_write, readv_data) = rc - self.failUnless(did_write) - self.failUnless(isinstance(readv_data, dict)) - self.failUnlessEqual(len(readv_data), 0) + self.assertTrue(did_write) + self.assertThat(readv_data, IsInstance(dict)) + self.assertThat(readv_data, HasLength(0)) + + def test_enumerate_mutable_shares(self): + """ + ``StorageServer.enumerate_mutable_shares()`` returns a set of share + numbers for the given storage index, or an empty set if it does not + exist at all. + """ + ss = self.create("test_enumerate_mutable_shares") + + # Initially, nothing exists: + empty = ss.enumerate_mutable_shares(b"si1") + + self.allocate(ss, b"si1", b"we1", b"le1", [0, 1, 4, 2], 12) + shares0_1_2_4 = ss.enumerate_mutable_shares(b"si1") + + # Remove share 2, by setting size to 0: + secrets = (self.write_enabler(b"we1"), + self.renew_secret(b"le1"), + self.cancel_secret(b"le1")) + ss.slot_testv_and_readv_and_writev(b"si1", secrets, {2: ([], [], 0)}, []) + shares0_1_4 = ss.enumerate_mutable_shares(b"si1") + self.assertThat( + (empty, shares0_1_2_4, shares0_1_4), + Equals((set(), {0, 1, 2, 4}, {0, 1, 4})) + ) + + def test_mutable_share_length(self): + """``get_mutable_share_length()`` returns the length of the share.""" + ss = self.create("test_mutable_share_length") + self.allocate(ss, b"si1", b"we1", b"le1", [16], 23) + ss.slot_testv_and_readv_and_writev( + b"si1", (self.write_enabler(b"we1"), + self.renew_secret(b"le1"), + self.cancel_secret(b"le1")), + {16: ([], [(0, b"x" * 23)], None)}, + [] + ) + self.assertThat(ss.get_mutable_share_length(b"si1", 16), Equals(23)) + + def test_mutable_share_length_unknown(self): + """ + ``get_mutable_share_length()`` raises a ``KeyError`` on unknown shares. + """ + ss = self.create("test_mutable_share_length_unknown") + self.allocate(ss, b"si1", b"we1", b"le1", [16], 23) + ss.slot_testv_and_readv_and_writev( + b"si1", (self.write_enabler(b"we1"), + self.renew_secret(b"le1"), + self.cancel_secret(b"le1")), + {16: ([], [(0, b"x" * 23)], None)}, + [] + ) + with self.assertRaises(KeyError): + # Wrong share number. + ss.get_mutable_share_length(b"si1", 17) + with self.assertRaises(KeyError): + # Wrong storage index + ss.get_mutable_share_length(b"unknown", 16) def test_bad_magic(self): ss = self.create("test_bad_magic") @@ -994,18 +1395,20 @@ class MutableServer(unittest.TestCase): f.seek(0) f.write(b"BAD MAGIC") f.close() - read = ss.remote_slot_readv + read = ss.slot_readv e = self.failUnlessRaises(UnknownMutableContainerVersionError, read, b"si1", [0], [(0,10)]) - self.failUnlessIn(" had magic ", str(e)) - self.failUnlessIn(" but we wanted ", str(e)) + self.assertThat(e.filename, Equals(fn)) + self.assertTrue(e.version.startswith(b"BAD MAGIC")) + self.assertThat(str(e), Contains("had unexpected version")) + self.assertThat(str(e), Contains("BAD MAGIC")) def test_container_size(self): ss = self.create("test_container_size") self.allocate(ss, b"si1", b"we1", next(self._lease_secret), set([0,1,2]), 100) - read = ss.remote_slot_readv - rstaraw = ss.remote_slot_testv_and_readv_and_writev + read = ss.slot_readv + rstaraw = ss.slot_testv_and_readv_and_writev secrets = ( self.write_enabler(b"we1"), self.renew_secret(b"we1"), self.cancel_secret(b"we1") ) @@ -1013,7 +1416,7 @@ class MutableServer(unittest.TestCase): answer = rstaraw(b"si1", secrets, {0: ([], [(0,data)], len(data)+12)}, []) - self.failUnlessEqual(answer, (True, {0:[],1:[],2:[]}) ) + self.assertThat(answer, Equals((True, {0:[],1:[],2:[]}))) # Trying to make the container too large (by sending a write vector # whose offset is too high) will raise an exception. @@ -1026,10 +1429,10 @@ class MutableServer(unittest.TestCase): answer = rstaraw(b"si1", secrets, {0: ([], [(0,data)], None)}, []) - self.failUnlessEqual(answer, (True, {0:[],1:[],2:[]}) ) + self.assertThat(answer, Equals((True, {0:[],1:[],2:[]}))) read_answer = read(b"si1", [0], [(0,10)]) - self.failUnlessEqual(read_answer, {0: [data[:10]]}) + self.assertThat(read_answer, Equals({0: [data[:10]]})) # Sending a new_length shorter than the current length truncates the # data. @@ -1037,7 +1440,7 @@ class MutableServer(unittest.TestCase): {0: ([], [], 9)}, []) read_answer = read(b"si1", [0], [(0,10)]) - self.failUnlessEqual(read_answer, {0: [data[:9]]}) + self.assertThat(read_answer, Equals({0: [data[:9]]})) # Sending a new_length longer than the current length doesn't change # the data. @@ -1046,7 +1449,7 @@ class MutableServer(unittest.TestCase): []) assert answer == (True, {0:[],1:[],2:[]}) read_answer = read(b"si1", [0], [(0, 20)]) - self.failUnlessEqual(read_answer, {0: [data[:9]]}) + self.assertThat(read_answer, Equals({0: [data[:9]]})) # Sending a write vector whose start is after the end of the current # data doesn't reveal "whatever was there last time" (palimpsest), @@ -1068,7 +1471,7 @@ class MutableServer(unittest.TestCase): answer = rstaraw(b"si1", secrets, {0: ([], [], None)}, [(20, 1980)]) - self.failUnlessEqual(answer, (True, {0:[b''],1:[b''],2:[b'']})) + self.assertThat(answer, Equals((True, {0:[b''],1:[b''],2:[b'']}))) # Then the extend the file by writing a vector which starts out past # the end... @@ -1081,58 +1484,58 @@ class MutableServer(unittest.TestCase): answer = rstaraw(b"si1", secrets, {0: ([], [], None)}, [(20, 30)]) - self.failUnlessEqual(answer, (True, {0:[b'\x00'*30],1:[b''],2:[b'']})) + self.assertThat(answer, Equals((True, {0:[b'\x00'*30],1:[b''],2:[b'']}))) # Also see if the server explicitly declares that it supports this # feature. - ver = ss.remote_get_version() + ver = ss.get_version() storage_v1_ver = ver[b"http://allmydata.org/tahoe/protocols/storage/v1"] - self.failUnless(storage_v1_ver.get(b"fills-holes-with-zero-bytes")) + self.assertTrue(storage_v1_ver.get(b"fills-holes-with-zero-bytes")) # If the size is dropped to zero the share is deleted. answer = rstaraw(b"si1", secrets, {0: ([], [(0,data)], 0)}, []) - self.failUnlessEqual(answer, (True, {0:[],1:[],2:[]}) ) + self.assertThat(answer, Equals((True, {0:[],1:[],2:[]}))) read_answer = read(b"si1", [0], [(0,10)]) - self.failUnlessEqual(read_answer, {}) + self.assertThat(read_answer, Equals({})) def test_allocate(self): ss = self.create("test_allocate") self.allocate(ss, b"si1", b"we1", next(self._lease_secret), set([0,1,2]), 100) - read = ss.remote_slot_readv - self.failUnlessEqual(read(b"si1", [0], [(0, 10)]), - {0: [b""]}) - self.failUnlessEqual(read(b"si1", [], [(0, 10)]), - {0: [b""], 1: [b""], 2: [b""]}) - self.failUnlessEqual(read(b"si1", [0], [(100, 10)]), - {0: [b""]}) + read = ss.slot_readv + self.assertThat(read(b"si1", [0], [(0, 10)]), + Equals({0: [b""]})) + self.assertThat(read(b"si1", [], [(0, 10)]), + Equals({0: [b""], 1: [b""], 2: [b""]})) + self.assertThat(read(b"si1", [0], [(100, 10)]), + Equals({0: [b""]})) # try writing to one secrets = ( self.write_enabler(b"we1"), self.renew_secret(b"we1"), self.cancel_secret(b"we1") ) data = b"".join([ (b"%d" % i) * 10 for i in range(10) ]) - write = ss.remote_slot_testv_and_readv_and_writev + write = ss.slot_testv_and_readv_and_writev answer = write(b"si1", secrets, {0: ([], [(0,data)], None)}, []) - self.failUnlessEqual(answer, (True, {0:[],1:[],2:[]}) ) + self.assertThat(answer, Equals((True, {0:[],1:[],2:[]}))) - self.failUnlessEqual(read(b"si1", [0], [(0,20)]), - {0: [b"00000000001111111111"]}) - self.failUnlessEqual(read(b"si1", [0], [(95,10)]), - {0: [b"99999"]}) - #self.failUnlessEqual(s0.remote_get_length(), 100) + self.assertThat(read(b"si1", [0], [(0,20)]), + Equals({0: [b"00000000001111111111"]})) + self.assertThat(read(b"si1", [0], [(95,10)]), + Equals({0: [b"99999"]})) + #self.failUnlessEqual(s0.get_length(), 100) bad_secrets = (b"bad write enabler", secrets[1], secrets[2]) f = self.failUnlessRaises(BadWriteEnablerError, write, b"si1", bad_secrets, {}, []) - self.failUnlessIn("The write enabler was recorded by nodeid 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'.", str(f)) + self.assertThat(str(f), Contains("The write enabler was recorded by nodeid 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'.")) # this testv should fail answer = write(b"si1", secrets, @@ -1144,12 +1547,12 @@ class MutableServer(unittest.TestCase): }, [(0,12), (20,5)], ) - self.failUnlessEqual(answer, (False, + self.assertThat(answer, Equals((False, {0: [b"000000000011", b"22222"], 1: [b"", b""], 2: [b"", b""], - })) - self.failUnlessEqual(read(b"si1", [0], [(0,100)]), {0: [data]}) + }))) + self.assertThat(read(b"si1", [0], [(0,100)]), Equals({0: [data]})) def test_operators(self): # test operators, the data we're comparing is '11111' in all cases. @@ -1160,8 +1563,8 @@ class MutableServer(unittest.TestCase): self.renew_secret(b"we1"), self.cancel_secret(b"we1") ) data = b"".join([ (b"%d" % i) * 10 for i in range(10) ]) - write = ss.remote_slot_testv_and_readv_and_writev - read = ss.remote_slot_readv + write = ss.slot_testv_and_readv_and_writev + read = ss.slot_readv def reset(): write(b"si1", secrets, @@ -1176,8 +1579,8 @@ class MutableServer(unittest.TestCase): [(0, b"x"*100)], None, )}, [(10,5)]) - self.failUnlessEqual(answer, (False, {0: [b"11111"]})) - self.failUnlessEqual(read(b"si1", [0], [(0,100)]), {0: [data]}) + self.assertThat(answer, Equals((False, {0: [b"11111"]}))) + self.assertThat(read(b"si1", [0], [(0,100)]), Equals({0: [data]})) reset() answer = write(b"si1", secrets, {0: ([(10, 5, b"eq", b"11111"), @@ -1185,8 +1588,8 @@ class MutableServer(unittest.TestCase): [(0, b"y"*100)], None, )}, [(10,5)]) - self.failUnlessEqual(answer, (True, {0: [b"11111"]})) - self.failUnlessEqual(read(b"si1", [0], [(0,100)]), {0: [b"y"*100]}) + self.assertThat(answer, Equals((True, {0: [b"11111"]}))) + self.assertThat(read(b"si1", [0], [(0,100)]), Equals({0: [b"y"*100]})) reset() # finally, test some operators against empty shares @@ -1195,8 +1598,8 @@ class MutableServer(unittest.TestCase): [(0, b"x"*100)], None, )}, [(10,5)]) - self.failUnlessEqual(answer, (False, {0: [b"11111"]})) - self.failUnlessEqual(read(b"si1", [0], [(0,100)]), {0: [data]}) + self.assertThat(answer, Equals((False, {0: [b"11111"]}))) + self.assertThat(read(b"si1", [0], [(0,100)]), Equals({0: [data]})) reset() def test_readv(self): @@ -1205,41 +1608,41 @@ class MutableServer(unittest.TestCase): self.renew_secret(b"we1"), self.cancel_secret(b"we1") ) data = b"".join([ (b"%d" % i) * 10 for i in range(10) ]) - write = ss.remote_slot_testv_and_readv_and_writev - read = ss.remote_slot_readv + write = ss.slot_testv_and_readv_and_writev + read = ss.slot_readv data = [(b"%d" % i) * 100 for i in range(3)] rc = write(b"si1", secrets, {0: ([], [(0,data[0])], None), 1: ([], [(0,data[1])], None), 2: ([], [(0,data[2])], None), }, []) - self.failUnlessEqual(rc, (True, {})) + self.assertThat(rc, Equals((True, {}))) answer = read(b"si1", [], [(0, 10)]) - self.failUnlessEqual(answer, {0: [b"0"*10], + self.assertThat(answer, Equals({0: [b"0"*10], 1: [b"1"*10], - 2: [b"2"*10]}) + 2: [b"2"*10]})) def compare_leases_without_timestamps(self, leases_a, leases_b): - self.failUnlessEqual(len(leases_a), len(leases_b)) - for i in range(len(leases_a)): - a = leases_a[i] - b = leases_b[i] - self.failUnlessEqual(a.owner_num, b.owner_num) - self.failUnlessEqual(a.renew_secret, b.renew_secret) - self.failUnlessEqual(a.cancel_secret, b.cancel_secret) - self.failUnlessEqual(a.nodeid, b.nodeid) - - def compare_leases(self, leases_a, leases_b): - self.failUnlessEqual(len(leases_a), len(leases_b)) - for i in range(len(leases_a)): - a = leases_a[i] - b = leases_b[i] - self.failUnlessEqual(a.owner_num, b.owner_num) - self.failUnlessEqual(a.renew_secret, b.renew_secret) - self.failUnlessEqual(a.cancel_secret, b.cancel_secret) - self.failUnlessEqual(a.nodeid, b.nodeid) - self.failUnlessEqual(a.expiration_time, b.expiration_time) + """ + Assert that, except for expiration times, ``leases_a`` contains the same + lease information as ``leases_b``. + """ + for a, b in zip(leases_a, leases_b): + # The leases aren't always of the same type (though of course + # corresponding elements in the two lists should be of the same + # type as each other) so it's inconvenient to just reach in and + # normalize the expiration timestamp. We don't want to call + # `renew` on both objects to normalize the expiration timestamp in + # case `renew` is broken and gives us back equal outputs from + # non-equal inputs (expiration timestamp aside). It seems + # reasonably safe to use `renew` to make _one_ of the timestamps + # equal to the other though. + self.assertThat( + a.renew(b.get_expiration_time()), + Equals(b), + ) + self.assertThat(len(leases_a), Equals(len(leases_b))) def test_leases(self): ss = self.create("test_leases") @@ -1248,10 +1651,10 @@ class MutableServer(unittest.TestCase): self.renew_secret(b"we1-%d" % n), self.cancel_secret(b"we1-%d" % n) ) data = b"".join([ (b"%d" % i) * 10 for i in range(10) ]) - write = ss.remote_slot_testv_and_readv_and_writev - read = ss.remote_slot_readv + write = ss.slot_testv_and_readv_and_writev + read = ss.slot_readv rc = write(b"si1", secrets(0), {0: ([], [(0,data)], None)}, []) - self.failUnlessEqual(rc, (True, {})) + self.assertThat(rc, Equals((True, {}))) # create a random non-numeric file in the bucket directory, to # exercise the code that's supposed to ignore those. @@ -1262,32 +1665,32 @@ class MutableServer(unittest.TestCase): f.close() s0 = MutableShareFile(os.path.join(bucket_dir, "0")) - self.failUnlessEqual(len(list(s0.get_leases())), 1) + self.assertThat(list(s0.get_leases()), HasLength(1)) # add-lease on a missing storage index is silently ignored - self.failUnlessEqual(ss.remote_add_lease(b"si18", b"", b""), None) + self.assertThat(ss.add_lease(b"si18", b"", b""), Equals(None)) # re-allocate the slots and use the same secrets, that should update # the lease write(b"si1", secrets(0), {0: ([], [(0,data)], None)}, []) - self.failUnlessEqual(len(list(s0.get_leases())), 1) + self.assertThat(list(s0.get_leases()), HasLength(1)) # renew it directly - ss.remote_renew_lease(b"si1", secrets(0)[1]) - self.failUnlessEqual(len(list(s0.get_leases())), 1) + ss.renew_lease(b"si1", secrets(0)[1]) + self.assertThat(list(s0.get_leases()), HasLength(1)) # now allocate them with a bunch of different secrets, to trigger the # extended lease code. Use add_lease for one of them. write(b"si1", secrets(1), {0: ([], [(0,data)], None)}, []) - self.failUnlessEqual(len(list(s0.get_leases())), 2) + self.assertThat(list(s0.get_leases()), HasLength(2)) secrets2 = secrets(2) - ss.remote_add_lease(b"si1", secrets2[1], secrets2[2]) - self.failUnlessEqual(len(list(s0.get_leases())), 3) + ss.add_lease(b"si1", secrets2[1], secrets2[2]) + self.assertThat(list(s0.get_leases()), HasLength(3)) write(b"si1", secrets(3), {0: ([], [(0,data)], None)}, []) write(b"si1", secrets(4), {0: ([], [(0,data)], None)}, []) write(b"si1", secrets(5), {0: ([], [(0,data)], None)}, []) - self.failUnlessEqual(len(list(s0.get_leases())), 6) + self.assertThat(list(s0.get_leases()), HasLength(6)) all_leases = list(s0.get_leases()) # and write enough data to expand the container, forcing the server @@ -1299,11 +1702,11 @@ class MutableServer(unittest.TestCase): # read back the leases, make sure they're still intact. self.compare_leases_without_timestamps(all_leases, list(s0.get_leases())) - ss.remote_renew_lease(b"si1", secrets(0)[1]) - ss.remote_renew_lease(b"si1", secrets(1)[1]) - ss.remote_renew_lease(b"si1", secrets(2)[1]) - ss.remote_renew_lease(b"si1", secrets(3)[1]) - ss.remote_renew_lease(b"si1", secrets(4)[1]) + ss.renew_lease(b"si1", secrets(0)[1]) + ss.renew_lease(b"si1", secrets(1)[1]) + ss.renew_lease(b"si1", secrets(2)[1]) + ss.renew_lease(b"si1", secrets(3)[1]) + ss.renew_lease(b"si1", secrets(4)[1]) self.compare_leases_without_timestamps(all_leases, list(s0.get_leases())) # get a new copy of the leases, with the current timestamps. Reading # data and failing to renew/cancel leases should leave the timestamps @@ -1314,18 +1717,18 @@ class MutableServer(unittest.TestCase): # examine the exception thus raised, make sure the old nodeid is # present, to provide for share migration e = self.failUnlessRaises(IndexError, - ss.remote_renew_lease, b"si1", + ss.renew_lease, b"si1", secrets(20)[1]) e_s = str(e) - self.failUnlessIn("Unable to renew non-existent lease", e_s) - self.failUnlessIn("I have leases accepted by nodeids:", e_s) - self.failUnlessIn("nodeids: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' .", e_s) + self.assertThat(e_s, Contains("Unable to renew non-existent lease")) + self.assertThat(e_s, Contains("I have leases accepted by nodeids:")) + self.assertThat(e_s, Contains("nodeids: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' .")) - self.compare_leases(all_leases, list(s0.get_leases())) + self.assertThat(all_leases, Equals(list(s0.get_leases()))) # reading shares should not modify the timestamp read(b"si1", [], [(0,200)]) - self.compare_leases(all_leases, list(s0.get_leases())) + self.assertThat(all_leases, Equals(list(s0.get_leases()))) write(b"si1", secrets(0), {0: ([], [(200, b"make me bigger")], None)}, []) @@ -1343,39 +1746,39 @@ class MutableServer(unittest.TestCase): clock = Clock() clock.advance(235) ss = self.create("test_mutable_add_lease_renews", - get_current_time=clock.seconds) + clock=clock) def secrets(n): return ( self.write_enabler(b"we1"), self.renew_secret(b"we1-%d" % n), self.cancel_secret(b"we1-%d" % n) ) data = b"".join([ (b"%d" % i) * 10 for i in range(10) ]) - write = ss.remote_slot_testv_and_readv_and_writev + write = ss.slot_testv_and_readv_and_writev write_enabler, renew_secret, cancel_secret = secrets(0) rc = write(b"si1", (write_enabler, renew_secret, cancel_secret), {0: ([], [(0,data)], None)}, []) - self.failUnlessEqual(rc, (True, {})) + self.assertThat(rc, Equals((True, {}))) bucket_dir = os.path.join(self.workdir("test_mutable_add_lease_renews"), "shares", storage_index_to_dir(b"si1")) s0 = MutableShareFile(os.path.join(bucket_dir, "0")) [lease] = s0.get_leases() - self.assertEqual(lease.expiration_time, 235 + DEFAULT_RENEWAL_TIME) + self.assertThat(lease.get_expiration_time(), Equals(235 + DEFAULT_RENEWAL_TIME)) # Time passes... clock.advance(835) # Adding a lease renews it: - ss.remote_add_lease(b"si1", renew_secret, cancel_secret) + ss.add_lease(b"si1", renew_secret, cancel_secret) [lease] = s0.get_leases() - self.assertEqual(lease.expiration_time, - 235 + 835 + DEFAULT_RENEWAL_TIME) + self.assertThat(lease.get_expiration_time(), + Equals(235 + 835 + DEFAULT_RENEWAL_TIME)) def test_remove(self): ss = self.create("test_remove") self.allocate(ss, b"si1", b"we1", next(self._lease_secret), set([0,1,2]), 100) - readv = ss.remote_slot_readv - writev = ss.remote_slot_testv_and_readv_and_writev + readv = ss.slot_readv + writev = ss.slot_testv_and_readv_and_writev secrets = ( self.write_enabler(b"we1"), self.renew_secret(b"we1"), self.cancel_secret(b"we1") ) @@ -1385,36 +1788,36 @@ class MutableServer(unittest.TestCase): []) # the answer should mention all the shares that existed before the # write - self.failUnlessEqual(answer, (True, {0:[],1:[],2:[]}) ) + self.assertThat(answer, Equals((True, {0:[],1:[],2:[]}))) # but a new read should show only sh1 and sh2 - self.failUnlessEqual(readv(b"si1", [], [(0,10)]), - {1: [b""], 2: [b""]}) + self.assertThat(readv(b"si1", [], [(0,10)]), + Equals({1: [b""], 2: [b""]})) # delete sh1 by setting its size to zero answer = writev(b"si1", secrets, {1: ([], [], 0)}, []) - self.failUnlessEqual(answer, (True, {1:[],2:[]}) ) - self.failUnlessEqual(readv(b"si1", [], [(0,10)]), - {2: [b""]}) + self.assertThat(answer, Equals((True, {1:[],2:[]}))) + self.assertThat(readv(b"si1", [], [(0,10)]), + Equals({2: [b""]})) # delete sh2 by setting its size to zero answer = writev(b"si1", secrets, {2: ([], [], 0)}, []) - self.failUnlessEqual(answer, (True, {2:[]}) ) - self.failUnlessEqual(readv(b"si1", [], [(0,10)]), - {}) + self.assertThat(answer, Equals((True, {2:[]}))) + self.assertThat(readv(b"si1", [], [(0,10)]), + Equals({})) # and the bucket directory should now be gone - si = base32.b2a(b"si1") + si = base32.b2a(b"si1").decode() # note: this is a detail of the storage server implementation, and # may change in the future - si = bytes_to_native_str(si) # filesystem paths are native strings + # filesystem paths are native strings prefix = si[:2] prefixdir = os.path.join(self.workdir("test_remove"), "shares", prefix) bucketdir = os.path.join(prefixdir, si) - self.failUnless(os.path.exists(prefixdir), prefixdir) - self.failIf(os.path.exists(bucketdir), bucketdir) + self.assertTrue(os.path.exists(prefixdir), prefixdir) + self.assertFalse(os.path.exists(bucketdir), bucketdir) def test_writev_without_renew_lease(self): """ @@ -1443,7 +1846,7 @@ class MutableServer(unittest.TestCase): renew_leases=False, ) leases = list(ss.get_slot_leases(storage_index)) - self.assertEqual([], leases) + self.assertThat([], Equals(leases)) def test_get_slot_leases_empty_slot(self): """ @@ -1451,9 +1854,9 @@ class MutableServer(unittest.TestCase): shares, it returns an empty iterable. """ ss = self.create("test_get_slot_leases_empty_slot") - self.assertEqual( + self.assertThat( list(ss.get_slot_leases(b"si1")), - [], + Equals([]), ) def test_remove_non_present(self): @@ -1479,7 +1882,7 @@ class MutableServer(unittest.TestCase): # We don't even need to create any shares to exercise this # functionality. Just go straight to sending a truncate-to-zero # write. - testv_is_good, read_data = ss.remote_slot_testv_and_readv_and_writev( + testv_is_good, read_data = ss.slot_testv_and_readv_and_writev( storage_index=storage_index, secrets=secrets, test_and_write_vectors={ @@ -1489,15 +1892,16 @@ class MutableServer(unittest.TestCase): ) self.assertTrue(testv_is_good) - self.assertEqual({}, read_data) + self.assertThat({}, Equals(read_data)) -class MDMFProxies(unittest.TestCase, ShouldFailMixin): +class MDMFProxies(AsyncTestCase, ShouldFailMixin): def setUp(self): + super(MDMFProxies, self).setUp() self.sparent = LoggingServiceParent() self._lease_secret = itertools.count() self.ss = self.create("MDMFProxies storage test server") - self.rref = RemoteBucket(self.ss) + self.rref = RemoteBucket(FoolscapStorageServer(self.ss)) self.storage_server = _StorageServer(lambda: self.rref) self.secrets = (self.write_enabler(b"we_secret"), self.renew_secret(b"renew_secret"), @@ -1524,6 +1928,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): def tearDown(self): + super(MDMFProxies, self).tearDown() self.sparent.stopService() shutil.rmtree(self.workdir("MDMFProxies storage test server")) @@ -1664,7 +2069,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): If tail_segment=True, then I will write a share that has a smaller tail segment than other segments. """ - write = self.ss.remote_slot_testv_and_readv_and_writev + write = self.ss.slot_testv_and_readv_and_writev data = self.build_test_mdmf_share(tail_segment, empty) # Finally, we write the whole thing to the storage server in one # pass. @@ -1673,7 +2078,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): tws[0] = (testvs, [(0, data)], None) readv = [(0, 1)] results = write(storage_index, self.secrets, tws, readv) - self.failUnless(results[0]) + self.assertTrue(results[0]) def build_test_sdmf_share(self, empty=False): @@ -1732,14 +2137,14 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): empty=False): # Some tests need SDMF shares to verify that we can still # read them. This method writes one, which resembles but is not - write = self.ss.remote_slot_testv_and_readv_and_writev + write = self.ss.slot_testv_and_readv_and_writev share = self.build_test_sdmf_share(empty) testvs = [(0, 1, b"eq", b"")] tws = {} tws[0] = (testvs, [(0, share)], None) readv = [] results = write(storage_index, self.secrets, tws, readv) - self.failUnless(results[0]) + self.assertTrue(results[0]) def test_read(self): @@ -1749,8 +2154,8 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): d = defer.succeed(None) def _check_block_and_salt(block_and_salt): (block, salt) = block_and_salt - self.failUnlessEqual(block, self.block) - self.failUnlessEqual(salt, self.salt) + self.assertThat(block, Equals(self.block)) + self.assertThat(salt, Equals(self.salt)) for i in range(6): d.addCallback(lambda ignored, i=i: @@ -1760,57 +2165,57 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): d.addCallback(lambda ignored: mr.get_encprivkey()) d.addCallback(lambda encprivkey: - self.failUnlessEqual(self.encprivkey, encprivkey)) + self.assertThat(self.encprivkey, Equals(encprivkey))) d.addCallback(lambda ignored: mr.get_blockhashes()) d.addCallback(lambda blockhashes: - self.failUnlessEqual(self.block_hash_tree, blockhashes)) + self.assertThat(self.block_hash_tree, Equals(blockhashes))) d.addCallback(lambda ignored: mr.get_sharehashes()) d.addCallback(lambda sharehashes: - self.failUnlessEqual(self.share_hash_chain, sharehashes)) + self.assertThat(self.share_hash_chain, Equals(sharehashes))) d.addCallback(lambda ignored: mr.get_signature()) d.addCallback(lambda signature: - self.failUnlessEqual(signature, self.signature)) + self.assertThat(signature, Equals(self.signature))) d.addCallback(lambda ignored: mr.get_verification_key()) d.addCallback(lambda verification_key: - self.failUnlessEqual(verification_key, self.verification_key)) + self.assertThat(verification_key, Equals(self.verification_key))) d.addCallback(lambda ignored: mr.get_seqnum()) d.addCallback(lambda seqnum: - self.failUnlessEqual(seqnum, 0)) + self.assertThat(seqnum, Equals(0))) d.addCallback(lambda ignored: mr.get_root_hash()) d.addCallback(lambda root_hash: - self.failUnlessEqual(self.root_hash, root_hash)) + self.assertThat(self.root_hash, Equals(root_hash))) d.addCallback(lambda ignored: mr.get_seqnum()) d.addCallback(lambda seqnum: - self.failUnlessEqual(0, seqnum)) + self.assertThat(seqnum, Equals(0))) d.addCallback(lambda ignored: mr.get_encoding_parameters()) def _check_encoding_parameters(args): (k, n, segsize, datalen) = args - self.failUnlessEqual(k, 3) - self.failUnlessEqual(n, 10) - self.failUnlessEqual(segsize, 6) - self.failUnlessEqual(datalen, 36) + self.assertThat(k, Equals(3)) + self.assertThat(n, Equals(10)) + self.assertThat(segsize, Equals(6)) + self.assertThat(datalen, Equals(36)) d.addCallback(_check_encoding_parameters) d.addCallback(lambda ignored: mr.get_checkstring()) d.addCallback(lambda checkstring: - self.failUnlessEqual(checkstring, checkstring)) + self.assertThat(checkstring, Equals(checkstring))) return d @@ -1820,8 +2225,8 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): d = mr.get_block_and_salt(5) def _check_tail_segment(results): block, salt = results - self.failUnlessEqual(len(block), 1) - self.failUnlessEqual(block, b"a") + self.assertThat(block, HasLength(1)) + self.assertThat(block, Equals(b"a")) d.addCallback(_check_tail_segment) return d @@ -1843,10 +2248,10 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): d = mr.get_encoding_parameters() def _check_encoding_parameters(args): (k, n, segment_size, datalen) = args - self.failUnlessEqual(k, 3) - self.failUnlessEqual(n, 10) - self.failUnlessEqual(segment_size, 6) - self.failUnlessEqual(datalen, 36) + self.assertThat(k, Equals(3)) + self.assertThat(n, Equals(10)) + self.assertThat(segment_size, Equals(6)) + self.assertThat(datalen, Equals(36)) d.addCallback(_check_encoding_parameters) return d @@ -1856,7 +2261,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): mr = MDMFSlotReadProxy(self.storage_server, b"si1", 0) d = mr.get_seqnum() d.addCallback(lambda seqnum: - self.failUnlessEqual(seqnum, 0)) + self.assertThat(seqnum, Equals(0))) return d @@ -1865,7 +2270,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): mr = MDMFSlotReadProxy(self.storage_server, b"si1", 0) d = mr.get_root_hash() d.addCallback(lambda root_hash: - self.failUnlessEqual(root_hash, self.root_hash)) + self.assertThat(root_hash, Equals(self.root_hash))) return d @@ -1874,7 +2279,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): mr = MDMFSlotReadProxy(self.storage_server, b"si1", 0) d = mr.get_checkstring() d.addCallback(lambda checkstring: - self.failUnlessEqual(checkstring, self.checkstring)) + self.assertThat(checkstring, Equals(self.checkstring))) return d @@ -1896,22 +2301,22 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): mw.put_verification_key(self.verification_key) d = mw.finish_publishing() def _then(results): - self.failUnless(len(results), 2) + self.assertThat(results, HasLength(2)) result, readv = results - self.failUnless(result) - self.failIf(readv) + self.assertTrue(result) + self.assertFalse(readv) self.old_checkstring = mw.get_checkstring() mw.set_checkstring(b"") d.addCallback(_then) d.addCallback(lambda ignored: mw.finish_publishing()) def _then_again(results): - self.failUnlessEqual(len(results), 2) + self.assertThat(results, HasLength(2)) result, readvs = results - self.failIf(result) - self.failUnlessIn(0, readvs) + self.assertFalse(result) + self.assertThat(readvs, Contains(0)) readv = readvs[0][0] - self.failUnlessEqual(readv, self.old_checkstring) + self.assertThat(readv, Equals(self.old_checkstring)) d.addCallback(_then_again) # The checkstring remains the same for the rest of the process. return d @@ -1972,11 +2377,11 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): def _check_success(results): result, readvs = results - self.failUnless(result) + self.assertTrue(result) def _check_failure(results): result, readvs = results - self.failIf(result) + self.assertFalse(result) def _write_share(mw): for i in range(6): @@ -2020,14 +2425,14 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): # any point during the process, it should fail to write when we # tell it to write. def _check_failure(results): - self.failUnlessEqual(len(results), 2) + self.assertThat(results, HasLength(2)) res, d = results - self.failIf(res) + self.assertFalse(res) def _check_success(results): - self.failUnlessEqual(len(results), 2) + self.assertThat(results, HasLength(2)) res, d = results - self.failUnless(results) + self.assertTrue(results) mw = self._make_new_mw(b"si1", 0) mw.set_checkstring(b"this is a lie") @@ -2064,7 +2469,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): # blocks. mw = self._make_new_mw(b"si1", 0) # Test writing some blocks. - read = self.ss.remote_slot_readv + read = self.ss.slot_readv expected_private_key_offset = struct.calcsize(MDMFHEADER) expected_sharedata_offset = struct.calcsize(MDMFHEADER) + \ PRIVATE_KEY_SIZE + \ @@ -2084,100 +2489,100 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): mw.put_verification_key(self.verification_key) d = mw.finish_publishing() def _check_publish(results): - self.failUnlessEqual(len(results), 2) + self.assertThat(results, HasLength(2)) result, ign = results - self.failUnless(result, "publish failed") + self.assertTrue(result, "publish failed") for i in range(6): - self.failUnlessEqual(read(b"si1", [0], [(expected_sharedata_offset + (i * written_block_size), written_block_size)]), - {0: [written_block]}) + self.assertThat(read(b"si1", [0], [(expected_sharedata_offset + (i * written_block_size), written_block_size)]), + Equals({0: [written_block]})) - self.failUnlessEqual(len(self.encprivkey), 7) - self.failUnlessEqual(read(b"si1", [0], [(expected_private_key_offset, 7)]), - {0: [self.encprivkey]}) + self.assertThat(self.encprivkey, HasLength(7)) + self.assertThat(read(b"si1", [0], [(expected_private_key_offset, 7)]), + Equals({0: [self.encprivkey]})) expected_block_hash_offset = expected_sharedata_offset + \ (6 * written_block_size) - self.failUnlessEqual(len(self.block_hash_tree_s), 32 * 6) - self.failUnlessEqual(read(b"si1", [0], [(expected_block_hash_offset, 32 * 6)]), - {0: [self.block_hash_tree_s]}) + self.assertThat(self.block_hash_tree_s, HasLength(32 * 6)) + self.assertThat(read(b"si1", [0], [(expected_block_hash_offset, 32 * 6)]), + Equals({0: [self.block_hash_tree_s]})) expected_share_hash_offset = expected_private_key_offset + len(self.encprivkey) - self.failUnlessEqual(read(b"si1", [0],[(expected_share_hash_offset, (32 + 2) * 6)]), - {0: [self.share_hash_chain_s]}) + self.assertThat(read(b"si1", [0],[(expected_share_hash_offset, (32 + 2) * 6)]), + Equals({0: [self.share_hash_chain_s]})) - self.failUnlessEqual(read(b"si1", [0], [(9, 32)]), - {0: [self.root_hash]}) + self.assertThat(read(b"si1", [0], [(9, 32)]), + Equals({0: [self.root_hash]})) expected_signature_offset = expected_share_hash_offset + \ len(self.share_hash_chain_s) - self.failUnlessEqual(len(self.signature), 9) - self.failUnlessEqual(read(b"si1", [0], [(expected_signature_offset, 9)]), - {0: [self.signature]}) + self.assertThat(self.signature, HasLength(9)) + self.assertThat(read(b"si1", [0], [(expected_signature_offset, 9)]), + Equals({0: [self.signature]})) expected_verification_key_offset = expected_signature_offset + len(self.signature) - self.failUnlessEqual(len(self.verification_key), 6) - self.failUnlessEqual(read(b"si1", [0], [(expected_verification_key_offset, 6)]), - {0: [self.verification_key]}) + self.assertThat(self.verification_key, HasLength(6)) + self.assertThat(read(b"si1", [0], [(expected_verification_key_offset, 6)]), + Equals({0: [self.verification_key]})) signable = mw.get_signable() verno, seq, roothash, k, n, segsize, datalen = \ struct.unpack(">BQ32sBBQQ", signable) - self.failUnlessEqual(verno, 1) - self.failUnlessEqual(seq, 0) - self.failUnlessEqual(roothash, self.root_hash) - self.failUnlessEqual(k, 3) - self.failUnlessEqual(n, 10) - self.failUnlessEqual(segsize, 6) - self.failUnlessEqual(datalen, 36) + self.assertThat(verno, Equals(1)) + self.assertThat(seq, Equals(0)) + self.assertThat(roothash, Equals(self.root_hash)) + self.assertThat(k, Equals(3)) + self.assertThat(n, Equals(10)) + self.assertThat(segsize, Equals(6)) + self.assertThat(datalen, Equals(36)) expected_eof_offset = expected_block_hash_offset + \ len(self.block_hash_tree_s) # Check the version number to make sure that it is correct. expected_version_number = struct.pack(">B", 1) - self.failUnlessEqual(read(b"si1", [0], [(0, 1)]), - {0: [expected_version_number]}) + self.assertThat(read(b"si1", [0], [(0, 1)]), + Equals({0: [expected_version_number]})) # Check the sequence number to make sure that it is correct expected_sequence_number = struct.pack(">Q", 0) - self.failUnlessEqual(read(b"si1", [0], [(1, 8)]), - {0: [expected_sequence_number]}) + self.assertThat(read(b"si1", [0], [(1, 8)]), + Equals({0: [expected_sequence_number]})) # Check that the encoding parameters (k, N, segement size, data # length) are what they should be. These are 3, 10, 6, 36 expected_k = struct.pack(">B", 3) - self.failUnlessEqual(read(b"si1", [0], [(41, 1)]), - {0: [expected_k]}) + self.assertThat(read(b"si1", [0], [(41, 1)]), + Equals({0: [expected_k]})) expected_n = struct.pack(">B", 10) - self.failUnlessEqual(read(b"si1", [0], [(42, 1)]), - {0: [expected_n]}) + self.assertThat(read(b"si1", [0], [(42, 1)]), + Equals({0: [expected_n]})) expected_segment_size = struct.pack(">Q", 6) - self.failUnlessEqual(read(b"si1", [0], [(43, 8)]), - {0: [expected_segment_size]}) + self.assertThat(read(b"si1", [0], [(43, 8)]), + Equals({0: [expected_segment_size]})) expected_data_length = struct.pack(">Q", 36) - self.failUnlessEqual(read(b"si1", [0], [(51, 8)]), - {0: [expected_data_length]}) + self.assertThat(read(b"si1", [0], [(51, 8)]), + Equals({0: [expected_data_length]})) expected_offset = struct.pack(">Q", expected_private_key_offset) - self.failUnlessEqual(read(b"si1", [0], [(59, 8)]), - {0: [expected_offset]}) + self.assertThat(read(b"si1", [0], [(59, 8)]), + Equals({0: [expected_offset]})) expected_offset = struct.pack(">Q", expected_share_hash_offset) - self.failUnlessEqual(read(b"si1", [0], [(67, 8)]), - {0: [expected_offset]}) + self.assertThat(read(b"si1", [0], [(67, 8)]), + Equals({0: [expected_offset]})) expected_offset = struct.pack(">Q", expected_signature_offset) - self.failUnlessEqual(read(b"si1", [0], [(75, 8)]), - {0: [expected_offset]}) + self.assertThat(read(b"si1", [0], [(75, 8)]), + Equals({0: [expected_offset]})) expected_offset = struct.pack(">Q", expected_verification_key_offset) - self.failUnlessEqual(read(b"si1", [0], [(83, 8)]), - {0: [expected_offset]}) + self.assertThat(read(b"si1", [0], [(83, 8)]), + Equals({0: [expected_offset]})) expected_offset = struct.pack(">Q", expected_verification_key_offset + len(self.verification_key)) - self.failUnlessEqual(read(b"si1", [0], [(91, 8)]), - {0: [expected_offset]}) + self.assertThat(read(b"si1", [0], [(91, 8)]), + Equals({0: [expected_offset]})) expected_offset = struct.pack(">Q", expected_sharedata_offset) - self.failUnlessEqual(read(b"si1", [0], [(99, 8)]), - {0: [expected_offset]}) + self.assertThat(read(b"si1", [0], [(99, 8)]), + Equals({0: [expected_offset]})) expected_offset = struct.pack(">Q", expected_block_hash_offset) - self.failUnlessEqual(read(b"si1", [0], [(107, 8)]), - {0: [expected_offset]}) + self.assertThat(read(b"si1", [0], [(107, 8)]), + Equals({0: [expected_offset]})) expected_offset = struct.pack(">Q", expected_eof_offset) - self.failUnlessEqual(read(b"si1", [0], [(115, 8)]), - {0: [expected_offset]}) + self.assertThat(read(b"si1", [0], [(115, 8)]), + Equals({0: [expected_offset]})) d.addCallback(_check_publish) return d @@ -2392,8 +2797,8 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): mr = MDMFSlotReadProxy(self.storage_server, b"si1", 0) def _check_block_and_salt(block_and_salt): (block, salt) = block_and_salt - self.failUnlessEqual(block, self.block) - self.failUnlessEqual(salt, self.salt) + self.assertThat(block, Equals(self.block)) + self.assertThat(salt, Equals(self.salt)) for i in range(6): d.addCallback(lambda ignored, i=i: @@ -2403,52 +2808,52 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): d.addCallback(lambda ignored: mr.get_encprivkey()) d.addCallback(lambda encprivkey: - self.failUnlessEqual(self.encprivkey, encprivkey)) + self.assertThat(self.encprivkey, Equals(encprivkey))) d.addCallback(lambda ignored: mr.get_blockhashes()) d.addCallback(lambda blockhashes: - self.failUnlessEqual(self.block_hash_tree, blockhashes)) + self.assertThat(self.block_hash_tree, Equals(blockhashes))) d.addCallback(lambda ignored: mr.get_sharehashes()) d.addCallback(lambda sharehashes: - self.failUnlessEqual(self.share_hash_chain, sharehashes)) + self.assertThat(self.share_hash_chain, Equals(sharehashes))) d.addCallback(lambda ignored: mr.get_signature()) d.addCallback(lambda signature: - self.failUnlessEqual(signature, self.signature)) + self.assertThat(signature, Equals(self.signature))) d.addCallback(lambda ignored: mr.get_verification_key()) d.addCallback(lambda verification_key: - self.failUnlessEqual(verification_key, self.verification_key)) + self.assertThat(verification_key, Equals(self.verification_key))) d.addCallback(lambda ignored: mr.get_seqnum()) d.addCallback(lambda seqnum: - self.failUnlessEqual(seqnum, 0)) + self.assertThat(seqnum, Equals(0))) d.addCallback(lambda ignored: mr.get_root_hash()) d.addCallback(lambda root_hash: - self.failUnlessEqual(self.root_hash, root_hash)) + self.assertThat(self.root_hash, Equals(root_hash))) d.addCallback(lambda ignored: mr.get_encoding_parameters()) def _check_encoding_parameters(args): (k, n, segsize, datalen) = args - self.failUnlessEqual(k, 3) - self.failUnlessEqual(n, 10) - self.failUnlessEqual(segsize, 6) - self.failUnlessEqual(datalen, 36) + self.assertThat(k, Equals(3)) + self.assertThat(n, Equals(10)) + self.assertThat(segsize, Equals(6)) + self.assertThat(datalen, Equals(36)) d.addCallback(_check_encoding_parameters) d.addCallback(lambda ignored: mr.get_checkstring()) d.addCallback(lambda checkstring: - self.failUnlessEqual(checkstring, mw.get_checkstring())) + self.assertThat(checkstring, Equals(mw.get_checkstring()))) return d @@ -2460,7 +2865,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): mr = MDMFSlotReadProxy(self.storage_server, b"si1", 0) d = mr.is_sdmf() d.addCallback(lambda issdmf: - self.failUnless(issdmf)) + self.assertTrue(issdmf)) return d @@ -2473,7 +2878,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): d.addCallback(lambda ignored: mr.is_sdmf()) d.addCallback(lambda issdmf: - self.failUnless(issdmf)) + self.assertTrue(issdmf)) # What do we need to read? # - The sharedata @@ -2486,51 +2891,51 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): # bytes in size. The share is composed entirely of the # letter a. self.block contains 2 as, so 6 * self.block is # what we are looking for. - self.failUnlessEqual(block, self.block * 6) - self.failUnlessEqual(salt, self.salt) + self.assertThat(block, Equals(self.block * 6)) + self.assertThat(salt, Equals(self.salt)) d.addCallback(_check_block_and_salt) # - The blockhashes d.addCallback(lambda ignored: mr.get_blockhashes()) d.addCallback(lambda blockhashes: - self.failUnlessEqual(self.block_hash_tree, - blockhashes, + self.assertThat(self.block_hash_tree, + Equals(blockhashes), blockhashes)) # - The sharehashes d.addCallback(lambda ignored: mr.get_sharehashes()) d.addCallback(lambda sharehashes: - self.failUnlessEqual(self.share_hash_chain, - sharehashes)) + self.assertThat(self.share_hash_chain, + Equals(sharehashes))) # - The keys d.addCallback(lambda ignored: mr.get_encprivkey()) d.addCallback(lambda encprivkey: - self.failUnlessEqual(encprivkey, self.encprivkey, encprivkey)) + self.assertThat(encprivkey, Equals(self.encprivkey), encprivkey)) d.addCallback(lambda ignored: mr.get_verification_key()) d.addCallback(lambda verification_key: - self.failUnlessEqual(verification_key, - self.verification_key, + self.assertThat(verification_key, + Equals(self.verification_key), verification_key)) # - The signature d.addCallback(lambda ignored: mr.get_signature()) d.addCallback(lambda signature: - self.failUnlessEqual(signature, self.signature, signature)) + self.assertThat(signature, Equals(self.signature), signature)) # - The sequence number d.addCallback(lambda ignored: mr.get_seqnum()) d.addCallback(lambda seqnum: - self.failUnlessEqual(seqnum, 0, seqnum)) + self.assertThat(seqnum, Equals(0), seqnum)) # - The root hash d.addCallback(lambda ignored: mr.get_root_hash()) d.addCallback(lambda root_hash: - self.failUnlessEqual(root_hash, self.root_hash, root_hash)) + self.assertThat(root_hash, Equals(self.root_hash), root_hash)) return d @@ -2544,7 +2949,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): d.addCallback(lambda ignored: mr.is_sdmf()) d.addCallback(lambda issdmf: - self.failUnless(issdmf)) + self.assertTrue(issdmf)) d.addCallback(lambda ignored: self.shouldFail(LayoutInvalid, "test bad segment", None, @@ -2572,8 +2977,8 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): d.addCallback(lambda mr: mr.get_verinfo()) def _check_verinfo(verinfo): - self.failUnless(verinfo) - self.failUnlessEqual(len(verinfo), 9) + self.assertTrue(verinfo) + self.assertThat(verinfo, HasLength(9)) (seqnum, root_hash, salt_hash, @@ -2583,12 +2988,12 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): n, prefix, offsets) = verinfo - self.failUnlessEqual(seqnum, 0) - self.failUnlessEqual(root_hash, self.root_hash) - self.failUnlessEqual(segsize, 6) - self.failUnlessEqual(datalen, 36) - self.failUnlessEqual(k, 3) - self.failUnlessEqual(n, 10) + self.assertThat(seqnum, Equals(0)) + self.assertThat(root_hash, Equals(self.root_hash)) + self.assertThat(segsize, Equals(6)) + self.assertThat(datalen, Equals(36)) + self.assertThat(k, Equals(3)) + self.assertThat(n, Equals(10)) expected_prefix = struct.pack(MDMFSIGNABLEHEADER, 1, seqnum, @@ -2597,8 +3002,8 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): n, segsize, datalen) - self.failUnlessEqual(expected_prefix, prefix) - self.failUnlessEqual(self.rref.read_count, 0) + self.assertThat(expected_prefix, Equals(prefix)) + self.assertThat(self.rref.read_count, Equals(0)) d.addCallback(_check_verinfo) # This is not enough data to read a block and a share, so the # wrapper should attempt to read this from the remote server. @@ -2607,9 +3012,9 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): mr.get_block_and_salt(0)) def _check_block_and_salt(block_and_salt): (block, salt) = block_and_salt - self.failUnlessEqual(block, self.block) - self.failUnlessEqual(salt, self.salt) - self.failUnlessEqual(self.rref.read_count, 1) + self.assertThat(block, Equals(self.block)) + self.assertThat(salt, Equals(self.salt)) + self.assertThat(self.rref.read_count, Equals(1)) # This should be enough data to read one block. d.addCallback(_make_mr, 123 + PRIVATE_KEY_SIZE + SIGNATURE_SIZE + VERIFICATION_KEY_SIZE + SHARE_HASH_CHAIN_SIZE + 140) d.addCallback(lambda mr: @@ -2633,8 +3038,8 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): d.addCallback(lambda mr: mr.get_verinfo()) def _check_verinfo(verinfo): - self.failUnless(verinfo) - self.failUnlessEqual(len(verinfo), 9) + self.assertTrue(verinfo) + self.assertThat(verinfo, HasLength(9)) (seqnum, root_hash, salt, @@ -2644,13 +3049,13 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): n, prefix, offsets) = verinfo - self.failUnlessEqual(seqnum, 0) - self.failUnlessEqual(root_hash, self.root_hash) - self.failUnlessEqual(salt, self.salt) - self.failUnlessEqual(segsize, 36) - self.failUnlessEqual(datalen, 36) - self.failUnlessEqual(k, 3) - self.failUnlessEqual(n, 10) + self.assertThat(seqnum, Equals(0)) + self.assertThat(root_hash, Equals(self.root_hash)) + self.assertThat(salt, Equals(self.salt)) + self.assertThat(segsize, Equals(36)) + self.assertThat(datalen, Equals(36)) + self.assertThat(k, Equals(3)) + self.assertThat(n, Equals(10)) expected_prefix = struct.pack(SIGNED_PREFIX, 0, seqnum, @@ -2660,8 +3065,8 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): n, segsize, datalen) - self.failUnlessEqual(expected_prefix, prefix) - self.failUnlessEqual(self.rref.read_count, 0) + self.assertThat(expected_prefix, Equals(prefix)) + self.assertThat(self.rref.read_count, Equals(0)) d.addCallback(_check_verinfo) # This shouldn't be enough to read any share data. d.addCallback(_make_mr, 123) @@ -2669,11 +3074,11 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): mr.get_block_and_salt(0)) def _check_block_and_salt(block_and_salt): (block, salt) = block_and_salt - self.failUnlessEqual(block, self.block * 6) - self.failUnlessEqual(salt, self.salt) + self.assertThat(block, Equals(self.block * 6)) + self.assertThat(salt, Equals(self.salt)) # TODO: Fix the read routine so that it reads only the data # that it has cached if it can't read all of it. - self.failUnlessEqual(self.rref.read_count, 2) + self.assertThat(self.rref.read_count, Equals(2)) # This should be enough to read share data. d.addCallback(_make_mr, self.offsets['share_data']) @@ -2695,12 +3100,12 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): d.addCallback(lambda ignored: mr.get_encoding_parameters()) def _check_encoding_parameters(params): - self.failUnlessEqual(len(params), 4) + self.assertThat(params, HasLength(4)) k, n, segsize, datalen = params - self.failUnlessEqual(k, 3) - self.failUnlessEqual(n, 10) - self.failUnlessEqual(segsize, 0) - self.failUnlessEqual(datalen, 0) + self.assertThat(k, Equals(3)) + self.assertThat(n, Equals(10)) + self.assertThat(segsize, Equals(0)) + self.assertThat(datalen, Equals(0)) d.addCallback(_check_encoding_parameters) # We should not be able to fetch a block, since there are no @@ -2721,12 +3126,12 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): d.addCallback(lambda ignored: mr.get_encoding_parameters()) def _check_encoding_parameters(params): - self.failUnlessEqual(len(params), 4) + self.assertThat(params, HasLength(4)) k, n, segsize, datalen = params - self.failUnlessEqual(k, 3) - self.failUnlessEqual(n, 10) - self.failUnlessEqual(segsize, 0) - self.failUnlessEqual(datalen, 0) + self.assertThat(k, Equals(3)) + self.assertThat(n, Equals(10)) + self.assertThat(segsize, Equals(0)) + self.assertThat(datalen, Equals(0)) d.addCallback(_check_encoding_parameters) # It does not make sense to get a block in this format, so we @@ -2746,8 +3151,8 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): d.addCallback(lambda ignored: mr.get_verinfo()) def _check_verinfo(verinfo): - self.failUnless(verinfo) - self.failUnlessEqual(len(verinfo), 9) + self.assertTrue(verinfo) + self.assertThat(verinfo, HasLength(9)) (seqnum, root_hash, salt, @@ -2757,13 +3162,13 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): n, prefix, offsets) = verinfo - self.failUnlessEqual(seqnum, 0) - self.failUnlessEqual(root_hash, self.root_hash) - self.failUnlessEqual(salt, self.salt) - self.failUnlessEqual(segsize, 36) - self.failUnlessEqual(datalen, 36) - self.failUnlessEqual(k, 3) - self.failUnlessEqual(n, 10) + self.assertThat(seqnum, Equals(0)) + self.assertThat(root_hash, Equals(self.root_hash)) + self.assertThat(salt, Equals(self.salt)) + self.assertThat(segsize, Equals(36)) + self.assertThat(datalen, Equals(36)) + self.assertThat(k, Equals(3)) + self.assertThat(n, Equals(10)) expected_prefix = struct.pack(">BQ32s16s BBQQ", 0, seqnum, @@ -2773,8 +3178,8 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): n, segsize, datalen) - self.failUnlessEqual(prefix, expected_prefix) - self.failUnlessEqual(offsets, self.offsets) + self.assertThat(prefix, Equals(expected_prefix)) + self.assertThat(offsets, Equals(self.offsets)) d.addCallback(_check_verinfo) return d @@ -2786,8 +3191,8 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): d.addCallback(lambda ignored: mr.get_verinfo()) def _check_verinfo(verinfo): - self.failUnless(verinfo) - self.failUnlessEqual(len(verinfo), 9) + self.assertTrue(verinfo) + self.assertThat(verinfo, HasLength(9)) (seqnum, root_hash, IV, @@ -2797,13 +3202,13 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): n, prefix, offsets) = verinfo - self.failUnlessEqual(seqnum, 0) - self.failUnlessEqual(root_hash, self.root_hash) - self.failIf(IV) - self.failUnlessEqual(segsize, 6) - self.failUnlessEqual(datalen, 36) - self.failUnlessEqual(k, 3) - self.failUnlessEqual(n, 10) + self.assertThat(seqnum, Equals(0)) + self.assertThat(root_hash, Equals(self.root_hash)) + self.assertFalse(IV) + self.assertThat(segsize, Equals(6)) + self.assertThat(datalen, Equals(36)) + self.assertThat(k, Equals(3)) + self.assertThat(n, Equals(10)) expected_prefix = struct.pack(">BQ32s BBQQ", 1, seqnum, @@ -2812,8 +3217,8 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): n, segsize, datalen) - self.failUnlessEqual(prefix, expected_prefix) - self.failUnlessEqual(offsets, self.offsets) + self.assertThat(prefix, Equals(expected_prefix)) + self.assertThat(offsets, Equals(self.offsets)) d.addCallback(_check_verinfo) return d @@ -2849,15 +3254,15 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): sdmfr.put_verification_key(self.verification_key) # Now check to make sure that nothing has been written yet. - self.failUnlessEqual(self.rref.write_count, 0) + self.assertThat(self.rref.write_count, Equals(0)) # Now finish publishing d = sdmfr.finish_publishing() def _then(ignored): - self.failUnlessEqual(self.rref.write_count, 1) - read = self.ss.remote_slot_readv - self.failUnlessEqual(read(b"si1", [0], [(0, len(data))]), - {0: [data]}) + self.assertThat(self.rref.write_count, Equals(1)) + read = self.ss.slot_readv + self.assertThat(read(b"si1", [0], [(0, len(data))]), + Equals({0: [data]})) d.addCallback(_then) return d @@ -2893,11 +3298,11 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): sdmfw.put_verification_key(self.verification_key) # We shouldn't have a checkstring yet - self.failUnlessEqual(sdmfw.get_checkstring(), b"") + self.assertThat(sdmfw.get_checkstring(), Equals(b"")) d = sdmfw.finish_publishing() def _then(results): - self.failIf(results[0]) + self.assertFalse(results[0]) # this is the correct checkstring self._expected_checkstring = results[1][0][0] return self._expected_checkstring @@ -2907,27 +3312,27 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin): d.addCallback(lambda ignored: sdmfw.get_checkstring()) d.addCallback(lambda checkstring: - self.failUnlessEqual(checkstring, self._expected_checkstring)) + self.assertThat(checkstring, Equals(self._expected_checkstring))) d.addCallback(lambda ignored: sdmfw.finish_publishing()) def _then_again(results): - self.failUnless(results[0]) - read = self.ss.remote_slot_readv - self.failUnlessEqual(read(b"si1", [0], [(1, 8)]), - {0: [struct.pack(">Q", 1)]}) - self.failUnlessEqual(read(b"si1", [0], [(9, len(data) - 9)]), - {0: [data[9:]]}) + self.assertTrue(results[0]) + read = self.ss.slot_readv + self.assertThat(read(b"si1", [0], [(1, 8)]), + Equals({0: [struct.pack(">Q", 1)]})) + self.assertThat(read(b"si1", [0], [(9, len(data) - 9)]), + Equals({0: [data[9:]]})) d.addCallback(_then_again) return d -class Stats(unittest.TestCase): +class Stats(SyncTestCase): def setUp(self): + super(Stats, self).setUp() self.sparent = LoggingServiceParent() self._lease_secret = itertools.count() - def tearDown(self): - return self.sparent.stopService() + self.addCleanup(self.sparent.stopService) def workdir(self, name): basedir = os.path.join("storage", "Server", name) @@ -2953,89 +3358,430 @@ class Stats(unittest.TestCase): output = ss.get_latencies() - self.failUnlessEqual(sorted(output.keys()), - sorted(["allocate", "renew", "cancel", "write", "get"])) - self.failUnlessEqual(len(ss.latencies["allocate"]), 1000) - self.failUnless(abs(output["allocate"]["mean"] - 9500) < 1, output) - self.failUnless(abs(output["allocate"]["01_0_percentile"] - 9010) < 1, output) - self.failUnless(abs(output["allocate"]["10_0_percentile"] - 9100) < 1, output) - self.failUnless(abs(output["allocate"]["50_0_percentile"] - 9500) < 1, output) - self.failUnless(abs(output["allocate"]["90_0_percentile"] - 9900) < 1, output) - self.failUnless(abs(output["allocate"]["95_0_percentile"] - 9950) < 1, output) - self.failUnless(abs(output["allocate"]["99_0_percentile"] - 9990) < 1, output) - self.failUnless(abs(output["allocate"]["99_9_percentile"] - 9999) < 1, output) + self.assertThat(sorted(output.keys()), + Equals(sorted(["allocate", "renew", "cancel", "write", "get"]))) + self.assertThat(ss.latencies["allocate"], HasLength(1000)) + self.assertTrue(abs(output["allocate"]["mean"] - 9500) < 1, output) + self.assertTrue(abs(output["allocate"]["01_0_percentile"] - 9010) < 1, output) + self.assertTrue(abs(output["allocate"]["10_0_percentile"] - 9100) < 1, output) + self.assertTrue(abs(output["allocate"]["50_0_percentile"] - 9500) < 1, output) + self.assertTrue(abs(output["allocate"]["90_0_percentile"] - 9900) < 1, output) + self.assertTrue(abs(output["allocate"]["95_0_percentile"] - 9950) < 1, output) + self.assertTrue(abs(output["allocate"]["99_0_percentile"] - 9990) < 1, output) + self.assertTrue(abs(output["allocate"]["99_9_percentile"] - 9999) < 1, output) - self.failUnlessEqual(len(ss.latencies["renew"]), 1000) - self.failUnless(abs(output["renew"]["mean"] - 500) < 1, output) - self.failUnless(abs(output["renew"]["01_0_percentile"] - 10) < 1, output) - self.failUnless(abs(output["renew"]["10_0_percentile"] - 100) < 1, output) - self.failUnless(abs(output["renew"]["50_0_percentile"] - 500) < 1, output) - self.failUnless(abs(output["renew"]["90_0_percentile"] - 900) < 1, output) - self.failUnless(abs(output["renew"]["95_0_percentile"] - 950) < 1, output) - self.failUnless(abs(output["renew"]["99_0_percentile"] - 990) < 1, output) - self.failUnless(abs(output["renew"]["99_9_percentile"] - 999) < 1, output) + self.assertThat(ss.latencies["renew"], HasLength(1000)) + self.assertTrue(abs(output["renew"]["mean"] - 500) < 1, output) + self.assertTrue(abs(output["renew"]["01_0_percentile"] - 10) < 1, output) + self.assertTrue(abs(output["renew"]["10_0_percentile"] - 100) < 1, output) + self.assertTrue(abs(output["renew"]["50_0_percentile"] - 500) < 1, output) + self.assertTrue(abs(output["renew"]["90_0_percentile"] - 900) < 1, output) + self.assertTrue(abs(output["renew"]["95_0_percentile"] - 950) < 1, output) + self.assertTrue(abs(output["renew"]["99_0_percentile"] - 990) < 1, output) + self.assertTrue(abs(output["renew"]["99_9_percentile"] - 999) < 1, output) - self.failUnlessEqual(len(ss.latencies["write"]), 20) - self.failUnless(abs(output["write"]["mean"] - 9) < 1, output) - self.failUnless(output["write"]["01_0_percentile"] is None, output) - self.failUnless(abs(output["write"]["10_0_percentile"] - 2) < 1, output) - self.failUnless(abs(output["write"]["50_0_percentile"] - 10) < 1, output) - self.failUnless(abs(output["write"]["90_0_percentile"] - 18) < 1, output) - self.failUnless(abs(output["write"]["95_0_percentile"] - 19) < 1, output) - self.failUnless(output["write"]["99_0_percentile"] is None, output) - self.failUnless(output["write"]["99_9_percentile"] is None, output) + self.assertThat(ss.latencies["write"], HasLength(20)) + self.assertTrue(abs(output["write"]["mean"] - 9) < 1, output) + self.assertTrue(output["write"]["01_0_percentile"] is None, output) + self.assertTrue(abs(output["write"]["10_0_percentile"] - 2) < 1, output) + self.assertTrue(abs(output["write"]["50_0_percentile"] - 10) < 1, output) + self.assertTrue(abs(output["write"]["90_0_percentile"] - 18) < 1, output) + self.assertTrue(abs(output["write"]["95_0_percentile"] - 19) < 1, output) + self.assertTrue(output["write"]["99_0_percentile"] is None, output) + self.assertTrue(output["write"]["99_9_percentile"] is None, output) - self.failUnlessEqual(len(ss.latencies["cancel"]), 10) - self.failUnless(abs(output["cancel"]["mean"] - 9) < 1, output) - self.failUnless(output["cancel"]["01_0_percentile"] is None, output) - self.failUnless(abs(output["cancel"]["10_0_percentile"] - 2) < 1, output) - self.failUnless(abs(output["cancel"]["50_0_percentile"] - 10) < 1, output) - self.failUnless(abs(output["cancel"]["90_0_percentile"] - 18) < 1, output) - self.failUnless(output["cancel"]["95_0_percentile"] is None, output) - self.failUnless(output["cancel"]["99_0_percentile"] is None, output) - self.failUnless(output["cancel"]["99_9_percentile"] is None, output) + self.assertThat(ss.latencies["cancel"], HasLength(10)) + self.assertTrue(abs(output["cancel"]["mean"] - 9) < 1, output) + self.assertTrue(output["cancel"]["01_0_percentile"] is None, output) + self.assertTrue(abs(output["cancel"]["10_0_percentile"] - 2) < 1, output) + self.assertTrue(abs(output["cancel"]["50_0_percentile"] - 10) < 1, output) + self.assertTrue(abs(output["cancel"]["90_0_percentile"] - 18) < 1, output) + self.assertTrue(output["cancel"]["95_0_percentile"] is None, output) + self.assertTrue(output["cancel"]["99_0_percentile"] is None, output) + self.assertTrue(output["cancel"]["99_9_percentile"] is None, output) - self.failUnlessEqual(len(ss.latencies["get"]), 1) - self.failUnless(output["get"]["mean"] is None, output) - self.failUnless(output["get"]["01_0_percentile"] is None, output) - self.failUnless(output["get"]["10_0_percentile"] is None, output) - self.failUnless(output["get"]["50_0_percentile"] is None, output) - self.failUnless(output["get"]["90_0_percentile"] is None, output) - self.failUnless(output["get"]["95_0_percentile"] is None, output) - self.failUnless(output["get"]["99_0_percentile"] is None, output) - self.failUnless(output["get"]["99_9_percentile"] is None, output) + self.assertThat(ss.latencies["get"], HasLength(1)) + self.assertTrue(output["get"]["mean"] is None, output) + self.assertTrue(output["get"]["01_0_percentile"] is None, output) + self.assertTrue(output["get"]["10_0_percentile"] is None, output) + self.assertTrue(output["get"]["50_0_percentile"] is None, output) + self.assertTrue(output["get"]["90_0_percentile"] is None, output) + self.assertTrue(output["get"]["95_0_percentile"] is None, output) + self.assertTrue(output["get"]["99_0_percentile"] is None, output) + self.assertTrue(output["get"]["99_9_percentile"] is None, output) +immutable_schemas = strategies.sampled_from(list(ALL_IMMUTABLE_SCHEMAS)) -class ShareFileTests(unittest.TestCase): +class ShareFileTests(SyncTestCase): """Tests for allmydata.storage.immutable.ShareFile.""" - def get_sharefile(self): - sf = ShareFile(self.mktemp(), max_size=1000, create=True) + def get_sharefile(self, **kwargs): + sf = ShareFile(self.mktemp(), max_size=1000, create=True, **kwargs) sf.write_share_data(0, b"abc") sf.write_share_data(2, b"DEF") # Should be b'abDEF' now. return sf - def test_read_write(self): + @given(immutable_schemas) + def test_read_write(self, schema): """Basic writes can be read.""" - sf = self.get_sharefile() + sf = self.get_sharefile(schema=schema) self.assertEqual(sf.read_share_data(0, 3), b"abD") self.assertEqual(sf.read_share_data(1, 4), b"bDEF") - def test_reads_beyond_file_end(self): + @given(immutable_schemas) + def test_reads_beyond_file_end(self, schema): """Reads beyond the file size are truncated.""" - sf = self.get_sharefile() + sf = self.get_sharefile(schema=schema) self.assertEqual(sf.read_share_data(0, 10), b"abDEF") self.assertEqual(sf.read_share_data(5, 10), b"") - def test_too_large_write(self): + @given(immutable_schemas) + def test_too_large_write(self, schema): """Can't do write larger than file size.""" - sf = self.get_sharefile() + sf = self.get_sharefile(schema=schema) with self.assertRaises(DataTooLargeError): sf.write_share_data(0, b"x" * 3000) - def test_no_leases_cancelled(self): + @given(immutable_schemas) + def test_no_leases_cancelled(self, schema): """If no leases were cancelled, IndexError is raised.""" - sf = self.get_sharefile() + sf = self.get_sharefile(schema=schema) with self.assertRaises(IndexError): sf.cancel_lease(b"garbage") + + @given(immutable_schemas) + def test_long_lease_count_format(self, schema): + """ + ``ShareFile.__init__`` raises ``ValueError`` if the lease count format + given is longer than one character. + """ + with self.assertRaises(ValueError): + self.get_sharefile(schema=schema, lease_count_format="BB") + + @given(immutable_schemas) + def test_large_lease_count_format(self, schema): + """ + ``ShareFile.__init__`` raises ``ValueError`` if the lease count format + encodes to a size larger than 8 bytes. + """ + with self.assertRaises(ValueError): + self.get_sharefile(schema=schema, lease_count_format="Q") + + @given(immutable_schemas) + def test_avoid_lease_overflow(self, schema): + """ + If the share file already has the maximum number of leases supported then + ``ShareFile.add_lease`` raises ``struct.error`` and makes no changes + to the share file contents. + """ + make_lease = partial( + LeaseInfo, + renew_secret=b"r" * 32, + cancel_secret=b"c" * 32, + expiration_time=2 ** 31, + ) + # Make it a little easier to reach the condition by limiting the + # number of leases to only 255. + sf = self.get_sharefile(schema=schema, lease_count_format="B") + + # Add the leases. + for i in range(2 ** 8 - 1): + lease = make_lease(owner_num=i) + sf.add_lease(lease) + + # Capture the state of the share file at this point so we can + # determine whether the next operation modifies it or not. + with open(sf.home, "rb") as f: + before_data = f.read() + + # It is not possible to add a 256th lease. + lease = make_lease(owner_num=256) + with self.assertRaises(struct.error): + sf.add_lease(lease) + + # Compare the share file state to what we captured earlier. Any + # change is a bug. + with open(sf.home, "rb") as f: + after_data = f.read() + + self.assertEqual(before_data, after_data) + + @given(immutable_schemas) + def test_renew_secret(self, schema): + """ + A lease loaded from an immutable share file at any schema version can have + its renew secret verified. + """ + renew_secret = b"r" * 32 + cancel_secret = b"c" * 32 + expiration_time = 2 ** 31 + + sf = self.get_sharefile(schema=schema) + lease = LeaseInfo( + owner_num=0, + renew_secret=renew_secret, + cancel_secret=cancel_secret, + expiration_time=expiration_time, + ) + sf.add_lease(lease) + (loaded_lease,) = sf.get_leases() + self.assertTrue(loaded_lease.is_renew_secret(renew_secret)) + + @given(immutable_schemas) + def test_cancel_secret(self, schema): + """ + A lease loaded from an immutable share file at any schema version can have + its cancel secret verified. + """ + renew_secret = b"r" * 32 + cancel_secret = b"c" * 32 + expiration_time = 2 ** 31 + + sf = self.get_sharefile(schema=schema) + lease = LeaseInfo( + owner_num=0, + renew_secret=renew_secret, + cancel_secret=cancel_secret, + expiration_time=expiration_time, + ) + sf.add_lease(lease) + (loaded_lease,) = sf.get_leases() + self.assertTrue(loaded_lease.is_cancel_secret(cancel_secret)) + +mutable_schemas = strategies.sampled_from(list(ALL_MUTABLE_SCHEMAS)) + +class MutableShareFileTests(SyncTestCase): + """ + Tests for allmydata.storage.mutable.MutableShareFile. + """ + def get_sharefile(self, **kwargs): + return MutableShareFile(self.mktemp(), **kwargs) + + @given( + schema=mutable_schemas, + nodeid=strategies.just(b"x" * 20), + write_enabler=strategies.just(b"y" * 32), + datav=strategies.lists( + # Limit the max size of these so we don't write *crazy* amounts of + # data to disk. + strategies.tuples(offsets(), strategies.binary(max_size=2 ** 8)), + max_size=2 ** 8, + ), + new_length=offsets(), + ) + def test_readv_reads_share_data(self, schema, nodeid, write_enabler, datav, new_length): + """ + ``MutableShareFile.readv`` returns bytes from the share data portion + of the share file. + """ + sf = self.get_sharefile(schema=schema) + sf.create(my_nodeid=nodeid, write_enabler=write_enabler) + sf.writev(datav=datav, new_length=new_length) + + # Apply all of the writes to a simple in-memory buffer so we can + # resolve the final state of the share data. In particular, this + # helps deal with overlapping writes which otherwise make it tricky to + # figure out what data to expect to be able to read back. + buf = BytesIO() + for (offset, data) in datav: + buf.seek(offset) + buf.write(data) + buf.truncate(new_length) + + # Using that buffer, determine the expected result of a readv for all + # of the data just written. + def read_from_buf(offset, length): + buf.seek(offset) + return buf.read(length) + expected_data = list( + read_from_buf(offset, len(data)) + for (offset, data) + in datav + ) + + # Perform a read that gives back all of the data written to the share + # file. + read_vectors = list((offset, len(data)) for (offset, data) in datav) + read_data = sf.readv(read_vectors) + + # Make sure the read reproduces the value we computed using our local + # buffer. + self.assertEqual(expected_data, read_data) + + @given( + schema=mutable_schemas, + nodeid=strategies.just(b"x" * 20), + write_enabler=strategies.just(b"y" * 32), + readv=strategies.lists(strategies.tuples(offsets(), lengths()), min_size=1), + random=strategies.randoms(), + ) + def test_readv_rejects_negative_length(self, schema, nodeid, write_enabler, readv, random): + """ + If a negative length is given to ``MutableShareFile.readv`` in a read + vector then ``AssertionError`` is raised. + """ + # Pick a read vector to break with a negative value + readv_index = random.randrange(len(readv)) + # Decide on whether we're breaking offset or length + offset_or_length = random.randrange(2) + + # A helper function that will take a valid offset and length and break + # one of them. + def corrupt(break_length, offset, length): + if break_length: + # length must not be 0 or flipping the sign does nothing + # length must not be negative or flipping the sign *fixes* it + assert length > 0 + return (offset, -length) + else: + if offset > 0: + # We can break offset just by flipping the sign. + return (-offset, length) + else: + # Otherwise it has to be zero. If it was negative, what's + # going on? + assert offset == 0 + # Since we can't just flip the sign on 0 to break things, + # replace a 0 offset with a simple negative value. All + # other negative values will be tested by the `offset > 0` + # case above. + return (-1, length) + + # Break the read vector very slightly! + broken_readv = readv[:] + broken_readv[readv_index] = corrupt( + offset_or_length, + *broken_readv[readv_index] + ) + + sf = self.get_sharefile(schema=schema) + sf.create(my_nodeid=nodeid, write_enabler=write_enabler) + + # A read with a broken read vector is an error. + with self.assertRaises(AssertionError): + sf.readv(broken_readv) + + +class LeaseInfoTests(SyncTestCase): + """ + Tests for ``allmydata.storage.lease.LeaseInfo``. + """ + def test_is_renew_secret(self): + """ + ``LeaseInfo.is_renew_secret`` returns ``True`` if the value given is the + renew secret. + """ + renew_secret = b"r" * 32 + cancel_secret = b"c" * 32 + lease = LeaseInfo( + owner_num=1, + renew_secret=renew_secret, + cancel_secret=cancel_secret, + ) + self.assertTrue(lease.is_renew_secret(renew_secret)) + + def test_is_not_renew_secret(self): + """ + ``LeaseInfo.is_renew_secret`` returns ``False`` if the value given is not + the renew secret. + """ + renew_secret = b"r" * 32 + cancel_secret = b"c" * 32 + lease = LeaseInfo( + owner_num=1, + renew_secret=renew_secret, + cancel_secret=cancel_secret, + ) + self.assertFalse(lease.is_renew_secret(cancel_secret)) + + def test_is_cancel_secret(self): + """ + ``LeaseInfo.is_cancel_secret`` returns ``True`` if the value given is the + cancel secret. + """ + renew_secret = b"r" * 32 + cancel_secret = b"c" * 32 + lease = LeaseInfo( + owner_num=1, + renew_secret=renew_secret, + cancel_secret=cancel_secret, + ) + self.assertTrue(lease.is_cancel_secret(cancel_secret)) + + def test_is_not_cancel_secret(self): + """ + ``LeaseInfo.is_cancel_secret`` returns ``False`` if the value given is not + the cancel secret. + """ + renew_secret = b"r" * 32 + cancel_secret = b"c" * 32 + lease = LeaseInfo( + owner_num=1, + renew_secret=renew_secret, + cancel_secret=cancel_secret, + ) + self.assertFalse(lease.is_cancel_secret(renew_secret)) + + @given( + strategies.tuples( + strategies.integers(min_value=0, max_value=2 ** 31 - 1), + strategies.binary(min_size=32, max_size=32), + strategies.binary(min_size=32, max_size=32), + strategies.integers(min_value=0, max_value=2 ** 31 - 1), + strategies.binary(min_size=20, max_size=20), + ), + ) + def test_immutable_size(self, initializer_args): + """ + ``LeaseInfo.immutable_size`` returns the length of the result of + ``LeaseInfo.to_immutable_data``. + + ``LeaseInfo.mutable_size`` returns the length of the result of + ``LeaseInfo.to_mutable_data``. + """ + info = LeaseInfo(*initializer_args) + self.expectThat( + info.to_immutable_data(), + HasLength(info.immutable_size()), + ) + self.expectThat( + info.to_mutable_data(), + HasLength(info.mutable_size()), + ) + + +class WriteBufferTests(SyncTestCase): + """Tests for ``_WriteBuffer``.""" + + @given( + small_writes=strategies.lists( + strategies.binary(min_size=1, max_size=20), + min_size=10, max_size=20), + batch_size=strategies.integers(min_value=5, max_value=10) + ) + def test_write_buffer(self, small_writes: list[bytes], batch_size: int): + """ + ``_WriteBuffer`` coalesces small writes into bigger writes based on + the batch size. + """ + wb = _WriteBuffer(batch_size) + result = b"" + for data in small_writes: + should_flush = wb.queue_write(data) + if should_flush: + flushed_offset, flushed_data = wb.flush() + self.assertEqual(flushed_offset, len(result)) + # The flushed data is in batch sizes, or closest approximation + # given queued inputs: + self.assertTrue(batch_size <= len(flushed_data) < batch_size + len(data)) + result += flushed_data + + # Final flush: + remaining_length = wb.get_queued_bytes() + flushed_offset, flushed_data = wb.flush() + self.assertEqual(remaining_length, len(flushed_data)) + self.assertEqual(flushed_offset, len(result)) + result += flushed_data + + self.assertEqual(result, b"".join(small_writes)) diff --git a/src/allmydata/test/test_storage_client.py b/src/allmydata/test/test_storage_client.py index 1a84f35ec..1b2d31bb2 100644 --- a/src/allmydata/test/test_storage_client.py +++ b/src/allmydata/test/test_storage_client.py @@ -1,22 +1,16 @@ """ -Ported from Python 3. +Tests for allmydata.storage_client. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from six import ensure_text +from __future__ import annotations from json import ( loads, ) - import hashlib +from typing import Union, Any, Optional + +from hyperlink import DecodedURL from fixtures import ( TempDir, ) @@ -47,6 +41,7 @@ import attr from twisted.internet.interfaces import ( IStreamClientEndpoint, + IProtocolFactory, ) from twisted.application.service import ( Service, @@ -60,6 +55,7 @@ from twisted.internet.defer import ( from twisted.python.filepath import ( FilePath, ) +from twisted.internet.task import Clock from foolscap.api import ( Tub, @@ -68,6 +64,8 @@ from foolscap.ipb import ( IConnectionHintHandler, ) +from allmydata.util.deferredutil import MultiFailure + from .no_network import LocalWrapper from .common import ( EMPTY_CLIENT_CONFIG, @@ -91,13 +89,20 @@ from allmydata.util import base32, yamlutil from allmydata.storage_client import ( IFoolscapStorageServer, NativeStorageServer, + HTTPNativeStorageServer, StorageFarmBroker, + StorageClientConfig, + MissingPlugin, _FoolscapStorage, _NullStorage, + _pick_a_http_server, + ANONYMOUS_STORAGE_NURLS, ) from ..storage.server import ( StorageServer, ) +from ..client import config_from_string + from allmydata.interfaces import ( IConnectionStatus, IStorageServer, @@ -169,16 +174,21 @@ class UnrecognizedAnnouncement(unittest.TestCase): an announcement generated by a storage server plugin which is not loaded in the client. """ + plugin_name = u"tahoe-lafs-testing-v1" ann = { - u"name": u"tahoe-lafs-testing-v1", - u"any-parameter": 12345, + u"storage-options": [ + { + u"name": plugin_name, + u"any-parameter": 12345, + }, + ], } server_id = b"abc" def _tub_maker(self, overrides): return Service() - def native_storage_server(self): + def native_storage_server(self, config: Optional[StorageClientConfig] = None) -> NativeStorageServer: """ Make a ``NativeStorageServer`` out of an unrecognizable announcement. """ @@ -187,7 +197,8 @@ class UnrecognizedAnnouncement(unittest.TestCase): self.ann, self._tub_maker, {}, - EMPTY_CLIENT_CONFIG, + node_config=EMPTY_CLIENT_CONFIG, + config=config if config is not None else StorageClientConfig(), ) def test_no_exceptions(self): @@ -234,6 +245,18 @@ class UnrecognizedAnnouncement(unittest.TestCase): server.get_foolscap_write_enabler_seed() server.get_nickname() + def test_missing_plugin(self) -> None: + """ + An exception is produced if the plugin is missing + """ + with self.assertRaises(MissingPlugin): + self.native_storage_server( + StorageClientConfig( + storage_plugins={ + "missing-plugin-name": {} + } + ) + ) class PluginMatchedAnnouncement(SyncTestCase): @@ -475,7 +498,7 @@ class StoragePluginWebPresence(AsyncTestCase): # config validation policy). "tub.port": tubport_endpoint, "tub.location": tubport_location, - "web.port": ensure_text(webport_endpoint), + "web.port": str(webport_endpoint), }, storage_plugin=self.storage_plugin, basedir=self.basedir, @@ -582,7 +605,7 @@ class SpyHandler(object): ``Deferred`` that was returned from ``connect`` and the factory that was passed to ``connect``. """ - _connects = attr.ib(default=attr.Factory(list)) + _connects : list[tuple[Deferred[object], IProtocolFactory]]= attr.ib(default=attr.Factory(list)) def hint_to_endpoint(self, hint, reactor, update_status): return (SpyEndpoint(self._connects.append), hint) @@ -624,6 +647,43 @@ storage: self.assertIdentical(s2, s) self.assertEqual(s2.get_permutation_seed(), permseed) + def test_upgrade_from_foolscap_to_http(self): + """ + When an announcement is initially Foolscap but then switches to HTTP, + HTTP is used, assuming HTTP is enabled. + """ + tub_maker = lambda _: new_tub() + config = config_from_string( + "/dev/null", "", "[client]\nforce_foolscap = False\n" + ) + broker = StorageFarmBroker(True, tub_maker, config) + broker.startService() + self.addCleanup(broker.stopService) + key_s = b'v0-1234-1' + + ones = str(base32.b2a(b"1"), "utf-8") + initial_announcement = { + "service-name": "storage", + "anonymous-storage-FURL": f"pb://{ones}@nowhere/fake2", + "permutation-seed-base32": "bbbbbbbbbbbbbbbbbbbbbbbb", + } + broker._got_announcement(key_s, initial_announcement) + initial_service = broker.servers[key_s] + self.assertIsInstance(initial_service, NativeStorageServer) + self.assertTrue(initial_service.running) + self.assertIdentical(initial_service.parent, broker) + + http_announcement = initial_announcement.copy() + http_announcement[ANONYMOUS_STORAGE_NURLS] = {f"pb://{ones}@nowhere/fake2#v=1"} + broker._got_announcement(key_s, http_announcement) + self.assertFalse(initial_service.running) + self.assertEqual(initial_service.parent, None) + new_service = broker.servers[key_s] + self.assertIsInstance(new_service, HTTPNativeStorageServer) + self.assertTrue(new_service.running) + self.assertIdentical(new_service.parent, broker) + + def test_static_permutation_seed_pubkey(self): broker = make_broker() server_id = b"v0-4uazse3xb6uu5qpkb7tel2bm6bpea4jhuigdhqcuvvse7hugtsia" @@ -739,3 +799,101 @@ storage: yield done self.assertTrue(done.called) + + def test_should_we_use_http_default(self): + """Default is to use HTTP.""" + basedir = self.mktemp() + node_config = config_from_string(basedir, "", "") + announcement = {ANONYMOUS_STORAGE_NURLS: ["pb://..."]} + self.assertTrue( + StorageFarmBroker._should_we_use_http(node_config, announcement) + ) + # Lacking NURLs, we can't use HTTP: + self.assertFalse( + StorageFarmBroker._should_we_use_http(node_config, {}) + ) + + def test_should_we_use_http(self): + """ + If HTTP is allowed, it will only be used if the announcement includes + some NURLs. + """ + basedir = self.mktemp() + + no_nurls = {} + empty_nurls = {ANONYMOUS_STORAGE_NURLS: []} + has_nurls = {ANONYMOUS_STORAGE_NURLS: ["pb://.."]} + + for force_foolscap, announcement, expected_http_usage in [ + ("false", no_nurls, False), + ("false", empty_nurls, False), + ("false", has_nurls, True), + ("true", empty_nurls, False), + ("true", no_nurls, False), + ("true", has_nurls, False), + ]: + node_config = config_from_string( + basedir, "", f"[client]\nforce_foolscap = {force_foolscap}" + ) + self.assertEqual( + StorageFarmBroker._should_we_use_http(node_config, announcement), + expected_http_usage + ) + + +class PickHTTPServerTests(unittest.SynchronousTestCase): + """Tests for ``_pick_a_http_server``.""" + + def pick_result(self, url_to_results: dict[DecodedURL, tuple[float, Union[Exception, Any]]]) -> Deferred[DecodedURL]: + """ + Given mapping of URLs to (delay, result), return the URL of the + first selected server, or None. + """ + clock = Clock() + + def request(reactor, url): + delay, value = url_to_results[url] + result = Deferred() + def add_result_value(): + if isinstance(value, Exception): + result.errback(value) + else: + result.callback(value) + reactor.callLater(delay, add_result_value) + return result + + d = _pick_a_http_server(clock, list(url_to_results.keys()), request) + for i in range(100): + clock.advance(0.1) + return d + + def test_first_successful_connect_is_picked(self): + """ + Given multiple good URLs, the first one that connects is chosen. + """ + earliest_url = DecodedURL.from_text("http://a") + latest_url = DecodedURL.from_text("http://b") + bad_url = DecodedURL.from_text("http://bad") + result = self.pick_result({ + latest_url: (2, None), + earliest_url: (1, None), + bad_url: (0.5, RuntimeError()), + }) + self.assertEqual(self.successResultOf(result), earliest_url) + + def test_failures_include_all_reasons(self): + """ + If all the requests fail, ``_pick_a_http_server`` raises a + ``allmydata.util.deferredutil.MultiFailure``. + """ + eventually_good_url = DecodedURL.from_text("http://good") + bad_url = DecodedURL.from_text("http://bad") + exception1 = RuntimeError() + exception2 = ZeroDivisionError() + result = self.pick_result({ + eventually_good_url: (1, exception1), + bad_url: (0.1, exception2), + }) + exc = self.failureResultOf(result).value + self.assertIsInstance(exc, MultiFailure) + self.assertEqual({f.value for f in exc.failures}, {exception2, exception1}) diff --git a/src/allmydata/test/test_storage_http.py b/src/allmydata/test/test_storage_http.py new file mode 100644 index 000000000..185cfa995 --- /dev/null +++ b/src/allmydata/test/test_storage_http.py @@ -0,0 +1,1837 @@ +""" +Tests for HTTP storage client + server. + +The tests here are synchronous and don't involve running a real reactor. This +works, but has some caveats when it comes to testing HTTP endpoints: + +* Some HTTP endpoints are synchronous, some are not. +* For synchronous endpoints, the result is immediately available on the + ``Deferred`` coming out of ``StubTreq``. +* For asynchronous endpoints, you need to use ``StubTreq.flush()`` and + iterate the fake in-memory clock/reactor to advance time . + +So for HTTP endpoints, you should use ``HttpTestFixture.result_of_with_flush()`` +which handles both, and patches and moves forward the global Twisted +``Cooperator`` since that is used to drive pull producers. This is, +sadly, an internal implementation detail of Twisted being leaked to tests... + +For definitely synchronous calls, you can just use ``result_of()``. +""" + +import time +from base64 import b64encode +from contextlib import contextmanager +from os import urandom +from typing import Union, Callable, Tuple, Iterable +from queue import Queue +from pycddl import ValidationError as CDDLValidationError +from hypothesis import assume, given, strategies as st, settings as hypothesis_settings +from fixtures import Fixture, TempDir, MonkeyPatch +from treq.testing import StubTreq +from klein import Klein +from hyperlink import DecodedURL +from collections_extended import RangeMap +from twisted.internet.task import Clock, Cooperator +from twisted.internet.interfaces import IReactorTime, IReactorFromThreads +from twisted.internet.defer import CancelledError, Deferred, ensureDeferred +from twisted.web import http +from twisted.web.http_headers import Headers +from werkzeug import routing +from werkzeug.exceptions import NotFound as WNotFound +from testtools.matchers import Equals +from zope.interface import implementer + +from ..util.cbor import dumps +from ..util.deferredutil import async_to_deferred +from ..util.cputhreadpool import disable_thread_pool_for_test +from .common import SyncTestCase +from ..storage.http_common import ( + get_content_type, + CBOR_MIME_TYPE, + response_is_not_html, +) +from ..storage.common import si_b2a +from ..storage.lease import LeaseInfo +from ..storage.server import StorageServer +from ..storage.http_server import ( + HTTPServer, + _extract_secrets, + Secrets, + ClientSecretsException, + _authorized_route, + StorageIndexConverter, + _add_error_handling, + read_encoded, + _SCHEMAS as SERVER_SCHEMAS, + BaseApp, +) +from ..storage.http_client import ( + StorageClient, + StorageClientFactory, + ClientException, + StorageClientImmutables, + ImmutableCreateResult, + UploadProgress, + StorageClientGeneral, + _encode_si, + StorageClientMutables, + TestWriteVectors, + WriteVector, + ReadVector, + ReadTestWriteResult, + TestVector, + limited_content, +) + + +class HTTPUtilities(SyncTestCase): + """Tests for HTTP common utilities.""" + + def test_get_content_type(self): + """``get_content_type()`` extracts the content-type from the header.""" + + def assert_header_values_result(values, expected_content_type): + headers = Headers() + if values: + headers.setRawHeaders("Content-Type", values) + content_type = get_content_type(headers) + self.assertEqual(content_type, expected_content_type) + + assert_header_values_result(["text/html"], "text/html") + assert_header_values_result([], None) + assert_header_values_result(["text/plain", "application/json"], "text/plain") + assert_header_values_result(["text/html;encoding=utf-8"], "text/html") + + +def _post_process(params): + secret_types, secrets = params + secrets = {t: s for (t, s) in zip(secret_types, secrets)} + headers = [ + "{} {}".format( + secret_type.value, str(b64encode(secrets[secret_type]), "ascii").strip() + ) + for secret_type in secret_types + ] + return secrets, headers + + +# Creates a tuple of ({Secret enum value: secret_bytes}, [http headers with secrets]). +SECRETS_STRATEGY = ( + st.sets(st.sampled_from(Secrets)) + .flatmap( + lambda secret_types: st.tuples( + st.just(secret_types), + st.lists( + st.binary(min_size=32, max_size=32), + min_size=len(secret_types), + max_size=len(secret_types), + ), + ) + ) + .map(_post_process) +) + + +class ExtractSecretsTests(SyncTestCase): + """ + Tests for ``_extract_secrets``. + """ + + @given(secrets_to_send=SECRETS_STRATEGY) + def test_extract_secrets(self, secrets_to_send): + """ + ``_extract_secrets()`` returns a dictionary with the extracted secrets + if the input secrets match the required secrets. + """ + secrets, headers = secrets_to_send + + # No secrets needed, none given: + self.assertEqual(_extract_secrets(headers, secrets.keys()), secrets) + + @given( + secrets_to_send=SECRETS_STRATEGY, + secrets_to_require=st.sets(st.sampled_from(Secrets)), + ) + def test_wrong_number_of_secrets(self, secrets_to_send, secrets_to_require): + """ + If the wrong number of secrets are passed to ``_extract_secrets``, a + ``ClientSecretsException`` is raised. + """ + secrets_to_send, headers = secrets_to_send + assume(secrets_to_send.keys() != secrets_to_require) + + with self.assertRaises(ClientSecretsException): + _extract_secrets(headers, secrets_to_require) + + def test_bad_secret_missing_value(self): + """ + Missing value in ``_extract_secrets`` result in + ``ClientSecretsException``. + """ + with self.assertRaises(ClientSecretsException): + _extract_secrets(["lease-renew-secret"], {Secrets.LEASE_RENEW}) + + def test_bad_secret_unknown_prefix(self): + """ + Missing value in ``_extract_secrets`` result in + ``ClientSecretsException``. + """ + with self.assertRaises(ClientSecretsException): + _extract_secrets(["FOO eA=="], set()) + + def test_bad_secret_not_base64(self): + """ + A non-base64 value in ``_extract_secrets`` result in + ``ClientSecretsException``. + """ + with self.assertRaises(ClientSecretsException): + _extract_secrets(["lease-renew-secret x"], {Secrets.LEASE_RENEW}) + + def test_bad_secret_wrong_length_lease_renew(self): + """ + Lease renewal secrets must be 32-bytes long. + """ + with self.assertRaises(ClientSecretsException): + _extract_secrets(["lease-renew-secret eA=="], {Secrets.LEASE_RENEW}) + + def test_bad_secret_wrong_length_lease_cancel(self): + """ + Lease cancel secrets must be 32-bytes long. + """ + with self.assertRaises(ClientSecretsException): + _extract_secrets(["lease-cancel-secret eA=="], {Secrets.LEASE_RENEW}) + + +class RouteConverterTests(SyncTestCase): + """Tests for custom werkzeug path segment converters.""" + + adapter = routing.Map( + [ + routing.Rule( + "//", endpoint="si", methods=["GET"] + ) + ], + converters={"storage_index": StorageIndexConverter}, + ).bind("example.com", "/") + + @given(storage_index=st.binary(min_size=16, max_size=16)) + def test_good_storage_index_is_parsed(self, storage_index): + """ + A valid storage index is accepted and parsed back out by + StorageIndexConverter. + """ + self.assertEqual( + self.adapter.match( + "/{}/".format(str(si_b2a(storage_index), "ascii")), method="GET" + ), + ("si", {"storage_index": storage_index}), + ) + + def test_long_storage_index_is_not_parsed(self): + """An overly long storage_index string is not parsed.""" + with self.assertRaises(WNotFound): + self.adapter.match("/{}/".format("a" * 27), method="GET") + + def test_short_storage_index_is_not_parsed(self): + """An overly short storage_index string is not parsed.""" + with self.assertRaises(WNotFound): + self.adapter.match("/{}/".format("a" * 25), method="GET") + + def test_bad_characters_storage_index_is_not_parsed(self): + """A storage_index string with bad characters is not parsed.""" + with self.assertRaises(WNotFound): + self.adapter.match("/{}_/".format("a" * 25), method="GET") + + def test_invalid_storage_index_is_not_parsed(self): + """An invalid storage_index string is not parsed.""" + with self.assertRaises(WNotFound): + self.adapter.match("/nomd2a65ylxjbqzsw7gcfh4ivr/", method="GET") + + +# TODO should be actual swissnum +SWISSNUM_FOR_TEST = b"abcd" + + +def gen_bytes(length: int) -> bytes: + """Generate bytes to the given length.""" + result = (b"0123456789abcdef" * ((length // 16) + 1))[:length] + assert len(result) == length + return result + + +class TestApp(BaseApp): + """HTTP API for testing purposes.""" + + clock: IReactorTime + _app = Klein() + _add_error_handling(_app) + _swissnum = SWISSNUM_FOR_TEST # Match what the test client is using + + @_authorized_route(_app, set(), "/noop", methods=["GET"]) + def noop(self, request, authorization): + return "noop" + + @_authorized_route(_app, {Secrets.UPLOAD}, "/upload_secret", methods=["GET"]) + def validate_upload_secret(self, request, authorization): + if authorization == {Secrets.UPLOAD: b"MAGIC"}: + return "GOOD SECRET" + else: + return "BAD: {}".format(authorization) + + @_authorized_route(_app, set(), "/storage/v1/version", methods=["GET"]) + def bad_version(self, request, authorization): + """Return version result that violates the expected schema.""" + request.setHeader("content-type", CBOR_MIME_TYPE) + return dumps({"garbage": 123}) + + @_authorized_route(_app, set(), "/bytes/", methods=["GET"]) + def generate_bytes(self, request, authorization, length): + """Return bytes to the given length using ``gen_bytes()``.""" + return gen_bytes(length) + + @_authorized_route(_app, set(), "/slowly_never_finish_result", methods=["GET"]) + def slowly_never_finish_result(self, request, authorization): + """ + Send data immediately, after 59 seconds, after another 59 seconds, and then + never again, without finishing the response. + """ + request.write(b"a") + self.clock.callLater(59, request.write, b"b") + self.clock.callLater(59 + 59, request.write, b"c") + return Deferred() + + @_authorized_route(_app, set(), "/die_unfinished", methods=["GET"]) + def die(self, request, authorization): + """ + Dies half-way. + """ + request.transport.loseConnection() + return Deferred() + + @_authorized_route(_app, set(), "/read_body", methods=["POST"]) + @async_to_deferred + async def read_body(self, request, authorization): + """ + Accept an advise_corrupt_share message, return the reason. + + I.e. exercise codepaths used for reading CBOR from the body. + """ + data = await read_encoded( + self.clock, request, SERVER_SCHEMAS["advise_corrupt_share"] + ) + return data["reason"] + + +def result_of(d): + """ + Synchronously extract the result of a Deferred. + """ + result = [] + error = [] + d.addCallbacks(result.append, error.append) + if result: + return result[0] + if error: + error[0].raiseException() + raise RuntimeError( + "We expected given Deferred to have result already, but it wasn't. " + + "This is probably a test design issue." + ) + + +class CustomHTTPServerTests(SyncTestCase): + """ + Tests that use a custom HTTP server. + """ + + def setUp(self): + super(CustomHTTPServerTests, self).setUp() + disable_thread_pool_for_test(self) + StorageClientFactory.start_test_mode( + lambda pool: self.addCleanup(pool.closeCachedConnections) + ) + self.addCleanup(StorageClientFactory.stop_test_mode) + # Could be a fixture, but will only be used in this test class so not + # going to bother: + self._http_server = TestApp() + treq = StubTreq(self._http_server._app.resource()) + self.client = StorageClient( + DecodedURL.from_text("http://127.0.0.1"), + SWISSNUM_FOR_TEST, + treq=treq, + pool=None, + # We're using a Treq private API to get the reactor, alas, but only + # in a test, so not going to worry about it too much. This would be + # fixed if https://github.com/twisted/treq/issues/226 were ever + # fixed. + clock=treq._agent._memoryReactor, + analyze_response=response_is_not_html, + ) + self._http_server.clock = self.client._clock + + def test_bad_swissnum_from_client(self) -> None: + """ + If the swissnum is invalid, a BAD REQUEST response code is returned. + """ + headers = Headers() + # The value is not UTF-8. + headers.addRawHeader("Authorization", b"\x00\xFF\x00\xFF") + response = result_of( + self.client._treq.request( + "GET", + DecodedURL.from_text("http://127.0.0.1/noop"), + headers=headers, + ) + ) + self.assertEqual(response.code, 400) + + def test_bad_secret(self) -> None: + """ + If the secret is invalid (not base64), a BAD REQUEST + response code is returned. + """ + bad_secret = b"upload-secret []<>" + headers = Headers() + headers.addRawHeader( + "X-Tahoe-Authorization", + bad_secret, + ) + response = result_of( + self.client.request( + "GET", + DecodedURL.from_text("http://127.0.0.1/upload_secret"), + headers=headers, + ) + ) + self.assertEqual(response.code, 400) + + def test_authorization_enforcement(self): + """ + The requirement for secrets is enforced by the ``_authorized_route`` + decorator; if they are not given, a 400 response code is returned. + + Note that this refers to ``X-Tahoe-Authorization``, not the + ``Authorization`` header used for the swissnum. + """ + # Without secret, get a 400 error. + response = result_of( + self.client.request( + "GET", + DecodedURL.from_text("http://127.0.0.1/upload_secret"), + ) + ) + self.assertEqual(response.code, 400) + + # With secret, we're good. + response = result_of( + self.client.request( + "GET", + DecodedURL.from_text("http://127.0.0.1/upload_secret"), + upload_secret=b"MAGIC", + ) + ) + self.assertEqual(response.code, 200) + self.assertEqual(result_of(response.content()), b"GOOD SECRET") + + def test_client_side_schema_validation(self): + """ + The client validates returned CBOR message against a schema. + """ + client = StorageClientGeneral(self.client) + with self.assertRaises(CDDLValidationError): + result_of(client.get_version()) + + @given(length=st.integers(min_value=1, max_value=1_000_000)) + # On Python 3.12 we're getting weird deadline issues in CI, so disabling + # for now. + @hypothesis_settings(deadline=None) + def test_limited_content_fits(self, length): + """ + ``http_client.limited_content()`` returns the body if it is less than + the max length. + """ + for at_least_length in (length, length + 1, length + 1000, length + 100_000): + response = result_of( + self.client.request( + "GET", + DecodedURL.from_text(f"http://127.0.0.1/bytes/{length}"), + ) + ) + + self.assertEqual( + result_of( + limited_content(response, self._http_server.clock, at_least_length) + ).read(), + gen_bytes(length), + ) + + @given(length=st.integers(min_value=10, max_value=1_000_000)) + def test_limited_content_does_not_fit(self, length): + """ + If the body is longer than than max length, + ``http_client.limited_content()`` fails with a ``ValueError``. + """ + for too_short in (length - 1, 5): + response = result_of( + self.client.request( + "GET", + DecodedURL.from_text(f"http://127.0.0.1/bytes/{length}"), + ) + ) + + with self.assertRaises(ValueError): + result_of(limited_content(response, self._http_server.clock, too_short)) + + def test_limited_content_silence_causes_timeout(self): + """ + ``http_client.limited_content() times out if it receives no data for 60 + seconds. + """ + response = result_of( + self.client.request( + "GET", + DecodedURL.from_text("http://127.0.0.1/slowly_never_finish_result"), + ) + ) + + body_deferred = limited_content(response, self._http_server.clock, 4) + result = [] + error = [] + body_deferred.addCallbacks(result.append, error.append) + + for i in range(59 + 59 + 60): + self.assertEqual((result, error), ([], [])) + self._http_server.clock.advance(1) + # Push data between in-memory client and in-memory server: + self.client._treq._agent.flush() + + # After 59 (second write) + 59 (third write) + 60 seconds (quiescent + # timeout) the limited_content() response times out. + self.assertTrue(error) + with self.assertRaises(CancelledError): + error[0].raiseException() + + def test_limited_content_cancels_timeout_on_failed_response(self): + """ + If the response fails somehow, the timeout is still cancelled. + """ + response = result_of( + self.client.request( + "GET", + DecodedURL.from_text("http://127.0.0.1/die"), + ) + ) + + d = limited_content(response, self._http_server.clock, 4) + with self.assertRaises(ValueError): + result_of(d) + self.assertEqual(len(self._http_server.clock.getDelayedCalls()), 0) + + def test_request_with_no_content_type_same_as_cbor(self): + """ + If no ``Content-Type`` header is set when sending a body, it is assumed + to be CBOR. + """ + response = result_of( + self.client.request( + "POST", + DecodedURL.from_text("http://127.0.0.1/read_body"), + data=dumps({"reason": "test"}), + ) + ) + self.assertEqual( + result_of(limited_content(response, self._http_server.clock, 100)).read(), + b"test", + ) + + def test_request_with_wrong_content(self): + """ + If a non-CBOR ``Content-Type`` header is set when sending a body, the + server complains appropriatly. + """ + headers = Headers() + headers.setRawHeaders("content-type", ["some/value"]) + response = result_of( + self.client.request( + "POST", + DecodedURL.from_text("http://127.0.0.1/read_body"), + data=dumps({"reason": "test"}), + headers=headers, + ) + ) + self.assertEqual(response.code, http.UNSUPPORTED_MEDIA_TYPE) + + +@implementer(IReactorFromThreads) +class Reactor(Clock): + """ + Fake reactor that supports time APIs and callFromThread. + + Advancing the clock also runs any callbacks scheduled via callFromThread. + """ + + def __init__(self): + Clock.__init__(self) + self._queue = Queue() + + def callFromThread(self, callable, *args, **kwargs): + self._queue.put((callable, args, kwargs)) + + def advance(self, *args, **kwargs): + Clock.advance(self, *args, **kwargs) + while not self._queue.empty(): + f, args, kwargs = self._queue.get() + f(*args, **kwargs) + + +class HttpTestFixture(Fixture): + """ + Setup HTTP tests' infrastructure, the storage server and corresponding + client. + """ + + def _setUp(self): + StorageClientFactory.start_test_mode( + lambda pool: self.addCleanup(pool.closeCachedConnections) + ) + self.addCleanup(StorageClientFactory.stop_test_mode) + self.clock = Reactor() + self.tempdir = self.useFixture(TempDir()) + # The global Cooperator used by Twisted (a) used by pull producers in + # twisted.web, (b) is driven by a real reactor. We want to push time + # forward ourselves since we rely on pull producers in the HTTP storage + # server. + self.mock = self.useFixture( + MonkeyPatch( + "twisted.internet.task._theCooperator", + Cooperator(scheduler=lambda c: self.clock.callLater(0.000001, c)), + ) + ) + self.storage_server = StorageServer( + self.tempdir.path, b"\x00" * 20, clock=self.clock + ) + self.http_server = HTTPServer( + self.clock, self.storage_server, SWISSNUM_FOR_TEST + ) + self.treq = StubTreq(self.http_server.get_resource()) + self.client = StorageClient( + DecodedURL.from_text("http://127.0.0.1"), + SWISSNUM_FOR_TEST, + treq=self.treq, + pool=None, + clock=self.clock, + analyze_response=response_is_not_html, + ) + + def result_of_with_flush(self, d): + """ + Like ``result_of``, but supports fake reactor and ``treq`` testing + infrastructure necessary to support asynchronous HTTP server endpoints. + """ + d = ensureDeferred(d) + result = [] + error = [] + d.addCallbacks(result.append, error.append) + + # Check for synchronous HTTP endpoint handler: + if result: + return result[0] + if error: + error[0].raiseException() + + # OK, no result yet, probably async HTTP endpoint handler, so advance + # time, flush treq, and try again: + for i in range(10_000): + self.clock.advance(0.001) + self.treq.flush() + if result: + break + # By putting the sleep at the end, tests that are completely + # synchronous and don't use threads will have already broken out of + # the loop, and so will finish without any sleeps. This allows them + # to run as quickly as possible. + # + # However, some tests do talk to APIs that use a thread pool on the + # backend, so we need to allow actual time to pass for those. + time.sleep(0.001) + + if result: + return result[0] + if error: + error[0].raiseException() + + raise RuntimeError( + "We expected given Deferred to have result already, but it wasn't. " + + "This is probably a test design issue." + ) + + +class StorageClientWithHeadersOverride(object): + """Wrap ``StorageClient`` and override sent headers.""" + + def __init__(self, storage_client, add_headers): + self.storage_client = storage_client + self.add_headers = add_headers + + def __getattr__(self, attr): + return getattr(self.storage_client, attr) + + def request(self, *args, headers=None, **kwargs): + if headers is None: + headers = Headers() + for key, value in self.add_headers.items(): + headers.setRawHeaders(key, [value]) + return self.storage_client.request(*args, headers=headers, **kwargs) + + +@contextmanager +def assert_fails_with_http_code(test_case: SyncTestCase, code: int): + """ + Context manager that asserts the code fails with the given HTTP response + code. + """ + with test_case.assertRaises(ClientException) as e: + try: + yield + finally: + pass + test_case.assertEqual(e.exception.code, code) + + +class GenericHTTPAPITests(SyncTestCase): + """ + Tests of HTTP client talking to the HTTP server, for generic HTTP API + endpoints and concerns. + """ + + def setUp(self): + super(GenericHTTPAPITests, self).setUp() + disable_thread_pool_for_test(self) + self.http = self.useFixture(HttpTestFixture()) + + def test_missing_authentication(self) -> None: + """ + If nothing is given in the ``Authorization`` header at all an + ``Unauthorized`` response is returned. + """ + client = StubTreq(self.http.http_server.get_resource()) + response = self.http.result_of_with_flush( + client.request( + "GET", + "http://127.0.0.1/storage/v1/version", + ), + ) + self.assertThat(response.code, Equals(http.UNAUTHORIZED)) + + def test_bad_authentication(self): + """ + If the wrong swissnum is used, an ``Unauthorized`` response code is + returned. + """ + client = StorageClientGeneral( + StorageClient( + DecodedURL.from_text("http://127.0.0.1"), + b"something wrong", + treq=StubTreq(self.http.http_server.get_resource()), + pool=None, + clock=self.http.clock, + analyze_response=response_is_not_html, + ) + ) + with assert_fails_with_http_code(self, http.UNAUTHORIZED): + self.http.result_of_with_flush(client.get_version()) + + def test_unsupported_mime_type(self): + """ + The client can request mime types other than CBOR, and if they are + unsupported a NOT ACCEPTABLE (406) error will be returned. + """ + client = StorageClientGeneral( + StorageClientWithHeadersOverride(self.http.client, {"accept": "image/gif"}) + ) + with assert_fails_with_http_code(self, http.NOT_ACCEPTABLE): + self.http.result_of_with_flush(client.get_version()) + + def test_version(self): + """ + The client can return the version. + + We ignore available disk space and max immutable share size, since that + might change across calls. + """ + client = StorageClientGeneral(self.http.client) + version = self.http.result_of_with_flush(client.get_version()) + version[b"http://allmydata.org/tahoe/protocols/storage/v1"].pop( + b"available-space" + ) + version[b"http://allmydata.org/tahoe/protocols/storage/v1"].pop( + b"maximum-immutable-share-size" + ) + expected_version = self.http.storage_server.get_version() + expected_version[b"http://allmydata.org/tahoe/protocols/storage/v1"].pop( + b"available-space" + ) + expected_version[b"http://allmydata.org/tahoe/protocols/storage/v1"].pop( + b"maximum-immutable-share-size" + ) + self.assertEqual(version, expected_version) + + def test_server_side_schema_validation(self): + """ + Ensure that schema validation is happening: invalid CBOR should result + in bad request response code (error 400). + + We don't bother checking every single request, the API on the + server-side is designed to require a schema, so it validates + everywhere. But we check at least one to ensure we get correct + response code on bad input, so we know validation happened. + """ + upload_secret = urandom(32) + lease_secret = urandom(32) + storage_index = urandom(16) + url = self.http.client.relative_url( + "/storage/v1/immutable/" + _encode_si(storage_index) + ) + message = {"bad-message": "missing expected keys"} + + response = self.http.result_of_with_flush( + self.http.client.request( + "POST", + url, + lease_renew_secret=lease_secret, + lease_cancel_secret=lease_secret, + upload_secret=upload_secret, + message_to_serialize=message, + ) + ) + self.assertEqual(response.code, http.BAD_REQUEST) + + +class ImmutableHTTPAPITests(SyncTestCase): + """ + Tests for immutable upload/download APIs. + """ + + def setUp(self): + super(ImmutableHTTPAPITests, self).setUp() + disable_thread_pool_for_test(self) + self.http = self.useFixture(HttpTestFixture()) + self.imm_client = StorageClientImmutables(self.http.client) + self.general_client = StorageClientGeneral(self.http.client) + + def create_upload(self, share_numbers, length): + """ + Create a write bucket on server, return: + + (upload_secret, lease_secret, storage_index, result) + """ + upload_secret = urandom(32) + lease_secret = urandom(32) + storage_index = urandom(16) + created = self.http.result_of_with_flush( + self.imm_client.create( + storage_index, + share_numbers, + length, + upload_secret, + lease_secret, + lease_secret, + ) + ) + return (upload_secret, lease_secret, storage_index, created) + + def test_upload_can_be_downloaded(self): + """ + A single share can be uploaded in (possibly overlapping) chunks, and + then a random chunk can be downloaded, and it will match the original + file. + + We don't exercise the full variation of overlapping chunks because + that's already done in test_storage.py. + """ + length = 100 + expected_data = bytes(range(100)) + + # Create a upload: + (upload_secret, _, storage_index, created) = self.create_upload({1}, 100) + self.assertEqual( + created, ImmutableCreateResult(already_have=set(), allocated={1}) + ) + + remaining = RangeMap() + remaining.set(True, 0, 100) + + # Three writes: 10-19, 30-39, 50-59. This allows for a bunch of holes. + def write(offset, length): + remaining.empty(offset, offset + length) + return self.imm_client.write_share_chunk( + storage_index, + 1, + upload_secret, + offset, + expected_data[offset : offset + length], + ) + + upload_progress = self.http.result_of_with_flush(write(10, 10)) + self.assertEqual( + upload_progress, UploadProgress(finished=False, required=remaining) + ) + upload_progress = self.http.result_of_with_flush(write(30, 10)) + self.assertEqual( + upload_progress, UploadProgress(finished=False, required=remaining) + ) + upload_progress = self.http.result_of_with_flush(write(50, 10)) + self.assertEqual( + upload_progress, UploadProgress(finished=False, required=remaining) + ) + + # Then, an overlapping write with matching data (15-35): + upload_progress = self.http.result_of_with_flush(write(15, 20)) + self.assertEqual( + upload_progress, UploadProgress(finished=False, required=remaining) + ) + + # Now fill in the holes: + upload_progress = self.http.result_of_with_flush(write(0, 10)) + self.assertEqual( + upload_progress, UploadProgress(finished=False, required=remaining) + ) + upload_progress = self.http.result_of_with_flush(write(40, 10)) + self.assertEqual( + upload_progress, UploadProgress(finished=False, required=remaining) + ) + upload_progress = self.http.result_of_with_flush(write(60, 40)) + self.assertEqual( + upload_progress, UploadProgress(finished=True, required=RangeMap()) + ) + + # We can now read: + for offset, length in [(0, 100), (10, 19), (99, 1), (49, 200)]: + downloaded = self.http.result_of_with_flush( + self.imm_client.read_share_chunk(storage_index, 1, offset, length) + ) + self.assertEqual(downloaded, expected_data[offset : offset + length]) + + def test_write_with_wrong_upload_key(self): + """ + A write with an upload key that is different than the original upload + key will fail. + """ + (upload_secret, _, storage_index, _) = self.create_upload({1}, 100) + with assert_fails_with_http_code(self, http.UNAUTHORIZED): + self.http.result_of_with_flush( + self.imm_client.write_share_chunk( + storage_index, + 1, + upload_secret + b"X", + 0, + b"123", + ) + ) + + def test_allocate_buckets_second_time_different_shares(self): + """ + If allocate buckets endpoint is called second time with different + upload key on potentially different shares, that creates the buckets on + those shares that are different. + """ + # Create a upload: + (upload_secret, lease_secret, storage_index, created) = self.create_upload( + {1, 2, 3}, 100 + ) + + # Write half of share 1 + self.http.result_of_with_flush( + self.imm_client.write_share_chunk( + storage_index, + 1, + upload_secret, + 0, + b"a" * 50, + ) + ) + + # Add same shares with a different upload key share 1 overlaps with + # existing shares, this call shouldn't overwrite the existing + # work-in-progress. + upload_secret2 = b"x" * 2 + created2 = self.http.result_of_with_flush( + self.imm_client.create( + storage_index, + {1, 4, 6}, + 100, + upload_secret2, + lease_secret, + lease_secret, + ) + ) + self.assertEqual(created2.allocated, {4, 6}) + + # Write second half of share 1 + self.assertTrue( + self.http.result_of_with_flush( + self.imm_client.write_share_chunk( + storage_index, + 1, + upload_secret, + 50, + b"b" * 50, + ) + ).finished + ) + + # The upload of share 1 succeeded, demonstrating that second create() + # call didn't overwrite work-in-progress. + downloaded = self.http.result_of_with_flush( + self.imm_client.read_share_chunk(storage_index, 1, 0, 100) + ) + self.assertEqual(downloaded, b"a" * 50 + b"b" * 50) + + # We can successfully upload the shares created with the second upload secret. + self.assertTrue( + self.http.result_of_with_flush( + self.imm_client.write_share_chunk( + storage_index, + 4, + upload_secret2, + 0, + b"x" * 100, + ) + ).finished + ) + + def test_list_shares(self): + """ + Once a share is finished uploading, it's possible to list it. + """ + (upload_secret, _, storage_index, created) = self.create_upload({1, 2, 3}, 10) + + # Initially there are no shares: + self.assertEqual( + self.http.result_of_with_flush(self.imm_client.list_shares(storage_index)), + set(), + ) + + # Upload shares 1 and 3: + for share_number in [1, 3]: + progress = self.http.result_of_with_flush( + self.imm_client.write_share_chunk( + storage_index, + share_number, + upload_secret, + 0, + b"0123456789", + ) + ) + self.assertTrue(progress.finished) + + # Now shares 1 and 3 exist: + self.assertEqual( + self.http.result_of_with_flush(self.imm_client.list_shares(storage_index)), + {1, 3}, + ) + + def test_upload_bad_content_range(self): + """ + Malformed or invalid Content-Range headers to the immutable upload + endpoint result in a 416 error. + """ + (upload_secret, _, storage_index, created) = self.create_upload({1}, 10) + + def check_invalid(bad_content_range_value): + client = StorageClientImmutables( + StorageClientWithHeadersOverride( + self.http.client, {"content-range": bad_content_range_value} + ) + ) + with assert_fails_with_http_code( + self, http.REQUESTED_RANGE_NOT_SATISFIABLE + ): + self.http.result_of_with_flush( + client.write_share_chunk( + storage_index, + 1, + upload_secret, + 0, + b"0123456789", + ) + ) + + check_invalid("not a valid content-range header at all") + check_invalid("bytes -1-9/10") + check_invalid("bytes 0--9/10") + check_invalid("teapots 0-9/10") + + def test_list_shares_unknown_storage_index(self): + """ + Listing unknown storage index's shares results in empty list of shares. + """ + storage_index = bytes(range(16)) + self.assertEqual( + self.http.result_of_with_flush(self.imm_client.list_shares(storage_index)), + set(), + ) + + def test_upload_non_existent_storage_index(self): + """ + Uploading to a non-existent storage index or share number results in + 404. + """ + (upload_secret, _, storage_index, _) = self.create_upload({1}, 10) + + def unknown_check(storage_index, share_number): + with assert_fails_with_http_code(self, http.NOT_FOUND): + self.http.result_of_with_flush( + self.imm_client.write_share_chunk( + storage_index, + share_number, + upload_secret, + 0, + b"0123456789", + ) + ) + + # Wrong share number: + unknown_check(storage_index, 7) + # Wrong storage index: + unknown_check(b"X" * 16, 7) + + def test_multiple_shares_uploaded_to_different_place(self): + """ + If a storage index has multiple shares, uploads to different shares are + stored separately and can be downloaded separately. + """ + (upload_secret, _, storage_index, _) = self.create_upload({1, 2}, 10) + self.http.result_of_with_flush( + self.imm_client.write_share_chunk( + storage_index, + 1, + upload_secret, + 0, + b"1" * 10, + ) + ) + self.http.result_of_with_flush( + self.imm_client.write_share_chunk( + storage_index, + 2, + upload_secret, + 0, + b"2" * 10, + ) + ) + self.assertEqual( + self.http.result_of_with_flush( + self.imm_client.read_share_chunk(storage_index, 1, 0, 10) + ), + b"1" * 10, + ) + self.assertEqual( + self.http.result_of_with_flush( + self.imm_client.read_share_chunk(storage_index, 2, 0, 10) + ), + b"2" * 10, + ) + + def test_mismatching_upload_fails(self): + """ + If an uploaded chunk conflicts with an already uploaded chunk, a + CONFLICT error is returned. + """ + (upload_secret, _, storage_index, created) = self.create_upload({1}, 100) + + # Write: + self.http.result_of_with_flush( + self.imm_client.write_share_chunk( + storage_index, + 1, + upload_secret, + 0, + b"0" * 10, + ) + ) + + # Conflicting write: + with assert_fails_with_http_code(self, http.CONFLICT): + self.http.result_of_with_flush( + self.imm_client.write_share_chunk( + storage_index, + 1, + upload_secret, + 0, + b"0123456789", + ) + ) + + def test_timed_out_upload_allows_reupload(self): + """ + If an in-progress upload times out, it is cancelled altogether, + allowing a new upload to occur. + """ + self._test_abort_or_timed_out_upload_to_existing_storage_index( + lambda **kwargs: self.http.clock.advance(30 * 60 + 1) + ) + + def test_abort_upload_allows_reupload(self): + """ + If an in-progress upload is aborted, it is cancelled altogether, + allowing a new upload to occur. + """ + + def abort(storage_index, share_number, upload_secret): + return self.http.result_of_with_flush( + self.imm_client.abort_upload(storage_index, share_number, upload_secret) + ) + + self._test_abort_or_timed_out_upload_to_existing_storage_index(abort) + + def _test_abort_or_timed_out_upload_to_existing_storage_index(self, cancel_upload): + """Start uploading to an existing storage index that then times out or aborts. + + Re-uploading should work. + """ + # Start an upload: + (upload_secret, _, storage_index, _) = self.create_upload({1}, 100) + self.http.result_of_with_flush( + self.imm_client.write_share_chunk( + storage_index, + 1, + upload_secret, + 0, + b"123", + ) + ) + + # Now, the upload is cancelled somehow: + cancel_upload( + storage_index=storage_index, upload_secret=upload_secret, share_number=1 + ) + + # Now we can create a new share with the same storage index without + # complaint: + upload_secret = urandom(32) + lease_secret = urandom(32) + created = self.http.result_of_with_flush( + self.imm_client.create( + storage_index, + {1}, + 100, + upload_secret, + lease_secret, + lease_secret, + ) + ) + self.assertEqual(created.allocated, {1}) + + # And write to it, too: + self.http.result_of_with_flush( + self.imm_client.write_share_chunk( + storage_index, + 1, + upload_secret, + 0, + b"ABC", + ) + ) + + def test_unknown_aborts(self): + """ + Aborting uploads with an unknown storage index or share number will + result 404 HTTP response code. + """ + (upload_secret, _, storage_index, _) = self.create_upload({1}, 100) + + for si, num in [(storage_index, 3), (b"x" * 16, 1)]: + with assert_fails_with_http_code(self, http.NOT_FOUND): + self.http.result_of_with_flush( + self.imm_client.abort_upload(si, num, upload_secret) + ) + + def test_unauthorized_abort(self): + """ + An abort with the wrong key will return an unauthorized error, and will + not abort the upload. + """ + (upload_secret, _, storage_index, _) = self.create_upload({1}, 100) + + # Failed to abort becaues wrong upload secret: + with assert_fails_with_http_code(self, http.UNAUTHORIZED): + self.http.result_of_with_flush( + self.imm_client.abort_upload(storage_index, 1, upload_secret + b"X") + ) + + # We can still write to it: + self.http.result_of_with_flush( + self.imm_client.write_share_chunk( + storage_index, + 1, + upload_secret, + 0, + b"ABC", + ) + ) + + def test_too_late_abort(self): + """ + An abort of an already-fully-uploaded immutable will result in 405 + error and will not affect the immutable. + """ + uploaded_data = b"123" + (upload_secret, _, storage_index, _) = self.create_upload({0}, 3) + self.http.result_of_with_flush( + self.imm_client.write_share_chunk( + storage_index, + 0, + upload_secret, + 0, + uploaded_data, + ) + ) + + # Can't abort, we finished upload: + with assert_fails_with_http_code(self, http.NOT_ALLOWED): + self.http.result_of_with_flush( + self.imm_client.abort_upload(storage_index, 0, upload_secret) + ) + + # Abort didn't prevent reading: + self.assertEqual( + uploaded_data, + self.http.result_of_with_flush( + self.imm_client.read_share_chunk( + storage_index, + 0, + 0, + 3, + ) + ), + ) + + def test_lease_on_unknown_storage_index(self): + """ + An attempt to renew an unknown storage index will result in a HTTP 404. + """ + storage_index = urandom(16) + secret = b"A" * 32 + with assert_fails_with_http_code(self, http.NOT_FOUND): + self.http.result_of_with_flush( + self.general_client.add_or_renew_lease(storage_index, secret, secret) + ) + + +class MutableHTTPAPIsTests(SyncTestCase): + """Tests for mutable APIs.""" + + def setUp(self): + super(MutableHTTPAPIsTests, self).setUp() + disable_thread_pool_for_test(self) + self.http = self.useFixture(HttpTestFixture()) + self.mut_client = StorageClientMutables(self.http.client) + + def create_upload(self, data=b"abcdef"): + """ + Utility that creates shares 0 and 1 with bodies + ``{data}-{share_number}``. + """ + write_secret = urandom(32) + lease_secret = urandom(32) + storage_index = urandom(16) + self.http.result_of_with_flush( + self.mut_client.read_test_write_chunks( + storage_index, + write_secret, + lease_secret, + lease_secret, + { + 0: TestWriteVectors( + write_vectors=[WriteVector(offset=0, data=data + b"-0")] + ), + 1: TestWriteVectors( + write_vectors=[ + WriteVector(offset=0, data=data), + WriteVector(offset=len(data), data=b"-1"), + ] + ), + }, + [], + ) + ) + return storage_index, write_secret, lease_secret + + def test_write_can_be_read_small_data(self): + """ + Small written data can be read using ``read_share_chunk``. + """ + self.write_can_be_read(b"abcdef") + + def test_write_can_be_read_large_data(self): + """ + Large written data (50MB) can be read using ``read_share_chunk``. + """ + self.write_can_be_read(b"abcdefghij" * 5 * 1024 * 1024) + + def write_can_be_read(self, data): + """ + Written data can be read using ``read_share_chunk``. + """ + lease_secret = urandom(32) + storage_index = urandom(16) + self.http.result_of_with_flush( + self.mut_client.read_test_write_chunks( + storage_index, + urandom(32), + lease_secret, + lease_secret, + { + 0: TestWriteVectors( + write_vectors=[WriteVector(offset=0, data=data)] + ), + }, + [], + ) + ) + read_data = self.http.result_of_with_flush( + self.mut_client.read_share_chunk(storage_index, 0, 0, len(data)) + ) + self.assertEqual(read_data, data) + + def test_read_before_write(self): + """In combo read/test/write operation, reads happen before writes.""" + storage_index, write_secret, lease_secret = self.create_upload() + result = self.http.result_of_with_flush( + self.mut_client.read_test_write_chunks( + storage_index, + write_secret, + lease_secret, + lease_secret, + { + 0: TestWriteVectors( + write_vectors=[WriteVector(offset=1, data=b"XYZ")] + ), + }, + [ReadVector(0, 8)], + ) + ) + # Reads are from before the write: + self.assertEqual( + result, + ReadTestWriteResult( + success=True, reads={0: [b"abcdef-0"], 1: [b"abcdef-1"]} + ), + ) + # But the write did happen: + data0 = self.http.result_of_with_flush( + self.mut_client.read_share_chunk(storage_index, 0, 0, 8) + ) + data1 = self.http.result_of_with_flush( + self.mut_client.read_share_chunk(storage_index, 1, 0, 8) + ) + self.assertEqual((data0, data1), (b"aXYZef-0", b"abcdef-1")) + + def test_conditional_write(self): + """Uploads only happen if the test passes.""" + storage_index, write_secret, lease_secret = self.create_upload() + result_failed = self.http.result_of_with_flush( + self.mut_client.read_test_write_chunks( + storage_index, + write_secret, + lease_secret, + lease_secret, + { + 0: TestWriteVectors( + test_vectors=[TestVector(1, 4, b"FAIL")], + write_vectors=[WriteVector(offset=1, data=b"XYZ")], + ), + }, + [], + ) + ) + self.assertFalse(result_failed.success) + + # This time the test matches: + result = self.http.result_of_with_flush( + self.mut_client.read_test_write_chunks( + storage_index, + write_secret, + lease_secret, + lease_secret, + { + 0: TestWriteVectors( + test_vectors=[TestVector(1, 4, b"bcde")], + write_vectors=[WriteVector(offset=1, data=b"XYZ")], + ), + }, + [ReadVector(0, 8)], + ) + ) + self.assertTrue(result.success) + self.assertEqual( + self.http.result_of_with_flush( + self.mut_client.read_share_chunk(storage_index, 0, 0, 8) + ), + b"aXYZef-0", + ) + + def test_list_shares(self): + """``list_shares()`` returns the shares for a given storage index.""" + storage_index, _, _ = self.create_upload() + self.assertEqual( + self.http.result_of_with_flush(self.mut_client.list_shares(storage_index)), + {0, 1}, + ) + + def test_non_existent_list_shares(self): + """A non-existent storage index errors when shares are listed.""" + with self.assertRaises(ClientException) as exc: + self.http.result_of_with_flush(self.mut_client.list_shares(urandom(32))) + self.assertEqual(exc.exception.code, http.NOT_FOUND) + + def test_wrong_write_enabler(self): + """Writes with the wrong write enabler fail, and are not processed.""" + storage_index, write_secret, lease_secret = self.create_upload() + with self.assertRaises(ClientException) as exc: + self.http.result_of_with_flush( + self.mut_client.read_test_write_chunks( + storage_index, + urandom(32), + lease_secret, + lease_secret, + { + 0: TestWriteVectors( + write_vectors=[WriteVector(offset=1, data=b"XYZ")] + ), + }, + [ReadVector(0, 8)], + ) + ) + self.assertEqual(exc.exception.code, http.UNAUTHORIZED) + + # The write did not happen: + self.assertEqual( + self.http.result_of_with_flush( + self.mut_client.read_share_chunk(storage_index, 0, 0, 8) + ), + b"abcdef-0", + ) + + +class SharedImmutableMutableTestsMixin: + """ + Shared tests for mutables and immutables where the API is the same. + """ + + KIND: str # either "mutable" or "immutable" + general_client: StorageClientGeneral + client: Union[StorageClientImmutables, StorageClientMutables] + clientFactory: Callable[ + [StorageClient], Union[StorageClientImmutables, StorageClientMutables] + ] + + def upload(self, share_number: int, data_length=26) -> Tuple[bytes, bytes, bytes]: + """ + Create a share, return (storage_index, uploaded_data, lease secret). + """ + raise NotImplementedError + + def get_leases(self, storage_index: bytes) -> Iterable[LeaseInfo]: + """Get leases for the storage index.""" + raise NotImplementedError() + + def test_advise_corrupt_share(self): + """ + Advising share was corrupted succeeds from HTTP client's perspective, + and calls appropriate method on server. + """ + corrupted = [] + self.http.storage_server.advise_corrupt_share = lambda *args: corrupted.append( + args + ) + + storage_index, _, _ = self.upload(13) + reason = "OHNO \u1235" + self.http.result_of_with_flush( + self.client.advise_corrupt_share(storage_index, 13, reason) + ) + + self.assertEqual( + corrupted, + [(self.KIND.encode("ascii"), storage_index, 13, reason.encode("utf-8"))], + ) + + def test_advise_corrupt_share_unknown(self): + """ + Advising an unknown share was corrupted results in 404. + """ + storage_index, _, _ = self.upload(13) + reason = "OHNO \u1235" + self.http.result_of_with_flush( + self.client.advise_corrupt_share(storage_index, 13, reason) + ) + + for si, share_number in [(storage_index, 11), (urandom(16), 13)]: + with assert_fails_with_http_code(self, http.NOT_FOUND): + self.http.result_of_with_flush( + self.client.advise_corrupt_share(si, share_number, reason) + ) + + def test_lease_renew_and_add(self): + """ + It's possible the renew the lease on an uploaded mutable/immutable, by + using the same renewal secret, or add a new lease by choosing a + different renewal secret. + """ + # Create a storage index: + storage_index, _, lease_secret = self.upload(0) + + [lease] = self.get_leases(storage_index) + initial_expiration_time = lease.get_expiration_time() + + # Time passes: + self.http.clock.advance(167) + + # We renew the lease: + self.http.result_of_with_flush( + self.general_client.add_or_renew_lease( + storage_index, lease_secret, lease_secret + ) + ) + + # More time passes: + self.http.clock.advance(10) + + # We create a new lease: + lease_secret2 = urandom(32) + self.http.result_of_with_flush( + self.general_client.add_or_renew_lease( + storage_index, lease_secret2, lease_secret2 + ) + ) + + [lease1, lease2] = self.get_leases(storage_index) + self.assertEqual(lease1.get_expiration_time(), initial_expiration_time + 167) + self.assertEqual(lease2.get_expiration_time(), initial_expiration_time + 177) + + def test_read_of_wrong_storage_index_fails(self): + """ + Reading from unknown storage index results in 404. + """ + with assert_fails_with_http_code(self, http.NOT_FOUND): + self.http.result_of_with_flush( + self.client.read_share_chunk( + b"1" * 16, + 1, + 0, + 10, + ) + ) + + def test_read_of_wrong_share_number_fails(self): + """ + Reading from unknown storage index results in 404. + """ + storage_index, _, _ = self.upload(1) + with assert_fails_with_http_code(self, http.NOT_FOUND): + self.http.result_of_with_flush( + self.client.read_share_chunk( + storage_index, + 7, # different share number + 0, + 10, + ) + ) + + def test_read_with_negative_offset_fails(self): + """ + Malformed or unsupported Range headers result in 416 (requested range + not satisfiable) error. + """ + storage_index, _, _ = self.upload(1) + + def check_bad_range(bad_range_value): + client = self.clientFactory( + StorageClientWithHeadersOverride( + self.http.client, {"range": bad_range_value} + ) + ) + + with assert_fails_with_http_code( + self, http.REQUESTED_RANGE_NOT_SATISFIABLE + ): + self.http.result_of_with_flush( + client.read_share_chunk( + storage_index, + 1, + 0, + 10, + ) + ) + + # Bad unit + check_bad_range("molluscs=0-9") + # Negative offsets + check_bad_range("bytes=-2-9") + check_bad_range("bytes=0--10") + # Negative offset no endpoint + check_bad_range("bytes=-300-") + check_bad_range("bytes=") + # Multiple ranges are currently unsupported, even if they're + # semantically valid under HTTP: + check_bad_range("bytes=0-5, 6-7") + # Ranges without an end are currently unsupported, even if they're + # semantically valid under HTTP. + check_bad_range("bytes=0-") + + def _read_with_no_range_test(self, data_length): + """ + A read with no range returns the whole mutable/immutable. + + Actual test is defined in subclasses, to fix complaints from Hypothesis + about the method having different executors. + """ + storage_index, uploaded_data, _ = self.upload(1, data_length) + response = self.http.result_of_with_flush( + self.http.client.request( + "GET", + self.http.client.relative_url( + "/storage/v1/{}/{}/1".format(self.KIND, _encode_si(storage_index)) + ), + ) + ) + self.assertEqual(response.code, http.OK) + self.assertEqual( + self.http.result_of_with_flush(response.content()), uploaded_data + ) + + def test_validate_content_range_response_to_read(self): + """ + The server responds to ranged reads with an appropriate Content-Range + header. + """ + storage_index, _, _ = self.upload(1, 26) + + def check_range(requested_range, expected_response): + headers = Headers() + headers.setRawHeaders("range", [requested_range]) + response = self.http.result_of_with_flush( + self.http.client.request( + "GET", + self.http.client.relative_url( + "/storage/v1/{}/{}/1".format( + self.KIND, _encode_si(storage_index) + ) + ), + headers=headers, + ) + ) + self.assertEqual( + response.headers.getRawHeaders("content-range"), [expected_response] + ) + + check_range("bytes=0-10", "bytes 0-10/*") + check_range("bytes=3-17", "bytes 3-17/*") + # TODO re-enable in https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3907 + # Can't go beyond the end of the mutable/immutable! + # check_range("bytes=10-100", "bytes 10-25/*") + + +class ImmutableSharedTests(SharedImmutableMutableTestsMixin, SyncTestCase): + """Shared tests, running on immutables.""" + + KIND = "immutable" + clientFactory = StorageClientImmutables + + def setUp(self): + super(ImmutableSharedTests, self).setUp() + disable_thread_pool_for_test(self) + self.http = self.useFixture(HttpTestFixture()) + self.client = self.clientFactory(self.http.client) + self.general_client = StorageClientGeneral(self.http.client) + + def upload(self, share_number, data_length=26): + """ + Create a share, return (storage_index, uploaded_data, lease_secret). + """ + uploaded_data = (b"abcdefghijklmnopqrstuvwxyz" * ((data_length // 26) + 1))[ + :data_length + ] + upload_secret = urandom(32) + lease_secret = urandom(32) + storage_index = urandom(16) + self.http.result_of_with_flush( + self.client.create( + storage_index, + {share_number}, + data_length, + upload_secret, + lease_secret, + lease_secret, + ) + ) + self.http.result_of_with_flush( + self.client.write_share_chunk( + storage_index, + share_number, + upload_secret, + 0, + uploaded_data, + ) + ) + return storage_index, uploaded_data, lease_secret + + def get_leases(self, storage_index): + return self.http.storage_server.get_leases(storage_index) + + @given(data_length=st.integers(min_value=1, max_value=300000)) + def test_read_with_no_range(self, data_length): + """ + A read with no range returns the whole immutable. + """ + return self._read_with_no_range_test(data_length) + + +class MutableSharedTests(SharedImmutableMutableTestsMixin, SyncTestCase): + """Shared tests, running on mutables.""" + + KIND = "mutable" + clientFactory = StorageClientMutables + + def setUp(self): + super(MutableSharedTests, self).setUp() + disable_thread_pool_for_test(self) + self.http = self.useFixture(HttpTestFixture()) + self.client = self.clientFactory(self.http.client) + self.general_client = StorageClientGeneral(self.http.client) + + def upload(self, share_number, data_length=26): + """ + Create a share, return (storage_index, uploaded_data, lease_secret). + """ + data = (b"abcdefghijklmnopqrstuvwxyz" * ((data_length // 26) + 1))[:data_length] + write_secret = urandom(32) + lease_secret = urandom(32) + storage_index = urandom(16) + self.http.result_of_with_flush( + self.client.read_test_write_chunks( + storage_index, + write_secret, + lease_secret, + lease_secret, + { + share_number: TestWriteVectors( + write_vectors=[WriteVector(offset=0, data=data)] + ), + }, + [], + ) + ) + return storage_index, data, lease_secret + + def get_leases(self, storage_index): + return self.http.storage_server.get_slot_leases(storage_index) + + @given(data_length=st.integers(min_value=1, max_value=300000)) + def test_read_with_no_range(self, data_length): + """ + A read with no range returns the whole mutable. + """ + return self._read_with_no_range_test(data_length) diff --git a/src/allmydata/test/test_storage_https.py b/src/allmydata/test/test_storage_https.py new file mode 100644 index 000000000..5ff193c3d --- /dev/null +++ b/src/allmydata/test/test_storage_https.py @@ -0,0 +1,223 @@ +""" +Tests for the TLS part of the HTTP Storage Protocol. + +More broadly, these are tests for HTTPS usage as replacement for Foolscap's +server authentication logic, which may one day apply outside of HTTP Storage +Protocol. +""" + +from contextlib import asynccontextmanager +from base64 import b64decode + +from yaml import safe_load +from cryptography import x509 + +from twisted.internet.endpoints import serverFromString +from twisted.internet import reactor +from twisted.internet.defer import maybeDeferred +from twisted.web.server import Site +from twisted.web.static import Data +from twisted.web.client import Agent, HTTPConnectionPool, ResponseNeverReceived +from twisted.python.filepath import FilePath +from treq.client import HTTPClient + +from .common import SyncTestCase, AsyncTestCase, SameProcessStreamEndpointAssigner +from .certs import ( + generate_certificate, + generate_private_key, + private_key_to_file, + cert_to_file, +) +from ..storage.http_common import get_spki, get_spki_hash +from ..storage.http_client import _StorageClientHTTPSPolicy +from ..storage.http_server import _TLSEndpointWrapper +from ..util.deferredutil import async_to_deferred +from .common_system import spin_until_cleanup_done + +spki_test_vectors_path = FilePath(__file__).sibling("data").child("spki-hash-test-vectors.yaml") + + +class HTTPSNurlTests(SyncTestCase): + """Tests for HTTPS NURLs.""" + + def test_spki_hash(self): + """ + The output of ``get_spki_hash()`` matches the semantics of RFC + 7469. + + The test vector certificates were generated using the openssl command + line tool:: + + openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -days 365 + + The expected hash was generated using Appendix A instructions in the + RFC:: + + openssl x509 -noout -in certificate.pem -pubkey | \ + openssl asn1parse -noout -inform pem -out public.key + openssl dgst -sha256 -binary public.key | openssl enc -base64 + + The OpenSSL base64-encoded output was then adjusted into the URL-safe + base64 variation: `+` and `/` were replaced with `-` and `_` and the + trailing `=` padding was removed. + + The expected SubjectPublicKeyInfo bytes were extracted from the + implementation of `get_spki_hash` after its result matched the + expected value generated by the command above. + """ + spki_cases = safe_load(spki_test_vectors_path.getContent())["vector"] + for n, case in enumerate(spki_cases): + certificate_text = case["certificate"].encode("ascii") + expected_spki = b64decode(case["expected-spki"]) + expected_hash = case["expected-hash"].encode("ascii") + + try: + certificate = x509.load_pem_x509_certificate(certificate_text) + except Exception as e: + self.fail(f"Loading case {n} certificate failed: {e}") + + self.assertEqual( + expected_spki, + get_spki(certificate), + f"case {n} spki data mismatch", + ) + self.assertEqual( + expected_hash, + get_spki_hash(certificate), + f"case {n} spki hash mismatch", + ) + + +class PinningHTTPSValidation(AsyncTestCase): + """ + Test client-side validation logic of HTTPS certificates that uses + Tahoe-LAFS's pinning-based scheme instead of the traditional certificate + authority scheme. + + https://cryptography.io/en/latest/x509/tutorial/#creating-a-self-signed-certificate + """ + + def setUp(self): + self._port_assigner = SameProcessStreamEndpointAssigner() + self._port_assigner.setUp() + self.addCleanup(self._port_assigner.tearDown) + return AsyncTestCase.setUp(self) + + def tearDown(self): + d = maybeDeferred(AsyncTestCase.tearDown, self) + return d.addCallback(lambda _: spin_until_cleanup_done()) + + @asynccontextmanager + async def listen(self, private_key_path: FilePath, cert_path: FilePath): + """ + Context manager that runs a HTTPS server with the given private key + and certificate. + + Returns a URL that will connect to the server. + """ + location_hint, endpoint_string = self._port_assigner.assign(reactor) + underlying_endpoint = serverFromString(reactor, endpoint_string) + endpoint = _TLSEndpointWrapper.from_paths( + underlying_endpoint, private_key_path, cert_path + ) + root = Data(b"YOYODYNE", "text/plain") + root.isLeaf = True + listening_port = await endpoint.listen(Site(root)) + try: + yield f"https://127.0.0.1:{listening_port.getHost().port}/" # type: ignore[attr-defined] + finally: + result = listening_port.stopListening() + if result is not None: + await result + + def request(self, url: str, expected_certificate: x509.Certificate): + """ + Send a HTTPS request to the given URL, ensuring that the given + certificate is the one used via SPKI-hash-based pinning comparison. + """ + # No persistent connections, so we don't have dirty reactor at the end + # of the test. + treq_client = HTTPClient( + Agent( + reactor, + _StorageClientHTTPSPolicy( + expected_spki_hash=get_spki_hash(expected_certificate) + ), + pool=HTTPConnectionPool(reactor, persistent=False), + ) + ) + return treq_client.get(url) + + @async_to_deferred + async def test_success(self): + """ + If all conditions are met, a TLS client using the Tahoe-LAFS policy can + connect to the server. + """ + private_key = generate_private_key() + certificate = generate_certificate(private_key) + async with self.listen( + private_key_to_file(FilePath(self.mktemp()), private_key), + cert_to_file(FilePath(self.mktemp()), certificate), + ) as url: + response = await self.request(url, certificate) + self.assertEqual(await response.content(), b"YOYODYNE") + + @async_to_deferred + async def test_server_certificate_has_wrong_hash(self): + """ + If the server's certificate hash doesn't match the hash the client + expects, the request to the server fails. + """ + private_key1 = generate_private_key() + certificate1 = generate_certificate(private_key1) + private_key2 = generate_private_key() + certificate2 = generate_certificate(private_key2) + + async with self.listen( + private_key_to_file(FilePath(self.mktemp()), private_key1), + cert_to_file(FilePath(self.mktemp()), certificate1), + ) as url: + with self.assertRaises(ResponseNeverReceived): + await self.request(url, certificate2) + + @async_to_deferred + async def test_server_certificate_expired(self): + """ + If the server's certificate has expired, the request to the server + succeeds if the hash matches the one the client expects; expiration has + no effect. + """ + private_key = generate_private_key() + certificate = generate_certificate(private_key, expires_days=-10) + + async with self.listen( + private_key_to_file(FilePath(self.mktemp()), private_key), + cert_to_file(FilePath(self.mktemp()), certificate), + ) as url: + response = await self.request(url, certificate) + self.assertEqual(await response.content(), b"YOYODYNE") + + @async_to_deferred + async def test_server_certificate_not_valid_yet(self): + """ + If the server's certificate is only valid starting in The Future, the + request to the server succeeds if the hash matches the one the client + expects; start time has no effect. + """ + private_key = generate_private_key() + certificate = generate_certificate( + private_key, expires_days=10, valid_in_days=5 + ) + + async with self.listen( + private_key_to_file(FilePath(self.mktemp()), private_key), + cert_to_file(FilePath(self.mktemp()), certificate), + ) as url: + response = await self.request(url, certificate) + self.assertEqual(await response.content(), b"YOYODYNE") + + # A potential attack to test is a private key that doesn't match the + # certificate... but OpenSSL (quite rightly) won't let you listen with that + # so I don't know how to test that! See + # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3884 diff --git a/src/allmydata/test/test_storage_web.py b/src/allmydata/test/test_storage_web.py index b3f5fac98..71d26af54 100644 --- a/src/allmydata/test/test_storage_web.py +++ b/src/allmydata/test/test_storage_web.py @@ -4,41 +4,44 @@ Tests for twisted.storage that uses Web APIs. Partially ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - # Omitted list since it broke a test on Python 2. Shouldn't require further - # work, when we switch to Python 3 we'll be dropping this, anyway. - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, object, range, str, max, min # noqa: F401 - import time import os.path import re import json +from unittest import skipIf +from io import StringIO from twisted.trial import unittest - from twisted.internet import defer from twisted.application import service from twisted.web.template import flattenString +from twisted.python.filepath import FilePath +from twisted.python.runtime import platform from foolscap.api import fireEventually from allmydata.util import fileutil, hashutil, base32, pollmixin from allmydata.storage.common import storage_index_to_dir, \ UnknownMutableContainerVersionError, UnknownImmutableContainerVersionError from allmydata.storage.server import StorageServer -from allmydata.storage.crawler import BucketCountingCrawler -from allmydata.storage.expirer import LeaseCheckingCrawler +from allmydata.storage.crawler import ( + BucketCountingCrawler, + _LeaseStateSerializer, +) +from allmydata.storage.expirer import ( + LeaseCheckingCrawler, + _HistorySerializer, +) from allmydata.web.storage import ( StorageStatus, StorageStatusElement, remove_prefix ) -from .common_util import FakeCanary +from allmydata.scripts.admin import ( + migrate_crawler, +) +from allmydata.scripts.runner import ( + Options, +) from .common_web import ( render, @@ -147,7 +150,7 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin): html = renderSynchronously(w) s = remove_tags(html) self.failUnlessIn(b"Total buckets: 0 (the number of", s) - self.failUnless(b"Next crawl in 59 minutes" in s or "Next crawl in 60 minutes" in s, s) + self.failUnless(b"Next crawl in 59 minutes" in s or b"Next crawl in 60 minutes" in s, s) d.addCallback(_check2) return d @@ -289,28 +292,27 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin): mutable_si_3, rs3, cs3, we3 = make_mutable(b"\x03" * 16) rs3a, cs3a = make_extra_lease(mutable_si_3, 1) sharenums = [0] - canary = FakeCanary() # note: 'tahoe debug dump-share' will not handle this file, since the # inner contents are not a valid CHK share data = b"\xff" * 1000 - a,w = ss.remote_allocate_buckets(immutable_si_0, rs0, cs0, sharenums, - 1000, canary) - w[0].remote_write(0, data) - w[0].remote_close() + a,w = ss.allocate_buckets(immutable_si_0, rs0, cs0, sharenums, + 1000) + w[0].write(0, data) + w[0].close() - a,w = ss.remote_allocate_buckets(immutable_si_1, rs1, cs1, sharenums, - 1000, canary) - w[0].remote_write(0, data) - w[0].remote_close() - ss.remote_add_lease(immutable_si_1, rs1a, cs1a) + a,w = ss.allocate_buckets(immutable_si_1, rs1, cs1, sharenums, + 1000) + w[0].write(0, data) + w[0].close() + ss.add_lease(immutable_si_1, rs1a, cs1a) - writev = ss.remote_slot_testv_and_readv_and_writev + writev = ss.slot_testv_and_readv_and_writev writev(mutable_si_2, (we2, rs2, cs2), {0: ([], [(0,data)], len(data))}, []) writev(mutable_si_3, (we3, rs3, cs3), {0: ([], [(0,data)], len(data))}, []) - ss.remote_add_lease(mutable_si_3, rs3a, cs3a) + ss.add_lease(mutable_si_3, rs3a, cs3a) self.sis = [immutable_si_0, immutable_si_1, mutable_si_2, mutable_si_3] self.renew_secrets = [rs0, rs1, rs1a, rs2, rs3, rs3a] @@ -376,7 +378,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin): self.failUnlessEqual(type(lah), list) self.failUnlessEqual(len(lah), 1) self.failUnlessEqual(lah, [ (0.0, DAY, 1) ] ) - self.failUnlessEqual(so_far["leases-per-share-histogram"], {1: 1}) + self.failUnlessEqual(so_far["leases-per-share-histogram"], {"1": 1}) self.failUnlessEqual(so_far["corrupt-shares"], []) sr1 = so_far["space-recovered"] self.failUnlessEqual(sr1["examined-buckets"], 1) @@ -427,9 +429,9 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin): self.failIf("cycle-to-date" in s) self.failIf("estimated-remaining-cycle" in s) self.failIf("estimated-current-cycle" in s) - last = s["history"][0] + last = s["history"]["0"] self.failUnlessIn("cycle-start-finish-times", last) - self.failUnlessEqual(type(last["cycle-start-finish-times"]), tuple) + self.failUnlessEqual(type(last["cycle-start-finish-times"]), list) self.failUnlessEqual(last["expiration-enabled"], False) self.failUnlessIn("configured-expiration-mode", last) @@ -437,9 +439,9 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin): lah = last["lease-age-histogram"] self.failUnlessEqual(type(lah), list) self.failUnlessEqual(len(lah), 1) - self.failUnlessEqual(lah, [ (0.0, DAY, 6) ] ) + self.failUnlessEqual(lah, [ [0.0, DAY, 6] ] ) - self.failUnlessEqual(last["leases-per-share-histogram"], {1: 2, 2: 2}) + self.failUnlessEqual(last["leases-per-share-histogram"], {"1": 2, "2": 2}) self.failUnlessEqual(last["corrupt-shares"], []) rec = last["space-recovered"] @@ -485,17 +487,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin): return d def backdate_lease(self, sf, renew_secret, new_expire_time): - # ShareFile.renew_lease ignores attempts to back-date a lease (i.e. - # "renew" a lease with a new_expire_time that is older than what the - # current lease has), so we have to reach inside it. - for i,lease in enumerate(sf.get_leases()): - if lease.renew_secret == renew_secret: - lease.expiration_time = new_expire_time - f = open(sf.home, 'rb+') - sf._write_lease_record(f, i, lease) - f.close() - return - raise IndexError("unable to renew non-existent lease") + sf.renew_lease(renew_secret, new_expire_time, allow_backdate=True) def test_expire_age(self): basedir = "storage/LeaseCrawler/expire_age" @@ -597,12 +589,12 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin): self.failUnlessEqual(count_leases(mutable_si_3), 1) s = lc.get_state() - last = s["history"][0] + last = s["history"]["0"] self.failUnlessEqual(last["expiration-enabled"], True) self.failUnlessEqual(last["configured-expiration-mode"], - ("age", 2000, None, ("mutable", "immutable"))) - self.failUnlessEqual(last["leases-per-share-histogram"], {1: 2, 2: 2}) + ["age", 2000, None, ["mutable", "immutable"]]) + self.failUnlessEqual(last["leases-per-share-histogram"], {"1": 2, "2": 2}) rec = last["space-recovered"] self.failUnlessEqual(rec["examined-buckets"], 4) @@ -741,14 +733,14 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin): self.failUnlessEqual(count_leases(mutable_si_3), 1) s = lc.get_state() - last = s["history"][0] + last = s["history"]["0"] self.failUnlessEqual(last["expiration-enabled"], True) self.failUnlessEqual(last["configured-expiration-mode"], - ("cutoff-date", None, then, - ("mutable", "immutable"))) + ["cutoff-date", None, then, + ["mutable", "immutable"]]) self.failUnlessEqual(last["leases-per-share-histogram"], - {1: 2, 2: 2}) + {"1": 2, "2": 2}) rec = last["space-recovered"] self.failUnlessEqual(rec["examined-buckets"], 4) @@ -934,8 +926,8 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin): s = lc.get_state() h = s["history"] self.failUnlessEqual(len(h), 10) - self.failUnlessEqual(max(h.keys()), 15) - self.failUnlessEqual(min(h.keys()), 6) + self.failUnlessEqual(max(int(k) for k in h.keys()), 15) + self.failUnlessEqual(min(int(k) for k in h.keys()), 6) d.addCallback(_check) return d @@ -1024,7 +1016,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin): def _check(ignored): s = lc.get_state() - last = s["history"][0] + last = s["history"]["0"] rec = last["space-recovered"] self.failUnlessEqual(rec["configured-buckets"], 4) self.failUnlessEqual(rec["configured-shares"], 4) @@ -1120,7 +1112,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin): def _after_first_cycle(ignored): s = lc.get_state() - last = s["history"][0] + last = s["history"]["0"] rec = last["space-recovered"] self.failUnlessEqual(rec["examined-buckets"], 5) self.failUnlessEqual(rec["examined-shares"], 3) @@ -1149,6 +1141,390 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin): d.addBoth(_cleanup) return d + @skipIf(platform.isWindows(), "pickle test-data can't be loaded on windows") + def test_deserialize_pickle(self): + """ + The crawler can read existing state from the old pickle format + """ + # this file came from an "in the wild" tahoe version 1.16.0 + original_pickle = FilePath(__file__).parent().child("data").child("lease_checker.state.txt") + root = FilePath(self.mktemp()) + storage = root.child("storage") + storage.makedirs() + test_pickle = storage.child("lease_checker.state") + with test_pickle.open("wb") as local, original_pickle.open("rb") as remote: + local.write(remote.read()) + + # convert from pickle format to JSON + top = Options() + top.parseOptions([ + "admin", "migrate-crawler", + "--basedir", storage.parent().path, + ]) + options = top.subOptions + while hasattr(options, "subOptions"): + options = options.subOptions + options.stdout = StringIO() + migrate_crawler(options) + + # the (existing) state file should have been upgraded to JSON + self.assertFalse(test_pickle.exists()) + self.assertTrue(test_pickle.siblingExtension(".json").exists()) + serial = _LeaseStateSerializer(test_pickle.path) + + self.assertEqual( + serial.load(), + { + u'last-complete-prefix': None, + u'version': 1, + u'current-cycle-start-time': 1635003106.611748, + u'last-cycle-finished': 312, + u'cycle-to-date': { + u'leases-per-share-histogram': { + u'1': 36793, + u'2': 1, + }, + u'space-recovered': { + u'examined-buckets-immutable': 17183, + u'configured-buckets-mutable': 0, + u'examined-shares-mutable': 1796, + u'original-shares-mutable': 1563, + u'configured-buckets-immutable': 0, + u'original-shares-immutable': 27926, + u'original-diskbytes-immutable': 431149056, + u'examined-shares-immutable': 34998, + u'original-buckets': 14661, + u'actual-shares-immutable': 0, + u'configured-shares': 0, + u'original-buckets-mutable': 899, + u'actual-diskbytes': 4096, + u'actual-shares-mutable': 0, + u'configured-buckets': 1, + u'examined-buckets-unknown': 14, + u'actual-sharebytes': 0, + u'original-shares': 29489, + u'actual-buckets-immutable': 0, + u'original-sharebytes': 312664812, + u'examined-sharebytes-immutable': 383801602, + u'actual-shares': 0, + u'actual-sharebytes-immutable': 0, + u'original-diskbytes': 441643008, + u'configured-diskbytes-mutable': 0, + u'configured-sharebytes-immutable': 0, + u'configured-shares-mutable': 0, + u'actual-diskbytes-immutable': 0, + u'configured-diskbytes-immutable': 0, + u'original-diskbytes-mutable': 10489856, + u'actual-sharebytes-mutable': 0, + u'configured-sharebytes': 0, + u'examined-shares': 36794, + u'actual-diskbytes-mutable': 0, + u'actual-buckets': 1, + u'original-buckets-immutable': 13761, + u'configured-sharebytes-mutable': 0, + u'examined-sharebytes': 390369660, + u'original-sharebytes-immutable': 308125753, + u'original-sharebytes-mutable': 4539059, + u'actual-buckets-mutable': 0, + u'examined-buckets-mutable': 1043, + u'configured-shares-immutable': 0, + u'examined-diskbytes': 476598272, + u'examined-diskbytes-mutable': 9154560, + u'examined-sharebytes-mutable': 6568058, + u'examined-buckets': 18241, + u'configured-diskbytes': 4096, + u'examined-diskbytes-immutable': 467443712}, + u'corrupt-shares': [ + [u'2dn6xnlnsqwtnapwxfdivpm3s4', 4], + [u'2dn6xnlnsqwtnapwxfdivpm3s4', 1], + [u'2rrzthwsrrxolevmwdvbdy3rqi', 4], + [u'2rrzthwsrrxolevmwdvbdy3rqi', 1], + [u'2skfngcto6h7eqmn4uo7ntk3ne', 4], + [u'2skfngcto6h7eqmn4uo7ntk3ne', 1], + [u'32d5swqpqx2mwix7xmqzvhdwje', 4], + [u'32d5swqpqx2mwix7xmqzvhdwje', 1], + [u'5mmayp66yflmpon3o6unsnbaca', 4], + [u'5mmayp66yflmpon3o6unsnbaca', 1], + [u'6ixhpvbtre7fnrl6pehlrlflc4', 4], + [u'6ixhpvbtre7fnrl6pehlrlflc4', 1], + [u'ewzhvswjsz4vp2bqkb6mi3bz2u', 4], + [u'ewzhvswjsz4vp2bqkb6mi3bz2u', 1], + [u'fu7pazf6ogavkqj6z4q5qqex3u', 4], + [u'fu7pazf6ogavkqj6z4q5qqex3u', 1], + [u'hbyjtqvpcimwxiyqbcbbdn2i4a', 4], + [u'hbyjtqvpcimwxiyqbcbbdn2i4a', 1], + [u'pmcjbdkbjdl26k3e6yja77femq', 4], + [u'pmcjbdkbjdl26k3e6yja77femq', 1], + [u'r6swof4v2uttbiiqwj5pi32cm4', 4], + [u'r6swof4v2uttbiiqwj5pi32cm4', 1], + [u't45v5akoktf53evc2fi6gwnv6y', 4], + [u't45v5akoktf53evc2fi6gwnv6y', 1], + [u'y6zb4faar3rdvn3e6pfg4wlotm', 4], + [u'y6zb4faar3rdvn3e6pfg4wlotm', 1], + [u'z3yghutvqoqbchjao4lndnrh3a', 4], + [u'z3yghutvqoqbchjao4lndnrh3a', 1], + ], + u'lease-age-histogram': { + "1641600,1728000": 78, + "12441600,12528000": 78, + "8640000,8726400": 32, + "1814400,1900800": 1860, + "2764800,2851200": 76, + "11491200,11577600": 20, + "10713600,10800000": 183, + "47865600,47952000": 7, + "3110400,3196800": 328, + "10627200,10713600": 43, + "45619200,45705600": 4, + "12873600,12960000": 5, + "7430400,7516800": 7228, + "1555200,1641600": 492, + "38880000,38966400": 3, + "12528000,12614400": 193, + "7344000,7430400": 12689, + "2678400,2764800": 278, + "2332800,2419200": 12, + "9244800,9331200": 73, + "12787200,12873600": 218, + "49075200,49161600": 19, + "10368000,10454400": 117, + "4665600,4752000": 256, + "7516800,7603200": 993, + "42336000,42422400": 33, + "10972800,11059200": 122, + "39052800,39139200": 51, + "12614400,12700800": 210, + "7603200,7689600": 2004, + "10540800,10627200": 16, + "950400,1036800": 4435, + "42076800,42163200": 4, + "8812800,8899200": 57, + "5788800,5875200": 954, + "36374400,36460800": 3, + "9331200,9417600": 12, + "30499200,30585600": 5, + "12700800,12787200": 25, + "2073600,2160000": 388, + "12960000,13046400": 8, + "11923200,12009600": 89, + "3369600,3456000": 79, + "3196800,3283200": 628, + "37497600,37584000": 11, + "33436800,33523200": 7, + "44928000,45014400": 2, + "37929600,38016000": 3, + "38966400,39052800": 61, + "3283200,3369600": 86, + "11750400,11836800": 7, + "3801600,3888000": 32, + "46310400,46396800": 1, + "4838400,4924800": 386, + "8208000,8294400": 38, + "37411200,37497600": 4, + "12009600,12096000": 329, + "10454400,10540800": 1239, + "40176000,40262400": 1, + "3715200,3801600": 104, + "44409600,44496000": 13, + "38361600,38448000": 5, + "12268800,12355200": 2, + "28771200,28857600": 6, + "41990400,42076800": 10, + "2592000,2678400": 40, + }, + }, + 'current-cycle': None, + 'last-complete-bucket': None, + } + ) + second_serial = _LeaseStateSerializer(serial._path.path) + self.assertEqual( + serial.load(), + second_serial.load(), + ) + + @skipIf(platform.isWindows(), "pickle test-data can't be loaded on windows") + def test_deserialize_history_pickle(self): + """ + The crawler can read existing history state from the old pickle + format + """ + # this file came from an "in the wild" tahoe version 1.16.0 + original_pickle = FilePath(__file__).parent().child("data").child("lease_checker.history.txt") + root = FilePath(self.mktemp()) + storage = root.child("storage") + storage.makedirs() + test_pickle = storage.child("lease_checker.history") + with test_pickle.open("wb") as local, original_pickle.open("rb") as remote: + local.write(remote.read()) + + # convert from pickle format to JSON + top = Options() + top.parseOptions([ + "admin", "migrate-crawler", + "--basedir", storage.parent().path, + ]) + options = top.subOptions + while hasattr(options, "subOptions"): + options = options.subOptions + options.stdout = StringIO() + migrate_crawler(options) + + serial = _HistorySerializer(test_pickle.path) + + self.maxDiff = None + self.assertEqual( + serial.load(), + { + "363": { + 'configured-expiration-mode': ['age', None, None, ['immutable', 'mutable']], + 'expiration-enabled': False, + 'leases-per-share-histogram': { + '1': 39774, + }, + 'lease-age-histogram': [ + [0, 86400, 3125], + [345600, 432000, 4175], + [950400, 1036800, 141], + [1036800, 1123200, 345], + [1123200, 1209600, 81], + [1296000, 1382400, 1832], + [1555200, 1641600, 390], + [1728000, 1814400, 12], + [2073600, 2160000, 84], + [2160000, 2246400, 228], + [2246400, 2332800, 75], + [2592000, 2678400, 644], + [2678400, 2764800, 273], + [2764800, 2851200, 94], + [2851200, 2937600, 97], + [3196800, 3283200, 143], + [3283200, 3369600, 48], + [4147200, 4233600, 374], + [4320000, 4406400, 534], + [5270400, 5356800, 1005], + [6739200, 6825600, 8704], + [6825600, 6912000, 3986], + [6912000, 6998400, 7592], + [6998400, 7084800, 2607], + [7689600, 7776000, 35], + [8035200, 8121600, 33], + [8294400, 8380800, 54], + [8640000, 8726400, 45], + [8726400, 8812800, 27], + [8812800, 8899200, 12], + [9763200, 9849600, 77], + [9849600, 9936000, 91], + [9936000, 10022400, 1210], + [10022400, 10108800, 45], + [10108800, 10195200, 186], + [10368000, 10454400, 113], + [10972800, 11059200, 21], + [11232000, 11318400, 5], + [11318400, 11404800, 19], + [11404800, 11491200, 238], + [11491200, 11577600, 159], + [11750400, 11836800, 1], + [11836800, 11923200, 32], + [11923200, 12009600, 192], + [12009600, 12096000, 222], + [12096000, 12182400, 18], + [12182400, 12268800, 224], + [12268800, 12355200, 9], + [12355200, 12441600, 9], + [12441600, 12528000, 10], + [12528000, 12614400, 6], + [12614400, 12700800, 6], + [12700800, 12787200, 18], + [12787200, 12873600, 6], + [12873600, 12960000, 62], + ], + 'cycle-start-finish-times': [1634446505.241972, 1634446666.055401], + 'space-recovered': { + 'examined-buckets-immutable': 17896, + 'configured-buckets-mutable': 0, + 'examined-shares-mutable': 2473, + 'original-shares-mutable': 1185, + 'configured-buckets-immutable': 0, + 'original-shares-immutable': 27457, + 'original-diskbytes-immutable': 2810982400, + 'examined-shares-immutable': 37301, + 'original-buckets': 14047, + 'actual-shares-immutable': 0, + 'configured-shares': 0, + 'original-buckets-mutable': 691, + 'actual-diskbytes': 4096, + 'actual-shares-mutable': 0, + 'configured-buckets': 1, + 'examined-buckets-unknown': 14, + 'actual-sharebytes': 0, + 'original-shares': 28642, + 'actual-buckets-immutable': 0, + 'original-sharebytes': 2695552941, + 'examined-sharebytes-immutable': 2754798505, + 'actual-shares': 0, + 'actual-sharebytes-immutable': 0, + 'original-diskbytes': 2818981888, + 'configured-diskbytes-mutable': 0, + 'configured-sharebytes-immutable': 0, + 'configured-shares-mutable': 0, + 'actual-diskbytes-immutable': 0, + 'configured-diskbytes-immutable': 0, + 'original-diskbytes-mutable': 7995392, + 'actual-sharebytes-mutable': 0, + 'configured-sharebytes': 0, + 'examined-shares': 39774, + 'actual-diskbytes-mutable': 0, + 'actual-buckets': 1, + 'original-buckets-immutable': 13355, + 'configured-sharebytes-mutable': 0, + 'examined-sharebytes': 2763646972, + 'original-sharebytes-immutable': 2692076909, + 'original-sharebytes-mutable': 3476032, + 'actual-buckets-mutable': 0, + 'examined-buckets-mutable': 1286, + 'configured-shares-immutable': 0, + 'examined-diskbytes': 2854801408, + 'examined-diskbytes-mutable': 12161024, + 'examined-sharebytes-mutable': 8848467, + 'examined-buckets': 19197, + 'configured-diskbytes': 4096, + 'examined-diskbytes-immutable': 2842640384 + }, + 'corrupt-shares': [ + ['2dn6xnlnsqwtnapwxfdivpm3s4', 3], + ['2dn6xnlnsqwtnapwxfdivpm3s4', 0], + ['2rrzthwsrrxolevmwdvbdy3rqi', 3], + ['2rrzthwsrrxolevmwdvbdy3rqi', 0], + ['2skfngcto6h7eqmn4uo7ntk3ne', 3], + ['2skfngcto6h7eqmn4uo7ntk3ne', 0], + ['32d5swqpqx2mwix7xmqzvhdwje', 3], + ['32d5swqpqx2mwix7xmqzvhdwje', 0], + ['5mmayp66yflmpon3o6unsnbaca', 3], + ['5mmayp66yflmpon3o6unsnbaca', 0], + ['6ixhpvbtre7fnrl6pehlrlflc4', 3], + ['6ixhpvbtre7fnrl6pehlrlflc4', 0], + ['ewzhvswjsz4vp2bqkb6mi3bz2u', 3], + ['ewzhvswjsz4vp2bqkb6mi3bz2u', 0], + ['fu7pazf6ogavkqj6z4q5qqex3u', 3], + ['fu7pazf6ogavkqj6z4q5qqex3u', 0], + ['hbyjtqvpcimwxiyqbcbbdn2i4a', 3], + ['hbyjtqvpcimwxiyqbcbbdn2i4a', 0], + ['pmcjbdkbjdl26k3e6yja77femq', 3], + ['pmcjbdkbjdl26k3e6yja77femq', 0], + ['r6swof4v2uttbiiqwj5pi32cm4', 3], + ['r6swof4v2uttbiiqwj5pi32cm4', 0], + ['t45v5akoktf53evc2fi6gwnv6y', 3], + ['t45v5akoktf53evc2fi6gwnv6y', 0], + ['y6zb4faar3rdvn3e6pfg4wlotm', 3], + ['y6zb4faar3rdvn3e6pfg4wlotm', 0], + ['z3yghutvqoqbchjao4lndnrh3a', 3], + ['z3yghutvqoqbchjao4lndnrh3a', 0], + ] + } + } + ) + class WebStatus(unittest.TestCase, pollmixin.PollMixin): diff --git a/src/allmydata/test/test_system.py b/src/allmydata/test/test_system.py index 3e1bdcdd4..b37d6923c 100644 --- a/src/allmydata/test/test_system.py +++ b/src/allmydata/test/test_system.py @@ -1,20 +1,13 @@ """ Ported to Python 3. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals +from __future__ import annotations -from future.utils import PY2 -if PY2: - # Don't import bytes since it causes issues on (so far unported) modules on Python 2. - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, max, min, str # noqa: F401 - -from past.builtins import chr as byteschr, long -from six import ensure_text, ensure_str +from past.builtins import chr as byteschr +from six import ensure_text import os, re, sys, time, json +from typing import Optional from bs4 import BeautifulSoup @@ -23,6 +16,7 @@ from twisted.internet import defer from allmydata import uri from allmydata.storage.mutable import MutableShareFile +from allmydata.storage.immutable import ShareFile from allmydata.storage.server import si_a2b from allmydata.immutable import offloaded, upload from allmydata.immutable.literal import LiteralFileNode @@ -32,8 +26,9 @@ from allmydata.util import log, base32 from allmydata.util.encodingutil import quote_output, unicode_to_argv from allmydata.util.fileutil import abspath_expanduser_unicode from allmydata.util.consumer import MemoryConsumer, download_to_data +from allmydata.util.deferredutil import async_to_deferred from allmydata.interfaces import IDirectoryNode, IFileNode, \ - NoSuchChildError, NoSharesError + NoSuchChildError, NoSharesError, SDMF_VERSION, MDMF_VERSION from allmydata.monitor import Monitor from allmydata.mutable.common import NotWriteableError from allmydata.mutable import layout as mutable_layout @@ -54,10 +49,12 @@ from .common_util import run_cli_unicode class RunBinTahoeMixin(object): - def run_bintahoe(self, args, stdin=None, python_options=[], env=None): + def run_bintahoe(self, args, stdin=None, python_options:Optional[list[str]]=None, env=None): # test_runner.run_bintahoe has better unicode support but doesn't # support env yet and is also synchronous. If we could get rid of # this in favor of that, though, it would probably be an improvement. + if python_options is None: + python_options = [] command = sys.executable argv = python_options + ["-b", "-m", "allmydata.scripts.runner"] + args @@ -116,11 +113,17 @@ class CountingDataUploadable(upload.Data): class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): - + """Foolscap integration-y tests.""" + FORCE_FOOLSCAP_FOR_STORAGE = True timeout = 180 + @property + def basedir(self): + return "system/SystemTest/{}-foolscap-{}".format( + self.id().split(".")[-1], self.FORCE_FOOLSCAP_FOR_STORAGE + ) + def test_connections(self): - self.basedir = "system/SystemTest/test_connections" d = self.set_up_nodes() self.extra_node = None d.addCallback(lambda res: self.add_extra_node(self.numclients)) @@ -148,11 +151,9 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): del test_connections def test_upload_and_download_random_key(self): - self.basedir = "system/SystemTest/test_upload_and_download_random_key" return self._test_upload_and_download(convergence=None) def test_upload_and_download_convergent(self): - self.basedir = "system/SystemTest/test_upload_and_download_convergent" return self._test_upload_and_download(convergence=b"some convergence string") def _test_upload_and_download(self, convergence): @@ -394,7 +395,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): # this is really bytes received rather than sent, but it's # convenient and basically measures the same thing bytes_sent = results.get_ciphertext_fetched() - self.failUnless(isinstance(bytes_sent, (int, long)), bytes_sent) + self.failUnless(isinstance(bytes_sent, int), bytes_sent) # We currently don't support resumption of upload if the data is # encrypted with a random key. (Because that would require us @@ -472,9 +473,10 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): def _corrupt_mutable_share(self, filename, which): msf = MutableShareFile(filename) - datav = msf.readv([ (0, 1000000) ]) + # Read more than share length: + datav = msf.readv([ (0, 10_000_000) ]) final_share = datav[0] - assert len(final_share) < 1000000 # ought to be truncated + assert len(final_share) < 10_000_000 # ought to be truncated pieces = mutable_layout.unpack_share(final_share) (seqnum, root_hash, IV, k, N, segsize, datalen, verification_key, signature, share_hash_chain, block_hash_tree, @@ -514,13 +516,20 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): msf.writev( [(0, final_share)], None) - def test_mutable(self): - self.basedir = "system/SystemTest/test_mutable" + def test_mutable_sdmf(self): + """SDMF mutables can be uploaded, downloaded, and many other things.""" + return self._test_mutable(SDMF_VERSION) + + def test_mutable_mdmf(self): + """MDMF mutables can be uploaded, downloaded, and many other things.""" + return self._test_mutable(MDMF_VERSION) + + def _test_mutable(self, mutable_version): DATA = b"initial contents go here." # 25 bytes % 3 != 0 DATA_uploadable = MutableData(DATA) NEWDATA = b"new contents yay" NEWDATA_uploadable = MutableData(NEWDATA) - NEWERDATA = b"this is getting old" + NEWERDATA = b"this is getting old" * 1_000_000 NEWERDATA_uploadable = MutableData(NEWERDATA) d = self.set_up_nodes() @@ -528,7 +537,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): def _create_mutable(res): c = self.clients[0] log.msg("starting create_mutable_file") - d1 = c.create_mutable_file(DATA_uploadable) + d1 = c.create_mutable_file(DATA_uploadable, mutable_version) def _done(res): log.msg("DONE: %s" % (res,)) self._mutable_node_1 = res @@ -550,27 +559,33 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): filename) self.failUnlessEqual(rc, 0) try: + share_type = 'SDMF' if mutable_version == SDMF_VERSION else 'MDMF' self.failUnless("Mutable slot found:\n" in output) - self.failUnless("share_type: SDMF\n" in output) + self.assertIn(f"share_type: {share_type}\n", output) peerid = idlib.nodeid_b2a(self.clients[client_num].nodeid) self.failUnless(" WE for nodeid: %s\n" % peerid in output) self.failUnless(" num_extra_leases: 0\n" in output) self.failUnless(" secrets are for nodeid: %s\n" % peerid in output) - self.failUnless(" SDMF contents:\n" in output) + self.failUnless(f" {share_type} contents:\n" in output) self.failUnless(" seqnum: 1\n" in output) self.failUnless(" required_shares: 3\n" in output) self.failUnless(" total_shares: 10\n" in output) - self.failUnless(" segsize: 27\n" in output, (output, filename)) + if mutable_version == SDMF_VERSION: + self.failUnless(" segsize: 27\n" in output, (output, filename)) self.failUnless(" datalen: 25\n" in output) # the exact share_hash_chain nodes depends upon the sharenum, # and is more of a hassle to compute than I want to deal with # now self.failUnless(" share_hash_chain: " in output) self.failUnless(" block_hash_tree: 1 nodes\n" in output) - expected = (" verify-cap: URI:SSK-Verifier:%s:" % - str(base32.b2a(storage_index), "ascii")) - self.failUnless(expected in output) + if mutable_version == SDMF_VERSION: + expected = (" verify-cap: URI:SSK-Verifier:%s:" % + str(base32.b2a(storage_index), "ascii")) + else: + expected = (" verify-cap: URI:MDMF-Verifier:%s" % + str(base32.b2a(storage_index), "ascii")) + self.assertIn(expected, output) except unittest.FailTest: print() print("dump-share output was:") @@ -638,7 +653,25 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): self.failUnlessEqual(res, NEWERDATA) d.addCallback(_check_download_5) - def _corrupt_shares(res): + # The previous checks upload a complete replacement. This uses a + # different API that is supposed to do a partial write at an offset. + @async_to_deferred + async def _check_write_at_offset(newnode): + log.msg("writing at offset") + start = b"abcdef" + expected = b"abXYef" + uri = self._mutable_node_1.get_uri() + newnode = self.clients[0].create_node_from_uri(uri) + await newnode.overwrite(MutableData(start)) + version = await newnode.get_mutable_version() + await version.update(MutableData(b"XY"), 2) + result = await newnode.download_best_version() + self.assertEqual(result, expected) + # Revert to previous version + await newnode.overwrite(MutableData(NEWERDATA)) + d.addCallback(_check_write_at_offset) + + def _corrupt_shares(_res): # run around and flip bits in all but k of the shares, to test # the hash checks shares = self._find_all_shares(self.basedir) @@ -690,7 +723,10 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): # when we retrieve this, we should get three signature # failures (where we've mangled seqnum, R, and segsize). The # pubkey mangling - d.addCallback(_corrupt_shares) + + if mutable_version == SDMF_VERSION: + # TODO Corrupting shares in test_systm doesn't work for MDMF right now + d.addCallback(_corrupt_shares) d.addCallback(lambda res: self._newnode3.download_best_version()) d.addCallback(_check_download_5) @@ -698,7 +734,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): def _check_empty_file(res): # make sure we can create empty files, this usually screws up the # segsize math - d1 = self.clients[2].create_mutable_file(MutableData(b"")) + d1 = self.clients[2].create_mutable_file(MutableData(b""), mutable_version) d1.addCallback(lambda newnode: newnode.download_best_version()) d1.addCallback(lambda res: self.failUnlessEqual(b"", res)) return d1 @@ -745,9 +781,8 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): # plaintext_hash check. def test_filesystem(self): - self.basedir = "system/SystemTest/test_filesystem" self.data = LARGE_DATA - d = self.set_up_nodes() + d = self.set_up_nodes(2) def _new_happy_semantics(ign): for c in self.clients: c.encoding_params['happy'] = 1 @@ -780,7 +815,6 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): d.addCallback(self._check_publish_private) d.addCallback(self.log, "did _check_publish_private") d.addCallback(self._test_web) - d.addCallback(self._test_control) d.addCallback(self._test_cli) # P now has four top-level children: # P/personal/sekrit data @@ -1049,7 +1083,9 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): headers["content-type"] = "multipart/form-data; boundary=%s" % str(sepbase, "ascii") return self.POST2(urlpath, body, headers, use_helper) - def POST2(self, urlpath, body=b"", headers={}, use_helper=False): + def POST2(self, urlpath, body=b"", headers=None, use_helper=False): + if headers is None: + headers = {} if use_helper: url = self.helper_webish_url + urlpath else: @@ -1291,9 +1327,9 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): # are sharefiles here filename = os.path.join(dirpath, filenames[0]) # peek at the magic to see if it is a chk share - magic = open(filename, "rb").read(4) - if magic == b'\x00\x00\x00\x01': - break + with open(filename, "rb") as f: + if ShareFile.is_valid_header(f.read(32)): + break else: self.fail("unable to find any uri_extension files in %r" % self.basedir) @@ -1343,25 +1379,6 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): if line.startswith("CHK %s " % storage_index_s)] self.failUnlessEqual(len(matching), 10) - def _test_control(self, res): - # exercise the remote-control-the-client foolscap interfaces in - # allmydata.control (mostly used for performance tests) - c0 = self.clients[0] - control_furl_file = c0.config.get_private_path("control.furl") - control_furl = ensure_str(open(control_furl_file, "r").read().strip()) - # it doesn't really matter which Tub we use to connect to the client, - # so let's just use our IntroducerNode's - d = self.introducer.tub.getReference(control_furl) - d.addCallback(self._test_control2, control_furl_file) - return d - def _test_control2(self, rref, filename): - d = defer.succeed(None) - d.addCallback(lambda res: rref.callRemote("speed_test", 1, 200, False)) - if sys.platform in ("linux2", "linux3"): - d.addCallback(lambda res: rref.callRemote("get_memory_usage")) - d.addCallback(lambda res: rref.callRemote("measure_peer_response_time")) - return d - def _test_cli(self, res): # run various CLI commands (in a thread, since they use blocking # network calls) @@ -1389,7 +1406,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): rc,out,err = yield run_cli(verb, *args, nodeargs=nodeargs, **kwargs) defer.returnValue((out,err)) - def _check_ls(out_and_err, expected_children, unexpected_children=[]): + def _check_ls(out_and_err, expected_children, unexpected_children=()): (out, err) = out_and_err self.failUnlessEqual(err, "") for s in expected_children: @@ -1729,10 +1746,13 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): return d + # In CI this test can be very slow, so give it a longer timeout: + test_filesystem.timeout = 360 # type: ignore[attr-defined] + + def test_filesystem_with_cli_in_subprocess(self): # We do this in a separate test so that test_filesystem doesn't skip if we can't run bin/tahoe. - self.basedir = "system/SystemTest/test_filesystem_with_cli_in_subprocess" d = self.set_up_nodes() def _new_happy_semantics(ign): for c in self.clients: @@ -1813,9 +1833,21 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): class Connections(SystemTestMixin, unittest.TestCase): + FORCE_FOOLSCAP_FOR_STORAGE = True def test_rref(self): - self.basedir = "system/Connections/rref" + # The way the listening port is created is via + # SameProcessStreamEndpointAssigner (allmydata.test.common), which then + # makes an endpoint string parsed by AdoptedServerPort. The latter does + # dup(fd), which results in the filedescriptor staying alive _until the + # test ends_. That means that when we disown the service, we still have + # the listening port there on the OS level! Just the resulting + # connections aren't handled. So this test relies on aggressive + # timeouts in the HTTP client and presumably some equivalent in + # Foolscap, since connection refused does _not_ happen. + self.basedir = "system/Connections/rref-foolscap-{}".format( + self.FORCE_FOOLSCAP_FOR_STORAGE + ) d = self.set_up_nodes(2) def _start(ign): self.c0 = self.clients[0] @@ -1831,9 +1863,13 @@ class Connections(SystemTestMixin, unittest.TestCase): # now shut down the server d.addCallback(lambda ign: self.clients[1].disownServiceParent()) + + # kill any persistent http connections that might continue to work + d.addCallback(lambda ign: self.close_idle_http_connections()) + # and wait for the client to notice def _poll(): - return len(self.c0.storage_broker.get_connected_servers()) < 2 + return len(self.c0.storage_broker.get_connected_servers()) == 1 d.addCallback(lambda ign: self.poll(_poll)) def _down(ign): @@ -1843,3 +1879,16 @@ class Connections(SystemTestMixin, unittest.TestCase): self.assertEqual(storage_server, self.s1_storage_server) d.addCallback(_down) return d + + +class HTTPSystemTest(SystemTest): + """HTTP storage protocol variant of the system tests.""" + + FORCE_FOOLSCAP_FOR_STORAGE = False + + + +class HTTPConnections(Connections): + """HTTP storage protocol variant of the connections tests.""" + FORCE_FOOLSCAP_FOR_STORAGE = False + diff --git a/src/allmydata/test/test_testing.py b/src/allmydata/test/test_testing.py index 527b235bd..07bebb7a1 100644 --- a/src/allmydata/test/test_testing.py +++ b/src/allmydata/test/test_testing.py @@ -9,18 +9,7 @@ """ Tests for the allmydata.testing helpers - -Ported to Python 3. - """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from twisted.internet.defer import ( inlineCallbacks, @@ -46,22 +35,25 @@ from hypothesis.strategies import ( binary, ) -from testtools import ( - TestCase, +from .common import ( + SyncTestCase, ) + from testtools.matchers import ( Always, Equals, IsInstance, MatchesStructure, AfterPreprocessing, + Contains, ) from testtools.twistedsupport import ( succeeded, ) +from twisted.web.http import GONE -class FakeWebTest(TestCase): +class FakeWebTest(SyncTestCase): """ Test the WebUI verified-fakes infrastucture """ @@ -143,7 +135,8 @@ class FakeWebTest(TestCase): def test_download_missing(self): """ - Error if we download a capability that doesn't exist + The response to a request to download a capability that doesn't exist + is 410 (GONE). """ http_client = create_tahoe_treq_client() @@ -156,7 +149,11 @@ class FakeWebTest(TestCase): resp, succeeded( MatchesStructure( - code=Equals(500) + code=Equals(GONE), + content=AfterPreprocessing( + lambda m: m(), + succeeded(Contains(b"No data for")), + ), ) ) ) diff --git a/src/allmydata/test/test_time_format.py b/src/allmydata/test/test_time_format.py index f83a6a53c..f3b9a8990 100644 --- a/src/allmydata/test/test_time_format.py +++ b/src/allmydata/test/test_time_format.py @@ -1,16 +1,6 @@ """ Tests for allmydata.util.time_format. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from past.builtins import long import time @@ -111,7 +101,7 @@ class TimeFormat(unittest.TestCase, TimezoneMixin): def test_parse_date(self): p = time_format.parse_date self.failUnlessEqual(p("2010-02-21"), 1266710400) - self.failUnless(isinstance(p("2009-03-18"), (int, long)), p("2009-03-18")) + self.failUnless(isinstance(p("2009-03-18"), int), p("2009-03-18")) self.failUnlessEqual(p("2009-03-18"), 1237334400) def test_format_time(self): diff --git a/src/allmydata/test/test_tor_provider.py b/src/allmydata/test/test_tor_provider.py index 86d54803a..e31a8586b 100644 --- a/src/allmydata/test/test_tor_provider.py +++ b/src/allmydata/test/test_tor_provider.py @@ -1,21 +1,12 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os from twisted.trial import unittest from twisted.internet import defer, error -from six.moves import StringIO -from six import ensure_str -import mock +from io import StringIO +from unittest import mock from ..util import tor_provider from ..scripts import create_node, runner from foolscap.eventual import flushEventualQueue @@ -94,16 +85,15 @@ class LaunchTor(unittest.TestCase): reactor = object() private_dir = "private" txtorcon = mock.Mock() - tpp = mock.Mock - tpp.tor_protocol = mock.Mock() - txtorcon.launch_tor = mock.Mock(return_value=tpp) + tor = mock.Mock + txtorcon.launch = mock.Mock(return_value=tor) with mock.patch("allmydata.util.tor_provider.allocate_tcp_port", return_value=999999): d = tor_provider._launch_tor(reactor, tor_executable, private_dir, txtorcon) - tor_control_endpoint, tor_control_proto = self.successResultOf(d) - self.assertIs(tor_control_proto, tpp.tor_protocol) + tor_control_endpoint, tor_result = self.successResultOf(d) + self.assertIs(tor_result, tor) def test_launch(self): return self._do_test_launch(None) @@ -161,6 +151,12 @@ class ConnectToTor(unittest.TestCase): return self._do_test_connect(None, False) +class FakeTor: + """Pretends to be a ``txtorcon.Tor`` instance.""" + def __init__(self): + self.protocol = object() + + class CreateOnion(unittest.TestCase): def test_no_txtorcon(self): with mock.patch("allmydata.util.tor_provider._import_txtorcon", @@ -171,6 +167,7 @@ class CreateOnion(unittest.TestCase): self.assertEqual(str(f.value), "Cannot create onion without txtorcon. " "Please 'pip install tahoe-lafs[tor]' to fix this.") + def _do_test_launch(self, executable): basedir = self.mktemp() os.mkdir(basedir) @@ -181,13 +178,13 @@ class CreateOnion(unittest.TestCase): if executable: args.append("--tor-executable=%s" % executable) cli_config = make_cli_config(basedir, *args) - protocol = object() + tor_instance = FakeTor() launch_tor = mock.Mock(return_value=defer.succeed(("control_endpoint", - protocol))) + tor_instance))) txtorcon = mock.Mock() ehs = mock.Mock() # This appears to be a native string in the real txtorcon object... - ehs.private_key = ensure_str("privkey") + ehs.private_key = "privkey" ehs.hostname = "ONION.onion" txtorcon.EphemeralHiddenService = mock.Mock(return_value=ehs) ehs.add_to_tor = mock.Mock(return_value=defer.succeed(None)) @@ -199,13 +196,13 @@ class CreateOnion(unittest.TestCase): with mock.patch("allmydata.util.tor_provider.allocate_tcp_port", return_value=999999): d = tor_provider.create_config(reactor, cli_config) - tahoe_config_tor, tor_port, tor_location = self.successResultOf(d) + tor_config = self.successResultOf(d) launch_tor.assert_called_with(reactor, executable, os.path.abspath(private_dir), txtorcon) txtorcon.EphemeralHiddenService.assert_called_with("3457 127.0.0.1:999999") - ehs.add_to_tor.assert_called_with(protocol) - ehs.remove_from_tor.assert_called_with(protocol) + ehs.add_to_tor.assert_called_with(tor_instance.protocol) + ehs.remove_from_tor.assert_called_with(tor_instance.protocol) expected = {"launch": "true", "onion": "true", @@ -216,10 +213,10 @@ class CreateOnion(unittest.TestCase): } if executable: expected["tor.executable"] = executable - self.assertEqual(tahoe_config_tor, expected) - self.assertEqual(tor_port, "tcp:999999:interface=127.0.0.1") - self.assertEqual(tor_location, "tor:ONION.onion:3457") - fn = os.path.join(basedir, tahoe_config_tor["onion.private_key_file"]) + self.assertEqual(dict(tor_config.node_config["tor"]), expected) + self.assertEqual(tor_config.tub_ports, ["tcp:999999:interface=127.0.0.1"]) + self.assertEqual(tor_config.tub_locations, ["tor:ONION.onion:3457"]) + fn = os.path.join(basedir, dict(tor_config.node_config["tor"])["onion.private_key_file"]) with open(fn, "rb") as f: privkey = f.read() self.assertEqual(privkey, b"privkey") @@ -253,7 +250,7 @@ class CreateOnion(unittest.TestCase): with mock.patch("allmydata.util.tor_provider.allocate_tcp_port", return_value=999999): d = tor_provider.create_config(reactor, cli_config) - tahoe_config_tor, tor_port, tor_location = self.successResultOf(d) + tor_config = self.successResultOf(d) connect_to_tor.assert_called_with(reactor, cli_config, txtorcon) txtorcon.EphemeralHiddenService.assert_called_with("3457 127.0.0.1:999999") @@ -267,10 +264,10 @@ class CreateOnion(unittest.TestCase): "onion.private_key_file": os.path.join("private", "tor_onion.privkey"), } - self.assertEqual(tahoe_config_tor, expected) - self.assertEqual(tor_port, "tcp:999999:interface=127.0.0.1") - self.assertEqual(tor_location, "tor:ONION.onion:3457") - fn = os.path.join(basedir, tahoe_config_tor["onion.private_key_file"]) + self.assertEqual(dict(tor_config.node_config["tor"]), expected) + self.assertEqual(tor_config.tub_ports, ["tcp:999999:interface=127.0.0.1"]) + self.assertEqual(tor_config.tub_locations, ["tor:ONION.onion:3457"]) + fn = os.path.join(basedir, dict(tor_config.node_config["tor"])["onion.private_key_file"]) with open(fn, "rb") as f: privkey = f.read() self.assertEqual(privkey, b"privkey") @@ -587,13 +584,14 @@ class Provider_Service(unittest.TestCase): txtorcon = mock.Mock() with mock_txtorcon(txtorcon): p = tor_provider.create(reactor, cfg) + tor_instance = FakeTor() tor_state = mock.Mock() - tor_state.protocol = object() + tor_state.protocol = tor_instance.protocol ehs = mock.Mock() ehs.add_to_tor = mock.Mock(return_value=defer.succeed(None)) ehs.remove_from_tor = mock.Mock(return_value=defer.succeed(None)) txtorcon.EphemeralHiddenService = mock.Mock(return_value=ehs) - launch_tor = mock.Mock(return_value=defer.succeed((None,tor_state.protocol))) + launch_tor = mock.Mock(return_value=defer.succeed((None,tor_instance))) with mock.patch("allmydata.util.tor_provider._launch_tor", launch_tor): d = p.startService() @@ -628,9 +626,8 @@ class Provider_Service(unittest.TestCase): txtorcon = mock.Mock() with mock_txtorcon(txtorcon): p = tor_provider.create(reactor, cfg) - tor_state = mock.Mock() - tor_state.protocol = object() - txtorcon.build_tor_connection = mock.Mock(return_value=tor_state) + tor_instance = FakeTor() + txtorcon.connect = mock.Mock(return_value=tor_instance) ehs = mock.Mock() ehs.add_to_tor = mock.Mock(return_value=defer.succeed(None)) ehs.remove_from_tor = mock.Mock(return_value=defer.succeed(None)) @@ -642,12 +639,12 @@ class Provider_Service(unittest.TestCase): yield flushEventualQueue() self.successResultOf(d) self.assertIs(p._onion_ehs, ehs) - self.assertIs(p._onion_tor_control_proto, tor_state.protocol) + self.assertIs(p._onion_tor_control_proto, tor_instance.protocol) cfs.assert_called_with(reactor, "ep_desc") - txtorcon.build_tor_connection.assert_called_with(tcep) + txtorcon.connect.assert_called_with(reactor, tcep) txtorcon.EphemeralHiddenService.assert_called_with("456 127.0.0.1:123", b"private key") - ehs.add_to_tor.assert_called_with(tor_state.protocol) + ehs.add_to_tor.assert_called_with(tor_instance.protocol) yield p.stopService() - ehs.remove_from_tor.assert_called_with(tor_state.protocol) + ehs.remove_from_tor.assert_called_with(tor_instance.protocol) diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py index 8d5435e88..ecb76ec27 100644 --- a/src/allmydata/test/test_upload.py +++ b/src/allmydata/test/test_upload.py @@ -3,14 +3,6 @@ """ Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os, shutil from io import BytesIO @@ -983,7 +975,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin, num_segments = encoder.get_param("num_segments") d = selector.get_shareholders(broker, sh, storage_index, share_size, block_size, num_segments, - 10, 3, 4) + 10, 3, 4, encoder.get_uri_extension_size()) def _have_shareholders(upload_trackers_and_already_servers): (upload_trackers, already_servers) = upload_trackers_and_already_servers assert servers_to_break <= len(upload_trackers) diff --git a/src/allmydata/test/test_uri.py b/src/allmydata/test/test_uri.py index 748a0f6ef..ae4bf2002 100644 --- a/src/allmydata/test/test_uri.py +++ b/src/allmydata/test/test_uri.py @@ -4,15 +4,6 @@ Tests for allmydata.uri. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, dict, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 - import os from twisted.trial import unittest from allmydata import uri diff --git a/src/allmydata/test/test_util.py b/src/allmydata/test/test_util.py index a03845ed6..07a2bfb59 100644 --- a/src/allmydata/test/test_util.py +++ b/src/allmydata/test/test_util.py @@ -2,19 +2,10 @@ Ported to Python3. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - # open is not here because we want to use native strings on Py2 - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -import six import os, time, sys import yaml import json +from threading import current_thread from twisted.trial import unittest from foolscap.api import Violation, RemoteException @@ -26,11 +17,11 @@ from allmydata.util import pollmixin from allmydata.util import yamlutil from allmydata.util import rrefutil from allmydata.util.fileutil import EncryptedTemporaryFile +from allmydata.util.cputhreadpool import defer_to_thread, disable_thread_pool_for_test from allmydata.test.common_util import ReallyEqualMixin from .no_network import fireNow, LocalWrapper -if six.PY3: - long = int +long = int class IDLib(unittest.TestCase): @@ -191,8 +182,6 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase): self.failUnlessRaises(AssertionError, fileutil.abspath_expanduser_unicode, b"bytestring") saved_cwd = os.path.normpath(os.getcwd()) - if PY2: - saved_cwd = saved_cwd.decode("utf8") abspath_cwd = fileutil.abspath_expanduser_unicode(u".") abspath_cwd_notlong = fileutil.abspath_expanduser_unicode(u".", long_path=False) self.failUnless(isinstance(saved_cwd, str), saved_cwd) @@ -486,7 +475,7 @@ class YAML(unittest.TestCase): Unicode and (ASCII) native strings get roundtripped to Unicode strings. """ data = yaml.safe_dump( - [six.ensure_str("str"), u"unicode", u"\u1234nicode"] + ["str", "unicode", "\u1234nicode"] ) back = yamlutil.safe_load(data) self.assertIsInstance(back[0], str) @@ -558,6 +547,12 @@ class JSONBytes(unittest.TestCase): expected ) + def test_dumps_bytes_unicode_separators(self): + """Unicode separators don't prevent the result from being bytes.""" + result = jsonbytes.dumps_bytes([1, 2], separators=(u',', u':')) + self.assertIsInstance(result, bytes) + self.assertEqual(result, b"[1,2]") + class FakeGetVersion(object): @@ -593,3 +588,36 @@ class RrefUtilTests(unittest.TestCase): ) self.assertEqual(result.version, "Default") self.assertIdentical(result, rref) + + +class CPUThreadPool(unittest.TestCase): + """Tests for cputhreadpool.""" + + async def test_runs_in_thread(self): + """The given function runs in a thread.""" + def f(*args, **kwargs): + return current_thread(), args, kwargs + + this_thread = current_thread().ident + thread, args, kwargs = await defer_to_thread(f, 1, 3, key=4, value=5) + + # The task ran in a different thread: + self.assertNotEqual(thread.ident, this_thread) + self.assertEqual(args, (1, 3)) + self.assertEqual(kwargs, {"key": 4, "value": 5}) + + async def test_when_disabled_runs_in_same_thread(self): + """ + If the CPU thread pool is disabled, the given function runs in the + current thread. + """ + disable_thread_pool_for_test(self) + def f(*args, **kwargs): + return current_thread().ident, args, kwargs + + this_thread = current_thread().ident + thread, args, kwargs = await defer_to_thread(f, 1, 3, key=4, value=5) + + self.assertEqual(thread, this_thread) + self.assertEqual(args, (1, 3)) + self.assertEqual(kwargs, {"key": 4, "value": 5}) diff --git a/src/allmydata/test/test_windows.py b/src/allmydata/test/test_windows.py deleted file mode 100644 index bae56bfed..000000000 --- a/src/allmydata/test/test_windows.py +++ /dev/null @@ -1,227 +0,0 @@ -# -*- coding: utf-8 -*- -# Tahoe-LAFS -- secure, distributed storage grid -# -# Copyright © 2020 The Tahoe-LAFS Software Foundation -# -# This file is part of Tahoe-LAFS. -# -# See the docs/about.rst file for licensing information. - -""" -Tests for the ``allmydata.windows``. -""" - -from __future__ import division -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from sys import ( - executable, -) -from json import ( - load, -) -from textwrap import ( - dedent, -) -from twisted.python.filepath import ( - FilePath, -) -from twisted.python.runtime import ( - platform, -) - -from testtools import ( - skipUnless, -) - -from testtools.matchers import ( - MatchesAll, - AllMatch, - IsInstance, - Equals, -) - -from hypothesis import ( - HealthCheck, - settings, - given, - note, -) - -from hypothesis.strategies import ( - lists, - text, - characters, -) - -from .common import ( - PIPE, - Popen, - SyncTestCase, -) - -slow_settings = settings( - suppress_health_check=[HealthCheck.too_slow], - deadline=None, - - # Reduce the number of examples required to consider the test a success. - # The default is 100. Launching a process is expensive so we'll try to do - # it as few times as we can get away with. To maintain good coverage, - # we'll try to pass as much data to each process as we can so we're still - # covering a good portion of the space. - max_examples=10, -) - -@skipUnless(platform.isWindows(), "get_argv is Windows-only") -@skipUnless(PY2, "Not used on Python 3.") -class GetArgvTests(SyncTestCase): - """ - Tests for ``get_argv``. - """ - def test_get_argv_return_type(self): - """ - ``get_argv`` returns a list of unicode strings - """ - # Hide the ``allmydata.windows.fixups.get_argv`` import here so it - # doesn't cause failures on non-Windows platforms. - from ..windows.fixups import ( - get_argv, - ) - argv = get_argv() - - # We don't know what this process's command line was so we just make - # structural assertions here. - self.assertThat( - argv, - MatchesAll( - IsInstance(list), - AllMatch(IsInstance(str)), - ), - ) - - # This test runs a child process. This is unavoidably slow and variable. - # Disable the two time-based Hypothesis health checks. - @slow_settings - @given( - lists( - text( - alphabet=characters( - blacklist_categories=('Cs',), - # Windows CommandLine is a null-terminated string, - # analogous to POSIX exec* arguments. So exclude nul from - # our generated arguments. - blacklist_characters=('\x00',), - ), - min_size=10, - max_size=20, - ), - min_size=10, - max_size=20, - ), - ) - def test_argv_values(self, argv): - """ - ``get_argv`` returns a list representing the result of tokenizing the - "command line" argument string provided to Windows processes. - """ - working_path = FilePath(self.mktemp()) - working_path.makedirs() - save_argv_path = working_path.child("script.py") - saved_argv_path = working_path.child("data.json") - with open(save_argv_path.path, "wt") as f: - # A simple program to save argv to a file. Using the file saves - # us having to figure out how to reliably get non-ASCII back over - # stdio which may pose an independent set of challenges. At least - # file I/O is relatively simple and well-understood. - f.write(dedent( - """ - from allmydata.windows.fixups import ( - get_argv, - ) - import json - with open({!r}, "wt") as f: - f.write(json.dumps(get_argv())) - """.format(saved_argv_path.path)), - ) - argv = [executable.decode("utf-8"), save_argv_path.path] + argv - p = Popen(argv, stdin=PIPE, stdout=PIPE, stderr=PIPE) - p.stdin.close() - stdout = p.stdout.read() - stderr = p.stderr.read() - returncode = p.wait() - - note("stdout: {!r}".format(stdout)) - note("stderr: {!r}".format(stderr)) - - self.assertThat( - returncode, - Equals(0), - ) - with open(saved_argv_path.path, "rt") as f: - saved_argv = load(f) - - self.assertThat( - saved_argv, - Equals(argv), - ) - - -@skipUnless(platform.isWindows(), "intended for Windows-only codepaths") -@skipUnless(PY2, "Not used on Python 3.") -class UnicodeOutputTests(SyncTestCase): - """ - Tests for writing unicode to stdout and stderr. - """ - @slow_settings - @given(characters(), characters()) - def test_write_non_ascii(self, stdout_char, stderr_char): - """ - Non-ASCII unicode characters can be written to stdout and stderr with - automatic UTF-8 encoding. - """ - working_path = FilePath(self.mktemp()) - working_path.makedirs() - script = working_path.child("script.py") - script.setContent(dedent( - """ - from future.utils import PY2 - if PY2: - from future.builtins import chr - - from allmydata.windows.fixups import initialize - initialize() - - # XXX A shortcoming of the monkey-patch approach is that you'd - # better not import stdout or stderr before you call initialize. - from sys import argv, stdout, stderr - - stdout.write(chr(int(argv[1]))) - stdout.close() - stderr.write(chr(int(argv[2]))) - stderr.close() - """ - )) - p = Popen([ - executable, - script.path, - str(ord(stdout_char)), - str(ord(stderr_char)), - ], stdout=PIPE, stderr=PIPE) - stdout = p.stdout.read().decode("utf-8").replace("\r\n", "\n") - stderr = p.stderr.read().decode("utf-8").replace("\r\n", "\n") - returncode = p.wait() - - self.assertThat( - (stdout, stderr, returncode), - Equals(( - stdout_char, - stderr_char, - 0, - )), - ) diff --git a/src/allmydata/test/web/common.py b/src/allmydata/test/web/common.py index 43a13a902..f92548810 100644 --- a/src/allmydata/test/web/common.py +++ b/src/allmydata/test/web/common.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import re @@ -23,7 +15,7 @@ def assert_soup_has_favicon(testcase, soup): ``BeautifulSoup`` object ``soup`` contains the tahoe favicon link. """ links = soup.find_all(u'link', rel=u'shortcut icon') - testcase.assert_( + testcase.assertTrue( any(t[u'href'] == u'/icon.png' for t in links), soup) @@ -92,6 +84,6 @@ def assert_soup_has_text(testcase, soup, text): ``BeautifulSoup`` object ``soup`` contains the passed in ``text`` anywhere as a text node. """ - testcase.assert_( + testcase.assertTrue( soup.find_all(string=re.compile(re.escape(text))), soup) diff --git a/src/allmydata/test/web/matchers.py b/src/allmydata/test/web/matchers.py index f764da79d..669e7ddf4 100644 --- a/src/allmydata/test/web/matchers.py +++ b/src/allmydata/test/web/matchers.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import attr diff --git a/src/allmydata/test/web/test_common.py b/src/allmydata/test/web/test_common.py index 84ab5cab2..34c9a17a3 100644 --- a/src/allmydata/test/web/test_common.py +++ b/src/allmydata/test/web/test_common.py @@ -3,14 +3,6 @@ Tests for ``allmydata.web.common``. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import gc diff --git a/src/allmydata/test/web/test_grid.py b/src/allmydata/test/web/test_grid.py index edcf32268..c782733f9 100644 --- a/src/allmydata/test/web/test_grid.py +++ b/src/allmydata/test/web/test_grid.py @@ -1,24 +1,15 @@ """ Ported to Python 3. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os.path, re from urllib.parse import quote as url_quote import json -from six.moves import StringIO +from io import StringIO from bs4 import BeautifulSoup from twisted.web import resource -from twisted.trial import unittest from allmydata import uri, dirnode from allmydata.util import base32 from allmydata.util.encodingutil import to_bytes @@ -43,6 +34,21 @@ from .common import ( unknown_rwcap, ) +from ..common import ( + AsyncTestCase, +) + +from testtools.matchers import ( + Equals, + Contains, + Not, + HasLength, + EndsWith, +) + +from testtools.twistedsupport import flush_logged_errors + + DIR_HTML_TAG = '' class CompletelyUnhandledError(Exception): @@ -53,7 +59,7 @@ class ErrorBoom(resource.Resource, object): def render(self, req): raise CompletelyUnhandledError("whoops") -class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase): +class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMixin, AsyncTestCase): def CHECK(self, ign, which, args, clientnum=0): fileurl = self.fileurls[which] @@ -117,37 +123,37 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi d.addCallback(self.CHECK, "good", "t=check") def _got_html_good(res): - self.failUnlessIn("Healthy", res) - self.failIfIn("Not Healthy", res) + self.assertThat(res, Contains("Healthy")) + self.assertThat(res, Not(Contains("Not Healthy", ))) soup = BeautifulSoup(res, 'html5lib') assert_soup_has_favicon(self, soup) d.addCallback(_got_html_good) d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere") def _got_html_good_return_to(res): - self.failUnlessIn("Healthy", res) - self.failIfIn("Not Healthy", res) - self.failUnlessIn('Return to file', res) + self.assertThat(res, Contains("Healthy")) + self.assertThat(res, Not(Contains("Not Healthy"))) + self.assertThat(res, Contains('Return to file')) d.addCallback(_got_html_good_return_to) d.addCallback(self.CHECK, "good", "t=check&output=json") def _got_json_good(res): r = json.loads(res) self.failUnlessEqual(r["summary"], "Healthy") self.failUnless(r["results"]["healthy"]) - self.failIfIn("needs-rebalancing", r["results"]) + self.assertThat(r["results"], Not(Contains("needs-rebalancing",))) self.failUnless(r["results"]["recoverable"]) d.addCallback(_got_json_good) d.addCallback(self.CHECK, "small", "t=check") def _got_html_small(res): - self.failUnlessIn("Literal files are always healthy", res) - self.failIfIn("Not Healthy", res) + self.assertThat(res, Contains("Literal files are always healthy")) + self.assertThat(res, Not(Contains("Not Healthy"))) d.addCallback(_got_html_small) d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere") def _got_html_small_return_to(res): - self.failUnlessIn("Literal files are always healthy", res) - self.failIfIn("Not Healthy", res) - self.failUnlessIn('Return to file', res) + self.assertThat(res, Contains("Literal files are always healthy")) + self.assertThat(res, Not(Contains("Not Healthy"))) + self.assertThat(res, Contains('Return to file')) d.addCallback(_got_html_small_return_to) d.addCallback(self.CHECK, "small", "t=check&output=json") def _got_json_small(res): @@ -158,8 +164,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi d.addCallback(self.CHECK, "smalldir", "t=check") def _got_html_smalldir(res): - self.failUnlessIn("Literal files are always healthy", res) - self.failIfIn("Not Healthy", res) + self.assertThat(res, Contains("Literal files are always healthy")) + self.assertThat(res, Not(Contains("Not Healthy"))) d.addCallback(_got_html_smalldir) d.addCallback(self.CHECK, "smalldir", "t=check&output=json") def _got_json_smalldir(res): @@ -170,43 +176,43 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi d.addCallback(self.CHECK, "sick", "t=check") def _got_html_sick(res): - self.failUnlessIn("Not Healthy", res) + self.assertThat(res, Contains("Not Healthy")) d.addCallback(_got_html_sick) d.addCallback(self.CHECK, "sick", "t=check&output=json") def _got_json_sick(res): r = json.loads(res) self.failUnlessEqual(r["summary"], "Not Healthy: 9 shares (enc 3-of-10)") - self.failIf(r["results"]["healthy"]) + self.assertThat(r["results"]["healthy"], Equals(False)) self.failUnless(r["results"]["recoverable"]) - self.failIfIn("needs-rebalancing", r["results"]) + self.assertThat(r["results"], Not(Contains("needs-rebalancing"))) d.addCallback(_got_json_sick) d.addCallback(self.CHECK, "dead", "t=check") def _got_html_dead(res): - self.failUnlessIn("Not Healthy", res) + self.assertThat(res, Contains("Not Healthy")) d.addCallback(_got_html_dead) d.addCallback(self.CHECK, "dead", "t=check&output=json") def _got_json_dead(res): r = json.loads(res) self.failUnlessEqual(r["summary"], "Not Healthy: 1 shares (enc 3-of-10)") - self.failIf(r["results"]["healthy"]) - self.failIf(r["results"]["recoverable"]) - self.failIfIn("needs-rebalancing", r["results"]) + self.assertThat(r["results"]["healthy"], Equals(False)) + self.assertThat(r["results"]["recoverable"], Equals(False)) + self.assertThat(r["results"], Not(Contains("needs-rebalancing"))) d.addCallback(_got_json_dead) d.addCallback(self.CHECK, "corrupt", "t=check&verify=true") def _got_html_corrupt(res): - self.failUnlessIn("Not Healthy! : Unhealthy", res) + self.assertThat(res, Contains("Not Healthy! : Unhealthy")) d.addCallback(_got_html_corrupt) d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json") def _got_json_corrupt(res): r = json.loads(res) - self.failUnlessIn("Unhealthy: 9 shares (enc 3-of-10)", r["summary"]) - self.failIf(r["results"]["healthy"]) + self.assertThat(r["summary"], Contains("Unhealthy: 9 shares (enc 3-of-10)")) + self.assertThat(r["results"]["healthy"], Equals(False)) self.failUnless(r["results"]["recoverable"]) - self.failIfIn("needs-rebalancing", r["results"]) + self.assertThat(r["results"], Not(Contains("needs-rebalancing"))) self.failUnlessReallyEqual(r["results"]["count-happiness"], 9) self.failUnlessReallyEqual(r["results"]["count-shares-good"], 9) self.failUnlessReallyEqual(r["results"]["count-corrupt-shares"], 1) @@ -261,9 +267,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi d.addCallback(self.CHECK, "good", "t=check&repair=true") def _got_html_good(res): - self.failUnlessIn("Healthy", res) - self.failIfIn("Not Healthy", res) - self.failUnlessIn("No repair necessary", res) + self.assertThat(res, Contains("Healthy")) + self.assertThat(res, Not(Contains("Not Healthy"))) + self.assertThat(res, Contains("No repair necessary", )) soup = BeautifulSoup(res, 'html5lib') assert_soup_has_favicon(self, soup) @@ -271,9 +277,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi d.addCallback(self.CHECK, "sick", "t=check&repair=true") def _got_html_sick(res): - self.failUnlessIn("Healthy : healthy", res) - self.failIfIn("Not Healthy", res) - self.failUnlessIn("Repair successful", res) + self.assertThat(res, Contains("Healthy : healthy")) + self.assertThat(res, Not(Contains("Not Healthy"))) + self.assertThat(res, Contains("Repair successful")) d.addCallback(_got_html_sick) # repair of a dead file will fail, of course, but it isn't yet @@ -290,9 +296,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true") def _got_html_corrupt(res): - self.failUnlessIn("Healthy : Healthy", res) - self.failIfIn("Not Healthy", res) - self.failUnlessIn("Repair successful", res) + self.assertThat(res, Contains("Healthy : Healthy")) + self.assertThat(res, Not(Contains("Not Healthy"))) + self.assertThat(res, Contains("Repair successful")) d.addCallback(_got_html_corrupt) d.addErrback(self.explain_web_error) @@ -392,31 +398,31 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi if expect_rw_uri: self.failUnlessReallyEqual(to_bytes(f[1]["rw_uri"]), unknown_rwcap, data) else: - self.failIfIn("rw_uri", f[1]) + self.assertThat(f[1], Not(Contains("rw_uri"))) if immutable: self.failUnlessReallyEqual(to_bytes(f[1]["ro_uri"]), unknown_immcap, data) else: self.failUnlessReallyEqual(to_bytes(f[1]["ro_uri"]), unknown_rocap, data) - self.failUnlessIn("metadata", f[1]) + self.assertThat(f[1], Contains("metadata")) d.addCallback(_check_directory_json, expect_rw_uri=not immutable) def _check_info(res, expect_rw_uri, expect_ro_uri): if expect_rw_uri: - self.failUnlessIn(unknown_rwcap, res) + self.assertThat(res, Contains(unknown_rwcap)) if expect_ro_uri: if immutable: - self.failUnlessIn(unknown_immcap, res) + self.assertThat(res, Contains(unknown_immcap)) else: - self.failUnlessIn(unknown_rocap, res) + self.assertThat(res, Contains(unknown_rocap)) else: - self.failIfIn(unknown_rocap, res) + self.assertThat(res, Not(Contains(unknown_rocap))) res = str(res, "utf-8") - self.failUnlessIn("Object Type: unknown", res) - self.failIfIn("Raw data as", res) - self.failIfIn("Directory writecap", res) - self.failIfIn("Checker Operations", res) - self.failIfIn("Mutable File Operations", res) - self.failIfIn("Directory Operations", res) + self.assertThat(res, Contains("Object Type: unknown")) + self.assertThat(res, Not(Contains("Raw data as"))) + self.assertThat(res, Not(Contains("Directory writecap"))) + self.assertThat(res, Not(Contains("Checker Operations"))) + self.assertThat(res, Not(Contains("Mutable File Operations"))) + self.assertThat(res, Not(Contains("Directory Operations"))) # FIXME: these should have expect_rw_uri=not immutable; I don't know # why they fail. Possibly related to ticket #922. @@ -432,7 +438,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi if expect_rw_uri: self.failUnlessReallyEqual(to_bytes(data[1]["rw_uri"]), unknown_rwcap, data) else: - self.failIfIn("rw_uri", data[1]) + self.assertThat(data[1], Not(Contains("rw_uri"))) if immutable: self.failUnlessReallyEqual(to_bytes(data[1]["ro_uri"]), unknown_immcap, data) @@ -442,10 +448,10 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.failUnlessReallyEqual(data[1]["mutable"], True) else: self.failUnlessReallyEqual(to_bytes(data[1]["ro_uri"]), unknown_rocap, data) - self.failIfIn("mutable", data[1]) + self.assertThat(data[1], Not(Contains("mutable"))) # TODO: check metadata contents - self.failUnlessIn("metadata", data[1]) + self.assertThat(data[1], Contains("metadata")) d.addCallback(lambda ign: self.GET("%s/%s?t=json" % (self.rooturl, str(name)))) d.addCallback(_check_json, expect_rw_uri=not immutable) @@ -519,14 +525,14 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi def _created(dn): self.failUnless(isinstance(dn, dirnode.DirectoryNode)) - self.failIf(dn.is_mutable()) + self.assertThat(dn.is_mutable(), Equals(False)) self.failUnless(dn.is_readonly()) # This checks that if we somehow ended up calling dn._decrypt_rwcapdata, it would fail. - self.failIf(hasattr(dn._node, 'get_writekey')) + self.assertThat(hasattr(dn._node, 'get_writekey'), Equals(False)) rep = str(dn) - self.failUnlessIn("RO-IMM", rep) + self.assertThat(rep, Contains("RO-IMM")) cap = dn.get_cap() - self.failUnlessIn(b"CHK", cap.to_string()) + self.assertThat(cap.to_string(), Contains(b"CHK")) self.cap = cap self.rootnode = dn self.rooturl = "uri/" + url_quote(dn.get_uri()) @@ -546,7 +552,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi (name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4) name = name_utf8.decode("utf-8") self.failUnlessEqual(rwcapdata, b"") - self.failUnlessIn(name, kids) + self.assertThat(kids, Contains(name)) (expected_child, ign) = kids[name] self.failUnlessReallyEqual(ro_uri, expected_child.get_readonly_uri()) numkids += 1 @@ -572,27 +578,27 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi d.addCallback(lambda ign: self.GET(self.rooturl)) def _check_html(res): soup = BeautifulSoup(res, 'html5lib') - self.failIfIn(b"URI:SSK", res) + self.assertThat(res, Not(Contains(b"URI:SSK"))) found = False for td in soup.find_all(u"td"): if td.text != u"FILE": continue a = td.findNextSibling()(u"a")[0] - self.assertIn(url_quote(lonely_uri), a[u"href"]) - self.assertEqual(u"lonely", a.text) - self.assertEqual(a[u"rel"], [u"noreferrer"]) - self.assertEqual(u"{}".format(len("one")), td.findNextSibling().findNextSibling().text) + self.assertThat(a[u"href"], Contains(url_quote(lonely_uri))) + self.assertThat(a.text, Equals(u"lonely")) + self.assertThat(a[u"rel"], Equals([u"noreferrer"])) + self.assertThat(td.findNextSibling().findNextSibling().text, Equals(u"{}".format(len("one")))) found = True break - self.assertTrue(found) + self.assertThat(found, Equals(True)) infos = list( a[u"href"] for a in soup.find_all(u"a") if a.text == u"More Info" ) - self.assertEqual(1, len(infos)) - self.assertTrue(infos[0].endswith(url_quote(lonely_uri) + "?t=info")) + self.assertThat(infos, HasLength(1)) + self.assertThat(infos[0], EndsWith(url_quote(lonely_uri) + "?t=info")) d.addCallback(_check_html) # ... and in JSON. @@ -604,7 +610,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.failUnlessReallyEqual(sorted(listed_children.keys()), [u"lonely"]) ll_type, ll_data = listed_children[u"lonely"] self.failUnlessEqual(ll_type, "filenode") - self.failIfIn("rw_uri", ll_data) + self.assertThat(ll_data, Not(Contains("rw_uri"))) self.failUnlessReallyEqual(to_bytes(ll_data["ro_uri"]), lonely_uri) d.addCallback(_check_json) return d @@ -744,8 +750,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi error_line = lines[first_error] error_msg = lines[first_error+1:] error_msg_s = "\n".join(error_msg) + "\n" - self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)", - error_line) + self.assertThat(error_line, Contains("ERROR: UnrecoverableFileError(no recoverable versions)")) self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback units = [json.loads(line) for line in lines[:first_error]] self.failUnlessReallyEqual(len(units), 6) # includes subdir @@ -765,8 +770,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi error_line = lines[first_error] error_msg = lines[first_error+1:] error_msg_s = "\n".join(error_msg) + "\n" - self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)", - error_line) + self.assertThat(error_line, Contains("ERROR: UnrecoverableFileError(no recoverable versions)")) self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback units = [json.loads(line) for line in lines[:first_error]] self.failUnlessReallyEqual(len(units), 6) # includes subdir @@ -936,8 +940,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi d.addCallback(self.CHECK, "one", "t=check") # no add-lease def _got_html_good(res): - self.failUnlessIn("Healthy", res) - self.failIfIn("Not Healthy", res) + self.assertThat(res, Contains("Healthy")) + self.assertThat(res, Not(Contains("Not Healthy"))) d.addCallback(_got_html_good) d.addCallback(self._count_leases, "one") @@ -1111,7 +1115,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.GET, self.fileurls["0shares"])) def _check_zero_shares(body): body = str(body, "utf-8") - self.failIfIn("", body) + self.assertThat(body, Not(Contains(""))) body = " ".join(body.strip().split()) exp = ("NoSharesError: no shares could be found. " "Zero shares usually indicates a corrupt URI, or that " @@ -1129,7 +1133,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.GET, self.fileurls["1share"])) def _check_one_share(body): body = str(body, "utf-8") - self.failIfIn("", body) + self.assertThat(body, Not(Contains(""))) body = " ".join(body.strip().split()) msgbase = ("NotEnoughSharesError: This indicates that some " "servers were unavailable, or that shares have been " @@ -1154,17 +1158,16 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.GET, self.fileurls["imaginary"])) def _missing_child(body): body = str(body, "utf-8") - self.failUnlessIn("No such child: imaginary", body) + self.assertThat(body, Contains("No such child: imaginary")) d.addCallback(_missing_child) d.addCallback(lambda ignored: self.GET_unicode(self.fileurls["dir-0share"])) def _check_0shares_dir_html(body): - self.failUnlessIn(DIR_HTML_TAG, body) + self.assertThat(body, Contains(DIR_HTML_TAG)) # we should see the regular page, but without the child table or # the dirops forms body = " ".join(body.strip().split()) - self.failUnlessIn('href="?t=info">More info on this directory', - body) + self.assertThat(body, Contains('href="?t=info">More info on this directory')) exp = ("UnrecoverableFileError: the directory (or mutable file) " "could not be retrieved, because there were insufficient " "good shares. This might indicate that no servers were " @@ -1172,8 +1175,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi "was corrupt, or that shares have been lost due to server " "departure, hard drive failure, or disk corruption. You " "should perform a filecheck on this object to learn more.") - self.failUnlessIn(exp, body) - self.failUnlessIn("No upload forms: directory is unreadable", body) + self.assertThat(body, Contains(exp)) + self.assertThat(body, Contains("No upload forms: directory is unreadable")) d.addCallback(_check_0shares_dir_html) d.addCallback(lambda ignored: self.GET_unicode(self.fileurls["dir-1share"])) @@ -1182,10 +1185,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi # and some-shares like we did for immutable files (since there # are different sorts of advice to offer in each case). For now, # they present the same way. - self.failUnlessIn(DIR_HTML_TAG, body) + self.assertThat(body, Contains(DIR_HTML_TAG)) body = " ".join(body.strip().split()) - self.failUnlessIn('href="?t=info">More info on this directory', - body) + self.assertThat(body, Contains('href="?t=info">More info on this directory')) exp = ("UnrecoverableFileError: the directory (or mutable file) " "could not be retrieved, because there were insufficient " "good shares. This might indicate that no servers were " @@ -1193,8 +1195,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi "was corrupt, or that shares have been lost due to server " "departure, hard drive failure, or disk corruption. You " "should perform a filecheck on this object to learn more.") - self.failUnlessIn(exp, body) - self.failUnlessIn("No upload forms: directory is unreadable", body) + self.assertThat(body, Contains(exp)) + self.assertThat(body, Contains("No upload forms: directory is unreadable")) d.addCallback(_check_1shares_dir_html) d.addCallback(lambda ignored: @@ -1204,7 +1206,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.fileurls["dir-0share-json"])) def _check_unrecoverable_file(body): body = str(body, "utf-8") - self.failIfIn("", body) + self.assertThat(body, Not(Contains(""))) body = " ".join(body.strip().split()) exp = ("UnrecoverableFileError: the directory (or mutable file) " "could not be retrieved, because there were insufficient " @@ -1213,7 +1215,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi "was corrupt, or that shares have been lost due to server " "departure, hard drive failure, or disk corruption. You " "should perform a filecheck on this object to learn more.") - self.failUnlessIn(exp, body) + self.assertThat(body, Contains(exp)) d.addCallback(_check_unrecoverable_file) d.addCallback(lambda ignored: @@ -1245,7 +1247,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi headers={"accept": "*/*"})) def _internal_error_html1(body): body = str(body, "utf-8") - self.failUnlessIn("", "expected HTML, not '%s'" % body) + self.assertThat("expected HTML, not '%s'" % body, Contains("")) d.addCallback(_internal_error_html1) d.addCallback(lambda ignored: @@ -1255,8 +1257,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi headers={"accept": "text/plain"})) def _internal_error_text2(body): body = str(body, "utf-8") - self.failIfIn("", body) + self.assertThat(body, Not(Contains(""))) self.failUnless(body.startswith("Traceback "), body) + d.addCallback(_internal_error_text2) CLI_accepts = "text/plain, application/octet-stream" @@ -1267,7 +1270,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi headers={"accept": CLI_accepts})) def _internal_error_text3(body): body = str(body, "utf-8") - self.failIfIn("", body) + self.assertThat(body, Not(Contains(""))) self.failUnless(body.startswith("Traceback "), body) d.addCallback(_internal_error_text3) @@ -1276,12 +1279,12 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi 500, "Internal Server Error", None, self.GET, "ERRORBOOM")) def _internal_error_html4(body): - self.failUnlessIn(b"", body) + self.assertThat(body, Contains(b"")) d.addCallback(_internal_error_html4) def _flush_errors(res): # Trial: please ignore the CompletelyUnhandledError in the logs - self.flushLoggedErrors(CompletelyUnhandledError) + flush_logged_errors(CompletelyUnhandledError) return res d.addBoth(_flush_errors) @@ -1312,8 +1315,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi d.addCallback(_stash_dir) d.addCallback(lambda ign: self.GET_unicode(self.dir_url, followRedirect=True)) def _check_dir_html(body): - self.failUnlessIn(DIR_HTML_TAG, body) - self.failUnlessIn("blacklisted.txt", body) + self.assertThat(body, Contains(DIR_HTML_TAG)) + self.assertThat(body, Contains("blacklisted.txt")) d.addCallback(_check_dir_html) d.addCallback(lambda ign: self.GET(self.url)) d.addCallback(lambda body: self.failUnlessEqual(DATA, body)) @@ -1336,8 +1339,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi # We should still be able to list the parent directory, in HTML... d.addCallback(lambda ign: self.GET_unicode(self.dir_url, followRedirect=True)) def _check_dir_html2(body): - self.failUnlessIn(DIR_HTML_TAG, body) - self.failUnlessIn("blacklisted.txt", body) + self.assertThat(body, Contains(DIR_HTML_TAG)) + self.assertThat(body, Contains("blacklisted.txt")) d.addCallback(_check_dir_html2) # ... and in JSON (used by CLI). @@ -1347,8 +1350,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.failUnless(isinstance(data, list), data) self.failUnlessEqual(data[0], "dirnode") self.failUnless(isinstance(data[1], dict), data) - self.failUnlessIn("children", data[1]) - self.failUnlessIn("blacklisted.txt", data[1]["children"]) + self.assertThat(data[1], Contains("children")) + self.assertThat(data[1]["children"], Contains("blacklisted.txt")) childdata = data[1]["children"]["blacklisted.txt"] self.failUnless(isinstance(childdata, list), data) self.failUnlessEqual(childdata[0], "filenode") @@ -1387,7 +1390,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi self.child_url = b"uri/"+dn.get_readonly_uri()+b"/child" d.addCallback(_get_dircap) d.addCallback(lambda ign: self.GET(self.dir_url_base, followRedirect=True)) - d.addCallback(lambda body: self.failUnlessIn(DIR_HTML_TAG, str(body, "utf-8"))) + d.addCallback(lambda body: self.assertThat(str(body, "utf-8"), Contains(DIR_HTML_TAG))) d.addCallback(lambda ign: self.GET(self.dir_url_json1)) d.addCallback(lambda res: json.loads(res)) # just check it decodes d.addCallback(lambda ign: self.GET(self.dir_url_json2)) diff --git a/src/allmydata/test/web/test_introducer.py b/src/allmydata/test/web/test_introducer.py index ba0a5beb9..6741c1a2d 100644 --- a/src/allmydata/test/web/test_introducer.py +++ b/src/allmydata/test/web/test_introducer.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import json from os.path import join @@ -83,12 +75,18 @@ def create_introducer_webish(reactor, port_assigner, basedir): with the node and its webish service. """ node.create_node_dir(basedir, "testing") - _, port_endpoint = port_assigner.assign(reactor) + main_tub_location, main_tub_endpoint = port_assigner.assign(reactor) + _, web_port_endpoint = port_assigner.assign(reactor) with open(join(basedir, "tahoe.cfg"), "w") as f: f.write( "[node]\n" - "tub.location = 127.0.0.1:1\n" + - "web.port = {}\n".format(port_endpoint) + "tub.port = {main_tub_endpoint}\n" + "tub.location = {main_tub_location}\n" + "web.port = {web_port_endpoint}\n".format( + main_tub_endpoint=main_tub_endpoint, + main_tub_location=main_tub_location, + web_port_endpoint=web_port_endpoint, + ) ) intro_node = yield create_introducer(basedir) @@ -211,7 +209,7 @@ class IntroducerRootTests(SyncTestCase): main_tub = Tub() main_tub.listenOn(b"tcp:0") main_tub.setLocation(b"tcp:127.0.0.1:1") - introducer_node = _IntroducerNode(config, main_tub, None, None, None) + introducer_node = _IntroducerNode(config, main_tub, None, None) introducer_service = introducer_node.getServiceNamed("introducer") for n in range(2): diff --git a/src/allmydata/test/web/test_logs.py b/src/allmydata/test/web/test_logs.py index 89ec7ba42..34ecccff6 100644 --- a/src/allmydata/test/web/test_logs.py +++ b/src/allmydata/test/web/test_logs.py @@ -4,23 +4,10 @@ Tests for ``allmydata.web.logs``. Ported to Python 3. """ -from __future__ import ( - print_function, - unicode_literals, - absolute_import, - division, -) - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import json -from twisted.trial import unittest from twisted.internet.defer import inlineCallbacks -from eliot import log_call from autobahn.twisted.testing import create_memory_agent, MemoryReactorClockResolver, create_pumper @@ -48,6 +35,7 @@ from .matchers import ( from ..common import ( SyncTestCase, + AsyncTestCase, ) from ...web.logs import ( @@ -55,6 +43,8 @@ from ...web.logs import ( TokenAuthenticatedWebSocketServerProtocol, ) +from eliot import log_call + class StreamingEliotLogsTests(SyncTestCase): """ Tests for the log streaming resources created by ``create_log_resources``. @@ -75,18 +65,20 @@ class StreamingEliotLogsTests(SyncTestCase): ) -class TestStreamingLogs(unittest.TestCase): +class TestStreamingLogs(AsyncTestCase): """ Test websocket streaming of logs """ def setUp(self): + super(TestStreamingLogs, self).setUp() self.reactor = MemoryReactorClockResolver() self.pumper = create_pumper() self.agent = create_memory_agent(self.reactor, self.pumper, TokenAuthenticatedWebSocketServerProtocol) return self.pumper.start() def tearDown(self): + super(TestStreamingLogs, self).tearDown() return self.pumper.stop() @inlineCallbacks @@ -114,10 +106,10 @@ class TestStreamingLogs(unittest.TestCase): proto.transport.loseConnection() yield proto.is_closed - self.assertEqual(len(messages), 2) - self.assertEqual(messages[0]["action_type"], "test:cli:some-exciting-action") - self.assertEqual(messages[0]["arguments"], - ["hello", "good-\\xff-day", 123, {"a": 35}, [None]]) - self.assertEqual(messages[1]["action_type"], "test:cli:some-exciting-action") - self.assertEqual("started", messages[0]["action_status"]) - self.assertEqual("succeeded", messages[1]["action_status"]) + self.assertThat(len(messages), Equals(3), messages) + self.assertThat(messages[0]["action_type"], Equals("test:cli:some-exciting-action")) + self.assertThat(messages[0]["arguments"], + Equals(["hello", "good-\\xff-day", 123, {"a": 35}, [None]])) + self.assertThat(messages[1]["action_type"], Equals("test:cli:some-exciting-action")) + self.assertThat("started", Equals(messages[0]["action_status"])) + self.assertThat("succeeded", Equals(messages[1]["action_status"])) diff --git a/src/allmydata/test/web/test_private.py b/src/allmydata/test/web/test_private.py index b426b4d93..110e31ff1 100644 --- a/src/allmydata/test/web/test_private.py +++ b/src/allmydata/test/web/test_private.py @@ -4,17 +4,6 @@ Tests for ``allmydata.web.private``. Ported to Python 3. """ -from __future__ import ( - print_function, - unicode_literals, - absolute_import, - division, -) - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - from testtools.matchers import ( Equals, ) diff --git a/src/allmydata/test/web/test_root.py b/src/allmydata/test/web/test_root.py index ca3cc695d..f3b877b2d 100644 --- a/src/allmydata/test/web/test_root.py +++ b/src/allmydata/test/web/test_root.py @@ -1,16 +1,9 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import time +import json from urllib.parse import ( quote, @@ -20,17 +13,27 @@ from bs4 import ( BeautifulSoup, ) -from twisted.trial import unittest from twisted.web.template import Tag from twisted.web.test.requesthelper import DummyRequest from twisted.application import service +from testtools.twistedsupport import succeeded +from twisted.internet.defer import ( + inlineCallbacks, + succeed, +) from ...storage_client import ( NativeStorageServer, StorageFarmBroker, ) -from ...web.root import RootElement +from ...web.root import ( + RootElement, + Root, +) from ...util.connection_status import ConnectionStatus +from ...crypto.ed25519 import ( + create_signing_keypair, +) from allmydata.web.root import URIHandler from allmydata.client import _Client @@ -44,7 +47,18 @@ from ..common import ( EMPTY_CLIENT_CONFIG, ) -class RenderSlashUri(unittest.TestCase): +from ..common import ( + SyncTestCase, + AsyncTestCase, +) + +from testtools.matchers import ( + Equals, + Contains, + AfterPreprocessing, +) + +class RenderSlashUri(SyncTestCase): """ Ensure that URIs starting with /uri?uri= only accept valid capabilities @@ -53,7 +67,9 @@ class RenderSlashUri(unittest.TestCase): def setUp(self): self.client = object() self.res = URIHandler(self.client) + super(RenderSlashUri, self).setUp() + @inlineCallbacks def test_valid_query_redirect(self): """ A syntactically valid capability given in the ``uri`` query argument @@ -64,9 +80,7 @@ class RenderSlashUri(unittest.TestCase): b"mukesarwdjxiyqsjinbfiiro6q7kgmmekocxfjcngh23oxwyxtzq:2:5:5874882" ) query_args = {b"uri": [cap]} - response_body = self.successResultOf( - render(self.res, query_args), - ) + response_body = yield render(self.res, query_args) soup = BeautifulSoup(response_body, 'html5lib') tag = assert_soup_has_tag_with_attributes( self, @@ -74,9 +88,9 @@ class RenderSlashUri(unittest.TestCase): u"meta", {u"http-equiv": "refresh"}, ) - self.assertIn( - quote(cap, safe=""), + self.assertThat( tag.attrs.get(u"content"), + Contains(quote(cap, safe="")), ) def test_invalid(self): @@ -84,16 +98,14 @@ class RenderSlashUri(unittest.TestCase): A syntactically invalid capbility results in an error. """ query_args = {b"uri": [b"not a capability"]} - response_body = self.successResultOf( - render(self.res, query_args), - ) - self.assertEqual( + response_body = render(self.res, query_args) + self.assertThat( response_body, - b"Invalid capability", + succeeded(AfterPreprocessing(bytes, Equals(b"Invalid capability"))), ) -class RenderServiceRow(unittest.TestCase): +class RenderServiceRow(SyncTestCase): def test_missing(self): """ minimally-defined static servers just need anonymous-storage-FURL @@ -127,5 +139,96 @@ class RenderServiceRow(unittest.TestCase): # Coerce `items` to list and pick the first item from it. item = list(items)[0] - self.assertEqual(item.slotData.get("version"), "") - self.assertEqual(item.slotData.get("nickname"), "") + self.assertThat(item.slotData.get("version"), Equals("")) + self.assertThat(item.slotData.get("nickname"), Equals("")) + + +class RenderRoot(AsyncTestCase): + + @inlineCallbacks + def test_root_json(self): + """ + The 'welcome' / root page renders properly with ?t=json when some + servers show None for available_space while others show a + valid int + + See also https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3852 + """ + ann = { + "anonymous-storage-FURL": "pb://w2hqnbaa25yw4qgcvghl5psa3srpfgw3@tcp:127.0.0.1:51309/vucto2z4fxment3vfxbqecblbf6zyp6x", + "permutation-seed-base32": "w2hqnbaa25yw4qgcvghl5psa3srpfgw3", + } + srv0 = NativeStorageServer(b"server_id0", ann, None, {}, EMPTY_CLIENT_CONFIG) + srv0.get_connection_status = lambda: ConnectionStatus(False, "summary0", {}, 0, 0) + + srv1 = NativeStorageServer(b"server_id1", ann, None, {}, EMPTY_CLIENT_CONFIG) + srv1.get_connection_status = lambda: ConnectionStatus(False, "summary1", {}, 0, 0) + # arrange for this server to have some valid available space + srv1.get_available_space = lambda: 12345 + + class FakeClient(_Client): + history = [] + stats_provider = object() + nickname = "" + nodeid = b"asdf" + _node_public_key = create_signing_keypair()[1] + introducer_clients = [] + helper = None + + def __init__(self): + service.MultiService.__init__(self) + self.storage_broker = StorageFarmBroker( + permute_peers=True, + tub_maker=None, + node_config=EMPTY_CLIENT_CONFIG, + ) + self.storage_broker.test_add_server(b"test-srv0", srv0) + self.storage_broker.test_add_server(b"test-srv1", srv1) + + root = Root(FakeClient(), now_fn=time.time) + + lines = [] + + req = DummyRequest(b"") + req.fields = {} + req.args = { + b"t": [b"json"], + } + + # for some reason, DummyRequest is already finished when we + # try to add a notifyFinish handler, so override that + # behavior. + + def nop(): + return succeed(None) + req.notifyFinish = nop + req.write = lines.append + + yield root.render(req) + + raw_js = b"".join(lines).decode("utf8") + js = json.loads(raw_js) + servers = js["servers"] + self.assertEquals(len(servers), 2) + self.assertIn( + { + "connection_status": "summary0", + "nodeid": "server_id0", + "last_received_data": 0, + "version": None, + "available_space": None, + "nickname": "" + }, + servers + ) + self.assertIn( + { + "connection_status": "summary1", + "nodeid": "server_id1", + "last_received_data": 0, + "version": None, + "available_space": 12345, + "nickname": "" + }, + servers + ) diff --git a/src/allmydata/test/web/test_status.py b/src/allmydata/test/web/test_status.py index 414925446..81c9568e5 100644 --- a/src/allmydata/test/web/test_status.py +++ b/src/allmydata/test/web/test_status.py @@ -3,14 +3,6 @@ Tests for ```allmydata.web.status```. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from bs4 import BeautifulSoup from twisted.web.template import flattenString diff --git a/src/allmydata/test/web/test_util.py b/src/allmydata/test/web/test_util.py index c536dc9f1..c21a66e98 100644 --- a/src/allmydata/test/web/test_util.py +++ b/src/allmydata/test/web/test_util.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from twisted.trial import unittest from allmydata.web import status, common diff --git a/src/allmydata/test/web/test_web.py b/src/allmydata/test/web/test_web.py index 1c9d6b65c..42be0f50d 100644 --- a/src/allmydata/test/web/test_web.py +++ b/src/allmydata/test/web/test_web.py @@ -1,19 +1,14 @@ """ -Ported to Python 3. +Tests for a bunch of web-related APIs. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals +from __future__ import annotations -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six import ensure_binary import os.path, re, time import treq from urllib.parse import quote as urlquote, unquote as urlunquote +from base64 import urlsafe_b64encode from bs4 import BeautifulSoup @@ -38,6 +33,7 @@ from allmydata.util import fileutil, base32, hashutil, jsonbytes as json from allmydata.util.consumer import download_to_data from allmydata.util.encodingutil import to_bytes from ...util.connection_status import ConnectionStatus +from ...crypto.rsa import PublicKey, PrivateKey, create_signing_keypair, der_string_from_signing_key from ..common import ( EMPTY_CLIENT_CONFIG, FakeCHKFileNode, @@ -65,6 +61,7 @@ from allmydata.interfaces import ( MustBeReadonlyError, ) from allmydata.mutable import servermap, publish, retrieve +from allmydata.mutable.common import derive_mutable_keys from .. import common_util as testutil from ..common_util import TimezoneMixin from ..common_web import ( @@ -93,6 +90,7 @@ class FakeNodeMaker(NodeMaker): 'happy': 7, 'max_segment_size':128*1024 # 1024=KiB } + all_contents: dict[bytes, object] def _create_lit(self, cap): return FakeCHKFileNode(cap, self.all_contents) def _create_immutable(self, cap): @@ -100,11 +98,19 @@ class FakeNodeMaker(NodeMaker): def _create_mutable(self, cap): return FakeMutableFileNode(None, None, self.encoding_params, None, - self.all_contents).init_from_cap(cap) - def create_mutable_file(self, contents=b"", keysize=None, - version=SDMF_VERSION): + self.all_contents, None).init_from_cap(cap) + def create_mutable_file(self, + contents=None, + version=None, + keypair: tuple[PublicKey, PrivateKey] | None=None, + ): + if contents is None: + contents = b"" + if version is None: + version = SDMF_VERSION + n = FakeMutableFileNode(None, None, self.encoding_params, None, - self.all_contents) + self.all_contents, keypair) return n.create(contents, version=version) class FakeUploader(service.Service): @@ -335,7 +341,7 @@ class WebMixin(TimezoneMixin): self.ws = webish.WebishServer( self.s, "0", - tempdir=tempdir.path, + webish.anonymous_tempfile_factory(tempdir.path), staticdir=self.staticdir, clock=self.clock, now_fn=lambda:self.fakeTime, @@ -559,7 +565,9 @@ class WebMixin(TimezoneMixin): returnValue(data) @inlineCallbacks - def HEAD(self, urlpath, return_response=False, headers={}): + def HEAD(self, urlpath, return_response=False, headers=None): + if headers is None: + headers = {} url = self.webish_url + urlpath response = yield treq.request("head", url, persistent=False, headers=headers) @@ -567,7 +575,9 @@ class WebMixin(TimezoneMixin): raise Error(response.code, response="") returnValue( ("", response.code, response.headers) ) - def PUT(self, urlpath, data, headers={}): + def PUT(self, urlpath, data, headers=None): + if headers is None: + headers = {} url = self.webish_url + urlpath return do_http("put", url, data=data, headers=headers) @@ -612,7 +622,9 @@ class WebMixin(TimezoneMixin): body, headers = self.build_form(**fields) return self.POST2(urlpath, body, headers) - def POST2(self, urlpath, body="", headers={}, followRedirect=False): + def POST2(self, urlpath, body="", headers=None, followRedirect=False): + if headers is None: + headers = {} url = self.webish_url + urlpath if isinstance(body, str): body = body.encode("utf-8") @@ -820,29 +832,37 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi """ d = self.GET("/?t=json") def _check(res): + """ + Check that the results are correct. + We can't depend on the order of servers in the output + """ decoded = json.loads(res) - expected = { - u'introducers': { - u'statuses': [], + self.assertEqual(decoded['introducers'], {u'statuses': []}) + actual_servers = decoded[u"servers"] + self.assertEquals(len(actual_servers), 2) + self.assertIn( + { + u"nodeid": u'other_nodeid', + u'available_space': 123456, + u'connection_status': u'summary', + u'last_received_data': 30, + u'nickname': u'other_nickname \u263b', + u'version': u'1.0', }, - u'servers': sorted([ - {u"nodeid": u'other_nodeid', - u'available_space': 123456, - u'connection_status': u'summary', - u'last_received_data': 30, - u'nickname': u'other_nickname \u263b', - u'version': u'1.0', - }, - {u"nodeid": u'disconnected_nodeid', - u'available_space': 123456, - u'connection_status': u'summary', - u'last_received_data': 35, - u'nickname': u'disconnected_nickname \u263b', - u'version': u'1.0', - }, - ], key=lambda o: sorted(o.items())), - } - self.assertEqual(expected, decoded) + actual_servers + ) + self.assertIn( + { + u"nodeid": u'disconnected_nodeid', + u'available_space': 123456, + u'connection_status': u'summary', + u'last_received_data': 35, + u'nickname': u'disconnected_nickname \u263b', + u'version': u'1.0', + }, + actual_servers + ) + d.addCallback(_check) return d @@ -2860,6 +2880,41 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi "Unknown format: foo", method="post", data=body, headers=headers) + async def test_POST_upload_keypair(self) -> None: + """ + A *POST* creating a new mutable object may include a *private-key* + query argument giving a urlsafe-base64-encoded RSA private key to use + as the "signature key". The given signature key is used, rather than + a new one being generated. + """ + format = "sdmf" + priv, pub = create_signing_keypair(2048) + encoded_privkey = urlsafe_b64encode(der_string_from_signing_key(priv)).decode("ascii") + filename = "predetermined-sdmf" + expected_content = self.NEWFILE_CONTENTS * 100 + actual_cap = uri.from_string(await self.POST( + self.public_url + + f"/foo?t=upload&format={format}&private-key={encoded_privkey}", + file=(filename, expected_content), + )) + # Ideally we would inspect the private ("signature") and public + # ("verification") keys but they are not made easily accessible here + # (ostensibly because we have a FakeMutableFileNode instead of a real + # one). + # + # So, instead, re-compute the writekey and fingerprint and compare + # those against the capability string. + expected_writekey, _, expected_fingerprint = derive_mutable_keys((pub, priv)) + self.assertEqual( + (expected_writekey, expected_fingerprint), + (actual_cap.writekey, actual_cap.fingerprint), + ) + + # And the capability we got can be used to download the data we + # uploaded. + downloaded_content = await self.GET(f"/uri/{actual_cap.to_string().decode('ascii')}") + self.assertEqual(expected_content, downloaded_content) + def test_POST_upload_format(self): def _check_upload(ign, format, uri_prefix, fn=None): filename = format + ".txt" diff --git a/src/allmydata/test/web/test_webish.py b/src/allmydata/test/web/test_webish.py index 12a04a6eb..523dfc878 100644 --- a/src/allmydata/test/web/test_webish.py +++ b/src/allmydata/test/web/test_webish.py @@ -1,23 +1,11 @@ """ Tests for ``allmydata.webish``. - -Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +import tempfile from uuid import ( uuid4, ) -from errno import ( - EACCES, -) from io import ( BytesIO, ) @@ -39,9 +27,6 @@ from testtools.matchers import ( HasLength, ) -from twisted.python.runtime import ( - platform, -) from twisted.python.filepath import ( FilePath, ) @@ -59,6 +44,7 @@ from ..common import ( from ...webish import ( TahoeLAFSRequest, TahoeLAFSSite, + anonymous_tempfile_factory, ) @@ -90,10 +76,11 @@ class TahoeLAFSRequestTests(SyncTestCase): """ self._fields_test(b"GET", {}, b"", Equals(None)) - def test_form_fields(self): + def test_form_fields_if_filename_set(self): """ When a ``POST`` request is received, form fields are parsed into - ``TahoeLAFSRequest.fields``. + ``TahoeLAFSRequest.fields`` and the body is bytes (presuming ``filename`` + is set). """ form_data, boundary = multipart_formdata([ [param(u"name", u"foo"), @@ -121,6 +108,49 @@ class TahoeLAFSRequestTests(SyncTestCase): ), ) + def test_form_fields_if_name_is_file(self): + """ + When a ``POST`` request is received, form fields are parsed into + ``TahoeLAFSRequest.fields`` and the body is bytes when ``name`` + is set to ``"file"``. + """ + form_data, boundary = multipart_formdata([ + [param(u"name", u"foo"), + body(u"bar"), + ], + [param(u"name", u"file"), + body(u"some file contents"), + ], + ]) + self._fields_test( + b"POST", + {b"content-type": b"multipart/form-data; boundary=" + bytes(boundary, 'ascii')}, + form_data.encode("ascii"), + AfterPreprocessing( + lambda fs: { + k: fs.getvalue(k) + for k + in fs.keys() + }, + Equals({ + "foo": "bar", + "file": b"some file contents", + }), + ), + ) + + def test_form_fields_require_correct_mime_type(self): + """ + The body of a ``POST`` is not parsed into fields if its mime type is + not ``multipart/form-data``. + + Reproducer for https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3854 + """ + data = u'{"lalala": "lolo"}' + data = data.encode("utf-8") + self._fields_test(b"POST", {"content-type": "application/json"}, + data, Equals(None)) + class TahoeLAFSSiteTests(SyncTestCase): """ @@ -139,8 +169,14 @@ class TahoeLAFSSiteTests(SyncTestCase): :return: ``None`` if the logging looks good. """ logPath = self.mktemp() + tempdir = self.mktemp() + FilePath(tempdir).makedirs() - site = TahoeLAFSSite(self.mktemp(), Resource(), logPath=logPath) + site = TahoeLAFSSite( + anonymous_tempfile_factory(tempdir), + Resource(), + logPath=logPath, + ) site.startFactory() channel = DummyChannel() @@ -158,6 +194,16 @@ class TahoeLAFSSiteTests(SyncTestCase): ), ) + def test_private_key_censoring(self): + """ + The log event for a request including a **private-key** query + argument has the private key value censored. + """ + self._test_censoring( + b"/uri?uri=URI:CHK:aaa:bbb&private-key=AAAAaaaabbbb==", + b"/uri?uri=[CENSORED]&private-key=[CENSORED]", + ) + def test_uri_censoring(self): """ The log event for a request for **/uri/** has the capability value @@ -203,11 +249,17 @@ class TahoeLAFSSiteTests(SyncTestCase): Create and return a new ``TahoeLAFSRequest`` hooked up to a ``TahoeLAFSSite``. - :param bytes tempdir: The temporary directory to give to the site. + :param FilePath tempdir: The temporary directory to configure the site + to write large temporary request bodies to. The temporary files + will be named for ease of testing. :return TahoeLAFSRequest: The new request instance. """ - site = TahoeLAFSSite(tempdir.path, Resource(), logPath=self.mktemp()) + site = TahoeLAFSSite( + lambda: tempfile.NamedTemporaryFile(dir=tempdir.path), + Resource(), + logPath=self.mktemp(), + ) site.startFactory() channel = DummyChannel() @@ -221,6 +273,7 @@ class TahoeLAFSSiteTests(SyncTestCase): A request body smaller than 1 MiB is kept in memory. """ tempdir = FilePath(self.mktemp()) + tempdir.makedirs() request = self._create_request(tempdir) request.gotLength(request_body_size) self.assertThat( @@ -230,57 +283,21 @@ class TahoeLAFSSiteTests(SyncTestCase): def _large_request_test(self, request_body_size): """ - Assert that when a request with a body of of the given size is received - its content is written to the directory the ``TahoeLAFSSite`` is - configured with. + Assert that when a request with a body of the given size is + received its content is written a temporary file created by the given + tempfile factory. """ tempdir = FilePath(self.mktemp()) tempdir.makedirs() request = self._create_request(tempdir) - - # So. Bad news. The temporary file for the uploaded content is - # unnamed (and this isn't even necessarily a bad thing since it is how - # you get automatic on-process-exit cleanup behavior on POSIX). It's - # not visible by inspecting the filesystem. It has no name we can - # discover. Then how do we verify it is written to the right place? - # The question itself is meaningless if we try to be too precise. It - # *has* no filesystem location. However, it is still stored *on* some - # filesystem. We still want to make sure it is on the filesystem we - # specified because otherwise it might be on a filesystem that's too - # small or undesirable in some other way. - # - # I don't know of any way to ask a file descriptor which filesystem - # it's on, either, though. It might be the case that the [f]statvfs() - # result could be compared somehow to infer the filesystem but - # ... it's not clear what the failure modes might be there, across - # different filesystems and runtime environments. - # - # Another approach is to make the temp directory unwriteable and - # observe the failure when an attempt is made to create a file there. - # This is hardly a lovely solution but at least it's kind of simple. - # - # It would be nice if it worked consistently cross-platform but on - # Windows os.chmod is more or less broken. - if platform.isWindows(): - request.gotLength(request_body_size) - self.assertThat( - tempdir.children(), - HasLength(1), - ) - else: - tempdir.chmod(0o550) - with self.assertRaises(OSError) as ctx: - request.gotLength(request_body_size) - raise Exception( - "OSError not raised, instead tempdir.children() = {}".format( - tempdir.children(), - ), - ) - - self.assertThat( - ctx.exception.errno, - Equals(EACCES), - ) + request.gotLength(request_body_size) + # We can see the temporary file in the temporary directory we + # specified because _create_request makes a request that uses named + # temporary files instead of the usual anonymous temporary files. + self.assertThat( + tempdir.children(), + HasLength(1), + ) def test_unknown_request_size(self): """ diff --git a/src/allmydata/testing/web.py b/src/allmydata/testing/web.py index bb858b555..f7c8a4e1e 100644 --- a/src/allmydata/testing/web.py +++ b/src/allmydata/testing/web.py @@ -6,22 +6,15 @@ # This file is part of Tahoe-LAFS. # # See the docs/about.rst file for licensing information. -"""Test-helpers for clients that use the WebUI. - -Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +Test-helpers for clients that use the WebUI. +""" +from __future__ import annotations import hashlib +from typing import Iterable import attr @@ -54,6 +47,7 @@ import allmydata.uri from allmydata.util import ( base32, ) +from ..util.dictutil import BytesKeyDict __all__ = ( @@ -147,8 +141,8 @@ class _FakeTahoeUriHandler(Resource, object): isLeaf = True - data = attr.ib(default=attr.Factory(dict)) - capability_generators = attr.ib(default=attr.Factory(dict)) + data: BytesKeyDict = attr.ib(default=attr.Factory(BytesKeyDict)) + capability_generators: dict[bytes,Iterable[bytes]] = attr.ib(default=attr.Factory(dict)) def _generate_capability(self, kind): """ @@ -209,7 +203,7 @@ class _FakeTahoeUriHandler(Resource, object): capability = None for arg, value in uri.query: if arg == u"uri": - capability = value + capability = value.encode("utf-8") # it's legal to use the form "/uri/" if capability is None and request.postpath and request.postpath[0]: capability = request.postpath[0] @@ -221,10 +215,9 @@ class _FakeTahoeUriHandler(Resource, object): # the user gave us a capability; if our Grid doesn't have any # data for it, that's an error. - capability = capability.encode('ascii') if capability not in self.data: - request.setResponseCode(http.BAD_REQUEST) - return u"No data for '{}'".format(capability.decode('ascii')) + request.setResponseCode(http.GONE) + return u"No data for '{}'".format(capability.decode('ascii')).encode("utf-8") return self.data[capability] @@ -284,6 +277,15 @@ class _SynchronousProducer(object): consumer.write(self.body) return succeed(None) + def stopProducing(self): + pass + + def pauseProducing(self): + pass + + def resumeProducing(self): + pass + def create_tahoe_treq_client(root=None): """ diff --git a/src/allmydata/unknown.py b/src/allmydata/unknown.py index 060696293..2a81437f6 100644 --- a/src/allmydata/unknown.py +++ b/src/allmydata/unknown.py @@ -1,13 +1,5 @@ """Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from zope.interface import implementer from twisted.internet import defer diff --git a/src/allmydata/uri.py b/src/allmydata/uri.py index 5641771d3..34f245ac7 100644 --- a/src/allmydata/uri.py +++ b/src/allmydata/uri.py @@ -6,26 +6,9 @@ Ported to Python 3. Methods ending in to_string() are actually to_bytes(), possibly should be fixed in follow-up port. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - # Don't import bytes or str, to prevent future's newbytes leaking and - # breaking code that only expects normal bytes. - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, max, min # noqa: F401 - from past.builtins import unicode as str - -from past.builtins import unicode, long import re - -try: - from typing import Type -except ImportError: - pass +from typing import Type from zope.interface import implementer from twisted.python.components import registerAdapter @@ -106,7 +89,7 @@ class CHKFileURI(_BaseURI): def to_string(self): assert isinstance(self.needed_shares, int) assert isinstance(self.total_shares, int) - assert isinstance(self.size, (int,long)) + assert isinstance(self.size, int) return (b'URI:CHK:%s:%s:%d:%d:%d' % (base32.b2a(self.key), @@ -162,7 +145,7 @@ class CHKFileVerifierURI(_BaseURI): def to_string(self): assert isinstance(self.needed_shares, int) assert isinstance(self.total_shares, int) - assert isinstance(self.size, (int,long)) + assert isinstance(self.size, int) return (b'URI:CHK-Verifier:%s:%s:%d:%d:%d' % (si_b2a(self.storage_index), @@ -707,7 +690,7 @@ class DirectoryURIVerifier(_DirectoryBaseURI): BASE_STRING=b'URI:DIR2-Verifier:' BASE_STRING_RE=re.compile(b'^'+BASE_STRING) - INNER_URI_CLASS=SSKVerifierURI # type: Type[IVerifierURI] + INNER_URI_CLASS : Type[IVerifierURI] = SSKVerifierURI def __init__(self, filenode_uri=None): if filenode_uri: @@ -757,7 +740,7 @@ ALLEGED_IMMUTABLE_PREFIX = b'imm.' def from_string(u, deep_immutable=False, name=u""): """Create URI from either unicode or byte string.""" - if isinstance(u, unicode): + if isinstance(u, str): u = u.encode("utf-8") if not isinstance(u, bytes): raise TypeError("URI must be unicode string or bytes: %r" % (u,)) @@ -859,7 +842,7 @@ def is_uri(s): return False def is_literal_file_uri(s): - if isinstance(s, unicode): + if isinstance(s, str): s = s.encode("utf-8") if not isinstance(s, bytes): return False @@ -868,7 +851,7 @@ def is_literal_file_uri(s): s.startswith(ALLEGED_IMMUTABLE_PREFIX + b'URI:LIT:')) def has_uri_prefix(s): - if isinstance(s, unicode): + if isinstance(s, str): s = s.encode("utf-8") if not isinstance(s, bytes): return False @@ -910,9 +893,9 @@ def pack_extension(data): pieces = [] for k in sorted(data.keys()): value = data[k] - if isinstance(value, (int, long)): + if isinstance(value, int): value = b"%d" % value - if isinstance(k, unicode): + if isinstance(k, str): k = k.encode("utf-8") assert isinstance(value, bytes), k assert re.match(br'^[a-zA-Z_\-]+$', k) diff --git a/src/allmydata/util/abbreviate.py b/src/allmydata/util/abbreviate.py index f895c3727..80abf7b05 100644 --- a/src/allmydata/util/abbreviate.py +++ b/src/allmydata/util/abbreviate.py @@ -3,14 +3,6 @@ Convert timestamps to abbreviated English text. Ported to Python 3. """ -from __future__ import division -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import re from datetime import timedelta diff --git a/src/allmydata/util/assertutil.py b/src/allmydata/util/assertutil.py index ed4b8599f..776ed7ef7 100644 --- a/src/allmydata/util/assertutil.py +++ b/src/allmydata/util/assertutil.py @@ -7,16 +7,6 @@ have tests. Ported to Python 3. """ -from __future__ import division -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - - # The API importers expect: from pyutil.assertutil import _assert, precondition, postcondition diff --git a/src/allmydata/util/attrs_provides.py b/src/allmydata/util/attrs_provides.py new file mode 100644 index 000000000..4282c3d38 --- /dev/null +++ b/src/allmydata/util/attrs_provides.py @@ -0,0 +1,50 @@ +""" +Utilities related to attrs + +Handling for zope.interface is deprecated in attrs so we copy the +relevant support method here since we depend on zope.interface anyway +""" + +from attr._make import attrs, attrib + + +@attrs(repr=False, slots=True, hash=True) +class _ProvidesValidator: + interface = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.interface.providedBy(value): + raise TypeError( + "'{name}' must provide {interface!r} which {value!r} " + "doesn't.".format( + name=attr.name, interface=self.interface, value=value + ), + attr, + self.interface, + value, + ) + + def __repr__(self): + return "".format( + interface=self.interface + ) + + +def provides(interface): + """ + A validator that raises a `TypeError` if the initializer is called + with an object that does not provide the requested *interface* (checks are + performed using ``interface.providedBy(value)`` (see `zope.interface + `_). + + :param interface: The interface to check for. + :type interface: ``zope.interface.Interface`` + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected interface, and the + value it got. + """ + return _ProvidesValidator(interface) diff --git a/src/allmydata/util/base32.py b/src/allmydata/util/base32.py index ab65beeac..19a3bbe26 100644 --- a/src/allmydata/util/base32.py +++ b/src/allmydata/util/base32.py @@ -3,30 +3,11 @@ Base32 encoding. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -if PY2: - def backwardscompat_bytes(b): - """ - Replace Future bytes with native Python 2 bytes, so % works - consistently until other modules are ported. - """ - return getattr(b, "__native__", lambda: b)() - import string - maketrans = string.maketrans -else: - def backwardscompat_bytes(b): - return b - maketrans = bytes.maketrans - from typing import Optional +def backwardscompat_bytes(b): + return b +maketrans = bytes.maketrans +from typing import Optional import base64 from allmydata.util.assertutil import precondition @@ -34,7 +15,7 @@ from allmydata.util.assertutil import precondition rfc3548_alphabet = b"abcdefghijklmnopqrstuvwxyz234567" # RFC3548 standard used by Gnutella, Content-Addressable Web, THEX, Bitzi, Web-Calculus... chars = rfc3548_alphabet -vals = backwardscompat_bytes(bytes(range(32))) +vals = bytes(range(32)) c2vtranstable = maketrans(chars, vals) v2ctranstable = maketrans(vals, chars) identitytranstable = maketrans(b'', b'') @@ -61,16 +42,16 @@ def get_trailing_chars_without_lsbs(N): d = {} return b''.join(_get_trailing_chars_without_lsbs(N, d=d)) -BASE32CHAR = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(0)+b']') -BASE32CHAR_4bits = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(1)+b']') -BASE32CHAR_3bits = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(2)+b']') -BASE32CHAR_2bits = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(3)+b']') -BASE32CHAR_1bits = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(4)+b']') -BASE32STR_1byte = backwardscompat_bytes(BASE32CHAR+BASE32CHAR_3bits) -BASE32STR_2bytes = backwardscompat_bytes(BASE32CHAR+b'{3}'+BASE32CHAR_1bits) -BASE32STR_3bytes = backwardscompat_bytes(BASE32CHAR+b'{4}'+BASE32CHAR_4bits) -BASE32STR_4bytes = backwardscompat_bytes(BASE32CHAR+b'{6}'+BASE32CHAR_2bits) -BASE32STR_anybytes = backwardscompat_bytes(bytes(b'((?:%s{8})*') % (BASE32CHAR,) + bytes(b"(?:|%s|%s|%s|%s))") % (BASE32STR_1byte, BASE32STR_2bytes, BASE32STR_3bytes, BASE32STR_4bytes)) +BASE32CHAR = b'['+get_trailing_chars_without_lsbs(0)+b']' +BASE32CHAR_4bits = b'['+get_trailing_chars_without_lsbs(1)+b']' +BASE32CHAR_3bits = b'['+get_trailing_chars_without_lsbs(2)+b']' +BASE32CHAR_2bits = b'['+get_trailing_chars_without_lsbs(3)+b']' +BASE32CHAR_1bits = b'['+get_trailing_chars_without_lsbs(4)+b']' +BASE32STR_1byte = BASE32CHAR+BASE32CHAR_3bits +BASE32STR_2bytes = BASE32CHAR+b'{3}'+BASE32CHAR_1bits +BASE32STR_3bytes = BASE32CHAR+b'{4}'+BASE32CHAR_4bits +BASE32STR_4bytes = BASE32CHAR+b'{6}'+BASE32CHAR_2bits +BASE32STR_anybytes = bytes(b'((?:%s{8})*') % (BASE32CHAR,) + bytes(b"(?:|%s|%s|%s|%s))") % (BASE32STR_1byte, BASE32STR_2bytes, BASE32STR_3bytes, BASE32STR_4bytes) def b2a(os): # type: (bytes) -> bytes """ @@ -80,7 +61,7 @@ def b2a(os): # type: (bytes) -> bytes """ return base64.b32encode(os).rstrip(b"=").lower() -def b2a_or_none(os): # type: (Optional[bytes]) -> Optional[bytes] +def b2a_or_none(os: Optional[bytes]) -> Optional[bytes]: if os is not None: return b2a(os) return None @@ -100,8 +81,6 @@ NUM_OS_TO_NUM_QS=(0, 2, 4, 5, 7,) NUM_QS_TO_NUM_OS=(0, 1, 1, 2, 2, 3, 3, 4) NUM_QS_LEGIT=(1, 0, 1, 0, 1, 1, 0, 1,) NUM_QS_TO_NUM_BITS=tuple([_x*8 for _x in NUM_QS_TO_NUM_OS]) -if PY2: - del _x # A fast way to determine whether a given string *could* be base-32 encoded data, assuming that the # original data had 8K bits for a positive integer K. @@ -135,8 +114,6 @@ def a2b(cs): # type: (bytes) -> bytes """ @param cs the base-32 encoded data (as bytes) """ - # Workaround Future newbytes issues by converting to real bytes on Python 2: - cs = backwardscompat_bytes(cs) precondition(could_be_base32_encoded(cs), "cs is required to be possibly base32 encoded data.", cs=cs) precondition(isinstance(cs, bytes), cs) @@ -144,9 +121,8 @@ def a2b(cs): # type: (bytes) -> bytes # Add padding back, to make Python's base64 module happy: while (len(cs) * 5) % 8 != 0: cs += b"=" - # Let newbytes come through and still work on Python 2, where the base64 - # module gets confused by them. - return base64.b32decode(backwardscompat_bytes(cs)) + + return base64.b32decode(cs) __all__ = ["b2a", "a2b", "b2a_or_none", "BASE32CHAR_3bits", "BASE32CHAR_1bits", "BASE32CHAR", "BASE32STR_anybytes", "could_be_base32_encoded"] diff --git a/src/allmydata/util/base62.py b/src/allmydata/util/base62.py index 964baff34..3602ef0ef 100644 --- a/src/allmydata/util/base62.py +++ b/src/allmydata/util/base62.py @@ -3,22 +3,9 @@ Base62 encoding. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -if PY2: - import string - maketrans = string.maketrans - translate = string.translate -else: - maketrans = bytes.maketrans - translate = bytes.translate +maketrans = bytes.maketrans +translate = bytes.translate from past.builtins import chr as byteschr diff --git a/src/allmydata/util/cbor.py b/src/allmydata/util/cbor.py new file mode 100644 index 000000000..a4b33ecec --- /dev/null +++ b/src/allmydata/util/cbor.py @@ -0,0 +1,19 @@ +""" +Unified entry point for CBOR encoding and decoding. + +Makes it less likely to use ``cbor2.loads()`` by mistake, which we want to avoid. +""" + +# We don't want to use the C extension for loading, at least for now, but using +# it for dumping should be fine. +from cbor2 import dumps, dump + +def load(*args, **kwargs): + """ + Don't use this! Here just in case someone uses it by mistake. + """ + raise RuntimeError("Use pycddl for decoding CBOR") + +loads = load + +__all__ = ["dumps", "loads", "dump", "load"] diff --git a/src/allmydata/util/configutil.py b/src/allmydata/util/configutil.py index ea64e1704..bdb872132 100644 --- a/src/allmydata/util/configutil.py +++ b/src/allmydata/util/configutil.py @@ -5,17 +5,7 @@ Configuration is returned as Unicode strings. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -# On Python 2 we use the backport package; that means we always get unicode -# out. from configparser import ConfigParser import attr @@ -66,13 +56,15 @@ def write_config(tahoe_cfg, config): """ Write a configuration to a file. - :param FilePath tahoe_cfg: The path to which to write the config. + :param FilePath tahoe_cfg: The path to which to write the + config. The directories are created if they do not already exist. :param ConfigParser config: The configuration to write. :return: ``None`` """ tmp = tahoe_cfg.temporarySibling() + tahoe_cfg.parent().makedirs(ignoreExistingDirectory=True) # FilePath.open can only open files in binary mode which does not work # with ConfigParser.write. with open(tmp.path, "wt") as fp: @@ -80,7 +72,10 @@ def write_config(tahoe_cfg, config): # Windows doesn't have atomic overwrite semantics for moveTo. Thus we end # up slightly less than atomic. if platform.isWindows(): - tahoe_cfg.remove() + try: + tahoe_cfg.remove() + except FileNotFoundError: + pass tmp.moveTo(tahoe_cfg) def validate_config(fname, cfg, valid_config): @@ -162,7 +157,7 @@ class ValidConfiguration(object): def is_valid_item(self, section_name, item_name): """ - :return: True if the given section name, ite name pair is valid, False + :return: True if the given section name, item_name pair is valid, False otherwise. """ return ( diff --git a/src/allmydata/util/connection_status.py b/src/allmydata/util/connection_status.py index 0e8595e81..0ccdcd672 100644 --- a/src/allmydata/util/connection_status.py +++ b/src/allmydata/util/connection_status.py @@ -1,21 +1,13 @@ """ Parse connection status from Foolscap. - -Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations import time from zope.interface import implementer from ..interfaces import IConnectionStatus +from foolscap.reconnector import Reconnector @implementer(IConnectionStatus) class ConnectionStatus(object): @@ -41,7 +33,7 @@ class ConnectionStatus(object): last_received_time=None, ) -def _hint_statuses(which, handlers, statuses): +def _hint_statuses(which, handlers, statuses) -> dict[str, str]: non_connected_statuses = {} for hint in which: handler = handlers.get(hint) @@ -50,7 +42,7 @@ def _hint_statuses(which, handlers, statuses): non_connected_statuses["%s%s" % (hint, handler_dsc)] = dsc return non_connected_statuses -def from_foolscap_reconnector(rc, last_received): +def from_foolscap_reconnector(rc: Reconnector, last_received: int, time=time.time) -> ConnectionStatus: ri = rc.getReconnectionInfo() # See foolscap/reconnector.py, ReconnectionInfo, for details about possible # states. The returned result is a native string, it seems, so convert to @@ -80,7 +72,7 @@ def from_foolscap_reconnector(rc, last_received): # ci describes the current in-progress attempt summary = "Trying to connect" elif state == "waiting": - now = time.time() + now = time() elapsed = now - ri.lastAttempt delay = ri.nextAttempt - now summary = "Reconnecting in %d seconds (last attempt %ds ago)" % \ diff --git a/src/allmydata/util/consumer.py b/src/allmydata/util/consumer.py index 3de82974d..c899fc25e 100644 --- a/src/allmydata/util/consumer.py +++ b/src/allmydata/util/consumer.py @@ -4,14 +4,6 @@ a filenode's read() method. See download_to_data() for an example of its use. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from zope.interface import implementer from twisted.internet.interfaces import IConsumer diff --git a/src/allmydata/util/cputhreadpool.py b/src/allmydata/util/cputhreadpool.py new file mode 100644 index 000000000..3835701fa --- /dev/null +++ b/src/allmydata/util/cputhreadpool.py @@ -0,0 +1,88 @@ +""" +A global thread pool for CPU-intensive tasks. + +Motivation: + +* Certain tasks are blocking on CPU, and so should be run in a thread. +* The Twisted thread pool is used for operations that don't necessarily block + on CPU, like DNS lookups. CPU processing should not block DNS lookups! +* The number of threads should be fixed, and tied to the number of available + CPUs. + +As a first pass, this uses ``os.cpu_count()`` to determine the max number of +threads. This may create too many threads, as it doesn't cover things like +scheduler affinity or cgroups, but that's not the end of the world. +""" + +import os +from typing import TypeVar, Callable, cast +from functools import partial +import threading +from typing_extensions import ParamSpec +from unittest import TestCase + +from twisted.python.threadpool import ThreadPool +from twisted.internet.threads import deferToThreadPool +from twisted.internet import reactor +from twisted.internet.interfaces import IReactorFromThreads + +_CPU_THREAD_POOL = ThreadPool(minthreads=0, maxthreads=os.cpu_count() or 1, name="TahoeCPU") +if hasattr(threading, "_register_atexit"): + # This is a private API present in Python 3.8 or later, specifically + # designed for thread pool shutdown. Since it's private, it might go away + # at any point, so if it doesn't exist we still have a solution. + threading._register_atexit(_CPU_THREAD_POOL.stop) # type: ignore +else: + # Daemon threads allow shutdown to happen without any explicit stopping of + # threads. There are some bugs in old Python versions related to daemon + # threads (fixed in subsequent CPython patch releases), but Python's own + # thread pools use daemon threads in those versions so we're no worse off. + _CPU_THREAD_POOL.threadFactory = partial( # type: ignore + _CPU_THREAD_POOL.threadFactory, daemon=True + ) +_CPU_THREAD_POOL.start() + + +P = ParamSpec("P") +R = TypeVar("R") + +# Is running in a thread pool disabled? Should only be true in synchronous unit +# tests. +_DISABLED = False + + +async def defer_to_thread(f: Callable[P, R], *args: P.args, **kwargs: P.kwargs) -> R: + """ + Run the function in a thread, return the result. + + However, if ``disable_thread_pool_for_test()`` was called the function will + be called synchronously inside the current thread. + + To reduce chances of synchronous tests being misleading as a result, this + is an async function on presumption that will encourage immediate ``await``ing. + """ + if _DISABLED: + return f(*args, **kwargs) + + # deferToThreadPool has no type annotations... + result = await deferToThreadPool(cast(IReactorFromThreads, reactor), _CPU_THREAD_POOL, f, *args, **kwargs) + return result + + +def disable_thread_pool_for_test(test: TestCase) -> None: + """ + For the duration of the test, calls to ``defer_to_thread()`` will actually + run synchronously, which is useful for synchronous unit tests. + """ + global _DISABLED + + def restore(): + global _DISABLED + _DISABLED = False + + test.addCleanup(restore) + + _DISABLED = True + + +__all__ = ["defer_to_thread", "disable_thread_pool_for_test"] diff --git a/src/allmydata/util/dbutil.py b/src/allmydata/util/dbutil.py index 916382972..b27b58ab5 100644 --- a/src/allmydata/util/dbutil.py +++ b/src/allmydata/util/dbutil.py @@ -6,15 +6,6 @@ Test coverage currently provided by test_backupdb.py. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import os, sys import sqlite3 @@ -25,7 +16,7 @@ class DBError(Exception): def get_db(dbfile, stderr=sys.stderr, - create_version=(None, None), updaters={}, just_create=False, dbname="db", + create_version=(None, None), updaters=None, just_create=False, dbname="db", ): """Open or create the given db file. The parent directory must exist. create_version=(SCHEMA, VERNUM), and SCHEMA must have a 'version' table. @@ -33,6 +24,8 @@ def get_db(dbfile, stderr=sys.stderr, to get from ver=1 to ver=2. Returns a (sqlite3,db) tuple, or raises DBError. """ + if updaters is None: + updaters = {} must_create = not os.path.exists(dbfile) try: db = sqlite3.connect(dbfile) diff --git a/src/allmydata/util/deferredutil.py b/src/allmydata/util/deferredutil.py index ed2a11ee4..9e8d7bad4 100644 --- a/src/allmydata/util/deferredutil.py +++ b/src/allmydata/util/deferredutil.py @@ -1,33 +1,29 @@ """ Utilities for working with Twisted Deferreds. - -Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations import time +from functools import wraps -try: - from typing import ( - Callable, - Any, - ) -except ImportError: - pass +from typing import ( + Callable, + Any, + Sequence, + TypeVar, + Optional, + Coroutine, + Generator +) +from typing_extensions import ParamSpec from foolscap.api import eventually from eliot.twisted import ( inline_callbacks, ) from twisted.internet import defer, reactor, error +from twisted.internet.defer import Deferred from twisted.python.failure import Failure from allmydata.util import log @@ -215,10 +211,9 @@ class WaitForDelayedCallsMixin(PollMixin): @inline_callbacks def until( - action, # type: Callable[[], defer.Deferred[Any]] - condition, # type: Callable[[], bool] -): - # type: (...) -> defer.Deferred[None] + action: Callable[[], defer.Deferred[Any]], + condition: Callable[[], bool], +) -> Generator[Any, None, None]: """ Run a Deferred-returning function until a condition is true. @@ -231,3 +226,113 @@ def until( yield action() if condition(): break + + +P = ParamSpec("P") +R = TypeVar("R") + + +def async_to_deferred(f: Callable[P, Coroutine[defer.Deferred[R], None, R]]) -> Callable[P, Deferred[R]]: + """ + Wrap an async function to return a Deferred instead. + + Maybe solution to https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3886 + """ + + @wraps(f) + def not_async(*args: P.args, **kwargs: P.kwargs) -> Deferred[R]: + return defer.Deferred.fromCoroutine(f(*args, **kwargs)) + + return not_async + + +class MultiFailure(Exception): + """ + More than one failure occurred. + """ + + def __init__(self, failures: Sequence[Failure]) -> None: + super(MultiFailure, self).__init__() + self.failures = failures + + +_T = TypeVar("_T") + +# Eventually this should be in Twisted upstream: +# https://github.com/twisted/twisted/pull/11818 +def race(ds: Sequence[Deferred[_T]]) -> Deferred[tuple[int, _T]]: + """ + Select the first available result from the sequence of Deferreds and + cancel the rest. + @return: A cancellable L{Deferred} that fires with the index and output of + the element of C{ds} to have a success result first, or that fires + with L{MultiFailure} holding a list of their failures if they all + fail. + """ + # Keep track of the Deferred for the action which completed first. When + # it completes, all of the other Deferreds will get cancelled but this one + # shouldn't be. Even though it "completed" it isn't really done - the + # caller will still be using it for something. If we cancelled it, + # cancellation could propagate down to them. + winner: Optional[Deferred] = None + + # The cancellation function for the Deferred this function returns. + def cancel(result: Deferred) -> None: + # If it is cancelled then we cancel all of the Deferreds for the + # individual actions because there is no longer the possibility of + # delivering any of their results anywhere. We don't have to fire + # `result` because the Deferred will do that for us. + for d in to_cancel: + d.cancel() + + # The Deferred that this function will return. It will fire with the + # index and output of the action that completes first, or None if all of + # the actions fail. If it is cancelled, all of the actions will be + # cancelled. + final_result: Deferred[tuple[int, _T]] = Deferred(canceller=cancel) + + # A callback for an individual action. + def succeeded(this_output: _T, this_index: int) -> None: + # If it is the first action to succeed then it becomes the "winner", + # its index/output become the externally visible result, and the rest + # of the action Deferreds get cancelled. If it is not the first + # action to succeed (because some action did not support + # cancellation), just ignore the result. It is uncommon for this + # callback to be entered twice. The only way it can happen is if one + # of the input Deferreds has a cancellation function that fires the + # Deferred with a success result. + nonlocal winner + if winner is None: + # This is the first success. Act on it. + winner = to_cancel[this_index] + + # Cancel the rest. + for d in to_cancel: + if d is not winner: + d.cancel() + + # Fire our Deferred + final_result.callback((this_index, this_output)) + + # Keep track of how many actions have failed. If they all fail we need to + # deliver failure notification on our externally visible result. + failure_state = [] + + def failed(failure: Failure, this_index: int) -> None: + failure_state.append((this_index, failure)) + if len(failure_state) == len(to_cancel): + # Every operation failed. + failure_state.sort() + failures = [f for (ignored, f) in failure_state] + final_result.errback(MultiFailure(failures)) + + # Copy the sequence of Deferreds so we know it doesn't get mutated out + # from under us. + to_cancel = list(ds) + for index, d in enumerate(ds): + # Propagate the position of this action as well as the argument to f + # to the success callback so we can cancel the right Deferreds and + # propagate the result outwards. + d.addCallbacks(succeeded, failed, callbackArgs=(index,), errbackArgs=(index,)) + + return final_result diff --git a/src/allmydata/util/dictutil.py b/src/allmydata/util/dictutil.py index 5971d26f6..58820993f 100644 --- a/src/allmydata/util/dictutil.py +++ b/src/allmydata/util/dictutil.py @@ -1,21 +1,23 @@ """ Tools to mess with dicts. - -Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - # IMPORTANT: We deliberately don't import dict. The issue is that we're - # subclassing dict, so we'd end up exposing Python 3 dict APIs to lots of - # code that doesn't support it. - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 -from six import ensure_str +from __future__ import annotations +from typing import Callable, TypeVar +K = TypeVar("K") +V = TypeVar("V") + +def filter(pred: Callable[[V], bool], orig: dict[K, V]) -> dict[K, V]: + """ + Filter out key/value pairs whose value fails to match a predicate. + """ + return { + k: v + for (k, v) + in orig.items() + if pred(v) + } class DictOfSets(dict): def add(self, key, value): @@ -104,7 +106,7 @@ def _make_enforcing_override(K, method_name): raise TypeError("{} must be of type {}".format( repr(key), self.KEY_TYPE)) return getattr(dict, method_name)(self, key, *args, **kwargs) - f.__name__ = ensure_str(method_name) + f.__name__ = method_name setattr(K, method_name, f) for _method_name in ["__setitem__", "__getitem__", "setdefault", "get", @@ -113,18 +115,13 @@ for _method_name in ["__setitem__", "__getitem__", "setdefault", "get", del _method_name -if PY2: - # No need for enforcement, can use either bytes or unicode as keys and it's - # fine. - BytesKeyDict = UnicodeKeyDict = dict -else: - class BytesKeyDict(_TypedKeyDict): - """Keys should be bytes.""" +class BytesKeyDict(_TypedKeyDict): + """Keys should be bytes.""" - KEY_TYPE = bytes + KEY_TYPE = bytes - class UnicodeKeyDict(_TypedKeyDict): - """Keys should be unicode strings.""" +class UnicodeKeyDict(_TypedKeyDict): + """Keys should be unicode strings.""" - KEY_TYPE = str + KEY_TYPE = str diff --git a/src/allmydata/util/eliotutil.py b/src/allmydata/util/eliotutil.py index 4e48fbb9f..6a43a7b74 100644 --- a/src/allmydata/util/eliotutil.py +++ b/src/allmydata/util/eliotutil.py @@ -3,36 +3,22 @@ Tools aimed at the interaction between Tahoe-LAFS implementation and Eliot. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from __future__ import ( - unicode_literals, - print_function, - absolute_import, - division, -) __all__ = [ + "MemoryLogger", "inline_callbacks", "eliot_logging_service", "opt_eliot_destination", "opt_help_eliot_destinations", "validateInstanceOf", "validateSetMembership", + "capture_logging", ] -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -from six import ensure_text - from sys import ( stdout, ) -from functools import wraps, partial +from functools import wraps from logging import ( INFO, Handler, @@ -40,25 +26,26 @@ from logging import ( ) from json import loads +from six import ensure_text from zope.interface import ( implementer, ) import attr -from attr.validators import ( - optional, - provides, -) - +from attr.validators import optional +from twisted.internet import reactor from eliot import ( ILogger, Message, FileDestination, - add_destinations, - remove_destination, write_traceback, start_action, ) +from eliot.testing import ( + MemoryLogger, + capture_logging, +) + from eliot._validation import ( ValidationError, ) @@ -66,8 +53,7 @@ from eliot.twisted import ( DeferredContext, inline_callbacks, ) -from eliot.testing import capture_logging as eliot_capture_logging - +from eliot.logwriter import ThreadedWriter from twisted.python.usage import ( UsageError, ) @@ -85,8 +71,9 @@ from twisted.logger import ( from twisted.internet.defer import ( maybeDeferred, ) -from twisted.application.service import Service +from twisted.application.service import MultiService +from .attrs_provides import provides from .jsonbytes import AnyBytesJSONEncoder @@ -154,7 +141,7 @@ def opt_help_eliot_destinations(self): raise SystemExit(0) -class _EliotLogging(Service): +class _EliotLogging(MultiService): """ A service which adds stdout as an Eliot destination while it is running. """ @@ -163,23 +150,22 @@ class _EliotLogging(Service): :param list destinations: The Eliot destinations which will is added by this service. """ - self.destinations = destinations - + MultiService.__init__(self) + for destination in destinations: + service = ThreadedWriter(destination, reactor) + service.setServiceParent(self) def startService(self): self.stdlib_cleanup = _stdlib_logging_to_eliot_configuration(getLogger()) self.twisted_observer = _TwistedLoggerToEliotObserver() globalLogPublisher.addObserver(self.twisted_observer) - add_destinations(*self.destinations) - return Service.startService(self) + return MultiService.startService(self) def stopService(self): - for dest in self.destinations: - remove_destination(dest) globalLogPublisher.removeObserver(self.twisted_observer) self.stdlib_cleanup() - return Service.stopService(self) + return MultiService.stopService(self) @implementer(ILogObserver) @@ -306,7 +292,7 @@ class _DestinationParser(object): rotateLength=rotate_length, maxRotatedFiles=max_rotated_files, ) - return lambda reactor: FileDestination(get_file(), AnyBytesJSONEncoder) + return lambda reactor: FileDestination(get_file(), encoder=AnyBytesJSONEncoder) _parse_destination_description = _DestinationParser().parse @@ -327,10 +313,3 @@ def log_call_deferred(action_type): return DeferredContext(d).addActionFinish() return logged_f return decorate_log_call_deferred - -# On Python 3, encoding bytes to JSON doesn't work, so we have a custom JSON -# encoder we want to use when validating messages. -if PY2: - capture_logging = eliot_capture_logging -else: - capture_logging = partial(eliot_capture_logging, encoder_=AnyBytesJSONEncoder) diff --git a/src/allmydata/util/encodingutil.py b/src/allmydata/util/encodingutil.py index f32710688..4f0910102 100644 --- a/src/allmydata/util/encodingutil.py +++ b/src/allmydata/util/encodingutil.py @@ -7,18 +7,7 @@ Ported to Python 3. Once Python 2 support is dropped, most of this module will obsolete, since Unicode is the default everywhere in Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2, PY3, native_str -from future.builtins import str as future_str -if PY2: - # We omit str() because that seems too tricky to get right. - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401 - -from past.builtins import unicode from six import ensure_str import sys, os, re @@ -63,25 +52,11 @@ def check_encoding(encoding): io_encoding = "utf-8" filesystem_encoding = None -is_unicode_platform = False -use_unicode_filepath = False def _reload(): - global filesystem_encoding, is_unicode_platform, use_unicode_filepath - + global filesystem_encoding filesystem_encoding = canonical_encoding(sys.getfilesystemencoding()) check_encoding(filesystem_encoding) - is_unicode_platform = PY3 or sys.platform in ["win32", "darwin"] - - # Despite the Unicode-mode FilePath support added to Twisted in - # , we can't yet use - # Unicode-mode FilePaths with INotify on non-Windows platforms due to - # . Supposedly 7928 is fixed, - # though... and Tahoe-LAFS doesn't use inotify anymore! - # - # In the interest of not breaking anything, this logic is unchanged for - # Python 2, but on Python 3 the paths are always unicode, like it or not. - use_unicode_filepath = PY3 or sys.platform == "win32" _reload() @@ -104,13 +79,13 @@ def argv_to_unicode(s): This is the inverse of ``unicode_to_argv``. """ - if isinstance(s, unicode): + if isinstance(s, str): return s precondition(isinstance(s, bytes), s) try: - return unicode(s, io_encoding) + return str(s, io_encoding) except UnicodeDecodeError: raise usage.UsageError("Argument %s cannot be decoded as %s." % (quote_output(s), io_encoding)) @@ -134,10 +109,8 @@ def unicode_to_argv(s): On Python 2 on POSIX, this encodes using UTF-8. On Python 3 and on Windows, this returns the input unmodified. """ - precondition(isinstance(s, unicode), s) - if PY3: - warnings.warn("This will be unnecessary once Python 2 is dropped.", - DeprecationWarning) + precondition(isinstance(s, str), s) + warnings.warn("This is unnecessary.", DeprecationWarning) if sys.platform == "win32": return s return ensure_str(s) @@ -145,7 +118,7 @@ def unicode_to_argv(s): # According to unicode_to_argv above, the expected type for # cli args depends on the platform, so capture that expectation. -argv_type = (future_str, native_str) if sys.platform == "win32" else native_str +argv_type = (str,) """ The expected type for args to a subprocess """ @@ -190,25 +163,9 @@ def unicode_to_output(s): On Python 3 just returns the unicode string unchanged, since encoding is the responsibility of stdout/stderr, they expect Unicode by default. """ - precondition(isinstance(s, unicode), s) - if PY3: - warnings.warn("This will be unnecessary once Python 2 is dropped.", - DeprecationWarning) - return s - - try: - out = s.encode(io_encoding) - except (UnicodeEncodeError, UnicodeDecodeError): - raise UnicodeEncodeError(native_str(io_encoding), s, 0, 0, - native_str("A string could not be encoded as %s for output to the terminal:\n%r" % - (io_encoding, repr(s)))) - - if PRINTABLE_8BIT.search(out) is None: - raise UnicodeEncodeError(native_str(io_encoding), s, 0, 0, - native_str("A string encoded as %s for output to the terminal contained unsafe bytes:\n%r" % - (io_encoding, repr(s)))) - return out - + precondition(isinstance(s, str), s) + warnings.warn("This is unnecessary.", DeprecationWarning) + return s def _unicode_escape(m, quote_newlines): u = m.group(0) @@ -254,7 +211,7 @@ def quote_output_u(*args, **kwargs): Like ``quote_output`` but always return ``unicode``. """ result = quote_output(*args, **kwargs) - if isinstance(result, unicode): + if isinstance(result, str): return result # Since we're quoting, the assumption is this will be read by a human, and # therefore printed, so stdout's encoding is the plausible one. io_encoding @@ -279,7 +236,7 @@ def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None): On Python 3, returns Unicode strings. """ - precondition(isinstance(s, (bytes, unicode)), s) + precondition(isinstance(s, (bytes, str)), s) # Since we're quoting, the assumption is this will be read by a human, and # therefore printed, so stdout's encoding is the plausible one. io_encoding # is now always utf-8. @@ -310,17 +267,7 @@ def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None): return b'"%s"' % (escaped.encode(encoding, 'backslashreplace'),) result = _encode(s) - if PY3: - # On Python 3 half of what this function does is unnecessary, since - # sys.stdout typically expects Unicode. To ensure no encode errors, one - # can do: - # - # sys.stdout.reconfigure(encoding=sys.stdout.encoding, errors="backslashreplace") - # - # Although the problem is that doesn't work in Python 3.6, only 3.7 or - # later... For now not thinking about it, just returning unicode since - # that is the right thing to do on Python 3. - result = result.decode(encoding) + result = result.decode(encoding) return result @@ -328,7 +275,7 @@ def quote_path(path, quotemarks=True): return quote_output(b"/".join(map(to_bytes, path)), quotemarks=quotemarks, quote_newlines=True) def quote_local_unicode_path(path, quotemarks=True): - precondition(isinstance(path, unicode), path) + precondition(isinstance(path, str), path) if sys.platform == "win32" and path.startswith(u"\\\\?\\"): path = path[4 :] @@ -348,20 +295,13 @@ def extend_filepath(fp, segments): for segment in segments: fp = fp.child(segment) - if isinstance(fp.path, unicode) and not use_unicode_filepath: - return FilePath(fp.path.encode(filesystem_encoding)) - else: - return fp + return fp def to_filepath(path): - precondition(isinstance(path, unicode if use_unicode_filepath else (bytes, unicode)), - path=path) - - if isinstance(path, unicode) and not use_unicode_filepath: - path = path.encode(filesystem_encoding) + precondition(isinstance(path, str), path=path) if sys.platform == "win32": - _assert(isinstance(path, unicode), path=path) + _assert(isinstance(path, str), path=path) if path.startswith(u"\\\\?\\") and len(path) > 4: # FilePath normally strips trailing path separators, but not in this case. path = path.rstrip(u"\\") @@ -369,7 +309,7 @@ def to_filepath(path): return FilePath(path) def _decode(s): - precondition(isinstance(s, (bytes, unicode)), s=s) + precondition(isinstance(s, (bytes, str)), s=s) if isinstance(s, bytes): return s.decode(filesystem_encoding) @@ -390,7 +330,7 @@ def unicode_platform(): """ Does the current platform handle Unicode filenames natively? """ - return is_unicode_platform + return True class FilenameEncodingError(Exception): """ @@ -399,39 +339,13 @@ class FilenameEncodingError(Exception): """ pass -def listdir_unicode_fallback(path): - """ - This function emulates a fallback Unicode API similar to one available - under Windows or MacOS X. - - If badly encoded filenames are encountered, an exception is raised. - """ - precondition(isinstance(path, unicode), path) - - try: - byte_path = path.encode(filesystem_encoding) - except (UnicodeEncodeError, UnicodeDecodeError): - raise FilenameEncodingError(path) - - try: - return [unicode(fn, filesystem_encoding) for fn in os.listdir(byte_path)] - except UnicodeDecodeError as e: - raise FilenameEncodingError(e.object) - def listdir_unicode(path): """ Wrapper around listdir() which provides safe access to the convenient Unicode API even under platforms that don't provide one natively. """ - precondition(isinstance(path, unicode), path) - - # On Windows and MacOS X, the Unicode API is used - # On other platforms (ie. Unix systems), the byte-level API is used - - if is_unicode_platform: - return os.listdir(path) - else: - return listdir_unicode_fallback(path) + precondition(isinstance(path, str), path) + return os.listdir(path) def listdir_filepath(fp): return listdir_unicode(unicode_from_filepath(fp)) diff --git a/src/allmydata/util/fileutil.py b/src/allmydata/util/fileutil.py index e40e06180..0a4eebaba 100644 --- a/src/allmydata/util/fileutil.py +++ b/src/allmydata/util/fileutil.py @@ -4,16 +4,6 @@ Ported to Python3. Futz with files like a pro. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - # open is not here because we want to use native strings on Py2 - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import sys, os, stat, tempfile, time, binascii import six from collections import namedtuple @@ -346,8 +336,6 @@ def abspath_expanduser_unicode(path, base=None, long_path=True): if not os.path.isabs(path): if base is None: cwd = os.getcwd() - if PY2: - cwd = cwd.decode('utf8') path = os.path.join(cwd, path) else: path = os.path.join(base, path) diff --git a/src/allmydata/util/gcutil.py b/src/allmydata/util/gcutil.py index 33f1f64f5..2302ae6b7 100644 --- a/src/allmydata/util/gcutil.py +++ b/src/allmydata/util/gcutil.py @@ -10,14 +10,6 @@ Helpers for managing garbage collection. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 __all__ = [ "fileDescriptorResource", diff --git a/src/allmydata/util/happinessutil.py b/src/allmydata/util/happinessutil.py index 9f2617a5e..19b602826 100644 --- a/src/allmydata/util/happinessutil.py +++ b/src/allmydata/util/happinessutil.py @@ -4,15 +4,6 @@ reporting it in messages. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - # We omit dict, just in case newdict breaks things. - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 from copy import deepcopy from allmydata.immutable.happiness_upload import residual_network diff --git a/src/allmydata/util/hashutil.py b/src/allmydata/util/hashutil.py index 8525dd95e..7217a2d93 100644 --- a/src/allmydata/util/hashutil.py +++ b/src/allmydata/util/hashutil.py @@ -3,17 +3,6 @@ Hashing utilities. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - # Don't import bytes to prevent leaking future's bytes. - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min, bytes as future_bytes # noqa: F401 -else: - future_bytes = bytes from past.builtins import chr as byteschr @@ -250,7 +239,7 @@ def bucket_cancel_secret_hash(file_cancel_secret, peerid): def _xor(a, b): - return b"".join([byteschr(c ^ b) for c in future_bytes(a)]) + return b"".join([byteschr(c ^ b) for c in bytes(a)]) def hmac(tag, data): diff --git a/src/allmydata/util/humanreadable.py b/src/allmydata/util/humanreadable.py index 60ac57083..356edb659 100644 --- a/src/allmydata/util/humanreadable.py +++ b/src/allmydata/util/humanreadable.py @@ -4,15 +4,6 @@ Utilities for turning objects into human-readable strings. This module has been ported to Python 3. """ -from __future__ import division -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import os from reprlib import Repr diff --git a/src/allmydata/util/i2p_provider.py b/src/allmydata/util/i2p_provider.py index 071245adf..c480cd2f1 100644 --- a/src/allmydata/util/i2p_provider.py +++ b/src/allmydata/util/i2p_provider.py @@ -1,14 +1,9 @@ # -*- coding: utf-8 -*- -""" -Ported to Python 3. -""" -from __future__ import absolute_import, print_function, with_statement -from __future__ import division -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations + +from typing import Any +from typing_extensions import Literal import os @@ -20,12 +15,15 @@ from twisted.internet.defer import inlineCallbacks, returnValue from twisted.internet.endpoints import clientFromString from twisted.internet.error import ConnectionRefusedError, ConnectError from twisted.application import service +from twisted.python.usage import Options +from ..listeners import ListenerConfig from ..interfaces import ( IAddressFamily, ) +from ..node import _Config -def create(reactor, config): +def create(reactor: Any, config: _Config) -> IAddressFamily: """ Create a new Provider service (this is an IService so must be hooked up to a parent or otherwise started). @@ -55,6 +53,21 @@ def _import_txi2p(): except ImportError: # pragma: no cover return None +def is_available() -> bool: + """ + Can this type of listener actually be used in this runtime + environment? + + If its dependencies are missing then it cannot be. + """ + return not (_import_i2p() is None or _import_txi2p() is None) + +def can_hide_ip() -> Literal[True]: + """ + Can the transport supported by this type of listener conceal the + node's public internet address from peers? + """ + return True def _try_to_connect(reactor, endpoint_desc, stdout, txi2p): # yields True or None @@ -97,29 +110,35 @@ def _connect_to_i2p(reactor, cli_config, txi2p): else: raise ValueError("unable to reach any default I2P SAM port") -@inlineCallbacks -def create_config(reactor, cli_config): +async def create_config(reactor: Any, cli_config: Options) -> ListenerConfig: + """ + For a given set of command-line options, construct an I2P listener. + + This includes allocating a new I2P address. + """ txi2p = _import_txi2p() if not txi2p: raise ValueError("Cannot create I2P Destination without txi2p. " "Please 'pip install tahoe-lafs[i2p]' to fix this.") - tahoe_config_i2p = {} # written into tahoe.cfg:[i2p] + tahoe_config_i2p = [] # written into tahoe.cfg:[i2p] private_dir = os.path.abspath(os.path.join(cli_config["basedir"], "private")) - stdout = cli_config.stdout + # XXX We shouldn't carry stdout around by jamming it into the Options + # value. See https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4048 + stdout = cli_config.stdout # type: ignore[attr-defined] if cli_config["i2p-launch"]: raise NotImplementedError("--i2p-launch is under development.") else: print("connecting to I2P (to allocate .i2p address)..", file=stdout) - sam_port = yield _connect_to_i2p(reactor, cli_config, txi2p) + sam_port = await _connect_to_i2p(reactor, cli_config, txi2p) print("I2P connection established", file=stdout) - tahoe_config_i2p["sam.port"] = sam_port + tahoe_config_i2p.append(("sam.port", sam_port)) external_port = 3457 # TODO: pick this randomly? there's no contention. privkeyfile = os.path.join(private_dir, "i2p_dest.privkey") sam_endpoint = clientFromString(reactor, sam_port) print("allocating .i2p address...", file=stdout) - dest = yield txi2p.generateDestination(reactor, privkeyfile, 'SAM', sam_endpoint) + dest = await txi2p.generateDestination(reactor, privkeyfile, 'SAM', sam_endpoint) print(".i2p address allocated", file=stdout) i2p_port = "listen:i2p" # means "see [i2p]", calls Provider.get_listener() i2p_location = "i2p:%s:%d" % (dest.host, external_port) @@ -132,10 +151,11 @@ def create_config(reactor, cli_config): # * "private_key_file" points to the on-disk copy of the private key # material (although we always write it to the same place) - tahoe_config_i2p["dest"] = "true" - tahoe_config_i2p["dest.port"] = str(external_port) - tahoe_config_i2p["dest.private_key_file"] = os.path.join("private", - "i2p_dest.privkey") + tahoe_config_i2p.extend([ + ("dest", "true"), + ("dest.port", str(external_port)), + ("dest.private_key_file", os.path.join("private", "i2p_dest.privkey")), + ]) # tahoe_config_i2p: this is a dictionary of keys/values to add to the # "[i2p]" section of tahoe.cfg, which tells the new node how to launch @@ -149,7 +169,7 @@ def create_config(reactor, cli_config): # at both create-node and startup time. The data directory is not # recorded in tahoe.cfg - returnValue((tahoe_config_i2p, i2p_port, i2p_location)) + return ListenerConfig([i2p_port], [i2p_location], {"i2p": tahoe_config_i2p}) @implementer(IAddressFamily) diff --git a/src/allmydata/util/idlib.py b/src/allmydata/util/idlib.py index eafcbc388..26dd72445 100644 --- a/src/allmydata/util/idlib.py +++ b/src/allmydata/util/idlib.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six import ensure_text from foolscap import base32 diff --git a/src/allmydata/util/iputil.py b/src/allmydata/util/iputil.py index fd3e88c7f..0666c37d4 100644 --- a/src/allmydata/util/iputil.py +++ b/src/allmydata/util/iputil.py @@ -1,17 +1,8 @@ """ Utilities for getting IP addresses. - -Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2, native_str -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from typing import Callable import os, socket @@ -39,6 +30,7 @@ from .gcutil import ( fcntl = requireModule("fcntl") +allocate_tcp_port: Callable[[], int] from foolscap.util import allocate_tcp_port # re-exported try: @@ -110,7 +102,7 @@ def get_local_addresses_sync(): on the local system. """ return list( - native_str(address[native_str("addr")]) + str(address["addr"]) for iface_name in interfaces() for address @@ -167,7 +159,7 @@ def _foolscapEndpointForPortNumber(portnum): # approach is error prone for the reasons described on # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2787 portnum = allocate_tcp_port() - return (portnum, native_str("tcp:%d" % (portnum,))) + return (portnum, "tcp:%d" % portnum) @implementer(IStreamServerEndpoint) @@ -216,7 +208,7 @@ def listenOnUnused(tub, portnum=None): """ portnum, endpoint = _foolscapEndpointForPortNumber(portnum) tub.listenOn(endpoint) - tub.setLocation(native_str("localhost:%d" % (portnum,))) + tub.setLocation("localhost:%d" % portnum) return portnum diff --git a/src/allmydata/util/jsonbytes.py b/src/allmydata/util/jsonbytes.py index 08e0cb68e..7415b4f02 100644 --- a/src/allmydata/util/jsonbytes.py +++ b/src/allmydata/util/jsonbytes.py @@ -4,28 +4,7 @@ A JSON encoder than can serialize bytes. Ported to Python 3. """ -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2, PY3 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import json -import codecs - -if PY2: - def backslashreplace_py2(ex): - """ - On Python 2 'backslashreplace' error handler doesn't work, so write our - own. - """ - return ''.join('\\x{:02x}'.format(ord(c)) - for c in ex.object[ex.start:ex.end]), ex.end - - codecs.register_error("backslashreplace_tahoe_py2", backslashreplace_py2) def bytes_to_unicode(any_bytes, obj): @@ -35,8 +14,6 @@ def bytes_to_unicode(any_bytes, obj): :param obj: Object to de-byte-ify. """ errors = "backslashreplace" if any_bytes else "strict" - if PY2 and errors == "backslashreplace": - errors = "backslashreplace_tahoe_py2" def doit(obj): """Convert any bytes objects to unicode, recursively.""" @@ -61,6 +38,9 @@ class UTF8BytesJSONEncoder(json.JSONEncoder): """ A JSON encoder than can also encode UTF-8 encoded strings. """ + def default(self, o): + return bytes_to_unicode(False, o) + def encode(self, o, **kwargs): return json.JSONEncoder.encode( self, bytes_to_unicode(False, o), **kwargs) @@ -77,6 +57,9 @@ class AnyBytesJSONEncoder(json.JSONEncoder): Bytes are decoded to strings using UTF-8, if that fails to decode then the bytes are quoted. """ + def default(self, o): + return bytes_to_unicode(True, o) + def encode(self, o, **kwargs): return json.JSONEncoder.encode( self, bytes_to_unicode(True, o), **kwargs) @@ -108,14 +91,12 @@ def dumps_bytes(obj, *args, **kwargs): UTF-8 encoded Unicode strings. If True, non-UTF-8 bytes are quoted for human consumption. """ - result = dumps(obj, *args, **kwargs) - if PY3: - result = result.encode("utf-8") - return result + return dumps(obj, *args, **kwargs).encode("utf-8") # To make this module drop-in compatible with json module: loads = json.loads +load = json.load -__all__ = ["dumps", "loads"] +__all__ = ["dumps", "loads", "load"] diff --git a/src/allmydata/util/log.py b/src/allmydata/util/log.py index b442d30bb..65df01dfc 100644 --- a/src/allmydata/util/log.py +++ b/src/allmydata/util/log.py @@ -3,14 +3,7 @@ Logging utilities. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from six import ensure_str from pyutil import nummedobj @@ -18,14 +11,10 @@ from pyutil import nummedobj from foolscap.logging import log from twisted.python import log as tw_log -if PY2: - def bytes_to_unicode(ign, obj): - return obj -else: - # We want to convert bytes keys to Unicode, otherwise JSON serialization - # inside foolscap will fail (for details see - # https://github.com/warner/foolscap/issues/88) - from .jsonbytes import bytes_to_unicode +# We want to convert bytes keys to Unicode, otherwise JSON serialization +# inside foolscap will fail (for details see +# https://github.com/warner/foolscap/issues/88) +from .jsonbytes import bytes_to_unicode NOISY = log.NOISY # 10 diff --git a/src/allmydata/util/mathutil.py b/src/allmydata/util/mathutil.py index 42863c30e..2aeb11b9e 100644 --- a/src/allmydata/util/mathutil.py +++ b/src/allmydata/util/mathutil.py @@ -6,16 +6,6 @@ Backwards compatibility for direct imports. Ported to Python 3. """ -from __future__ import division -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - - # The API importers expect: from pyutil.mathutil import div_ceil, next_multiple, pad_size, is_power_of_k, next_power_of_k, ave, log_ceil, log_floor diff --git a/src/allmydata/util/netstring.py b/src/allmydata/util/netstring.py index 423b8665b..ee7849b5f 100644 --- a/src/allmydata/util/netstring.py +++ b/src/allmydata/util/netstring.py @@ -3,16 +3,6 @@ Netstring encoding and decoding. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from past.builtins import long try: from typing import Optional, Tuple, List # noqa: F401 @@ -35,7 +25,7 @@ def split_netstring(data, numstrings, data does not exactly equal 'required_trailer'.""" assert isinstance(data, bytes) assert required_trailer is None or isinstance(required_trailer, bytes) - assert isinstance(position, (int, long)), (repr(position), type(position)) + assert isinstance(position, int), (repr(position), type(position)) elements = [] assert numstrings >= 0 while position < len(data): diff --git a/src/allmydata/util/observer.py b/src/allmydata/util/observer.py index 4a39fe014..2fa514a02 100644 --- a/src/allmydata/util/observer.py +++ b/src/allmydata/util/observer.py @@ -4,15 +4,6 @@ Observer for Twisted code. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - import weakref from twisted.internet import defer from foolscap.api import eventually diff --git a/src/allmydata/util/pid.py b/src/allmydata/util/pid.py new file mode 100644 index 000000000..f12c201d1 --- /dev/null +++ b/src/allmydata/util/pid.py @@ -0,0 +1,120 @@ +import psutil + +# the docs are a little misleading, but this is either WindowsFileLock +# or UnixFileLock depending upon the platform we're currently on +from filelock import FileLock, Timeout + + +class ProcessInTheWay(Exception): + """ + our pidfile points at a running process + """ + + +class InvalidPidFile(Exception): + """ + our pidfile isn't well-formed + """ + + +class CannotRemovePidFile(Exception): + """ + something went wrong removing the pidfile + """ + + +def _pidfile_to_lockpath(pidfile): + """ + internal helper. + :returns FilePath: a path to use for file-locking the given pidfile + """ + return pidfile.sibling("{}.lock".format(pidfile.basename())) + + +def parse_pidfile(pidfile): + """ + :param FilePath pidfile: + :returns tuple: 2-tuple of pid, creation-time as int, float + :raises InvalidPidFile: on error + """ + with pidfile.open("r") as f: + content = f.read().decode("utf8").strip() + try: + pid, starttime = content.split() + pid = int(pid) + starttime = float(starttime) + except ValueError: + raise InvalidPidFile( + "found invalid PID file in {}".format( + pidfile + ) + ) + return pid, starttime + + +def check_pid_process(pidfile): + """ + If another instance appears to be running already, raise an + exception. Otherwise, write our PID + start time to the pidfile + and arrange to delete it upon exit. + + :param FilePath pidfile: the file to read/write our PID from. + + :raises ProcessInTheWay: if a running process exists at our PID + """ + lock_path = _pidfile_to_lockpath(pidfile) + + try: + # a short timeout is fine, this lock should only be active + # while someone is reading or deleting the pidfile .. and + # facilitates testing the locking itself. + with FileLock(lock_path.path, timeout=2): + # check if we have another instance running already + if pidfile.exists(): + pid, starttime = parse_pidfile(pidfile) + try: + # if any other process is running at that PID, let the + # user decide if this is another legitimate + # instance. Automated programs may use the start-time to + # help decide this (if the PID is merely recycled, the + # start-time won't match). + psutil.Process(pid) + raise ProcessInTheWay( + "A process is already running as PID {}".format(pid) + ) + except psutil.NoSuchProcess: + print( + "'{pidpath}' refers to {pid} that isn't running".format( + pidpath=pidfile.path, + pid=pid, + ) + ) + # nothing is running at that PID so it must be a stale file + pidfile.remove() + + # write our PID + start-time to the pid-file + proc = psutil.Process() + with pidfile.open("w") as f: + f.write("{} {}\n".format(proc.pid, proc.create_time()).encode("utf8")) + except Timeout: + raise ProcessInTheWay( + "Another process is still locking {}".format(pidfile.path) + ) + + +def cleanup_pidfile(pidfile): + """ + Remove the pidfile specified (respecting locks). If anything at + all goes wrong, `CannotRemovePidFile` is raised. + """ + lock_path = _pidfile_to_lockpath(pidfile) + with FileLock(lock_path.path): + try: + pidfile.remove() + except Exception as e: + raise CannotRemovePidFile( + "Couldn't remove '{pidfile}': {err}.".format( + pidfile=pidfile.path, + err=e, + ) + ) diff --git a/src/allmydata/util/pipeline.py b/src/allmydata/util/pipeline.py deleted file mode 100644 index 31f5d5d49..000000000 --- a/src/allmydata/util/pipeline.py +++ /dev/null @@ -1,149 +0,0 @@ -""" -A pipeline of Deferreds. - -Ported to Python 3. -""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -from twisted.internet import defer -from twisted.python.failure import Failure -from twisted.python import log -from allmydata.util.assertutil import precondition - - -class PipelineError(Exception): - """One of the pipelined messages returned an error. The received Failure - object is stored in my .error attribute.""" - def __init__(self, error): - self.error = error - - def __repr__(self): - return "" % (self.error,) - def __str__(self): - return "" % (self.error,) - -class SingleFileError(Exception): - """You are not permitted to add a job to a full pipeline.""" - - -class ExpandableDeferredList(defer.Deferred, object): - # like DeferredList(fireOnOneErrback=True) with a built-in - # gatherResults(), but you can add new Deferreds until you close it. This - # gives you a chance to add don't-complain-about-unhandled-error errbacks - # immediately after attachment, regardless of whether you actually end up - # wanting the list or not. - def __init__(self): - defer.Deferred.__init__(self) - self.resultsReceived = 0 - self.resultList = [] - self.failure = None - self.closed = False - - def addDeferred(self, d): - precondition(not self.closed, "don't call addDeferred() on a closed ExpandableDeferredList") - index = len(self.resultList) - self.resultList.append(None) - d.addCallbacks(self._cbDeferred, self._ebDeferred, - callbackArgs=(index,)) - return d - - def close(self): - self.closed = True - self.checkForFinished() - - def checkForFinished(self): - if not self.closed: - return - if self.called: - return - if self.failure: - self.errback(self.failure) - elif self.resultsReceived == len(self.resultList): - self.callback(self.resultList) - - def _cbDeferred(self, res, index): - self.resultList[index] = res - self.resultsReceived += 1 - self.checkForFinished() - return res - - def _ebDeferred(self, f): - self.failure = f - self.checkForFinished() - return f - - -class Pipeline(object): - """I manage a size-limited pipeline of Deferred operations, usually - callRemote() messages.""" - - def __init__(self, capacity): - self.capacity = capacity # how full we can be - self.gauge = 0 # how full we are - self.failure = None - self.waiting = [] # callers of add() who are blocked - self.unflushed = ExpandableDeferredList() - - def add(self, _size, _func, *args, **kwargs): - # We promise that all the Deferreds we return will fire in the order - # they were returned. To make it easier to keep this promise, we - # prohibit multiple outstanding calls to add() . - if self.waiting: - raise SingleFileError - if self.failure: - return defer.fail(self.failure) - self.gauge += _size - fd = defer.maybeDeferred(_func, *args, **kwargs) - fd.addBoth(self._call_finished, _size) - self.unflushed.addDeferred(fd) - fd.addErrback(self._eat_pipeline_errors) - fd.addErrback(log.err, "_eat_pipeline_errors didn't eat it") - if self.gauge < self.capacity: - return defer.succeed(None) - d = defer.Deferred() - self.waiting.append(d) - return d - - def flush(self): - if self.failure: - return defer.fail(self.failure) - d, self.unflushed = self.unflushed, ExpandableDeferredList() - d.close() - d.addErrback(self._flushed_error) - return d - - def _flushed_error(self, f): - precondition(self.failure) # should have been set by _call_finished - return self.failure - - def _call_finished(self, res, size): - self.gauge -= size - if isinstance(res, Failure): - res = Failure(PipelineError(res)) - if not self.failure: - self.failure = res - if self.failure: - while self.waiting: - d = self.waiting.pop(0) - d.errback(self.failure) - else: - while self.waiting and (self.gauge < self.capacity): - d = self.waiting.pop(0) - d.callback(None) - # the d.callback() might trigger a new call to add(), which - # will raise our gauge and might cause the pipeline to be - # filled. So the while() loop gets a chance to tell the - # caller to stop. - return res - - def _eat_pipeline_errors(self, f): - f.trap(PipelineError) - return None diff --git a/src/allmydata/util/pollmixin.py b/src/allmydata/util/pollmixin.py index 582bafe86..b23277565 100644 --- a/src/allmydata/util/pollmixin.py +++ b/src/allmydata/util/pollmixin.py @@ -4,22 +4,10 @@ Polling utility that returns Deferred. Ported to Python 3. """ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations import time -try: - from typing import List -except ImportError: - pass - from twisted.internet import task class TimeoutError(Exception): @@ -29,7 +17,7 @@ class PollComplete(Exception): pass class PollMixin(object): - _poll_should_ignore_these_errors = [] # type: List[Exception] + _poll_should_ignore_these_errors : list[Exception] = [] def poll(self, check_f, pollinterval=0.01, timeout=1000): # Return a Deferred, then call check_f periodically until it returns diff --git a/src/allmydata/util/rrefutil.py b/src/allmydata/util/rrefutil.py index f39890ff1..15622435d 100644 --- a/src/allmydata/util/rrefutil.py +++ b/src/allmydata/util/rrefutil.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from foolscap.api import Violation, RemoteException diff --git a/src/allmydata/util/spans.py b/src/allmydata/util/spans.py index 81e14c0fb..e5b265aaa 100644 --- a/src/allmydata/util/spans.py +++ b/src/allmydata/util/spans.py @@ -1,11 +1,3 @@ -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 class Spans(object): diff --git a/src/allmydata/util/statistics.py b/src/allmydata/util/statistics.py index a517751d6..9881dc13f 100644 --- a/src/allmydata/util/statistics.py +++ b/src/allmydata/util/statistics.py @@ -11,15 +11,6 @@ Ported to Python 3. # Transitive Grace Period Public License, version 1 or later. -from __future__ import unicode_literals -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - from allmydata.util.mathutil import round_sigfigs import math from functools import reduce diff --git a/src/allmydata/util/time_format.py b/src/allmydata/util/time_format.py index ff267485e..fb4d735ab 100644 --- a/src/allmydata/util/time_format.py +++ b/src/allmydata/util/time_format.py @@ -4,30 +4,30 @@ Time formatting utilities. ISO-8601: http://www.cl.cam.ac.uk/~mgk25/iso-time.html """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -from future.utils import native_str import calendar, datetime, re, time +from typing import Optional + def format_time(t): return time.strftime("%Y-%m-%d %H:%M:%S", t) -def iso_utc_date(now=None, t=time.time): +def iso_utc_date( + now: Optional[float] = None, + t=time.time +) -> str: if now is None: now = t() return datetime.datetime.utcfromtimestamp(now).isoformat()[:10] -def iso_utc(now=None, sep='_', t=time.time): +def iso_utc( + now: Optional[float] = None, + sep: str = '_', + t=time.time +) -> str: if now is None: now = t() - sep = native_str(sep) # Python 2 doesn't allow unicode input to isoformat + sep = str(sep) # should already be a str return datetime.datetime.utcfromtimestamp(now).isoformat(sep) def iso_utc_time_to_seconds(isotime, _conversion_re=re.compile(r"(?P\d{4})-(?P\d{2})-(?P\d{2})[T_ ](?P\d{2}):(?P\d{2}):(?P\d{2})(?P\.\d+)?")): diff --git a/src/allmydata/util/tor_provider.py b/src/allmydata/util/tor_provider.py index 4ca19c01c..c40e65f42 100644 --- a/src/allmydata/util/tor_provider.py +++ b/src/allmydata/util/tor_provider.py @@ -1,15 +1,8 @@ # -*- coding: utf-8 -*- -""" -Ported to Python 3. -""" -from __future__ import absolute_import, print_function, with_statement -from __future__ import division -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations +from typing import Any +from typing_extensions import Literal import os from zope.interface import ( @@ -20,12 +13,15 @@ from twisted.internet.defer import inlineCallbacks, returnValue from twisted.internet.endpoints import clientFromString, TCP4ServerEndpoint from twisted.internet.error import ConnectionRefusedError, ConnectError from twisted.application import service +from twisted.python.usage import Options from .observer import OneShotObserverList from .iputil import allocate_tcp_port from ..interfaces import ( IAddressFamily, ) +from ..listeners import ListenerConfig + def _import_tor(): try: @@ -41,7 +37,13 @@ def _import_txtorcon(): except ImportError: # pragma: no cover return None -def create(reactor, config, import_tor=None, import_txtorcon=None): +def can_hide_ip() -> Literal[True]: + return True + +def is_available() -> bool: + return not (_import_tor() is None or _import_txtorcon() is None) + +def create(reactor, config, import_tor=None, import_txtorcon=None) -> _Provider: """ Create a new _Provider service (this is an IService so must be hooked up to a parent or otherwise started). @@ -98,33 +100,31 @@ def _try_to_connect(reactor, endpoint_desc, stdout, txtorcon): @inlineCallbacks def _launch_tor(reactor, tor_executable, private_dir, txtorcon): + """ + Launches Tor, returns a corresponding ``(control endpoint string, + txtorcon.Tor instance)`` tuple. + """ # TODO: handle default tor-executable # TODO: it might be a good idea to find exactly which Tor we used, # and record it's absolute path into tahoe.cfg . This would protect # us against one Tor being on $PATH at create-node time, but then a # different Tor being present at node startup. OTOH, maybe we don't # need to worry about it. - tor_config = txtorcon.TorConfig() - tor_config.DataDirectory = data_directory(private_dir) # unix-domain control socket - tor_config.ControlPort = "unix:" + os.path.join(private_dir, "tor.control") - tor_control_endpoint_desc = tor_config.ControlPort + tor_control_endpoint_desc = "unix:" + os.path.join(private_dir, "tor.control") - tor_config.SOCKSPort = allocate_tcp_port() - - tpp = yield txtorcon.launch_tor( - tor_config, reactor, + tor = yield txtorcon.launch( + reactor, + control_port=tor_control_endpoint_desc, + data_directory=data_directory(private_dir), tor_binary=tor_executable, + socks_port=allocate_tcp_port(), # can be useful when debugging; mirror Tor's output to ours # stdout=sys.stdout, # stderr=sys.stderr, ) - # now tor is launched and ready to be spoken to - # as a side effect, we've got an ITorControlProtocol ready to go - tor_control_proto = tpp.tor_protocol - # How/when to shut down the new process? for normal usage, the child # tor will exit when it notices its parent (us) quit. Unit tests will # mock out txtorcon.launch_tor(), so there will never be a real Tor @@ -134,7 +134,8 @@ def _launch_tor(reactor, tor_executable, private_dir, txtorcon): # (because it's a TorProcessProtocol) which returns a Deferred # that fires when Tor has actually exited. - returnValue((tor_control_endpoint_desc, tor_control_proto)) + returnValue((tor_control_endpoint_desc, tor)) + @inlineCallbacks def _connect_to_tor(reactor, cli_config, txtorcon): @@ -154,30 +155,32 @@ def _connect_to_tor(reactor, cli_config, txtorcon): else: raise ValueError("unable to reach any default Tor control port") -@inlineCallbacks -def create_config(reactor, cli_config): +async def create_config(reactor: Any, cli_config: Options) -> ListenerConfig: txtorcon = _import_txtorcon() if not txtorcon: raise ValueError("Cannot create onion without txtorcon. " "Please 'pip install tahoe-lafs[tor]' to fix this.") - tahoe_config_tor = {} # written into tahoe.cfg:[tor] + tahoe_config_tor = [] # written into tahoe.cfg:[tor] private_dir = os.path.abspath(os.path.join(cli_config["basedir"], "private")) - stdout = cli_config.stdout + # XXX We shouldn't carry stdout around by jamming it into the Options + # value. See https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4048 + stdout = cli_config.stdout # type: ignore[attr-defined] if cli_config["tor-launch"]: - tahoe_config_tor["launch"] = "true" + tahoe_config_tor.append(("launch", "true")) tor_executable = cli_config["tor-executable"] if tor_executable: - tahoe_config_tor["tor.executable"] = tor_executable + tahoe_config_tor.append(("tor.executable", tor_executable)) print("launching Tor (to allocate .onion address)..", file=stdout) - (_, tor_control_proto) = yield _launch_tor( + (_, tor) = await _launch_tor( reactor, tor_executable, private_dir, txtorcon) + tor_control_proto = tor.protocol print("Tor launched", file=stdout) else: print("connecting to Tor (to allocate .onion address)..", file=stdout) - (port, tor_control_proto) = yield _connect_to_tor( + (port, tor_control_proto) = await _connect_to_tor( reactor, cli_config, txtorcon) print("Tor connection established", file=stdout) - tahoe_config_tor["control.port"] = port + tahoe_config_tor.append(("control.port", port)) external_port = 3457 # TODO: pick this randomly? there's no contention. @@ -186,12 +189,12 @@ def create_config(reactor, cli_config): "%d 127.0.0.1:%d" % (external_port, local_port) ) print("allocating .onion address (takes ~40s)..", file=stdout) - yield ehs.add_to_tor(tor_control_proto) + await ehs.add_to_tor(tor_control_proto) print(".onion address allocated", file=stdout) tor_port = "tcp:%d:interface=127.0.0.1" % local_port tor_location = "tor:%s:%d" % (ehs.hostname, external_port) privkey = ehs.private_key - yield ehs.remove_from_tor(tor_control_proto) + await ehs.remove_from_tor(tor_control_proto) # in addition to the "how to launch/connect-to tor" keys above, we also # record information about the onion service into tahoe.cfg. @@ -203,12 +206,12 @@ def create_config(reactor, cli_config): # * "private_key_file" points to the on-disk copy of the private key # material (although we always write it to the same place) - tahoe_config_tor["onion"] = "true" - tahoe_config_tor["onion.local_port"] = str(local_port) - tahoe_config_tor["onion.external_port"] = str(external_port) - assert privkey - tahoe_config_tor["onion.private_key_file"] = os.path.join("private", - "tor_onion.privkey") + tahoe_config_tor.extend([ + ("onion", "true"), + ("onion.local_port", str(local_port)), + ("onion.external_port", str(external_port)), + ("onion.private_key_file", os.path.join("private", "tor_onion.privkey")), + ]) privkeyfile = os.path.join(private_dir, "tor_onion.privkey") with open(privkeyfile, "wb") as f: if isinstance(privkey, str): @@ -227,7 +230,11 @@ def create_config(reactor, cli_config): # at both create-node and startup time. The data directory is not # recorded in tahoe.cfg - returnValue((tahoe_config_tor, tor_port, tor_location)) + return ListenerConfig( + [tor_port], + [tor_location], + {"tor": tahoe_config_tor}, + ) @implementer(IAddressFamily) @@ -294,7 +301,7 @@ class _Provider(service.MultiService): returnValue(tor_control_endpoint) def _get_launched_tor(self, reactor): - # this fires with a tuple of (control_endpoint, tor_protocol) + # this fires with a tuple of (control_endpoint, txtorcon.Tor instance) if not self._tor_launched: self._tor_launched = OneShotObserverList() private_dir = self._config.get_config_path("private") @@ -325,17 +332,20 @@ class _Provider(service.MultiService): require("external_port") require("private_key_file") - @inlineCallbacks - def _start_onion(self, reactor): + def get_tor_instance(self, reactor: object): + """Return a ``Deferred`` that fires with a ``txtorcon.Tor`` instance.""" # launch tor, if necessary if self._get_tor_config("launch", False, boolean=True): - (_, tor_control_proto) = yield self._get_launched_tor(reactor) + return self._get_launched_tor(reactor).addCallback(lambda t: t[1]) else: controlport = self._get_tor_config("control.port", None) tcep = clientFromString(reactor, controlport) - tor_state = yield self._txtorcon.build_tor_connection(tcep) - tor_control_proto = tor_state.protocol + return self._txtorcon.connect(reactor, tcep) + @inlineCallbacks + def _start_onion(self, reactor): + tor_instance = yield self.get_tor_instance(reactor) + tor_control_proto = tor_instance.protocol local_port = int(self._get_tor_config("onion.local_port")) external_port = int(self._get_tor_config("onion.external_port")) diff --git a/src/allmydata/util/yamlutil.py b/src/allmydata/util/yamlutil.py index fd9fc73e2..512d5a2a9 100644 --- a/src/allmydata/util/yamlutil.py +++ b/src/allmydata/util/yamlutil.py @@ -1,40 +1,9 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import yaml - -if PY2: - # On Python 2 the way pyyaml deals with Unicode strings is inconsistent. - # - # >>> yaml.safe_load(yaml.safe_dump(u"hello")) - # 'hello' - # >>> yaml.safe_load(yaml.safe_dump(u"hello\u1234")) - # u'hello\u1234' - # - # In other words, Unicode strings get roundtripped to byte strings, but - # only sometimes. - # - # In order to ensure unicode stays unicode, we add a configuration saying - # that the YAML String Language-Independent Type ("a sequence of zero or - # more Unicode characters") should be the underlying Unicode string object, - # rather than converting to bytes when possible. - # - # Reference: https://yaml.org/type/str.html - def construct_unicode(loader, node): - return node.value - yaml.SafeLoader.add_constructor("tag:yaml.org,2002:str", - construct_unicode) - def safe_load(f): return yaml.safe_load(f) diff --git a/src/allmydata/web/check_results.py b/src/allmydata/web/check_results.py index 6c8810f2b..1ec835658 100644 --- a/src/allmydata/web/check_results.py +++ b/src/allmydata/web/check_results.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import time diff --git a/src/allmydata/web/common.py b/src/allmydata/web/common.py index bf89044a3..cf6eaecff 100644 --- a/src/allmydata/web/common.py +++ b/src/allmydata/web/common.py @@ -1,26 +1,23 @@ """ Ported to Python 3. """ -from __future__ import division -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401 - from past.builtins import unicode as str # prevent leaking newbytes/newstr into code that can't handle it +from __future__ import annotations from six import ensure_str - -try: - from typing import Optional, Union, Tuple, Any -except ImportError: - pass +import sys +if sys.version_info[:2] >= (3, 9): + from importlib.resources import files as resource_files, as_file +else: + from importlib_resources import files as resource_files, as_file +from contextlib import ExitStack +import weakref +from typing import Optional, Union, TypeVar, overload +from typing_extensions import Literal import time import json from functools import wraps +from base64 import urlsafe_b64decode from hyperlink import ( DecodedURL, @@ -38,6 +35,7 @@ from twisted.web import ( http, resource, template, + static, ) from twisted.web.iweb import ( IRequest, @@ -94,6 +92,7 @@ from allmydata.util.encodingutil import ( to_bytes, ) from allmydata.util import abbreviate +from allmydata.crypto.rsa import PrivateKey, PublicKey, create_signing_keypair_from_string class WebError(Exception): @@ -126,7 +125,7 @@ def boolean_of_arg(arg): # type: (bytes) -> bool return arg.lower() in (b"true", b"t", b"1", b"on") -def parse_replace_arg(replace): # type: (bytes) -> Union[bool,_OnlyFiles] +def parse_replace_arg(replace: bytes) -> Union[bool,_OnlyFiles]: assert isinstance(replace, bytes) if replace.lower() == b"only-files": return ONLY_FILES @@ -713,8 +712,15 @@ def url_for_string(req, url_string): ) return url +T = TypeVar("T") -def get_arg(req, argname, default=None, multiple=False): # type: (IRequest, Union[bytes,str], Any, bool) -> Union[bytes,Tuple[bytes],Any] +@overload +def get_arg(req: IRequest, argname: str | bytes, default: Optional[T] = None, *, multiple: Literal[False] = False) -> T | bytes: ... + +@overload +def get_arg(req: IRequest, argname: str | bytes, default: Optional[T] = None, *, multiple: Literal[True]) -> T | tuple[bytes, ...]: ... + +def get_arg(req: IRequest, argname: str | bytes, default: Optional[T] = None, *, multiple: bool = False) -> None | T | bytes | tuple[bytes, ...]: """Extract an argument from either the query args (req.args) or the form body fields (req.fields). If multiple=False, this returns a single value (or the default, which defaults to None), and the query args take @@ -725,15 +731,21 @@ def get_arg(req, argname, default=None, multiple=False): # type: (IRequest, Uni :return: Either bytes or tuple of bytes. """ + # Need to import here to prevent circular import: + from ..webish import TahoeLAFSRequest + if isinstance(argname, str): - argname = argname.encode("utf-8") - if isinstance(default, str): - default = default.encode("utf-8") - results = [] - if argname in req.args: - results.extend(req.args[argname]) - argname_unicode = str(argname, "utf-8") - if req.fields and argname_unicode in req.fields: + argname_bytes = argname.encode("utf-8") + else: + argname_bytes = argname + + results : list[bytes] = [] + if req.args is not None and argname_bytes in req.args: + results.extend(req.args[argname_bytes]) + argname_unicode = str(argname_bytes, "utf-8") + if isinstance(req, TahoeLAFSRequest) and req.fields and argname_unicode in req.fields: + # In all but one or two unit tests, the request will be a + # TahoeLAFSRequest. value = req.fields[argname_unicode].value if isinstance(value, str): value = value.encode("utf-8") @@ -742,6 +754,9 @@ def get_arg(req, argname, default=None, multiple=False): # type: (IRequest, Uni return tuple(results) if results: return results[0] + + if isinstance(default, str): + return default.encode("utf-8") return default @@ -833,3 +848,32 @@ def abbreviate_time(data): if s >= 0.001: return u"%.1fms" % (1000*s) return u"%.0fus" % (1000000*s) + +def get_keypair(request: IRequest) -> tuple[PublicKey, PrivateKey] | None: + """ + Load a keypair from a urlsafe-base64-encoded RSA private key in the + **private-key** argument of the given request, if there is one. + """ + privkey_der = get_arg(request, "private-key", default=None, multiple=False) + if privkey_der is None: + return None + privkey, pubkey = create_signing_keypair_from_string(urlsafe_b64decode(privkey_der)) + return pubkey, privkey + + +def add_static_children(root: IResource): + """ + Add static files from C{allmydata.web} to the given resource. + + Package resources may be on the filesystem, or they may be in a zip + or something, so we need to do a bit more work to serve them as + static files. + """ + temporary_file_manager = ExitStack() + static_dir = resource_files("allmydata.web") / "static" + for child in static_dir.iterdir(): + child_path = child.name.encode("utf-8") + root.putChild(child_path, static.File( + str(temporary_file_manager.enter_context(as_file(child))) + )) + weakref.finalize(root, temporary_file_manager.close) diff --git a/src/allmydata/web/directory.py b/src/allmydata/web/directory.py index 240fdd90c..001caf22f 100644 --- a/src/allmydata/web/directory.py +++ b/src/allmydata/web/directory.py @@ -1,16 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401 - # Don't use Future's str so that we don't get leaks into bad byte formatting - from past.builtins import unicode as str from urllib.parse import quote as url_quote from datetime import timedelta diff --git a/src/allmydata/web/filenode.py b/src/allmydata/web/filenode.py index dd793888e..680ca3331 100644 --- a/src/allmydata/web/filenode.py +++ b/src/allmydata/web/filenode.py @@ -1,23 +1,12 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401 - # Use native unicode() as str() to prevent leaking futurebytes in ways that - # break string formattin. - from past.builtins import unicode as str -from past.builtins import long +from __future__ import annotations from twisted.web import http, static from twisted.internet import defer from twisted.web.resource import ( - Resource, # note: Resource is an old-style class + Resource, ErrorPage, ) @@ -34,6 +23,7 @@ from allmydata.blacklist import ( ) from allmydata.web.common import ( + get_keypair, boolean_of_arg, exception_to_child, get_arg, @@ -56,7 +46,6 @@ from allmydata.web.check_results import ( from allmydata.web.info import MoreInfo from allmydata.util import jsonbytes as json - class ReplaceMeMixin(object): def replace_me_with_a_child(self, req, client, replace): # a new file is being uploaded in our place. @@ -64,7 +53,8 @@ class ReplaceMeMixin(object): mutable_type = get_mutable_type(file_format) if mutable_type is not None: data = MutableFileHandle(req.content) - d = client.create_mutable_file(data, version=mutable_type) + keypair = get_keypair(req) + d = client.create_mutable_file(data, version=mutable_type, unique_keypair=keypair) def _uploaded(newnode): d2 = self.parentnode.set_node(self.name, newnode, overwrite=replace) @@ -106,7 +96,8 @@ class ReplaceMeMixin(object): if file_format in ("SDMF", "MDMF"): mutable_type = get_mutable_type(file_format) uploadable = MutableFileHandle(contents.file) - d = client.create_mutable_file(uploadable, version=mutable_type) + keypair = get_keypair(req) + d = client.create_mutable_file(uploadable, version=mutable_type, unique_keypair=keypair) def _uploaded(newnode): d2 = self.parentnode.set_node(self.name, newnode, overwrite=replace) @@ -395,7 +386,7 @@ class FileDownloader(Resource, object): # list of (first,last) inclusive range tuples. filesize = self.filenode.get_size() - assert isinstance(filesize, (int,long)), filesize + assert isinstance(filesize, int), filesize try: # byte-ranges-specifier @@ -408,19 +399,19 @@ class FileDownloader(Resource, object): if first == '': # suffix-byte-range-spec - first = filesize - long(last) + first = filesize - int(last) last = filesize - 1 else: # byte-range-spec # first-byte-pos - first = long(first) + first = int(first) # last-byte-pos if last == '': last = filesize - 1 else: - last = long(last) + last = int(last) if last < first: raise ValueError @@ -456,7 +447,7 @@ class FileDownloader(Resource, object): b'attachment; filename="%s"' % self.filename) filesize = self.filenode.get_size() - assert isinstance(filesize, (int,long)), filesize + assert isinstance(filesize, int), filesize first, size = 0, None contentsize = filesize req.setHeader("accept-ranges", "bytes") diff --git a/src/allmydata/web/info.py b/src/allmydata/web/info.py index 2d45f9994..e10e59061 100644 --- a/src/allmydata/web/info.py +++ b/src/allmydata/web/info.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import os from urllib.parse import quote as urlquote diff --git a/src/allmydata/web/introweb.py b/src/allmydata/web/introweb.py index 621a15a5c..7cb74a1c1 100644 --- a/src/allmydata/web/introweb.py +++ b/src/allmydata/web/introweb.py @@ -1,26 +1,16 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -import time, os -from pkg_resources import resource_filename +import time from twisted.web.template import Element, XMLFile, renderElement, renderer from twisted.python.filepath import FilePath -from twisted.web import static import allmydata from allmydata.util import idlib, jsonbytes as json from allmydata.web.common import ( render_time, MultiFormatResource, SlotsSequenceElement, + add_static_children, ) @@ -38,9 +28,7 @@ class IntroducerRoot(MultiFormatResource): self.introducer_service = introducer_node.getServiceNamed("introducer") # necessary as a root Resource self.putChild(b"", self) - static_dir = resource_filename("allmydata.web", "static") - for filen in os.listdir(static_dir): - self.putChild(filen.encode("utf-8"), static.File(os.path.join(static_dir, filen))) + add_static_children(self) def _create_element(self): """ diff --git a/src/allmydata/web/logs.py b/src/allmydata/web/logs.py index a79440eb9..7c4d285f5 100644 --- a/src/allmydata/web/logs.py +++ b/src/allmydata/web/logs.py @@ -1,12 +1,6 @@ """ Ported to Python 3. """ -from __future__ import ( - print_function, - unicode_literals, - absolute_import, - division, -) from autobahn.twisted.resource import WebSocketResource from autobahn.twisted.websocket import ( diff --git a/src/allmydata/web/operations.py b/src/allmydata/web/operations.py index aedf33f37..0b71cc404 100644 --- a/src/allmydata/web/operations.py +++ b/src/allmydata/web/operations.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import time from hyperlink import ( @@ -43,8 +35,9 @@ DAY = 24*HOUR class OphandleTable(resource.Resource, service.Service): """Renders /operations/%d.""" - - name = "operations" + # The type in Twisted for services is wrong in 22.10... + # https://github.com/twisted/twisted/issues/10135 + name = "operations" # type: ignore[assignment] UNCOLLECTED_HANDLE_LIFETIME = 4*DAY COLLECTED_HANDLE_LIFETIME = 1*DAY diff --git a/src/allmydata/web/private.py b/src/allmydata/web/private.py index 4f59be33a..4410399b8 100644 --- a/src/allmydata/web/private.py +++ b/src/allmydata/web/private.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import attr diff --git a/src/allmydata/web/root.py b/src/allmydata/web/root.py index 1debc1d10..090f706f5 100644 --- a/src/allmydata/web/root.py +++ b/src/allmydata/web/root.py @@ -1,25 +1,13 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2, PY3 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 - -import os import time from urllib.parse import quote as urlquote from hyperlink import DecodedURL, URL -from pkg_resources import resource_filename from twisted.web import ( http, resource, - static, ) from twisted.web.util import redirectTo, Redirect from twisted.python.filepath import FilePath @@ -54,6 +42,7 @@ from allmydata.web.common import ( render_time_delta, render_time, render_time_attr, + add_static_children, ) from allmydata.web.private import ( create_private_tree, @@ -251,15 +240,10 @@ class Root(MultiFormatResource): self.putChild(b"named", FileHandler(client)) self.putChild(b"status", status.Status(client.get_history())) self.putChild(b"statistics", status.Statistics(client.stats_provider)) - static_dir = resource_filename("allmydata.web", "static") - for filen in os.listdir(static_dir): - child_path = filen - if PY3: - child_path = filen.encode("utf-8") - self.putChild(child_path, static.File(os.path.join(static_dir, filen))) - self.putChild(b"report_incident", IncidentReporter()) + add_static_children(self) + @exception_to_child def getChild(self, path, request): if not path: @@ -297,14 +281,12 @@ class Root(MultiFormatResource): } return json.dumps(result, indent=1) + "\n" - def _describe_known_servers(self, broker): - return sorted(list( + return list( self._describe_server(server) for server in broker.get_known_servers() - ), key=lambda o: sorted(o.items())) - + ) def _describe_server(self, server): status = server.get_connection_status() diff --git a/src/allmydata/web/status.py b/src/allmydata/web/status.py index 65647f491..1737a4d1b 100644 --- a/src/allmydata/web/status.py +++ b/src/allmydata/web/status.py @@ -2,16 +2,6 @@ Ported to Python 3. """ -from __future__ import division -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 -from past.builtins import long - import itertools import hashlib import re @@ -550,7 +540,7 @@ class DownloadStatusElement(Element): length = r_ev["length"] bytes_returned = r_ev["bytes_returned"] decrypt_time = "" - if bytes: + if bytes_returned: decrypt_time = self._rate_and_time(bytes_returned, r_ev["decrypt_time"]) speed, rtt = "","" if r_ev["finish_time"] is not None: @@ -1401,7 +1391,7 @@ class StatusElement(Element): size = op.get_size() if size is None: size = "(unknown)" - elif isinstance(size, (int, long, float)): + elif isinstance(size, (int, float)): size = abbreviate_size(size) result["total_size"] = size @@ -1616,30 +1606,30 @@ class StatisticsElement(Element): @renderer def uploads(self, req, tag): files = self._stats["counters"].get("uploader.files_uploaded", 0) - bytes = self._stats["counters"].get("uploader.bytes_uploaded", 0) + bytes_uploaded = self._stats["counters"].get("uploader.bytes_uploaded", 0) return tag(("%s files / %s bytes (%s)" % - (files, bytes, abbreviate_size(bytes)))) + (files, bytes_uploaded, abbreviate_size(bytes_uploaded)))) @renderer def downloads(self, req, tag): files = self._stats["counters"].get("downloader.files_downloaded", 0) - bytes = self._stats["counters"].get("downloader.bytes_downloaded", 0) + bytes_uploaded = self._stats["counters"].get("downloader.bytes_downloaded", 0) return tag("%s files / %s bytes (%s)" % - (files, bytes, abbreviate_size(bytes))) + (files, bytes_uploaded, abbreviate_size(bytes_uploaded))) @renderer def publishes(self, req, tag): files = self._stats["counters"].get("mutable.files_published", 0) - bytes = self._stats["counters"].get("mutable.bytes_published", 0) - return tag("%s files / %s bytes (%s)" % (files, bytes, - abbreviate_size(bytes))) + bytes_uploaded = self._stats["counters"].get("mutable.bytes_published", 0) + return tag("%s files / %s bytes (%s)" % (files, bytes_uploaded, + abbreviate_size(bytes_uploaded))) @renderer def retrieves(self, req, tag): files = self._stats["counters"].get("mutable.files_retrieved", 0) - bytes = self._stats["counters"].get("mutable.bytes_retrieved", 0) - return tag("%s files / %s bytes (%s)" % (files, bytes, - abbreviate_size(bytes))) + bytes_uploaded = self._stats["counters"].get("mutable.bytes_retrieved", 0) + return tag("%s files / %s bytes (%s)" % (files, bytes_uploaded, + abbreviate_size(bytes_uploaded))) @renderer def raw(self, req, tag): diff --git a/src/allmydata/web/storage.py b/src/allmydata/web/storage.py index f2f021a15..aeefcf62a 100644 --- a/src/allmydata/web/storage.py +++ b/src/allmydata/web/storage.py @@ -1,14 +1,6 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 import time from twisted.python.filepath import FilePath @@ -256,8 +248,8 @@ class StorageStatusElement(Element): if so_far["corrupt-shares"]: add("Corrupt shares:", - T.ul( (T.li( ["SI %s shnum %d" % corrupt_share - for corrupt_share in so_far["corrupt-shares"] ] + T.ul( (T.li( ["SI %s shnum %d" % (si, shnum) + for si, shnum in so_far["corrupt-shares"] ] )))) return tag("Current cycle:", p) @@ -267,7 +259,8 @@ class StorageStatusElement(Element): h = lc.get_state()["history"] if not h: return "" - last = h[max(h.keys())] + biggest = str(max(int(k) for k in h.keys())) + last = h[biggest] start, end = last["cycle-start-finish-times"] tag("Last complete cycle (which took %s and finished %s ago)" @@ -290,8 +283,8 @@ class StorageStatusElement(Element): if last["corrupt-shares"]: add("Corrupt shares:", - T.ul( (T.li( ["SI %s shnum %d" % corrupt_share - for corrupt_share in last["corrupt-shares"] ] + T.ul( (T.li( ["SI %s shnum %d" % (si, shnum) + for si, shnum in last["corrupt-shares"] ] )))) return tag(p) diff --git a/src/allmydata/web/storage_plugins.py b/src/allmydata/web/storage_plugins.py index 41bed9d81..ad448ccdd 100644 --- a/src/allmydata/web/storage_plugins.py +++ b/src/allmydata/web/storage_plugins.py @@ -4,14 +4,6 @@ of all enabled storage client plugins. Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from twisted.web.resource import ( Resource, diff --git a/src/allmydata/web/unlinked.py b/src/allmydata/web/unlinked.py index 425622496..26c41c7be 100644 --- a/src/allmydata/web/unlinked.py +++ b/src/allmydata/web/unlinked.py @@ -1,14 +1,7 @@ """ Ported to Python 3. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations from urllib.parse import quote as urlquote @@ -25,6 +18,7 @@ from twisted.web.template import ( from allmydata.immutable.upload import FileHandle from allmydata.mutable.publish import MutableFileHandle from allmydata.web.common import ( + get_keypair, get_arg, boolean_of_arg, convert_children_json, @@ -48,7 +42,8 @@ def PUTUnlinkedSSK(req, client, version): # SDMF: files are small, and we can only upload data req.content.seek(0) data = MutableFileHandle(req.content) - d = client.create_mutable_file(data, version=version) + keypair = get_keypair(req) + d = client.create_mutable_file(data, version=version, unique_keypair=keypair) d.addCallback(lambda n: n.get_uri()) return d @@ -165,7 +160,7 @@ def POSTUnlinkedCreateDirectory(req, client): mt = None if file_format: mt = get_mutable_type(file_format) - d = client.create_dirnode(version=mt) + d = client.create_dirnode(version=mt, unique_keypair=get_keypair(req)) redirect = get_arg(req, "redirect_to_result", "false") if boolean_of_arg(redirect): def _then_redir(res): @@ -183,7 +178,7 @@ def POSTUnlinkedCreateDirectoryWithChildren(req, client): req.content.seek(0) kids_json = req.content.read() kids = convert_children_json(client.nodemaker, kids_json) - d = client.create_dirnode(initial_children=kids) + d = client.create_dirnode(initial_children=kids, unique_keypair=get_keypair(req)) redirect = get_arg(req, "redirect_to_result", "false") if boolean_of_arg(redirect): def _then_redir(res): diff --git a/src/allmydata/webish.py b/src/allmydata/webish.py index 9b63a220c..ec5ad64c0 100644 --- a/src/allmydata/webish.py +++ b/src/allmydata/webish.py @@ -1,18 +1,12 @@ """ -Ported to Python 3. +General web server-related utilities. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from future.utils import PY2 -if PY2: - from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 +from __future__ import annotations from six import ensure_str - +from typing import IO, Callable, Optional import re, time, tempfile +from urllib.parse import parse_qsl, urlencode from cgi import ( FieldStorage, @@ -45,40 +39,37 @@ from .web.storage_plugins import ( ) -if PY2: - FileUploadFieldStorage = FieldStorage -else: - class FileUploadFieldStorage(FieldStorage): - """ - Do terrible things to ensure files are still bytes. +class FileUploadFieldStorage(FieldStorage): + """ + Do terrible things to ensure files are still bytes. - On Python 2, uploaded files were always bytes. On Python 3, there's a - heuristic: if the filename is set on a field, it's assumed to be a file - upload and therefore bytes. If no filename is set, it's Unicode. + On Python 2, uploaded files were always bytes. On Python 3, there's a + heuristic: if the filename is set on a field, it's assumed to be a file + upload and therefore bytes. If no filename is set, it's Unicode. - Unfortunately, we always want it to be bytes, and Tahoe-LAFS also - enables setting the filename not via the MIME filename, but via a - separate field called "name". + Unfortunately, we always want it to be bytes, and Tahoe-LAFS also + enables setting the filename not via the MIME filename, but via a + separate field called "name". - Thus we need to do this ridiculous workaround. Mypy doesn't like it - either, thus the ``# type: ignore`` below. + Thus we need to do this ridiculous workaround. Mypy doesn't like it + either, thus the ``# type: ignore`` below. - Source for idea: - https://mail.python.org/pipermail/python-dev/2017-February/147402.html - """ - @property # type: ignore - def filename(self): - if self.name == "file" and not self._mime_filename: - # We use the file field to upload files, see directory.py's - # _POST_upload. Lack of _mime_filename means we need to trick - # FieldStorage into thinking there is a filename so it'll - # return bytes. - return "unknown-filename" - return self._mime_filename + Source for idea: + https://mail.python.org/pipermail/python-dev/2017-February/147402.html + """ + @property # type: ignore + def filename(self): + if self.name == "file" and not self._mime_filename: + # We use the file field to upload files, see directory.py's + # _POST_upload. Lack of _mime_filename means we need to trick + # FieldStorage into thinking there is a filename so it'll + # return bytes. + return "unknown-filename" + return self._mime_filename - @filename.setter - def filename(self, value): - self._mime_filename = value + @filename.setter + def filename(self, value): + self._mime_filename = value class TahoeLAFSRequest(Request, object): @@ -114,7 +105,8 @@ class TahoeLAFSRequest(Request, object): self.path, argstring = x self.args = parse_qs(argstring, 1) - if self.method == b'POST': + content_type = (self.requestHeaders.getRawHeaders("content-type") or [""])[0] + if self.method == b'POST' and content_type.split(";")[0] in ("multipart/form-data", "application/x-www-form-urlencoded"): # We use FieldStorage here because it performs better than # cgi.parse_multipart(self.content, pdict) which is what # twisted.web.http.Request uses. @@ -179,12 +171,7 @@ def _logFormatter(logDateTime, request): queryargs = b"" else: path, queryargs = x - # there is a form handler which redirects POST /uri?uri=FOO into - # GET /uri/FOO so folks can paste in non-HTTP-prefixed uris. Make - # sure we censor these too. - if queryargs.startswith(b"uri="): - queryargs = b"uri=[CENSORED]" - queryargs = b"?" + queryargs + queryargs = b"?" + censor(queryargs) if path.startswith(b"/uri/"): path = b"/uri/[CENSORED]" elif path.startswith(b"/file/"): @@ -206,34 +193,74 @@ def _logFormatter(logDateTime, request): ) +def censor(queryargs: bytes) -> bytes: + """ + Replace potentially sensitive values in query arguments with a + constant string. + """ + args = parse_qsl(queryargs.decode("ascii"), keep_blank_values=True, encoding="utf8") + result = [] + for k, v in args: + if k == "uri": + # there is a form handler which redirects POST /uri?uri=FOO into + # GET /uri/FOO so folks can paste in non-HTTP-prefixed uris. Make + # sure we censor these. + v = "[CENSORED]" + elif k == "private-key": + # Likewise, sometimes a private key is supplied with mutable + # creation. + v = "[CENSORED]" + + result.append((k, v)) + + # Customize safe to try to leave our markers intact. + return urlencode(result, safe="[]").encode("ascii") + + +def anonymous_tempfile_factory(tempdir: bytes) -> Callable[[], IO[bytes]]: + """ + Create a no-argument callable for creating a new temporary file in the + given directory. + + :param tempdir: The directory in which temporary files with be created. + + :return: The callable. + """ + return lambda: tempfile.TemporaryFile(dir=tempdir) + + class TahoeLAFSSite(Site, object): """ The HTTP protocol factory used by Tahoe-LAFS. Among the behaviors provided: - * A configurable temporary directory where large request bodies can be - written so they don't stay in memory. + * A configurable temporary file factory for large request bodies to avoid + keeping them in memory. * A log formatter that writes some access logs but omits capability strings to help keep them secret. """ requestFactory = TahoeLAFSRequest - def __init__(self, tempdir, *args, **kwargs): + def __init__(self, make_tempfile: Callable[[], IO[bytes]], *args, **kwargs): Site.__init__(self, *args, logFormatter=_logFormatter, **kwargs) - self._tempdir = tempdir + assert callable(make_tempfile) + with make_tempfile(): + pass + self._make_tempfile = make_tempfile - def getContentFile(self, length): + def getContentFile(self, length: Optional[int]) -> IO[bytes]: if length is None or length >= 1024 * 1024: - return tempfile.TemporaryFile(dir=self._tempdir) + return self._make_tempfile() return BytesIO() - class WebishServer(service.MultiService): - name = "webish" + # The type in Twisted for services is wrong in 22.10... + # https://github.com/twisted/twisted/issues/10135 + name = "webish" # type: ignore[assignment] - def __init__(self, client, webport, tempdir, nodeurl_path=None, staticdir=None, + def __init__(self, client, webport, make_tempfile, nodeurl_path=None, staticdir=None, clock=None, now_fn=time.time): service.MultiService.__init__(self) # the 'data' argument to all render() methods default to the Client @@ -243,7 +270,7 @@ class WebishServer(service.MultiService): # time in a deterministic manner. self.root = root.Root(client, clock, now_fn) - self.buildServer(webport, tempdir, nodeurl_path, staticdir) + self.buildServer(webport, make_tempfile, nodeurl_path, staticdir) # If set, clock is a twisted.internet.task.Clock that the tests # use to test ophandle expiration. @@ -253,9 +280,9 @@ class WebishServer(service.MultiService): self.root.putChild(b"storage-plugins", StoragePlugins(client)) - def buildServer(self, webport, tempdir, nodeurl_path, staticdir): + def buildServer(self, webport, make_tempfile, nodeurl_path, staticdir): self.webport = webport - self.site = TahoeLAFSSite(tempdir, self.root) + self.site = TahoeLAFSSite(make_tempfile, self.root) self.staticdir = staticdir # so tests can check if staticdir: self.root.putChild(b"static", static.File(staticdir)) @@ -333,4 +360,4 @@ class IntroducerWebishServer(WebishServer): def __init__(self, introducer, webport, nodeurl_path=None, staticdir=None): service.MultiService.__init__(self) self.root = introweb.IntroducerRoot(introducer) - self.buildServer(webport, tempfile.tempdir, nodeurl_path, staticdir) + self.buildServer(webport, tempfile.TemporaryFile, nodeurl_path, staticdir) diff --git a/src/allmydata/windows/fixups.py b/src/allmydata/windows/fixups.py index 53eb14d53..1b204ccf4 100644 --- a/src/allmydata/windows/fixups.py +++ b/src/allmydata/windows/fixups.py @@ -1,8 +1,3 @@ -from __future__ import print_function - -from future.utils import PY3 -from past.builtins import unicode - # This code isn't loadable or sensible except on Windows. Importers all know # this and are careful. Normally I would just let an import error from ctypes # explain any mistakes but Mypy also needs some help here. This assert @@ -16,104 +11,19 @@ from past.builtins import unicode import sys assert sys.platform == "win32" -import codecs -from functools import partial - -from ctypes import WINFUNCTYPE, windll, POINTER, c_int, WinError, byref, get_last_error -from ctypes.wintypes import BOOL, HANDLE, DWORD, LPWSTR, LPCWSTR, LPVOID - # from win32api import ( - STD_OUTPUT_HANDLE, - STD_ERROR_HANDLE, SetErrorMode, - - # - # HANDLE WINAPI GetStdHandle(DWORD nStdHandle); - # returns INVALID_HANDLE_VALUE, NULL, or a valid handle - GetStdHandle, ) from win32con import ( SEM_FAILCRITICALERRORS, SEM_NOOPENFILEERRORBOX, ) -from win32file import ( - INVALID_HANDLE_VALUE, - FILE_TYPE_CHAR, - - # - # DWORD WINAPI GetFileType(DWORD hFile); - GetFileType, -) - -from allmydata.util import ( - log, -) - # Keep track of whether `initialize` has run so we don't do any of the # initialization more than once. _done = False -# -# pywin32 for Python 2.7 does not bind any of these *W variants so we do it -# ourselves. -# - -# -# BOOL WINAPI WriteConsoleW(HANDLE hOutput, LPWSTR lpBuffer, DWORD nChars, -# LPDWORD lpCharsWritten, LPVOID lpReserved); -WriteConsoleW = WINFUNCTYPE( - BOOL, HANDLE, LPWSTR, DWORD, POINTER(DWORD), LPVOID, - use_last_error=True -)(("WriteConsoleW", windll.kernel32)) - -# -GetCommandLineW = WINFUNCTYPE( - LPWSTR, - use_last_error=True -)(("GetCommandLineW", windll.kernel32)) - -# -CommandLineToArgvW = WINFUNCTYPE( - POINTER(LPWSTR), LPCWSTR, POINTER(c_int), - use_last_error=True -)(("CommandLineToArgvW", windll.shell32)) - -# -# BOOL WINAPI GetConsoleMode(HANDLE hConsole, LPDWORD lpMode); -GetConsoleMode = WINFUNCTYPE( - BOOL, HANDLE, POINTER(DWORD), - use_last_error=True -)(("GetConsoleMode", windll.kernel32)) - - -STDOUT_FILENO = 1 -STDERR_FILENO = 2 - -def get_argv(): - """ - :return [unicode]: The argument list this process was invoked with, as - unicode. - - Python 2 does not do a good job exposing this information in - ``sys.argv`` on Windows so this code re-retrieves the underlying - information using Windows API calls and massages it into the right - shape. - """ - command_line = GetCommandLineW() - argc = c_int(0) - argv_unicode = CommandLineToArgvW(command_line, byref(argc)) - if argv_unicode is None: - raise WinError(get_last_error()) - - # Convert it to a normal Python list - return list( - argv_unicode[i] - for i - in range(argc.value) - ) - def initialize(): global _done @@ -123,188 +33,3 @@ def initialize(): _done = True SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOOPENFILEERRORBOX) - - if PY3: - # The rest of this appears to be Python 2-specific - return - - original_stderr = sys.stderr - - # If any exception occurs in this code, we'll probably try to print it on stderr, - # which makes for frustrating debugging if stderr is directed to our wrapper. - # So be paranoid about catching errors and reporting them to original_stderr, - # so that we can at least see them. - def _complain(output_file, message): - print(isinstance(message, str) and message or repr(message), file=output_file) - log.msg(message, level=log.WEIRD) - - _complain = partial(_complain, original_stderr) - - # Work around . - codecs.register(lambda name: name == 'cp65001' and codecs.lookup('utf-8') or None) - - # Make Unicode console output work independently of the current code page. - # This also fixes . - # Credit to Michael Kaplan - # and TZOmegaTZIOY - # . - try: - old_stdout_fileno = None - old_stderr_fileno = None - if hasattr(sys.stdout, 'fileno'): - old_stdout_fileno = sys.stdout.fileno() - if hasattr(sys.stderr, 'fileno'): - old_stderr_fileno = sys.stderr.fileno() - - real_stdout = (old_stdout_fileno == STDOUT_FILENO) - real_stderr = (old_stderr_fileno == STDERR_FILENO) - - if real_stdout: - hStdout = GetStdHandle(STD_OUTPUT_HANDLE) - if not a_console(hStdout): - real_stdout = False - - if real_stderr: - hStderr = GetStdHandle(STD_ERROR_HANDLE) - if not a_console(hStderr): - real_stderr = False - - if real_stdout: - sys.stdout = UnicodeOutput(hStdout, None, STDOUT_FILENO, '', _complain) - else: - sys.stdout = UnicodeOutput(None, sys.stdout, old_stdout_fileno, '', _complain) - - if real_stderr: - sys.stderr = UnicodeOutput(hStderr, None, STDERR_FILENO, '', _complain) - else: - sys.stderr = UnicodeOutput(None, sys.stderr, old_stderr_fileno, '', _complain) - except Exception as e: - _complain("exception %r while fixing up sys.stdout and sys.stderr" % (e,)) - - argv = list(arg.encode("utf-8") for arg in get_argv()) - - # Take only the suffix with the same number of arguments as sys.argv. - # This accounts for anything that can cause initial arguments to be stripped, - # for example, the Python interpreter or any options passed to it, or runner - # scripts such as 'coverage run'. It works even if there are no such arguments, - # as in the case of a frozen executable created by bb-freeze or similar. - # - # Also, modify sys.argv in place. If any code has already taken a - # reference to the original argument list object then this ensures that - # code sees the new values. This reliance on mutation of shared state is, - # of course, awful. Why does this function even modify sys.argv? Why not - # have a function that *returns* the properly initialized argv as a new - # list? I don't know. - # - # At least Python 3 gets sys.argv correct so before very much longer we - # should be able to fix this bad design by deleting it. - sys.argv[:] = argv[-len(sys.argv):] - - -def a_console(handle): - """ - :return: ``True`` if ``handle`` refers to a console, ``False`` otherwise. - """ - if handle == INVALID_HANDLE_VALUE: - return False - return ( - # It's a character file (eg a printer or a console) - GetFileType(handle) == FILE_TYPE_CHAR and - # Checking the console mode doesn't fail (thus it's a console) - GetConsoleMode(handle, byref(DWORD())) != 0 - ) - - -class UnicodeOutput(object): - """ - ``UnicodeOutput`` is a file-like object that encodes unicode to UTF-8 and - writes it to another file or writes unicode natively to the Windows - console. - """ - def __init__(self, hConsole, stream, fileno, name, _complain): - """ - :param hConsole: ``None`` or a handle on the console to which to write - unicode. Mutually exclusive with ``stream``. - - :param stream: ``None`` or a file-like object to which to write bytes. - - :param fileno: A result to hand back from method of the same name. - - :param name: A human-friendly identifier for this output object. - - :param _complain: A one-argument callable which accepts bytes to be - written when there's a problem. Care should be taken to not make - this do a write on this object. - """ - self._hConsole = hConsole - self._stream = stream - self._fileno = fileno - self.closed = False - self.softspace = False - self.mode = 'w' - self.encoding = 'utf-8' - self.name = name - - self._complain = _complain - - from allmydata.util.encodingutil import canonical_encoding - from allmydata.util import log - if hasattr(stream, 'encoding') and canonical_encoding(stream.encoding) != 'utf-8': - log.msg("%s: %r had encoding %r, but we're going to write UTF-8 to it" % - (name, stream, stream.encoding), level=log.CURIOUS) - self.flush() - - def isatty(self): - return False - def close(self): - # don't really close the handle, that would only cause problems - self.closed = True - def fileno(self): - return self._fileno - def flush(self): - if self._hConsole is None: - try: - self._stream.flush() - except Exception as e: - self._complain("%s.flush: %r from %r" % (self.name, e, self._stream)) - raise - - def write(self, text): - try: - if self._hConsole is None: - # There is no Windows console available. That means we are - # responsible for encoding the unicode to a byte string to - # write it to a Python file object. - if isinstance(text, unicode): - text = text.encode('utf-8') - self._stream.write(text) - else: - # There is a Windows console available. That means Windows is - # responsible for dealing with the unicode itself. - if not isinstance(text, unicode): - text = str(text).decode('utf-8') - remaining = len(text) - while remaining > 0: - n = DWORD(0) - # There is a shorter-than-documented limitation on the - # length of the string passed to WriteConsoleW (see - # #1232). - retval = WriteConsoleW(self._hConsole, text, min(remaining, 10000), byref(n), None) - if retval == 0: - raise IOError("WriteConsoleW failed with WinError: %s" % (WinError(get_last_error()),)) - if n.value == 0: - raise IOError("WriteConsoleW returned %r, n.value = 0" % (retval,)) - remaining -= n.value - if remaining == 0: break - text = text[n.value:] - except Exception as e: - self._complain("%s.write: %r" % (self.name, e)) - raise - - def writelines(self, lines): - try: - for line in lines: - self.write(line) - except Exception as e: - self._complain("%s.writelines: %r" % (self.name, e)) - raise diff --git a/tox.ini b/tox.ini index 610570be5..5748928fe 100644 --- a/tox.ini +++ b/tox.ini @@ -7,63 +7,56 @@ # the tox-gh-actions package. [gh-actions] python = - 2.7: py27-coverage,codechecks - 3.6: py36-coverage - 3.7: py37-coverage,typechecks,codechecks3 3.8: py38-coverage 3.9: py39-coverage - pypy-3.7: pypy3 + 3.10: py310-coverage + 3.11: py311-coverage + 3.12: py312-coverage + pypy-3.8: pypy38 + pypy-3.9: pypy39 [pytest] twisted = 1 [tox] -envlist = typechecks,codechecks,codechecks3,py{27,36,37,38,39}-{coverage},pypy27,pypy3,integration,integration3 -minversion = 2.4 +envlist = typechecks,codechecks,py{38,39,310,311,312}-{coverage},pypy27,pypy38,pypy39,integration +minversion = 4 [testenv] -passenv = TAHOE_LAFS_* PIP_* SUBUNITREPORTER_* USERPROFILE HOMEDRIVE HOMEPATH -# Get "certifi" to avoid bug #2913. Basically if a `setup_requires=...` causes -# a package to be installed (with setuptools) then it'll fail on certain -# platforms (travis's OX-X 10.12, Slackware 14.2) because PyPI's TLS -# requirements (TLS >= 1.2) are incompatible with the old TLS clients -# available to those systems. Installing it ahead of time (with pip) avoids -# this problem. -deps = - # Pin all of these versions for the same reason you ever want to pin - # anything: to prevent new releases with regressions from introducing - # spurious failures into CI runs for whatever development work is - # happening at the time. The versions selected here are just the current - # versions at the time. Bumping them to keep up with future releases is - # fine as long as those releases are known to actually work. - # - # For now these are versions that support Python 2. - pip==20.3.4 - setuptools==44.1.1 - wheel==0.36.2 - subunitreporter==19.3.2 - # As an exception, we don't pin certifi because it contains CA - # certificates which necessarily change over time. Pinning this is - # guaranteed to cause things to break eventually as old certificates - # expire and as new ones are used in the wild that aren't present in - # whatever version we pin. Hopefully there won't be functionality - # regressions in new releases of this package that cause us the kind of - # suffering we're trying to avoid with the above pins. - certifi - # VCS hooks support - py36,!coverage: pre-commit - -# We add usedevelop=False because testing against a true installation gives -# more useful results. +# Install code the real way, for maximum realism. usedevelop = False -# We use extras=test to get things like "mock" that are required for our unit -# tests. -extras = test + +passenv = TAHOE_LAFS_*,PIP_*,SUBUNITREPORTER_*,USERPROFILE,HOMEDRIVE,HOMEPATH,COLUMNS +deps = + # We pull in certify *here* to avoid bug #2913. Basically if a + # `setup_requires=...` causes a package to be installed (with setuptools) + # then it'll fail on certain platforms (travis's OX-X 10.12, Slackware + # 14.2) because PyPI's TLS requirements (TLS >= 1.2) are incompatible with + # the old TLS clients available to those systems. Installing it ahead of + # time (with pip) avoids this problem. + # + # We don't pin an exact version of it because it contains CA certificates + # which necessarily change over time. Pinning this is guaranteed to cause + # things to break eventually as old certificates expire and as new ones + # are used in the wild that aren't present in whatever version we pin. + # Hopefully there won't be functionality regressions in new releases of + # this package that cause us the kind of suffering we're trying to avoid + # with the above pins. + certifi + +extras = + # Get general testing environment dependencies so we can run the tests + # how we like. + testenv + + # And get all of the test suite's actual direct Python dependencies. + test setenv = # Define TEST_SUITE in the environment as an aid to constructing the # correct test command below. TEST_SUITE = allmydata + COLUMNS = 80 commands = # As an aid to debugging, dump all of the Python packages and their @@ -87,63 +80,39 @@ commands = coverage: python -b -m coverage run -m twisted.trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors --reporter=timing} {posargs:{env:TEST_SUITE}} coverage: coverage combine coverage: coverage xml - coverage: coverage report [testenv:integration] -setenv = - COVERAGE_PROCESS_START=.coveragerc -commands = - # NOTE: 'run with "py.test --keep-tempdir -s -v integration/" to debug failures' - py.test --timeout=1800 --coverage -v {posargs:integration} - coverage combine - coverage report - - -[testenv:integration3] +usedevelop = False basepython = python3 +platform = mylinux: linux + mymacos: darwin + mywindows: win32 setenv = COVERAGE_PROCESS_START=.coveragerc commands = - python --version # NOTE: 'run with "py.test --keep-tempdir -s -v integration/" to debug failures' - py.test --timeout=1800 --coverage -v {posargs:integration} + py.test --timeout=1800 --coverage -s -v {posargs:integration} coverage combine - coverage report -# Once 2.7 is dropped, this can be removed. It just does flake8 with Python 2 -# since that can give different results than flake8 on Python 3. [testenv:codechecks] -basepython = python2.7 -setenv = - # If no positional arguments are given, try to run the checks on the - # entire codebase, including various pieces of supporting code. - DEFAULT_FILES=src integration static misc setup.py -commands = - flake8 {posargs:{env:DEFAULT_FILES}} - - -[testenv:codechecks3] basepython = python3 +skip_install = true deps = - # Newer versions of PyLint have buggy configuration - # (https://github.com/PyCQA/pylint/issues/4574), so stick to old version - # for now. - pylint < 2.5 + # Pin a specific version so we get consistent outcomes; update this + # occasionally: + ruff == 0.1.6 + towncrier # On macOS, git inside of towncrier needs $HOME. passenv = HOME setenv = # If no positional arguments are given, try to run the checks on the # entire codebase, including various pieces of supporting code. - DEFAULT_FILES=src integration static misc setup.py + DEFAULT_FILES=src integration benchmarks static misc setup.py commands = - flake8 {posargs:{env:DEFAULT_FILES}} + ruff check {posargs:{env:DEFAULT_FILES}} python misc/coding_tools/check-umids.py {posargs:{env:DEFAULT_FILES}} python misc/coding_tools/check-debugging.py {posargs:{env:DEFAULT_FILES}} - python misc/coding_tools/find-trailing-spaces.py -r {posargs:{env:DEFAULT_FILES}} - # PyLint has other useful checks, might want to enable them: - # http://pylint.pycqa.org/en/latest/technical_reference/features.html - pylint --disable=all --enable=cell-var-from-loop {posargs:{env:DEFAULT_FILES}} # If towncrier.check fails, you forgot to add a towncrier news # fragment explaining the change in this branch. Create one at @@ -154,40 +123,43 @@ commands = [testenv:typechecks] basepython = python3 -skip_install = True deps = - mypy + mypy==1.8.0 mypy-zope types-mock types-six types-PyYAML - types-pkg_resources - git+https://github.com/warner/foolscap - # Twisted 21.2.0 introduces some type hints which we are not yet - # compatible with. - # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3622 - twisted<21.2.0 -commands = mypy src + types-setuptools + types-pyOpenSSL + foolscap + # Upgrade when new releases come out: + Twisted==23.10.0 +commands = + # Different versions of Python have a different standard library, and we + # want to be compatible with all the variations. For speed's sake we only do + # the earliest and latest versions. + mypy --python-version=3.8 src + mypy --python-version=3.12 src [testenv:draftnews] -passenv = TAHOE_LAFS_* PIP_* SUBUNITREPORTER_* USERPROFILE HOMEDRIVE HOMEPATH +passenv = TAHOE_LAFS_*,PIP_*,SUBUNITREPORTER_*,USERPROFILE,HOMEDRIVE,HOMEPATH,COLUMNS deps = # see comment in [testenv] about "certifi" certifi - towncrier==21.3.0 + towncrier==23.11.0 commands = python -m towncrier --draft --config towncrier.toml [testenv:news] # On macOS, git invoked from Tox needs $HOME. -passenv = TAHOE_LAFS_* PIP_* SUBUNITREPORTER_* USERPROFILE HOMEDRIVE HOMEPATH HOME +passenv = TAHOE_LAFS_*,PIP_*,SUBUNITREPORTER_*,USERPROFILE,HOMEDRIVE,HOMEPATH,COLUMNS whitelist_externals = git deps = # see comment in [testenv] about "certifi" certifi - towncrier==21.3.0 + towncrier==23.11.0 commands = python -m towncrier --yes --config towncrier.toml # commit the changes @@ -206,17 +178,6 @@ commands = flogtool --version python misc/build_helpers/run-deprecations.py --package allmydata --warnings={env:TAHOE_LAFS_WARNINGS_LOG:_trial_temp/deprecation-warnings.log} trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata} -[testenv:checkmemory] -commands = - rm -rf _test_memory - python src/allmydata/test/check_memory.py upload - python src/allmydata/test/check_memory.py upload-self - python src/allmydata/test/check_memory.py upload-POST - python src/allmydata/test/check_memory.py download - python src/allmydata/test/check_memory.py download-GET - python src/allmydata/test/check_memory.py download-GET-slow - python src/allmydata/test/check_memory.py receive - # Use 'tox -e docs' to check formatting and cross-references in docs .rst # files. The published docs are built by code run over at readthedocs.org, # which does not use this target (but does something similar). @@ -228,29 +189,19 @@ commands = # your web browser. [testenv:docs] -# we pin docutils because of https://sourceforge.net/p/docutils/bugs/301/ -# which asserts when it reads links to .svg files (e.g. about.rst) deps = - sphinx - docutils==0.12 - recommonmark - sphinx_rtd_theme + -r docs/requirements.txt # normal install is not needed for docs, and slows things down skip_install = True commands = sphinx-build -W -b html -d {toxinidir}/docs/_build/doctrees {toxinidir}/docs {toxinidir}/docs/_build/html [testenv:pyinstaller] -# We override this to pass --no-use-pep517 because pyinstaller (3.4, at least) -# is broken when this feature is enabled. -install_command = python -m pip install --no-use-pep517 {opts} {packages} extras = deps = {[testenv]deps} packaging - # PyInstaller 4.0 drops Python 2 support. When we finish porting to - # Python 3 we can reconsider this constraint. - pyinstaller < 4.0 + pyinstaller pefile ; platform_system == "Windows" # Setting PYTHONHASHSEED to a known value assists with reproducible builds. # See https://pyinstaller.readthedocs.io/en/stable/advanced-topics.html#creating-a-reproducible-build @@ -260,7 +211,8 @@ commands= pyinstaller -y --clean pyinstaller.spec [testenv:tarballs] +basepython = python3 deps = commands = python setup.py update_version - python setup.py sdist --formats=bztar,gztar,zip bdist_wheel + python setup.py sdist --formats=gztar bdist_wheel --universal diff --git a/ws_client.py b/ws_client.py index 15d717d42..b444a878a 100644 --- a/ws_client.py +++ b/ws_client.py @@ -1,4 +1,3 @@ -from __future__ import print_function import sys import json