Merge remote-tracking branch 'origin/master' into 3956-mutable-uploads

This commit is contained in:
Itamar Turner-Trauring 2023-01-09 17:05:31 -05:00
commit 7c506057b5
23 changed files with 377 additions and 500 deletions

View File

@ -15,16 +15,11 @@ workflows:
ci: ci:
jobs: jobs:
# Start with jobs testing various platforms. # Start with jobs testing various platforms.
- "debian-10":
{}
- "debian-11": - "debian-11":
{} {}
- "ubuntu-20-04": - "ubuntu-20-04":
{} {}
- "ubuntu-18-04":
requires:
- "ubuntu-20-04"
# Equivalent to RHEL 8; CentOS 8 is dead. # Equivalent to RHEL 8; CentOS 8 is dead.
- "oraclelinux-8": - "oraclelinux-8":
@ -85,11 +80,7 @@ workflows:
# Contexts are managed in the CircleCI web interface: # Contexts are managed in the CircleCI web interface:
# #
# https://app.circleci.com/settings/organization/github/tahoe-lafs/contexts # https://app.circleci.com/settings/organization/github/tahoe-lafs/contexts
- "build-image-debian-10": &DOCKERHUB_CONTEXT - "build-image-debian-11": &DOCKERHUB_CONTEXT
context: "dockerhub-auth"
- "build-image-debian-11":
<<: *DOCKERHUB_CONTEXT
- "build-image-ubuntu-18-04":
<<: *DOCKERHUB_CONTEXT <<: *DOCKERHUB_CONTEXT
- "build-image-ubuntu-20-04": - "build-image-ubuntu-20-04":
<<: *DOCKERHUB_CONTEXT <<: *DOCKERHUB_CONTEXT
@ -167,12 +158,7 @@ jobs:
command: | command: |
dist/Tahoe-LAFS/tahoe --version dist/Tahoe-LAFS/tahoe --version
debian-10: &DEBIAN debian-11: &DEBIAN
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/debian:10-py3.7"
user: "nobody"
environment: &UTF_8_ENVIRONMENT environment: &UTF_8_ENVIRONMENT
# In general, the test suite is not allowed to fail while the job # In general, the test suite is not allowed to fail while the job
# succeeds. But you can set this to "yes" if you want it to be # succeeds. But you can set this to "yes" if you want it to be
@ -184,7 +170,7 @@ jobs:
# filenames and argv). # filenames and argv).
LANG: "en_US.UTF-8" LANG: "en_US.UTF-8"
# Select a tox environment to run for this job. # Select a tox environment to run for this job.
TAHOE_LAFS_TOX_ENVIRONMENT: "py37" TAHOE_LAFS_TOX_ENVIRONMENT: "py39"
# Additional arguments to pass to tox. # Additional arguments to pass to tox.
TAHOE_LAFS_TOX_ARGS: "" TAHOE_LAFS_TOX_ARGS: ""
# The path in which test artifacts will be placed. # The path in which test artifacts will be placed.
@ -252,15 +238,11 @@ jobs:
/tmp/venv/bin/codecov /tmp/venv/bin/codecov
fi fi
debian-11:
<<: *DEBIAN
docker: docker:
- <<: *DOCKERHUB_AUTH - <<: *DOCKERHUB_AUTH
image: "tahoelafsci/debian:11-py3.9" image: "tahoelafsci/debian:11-py3.9"
user: "nobody" user: "nobody"
environment:
<<: *UTF_8_ENVIRONMENT
TAHOE_LAFS_TOX_ENVIRONMENT: "py39"
# Restore later using PyPy3.8 # Restore later using PyPy3.8
# pypy27-buster: # pypy27-buster:
@ -312,22 +294,6 @@ jobs:
- run: *SETUP_VIRTUALENV - run: *SETUP_VIRTUALENV
- run: *RUN_TESTS - run: *RUN_TESTS
ubuntu-18-04: &UBUNTU_18_04
<<: *DEBIAN
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/ubuntu:18.04-py3.7"
user: "nobody"
environment:
<<: *UTF_8_ENVIRONMENT
# The default trial args include --rterrors which is incompatible with
# this reporter on Python 3. So drop that and just specify the
# reporter.
TAHOE_LAFS_TRIAL_ARGS: "--reporter=subunitv2-file"
TAHOE_LAFS_TOX_ENVIRONMENT: "py37"
ubuntu-20-04: ubuntu-20-04:
<<: *DEBIAN <<: *DEBIAN
docker: docker:
@ -380,7 +346,7 @@ jobs:
docker: docker:
# Run in a highly Nix-capable environment. # Run in a highly Nix-capable environment.
- <<: *DOCKERHUB_AUTH - <<: *DOCKERHUB_AUTH
image: "nixos/nix:2.3.16" image: "nixos/nix:2.10.3"
environment: environment:
# CACHIX_AUTH_TOKEN is manually set in the CircleCI web UI and # CACHIX_AUTH_TOKEN is manually set in the CircleCI web UI and
@ -390,27 +356,21 @@ jobs:
steps: steps:
- "run": - "run":
# The nixos/nix image does not include ssh. Install it so the # Get cachix for Nix-friendly caching.
# `checkout` step will succeed. We also want cachix for
# Nix-friendly caching.
name: "Install Basic Dependencies" name: "Install Basic Dependencies"
command: | command: |
NIXPKGS="https://github.com/nixos/nixpkgs/archive/nixos-<<parameters.nixpkgs>>.tar.gz"
nix-env \ nix-env \
--file https://github.com/nixos/nixpkgs/archive/nixos-<<parameters.nixpkgs>>.tar.gz \ --file $NIXPKGS \
--install \ --install \
-A openssh cachix bash -A cachix bash
# Activate it for "binary substitution". This sets up
# configuration tht lets Nix download something from the cache
# instead of building it locally, if possible.
cachix use "${CACHIX_NAME}"
- "checkout" - "checkout"
- run:
name: "Cachix setup"
# Record the store paths that exist before we did much. There's no
# reason to cache these, they're either in the image or have to be
# retrieved before we can use cachix to restore from cache.
command: |
cachix use "${CACHIX_NAME}"
nix path-info --all > /tmp/store-path-pre-build
- "run": - "run":
# The Nix package doesn't know how to do this part, unfortunately. # The Nix package doesn't know how to do this part, unfortunately.
name: "Generate version" name: "Generate version"
@ -432,55 +392,26 @@ jobs:
# build a couple simple little dependencies that don't take # build a couple simple little dependencies that don't take
# advantage of multiple cores and we get a little speedup by doing # advantage of multiple cores and we get a little speedup by doing
# them in parallel. # them in parallel.
nix-build --cores 3 --max-jobs 2 --argstr pkgsVersion "nixpkgs-<<parameters.nixpkgs>>" source .circleci/lib.sh
cache_if_able nix-build \
--cores 3 \
--max-jobs 2 \
--argstr pkgsVersion "nixpkgs-<<parameters.nixpkgs>>"
- "run": - "run":
name: "Test" name: "Test"
command: | command: |
# Let it go somewhat wild for the test suite itself # Let it go somewhat wild for the test suite itself
nix-build --cores 8 --argstr pkgsVersion "nixpkgs-<<parameters.nixpkgs>>" tests.nix source .circleci/lib.sh
cache_if_able nix-build \
- run: --cores 8 \
# Send any new store objects to cachix. --argstr pkgsVersion "nixpkgs-<<parameters.nixpkgs>>" \
name: "Push to Cachix" tests.nix
when: "always"
command: |
# Cribbed from
# https://circleci.com/blog/managing-secrets-when-you-have-pull-requests-from-outside-contributors/
if [ -n "$CIRCLE_PR_NUMBER" ]; then
# I'm sure you're thinking "CIRCLE_PR_NUMBER must just be the
# number of the PR being built". Sorry, dear reader, you have
# guessed poorly. It is also conditionally set based on whether
# this is a PR from a fork or not.
#
# https://circleci.com/docs/2.0/env-vars/#built-in-environment-variables
echo "Skipping Cachix push for forked PR."
else
# If this *isn't* a build from a fork then we have the Cachix
# write key in our environment and we can push any new objects
# to Cachix.
#
# To decide what to push, we inspect the list of store objects
# that existed before and after we did most of our work. Any
# that are new after the work is probably a useful thing to have
# around so push it to the cache. We exclude all derivation
# objects (.drv files) because they're cheap to reconstruct and
# by the time you know their cache key you've already done all
# the work anyway.
#
# This shell expression for finding the objects and pushing them
# was from the Cachix docs:
#
# https://docs.cachix.org/continuous-integration-setup/circleci.html
#
# but they seem to have removed it now.
bash -c "comm -13 <(sort /tmp/store-path-pre-build | grep -v '\.drv$') <(nix path-info --all | grep -v '\.drv$' | sort) | cachix push $CACHIX_NAME"
fi
typechecks: typechecks:
docker: docker:
- <<: *DOCKERHUB_AUTH - <<: *DOCKERHUB_AUTH
image: "tahoelafsci/ubuntu:18.04-py3.7" image: "tahoelafsci/ubuntu:20.04-py3.9"
steps: steps:
- "checkout" - "checkout"
@ -492,7 +423,7 @@ jobs:
docs: docs:
docker: docker:
- <<: *DOCKERHUB_AUTH - <<: *DOCKERHUB_AUTH
image: "tahoelafsci/ubuntu:18.04-py3.7" image: "tahoelafsci/ubuntu:20.04-py3.9"
steps: steps:
- "checkout" - "checkout"
@ -543,15 +474,6 @@ jobs:
docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION} docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION}
build-image-debian-10:
<<: *BUILD_IMAGE
environment:
DISTRO: "debian"
TAG: "10"
PYTHON_VERSION: "3.7"
build-image-debian-11: build-image-debian-11:
<<: *BUILD_IMAGE <<: *BUILD_IMAGE
@ -560,14 +482,6 @@ jobs:
TAG: "11" TAG: "11"
PYTHON_VERSION: "3.9" PYTHON_VERSION: "3.9"
build-image-ubuntu-18-04:
<<: *BUILD_IMAGE
environment:
DISTRO: "ubuntu"
TAG: "18.04"
PYTHON_VERSION: "3.7"
build-image-ubuntu-20-04: build-image-ubuntu-20-04:
<<: *BUILD_IMAGE <<: *BUILD_IMAGE

26
.circleci/lib.sh Normal file
View File

@ -0,0 +1,26 @@
# Run a command, enabling cache writes to cachix if possible. The command is
# accepted as a variable number of positional arguments (like argv).
function cache_if_able() {
# The `cachix watch-exec ...` does our cache population. When it sees
# something added to the store (I guess) it pushes it to the named cache.
#
# We can only *push* to it if we have a CACHIX_AUTH_TOKEN, though.
# in-repo jobs will get this from CircleCI configuration but jobs from
# forks may not.
echo "Building PR from user/org: ${CIRCLE_PROJECT_USERNAME}"
if [ -v CACHIX_AUTH_TOKEN ]; then
echo "Cachix credentials present; will attempt to write to cache."
cachix watch-exec "${CACHIX_NAME}" -- "$@"
else
# If we're building a from a forked repository then we're allowed to
# not have the credentials (but it's also fine if the owner of the
# fork supplied their own).
if [ "${CIRCLE_PROJECT_USERNAME}" == "tahoe-lafs" ]; then
echo "Required credentials (CACHIX_AUTH_TOKEN) are missing."
return 1
else
echo "Cachix credentials missing; will not attempt cache writes."
"$@"
fi
fi
}

View File

@ -48,21 +48,20 @@ jobs:
- windows-latest - windows-latest
- ubuntu-latest - ubuntu-latest
python-version: python-version:
- "3.7"
- "3.8" - "3.8"
- "3.9" - "3.9"
- "3.10" - "3.10"
include: include:
# On macOS don't bother with 3.7-3.8, just to get faster builds. # On macOS don't bother with 3.8, just to get faster builds.
- os: macos-latest - os: macos-latest
python-version: "3.9" python-version: "3.9"
- os: macos-latest - os: macos-latest
python-version: "3.10" python-version: "3.10"
# We only support PyPy on Linux at the moment. # We only support PyPy on Linux at the moment.
- os: ubuntu-latest
python-version: "pypy-3.7"
- os: ubuntu-latest - os: ubuntu-latest
python-version: "pypy-3.8" python-version: "pypy-3.8"
- os: ubuntu-latest
python-version: "pypy-3.9"
steps: steps:
# See https://github.com/actions/checkout. A fetch-depth of 0 # See https://github.com/actions/checkout. A fetch-depth of 0
@ -153,19 +152,18 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
os: include:
- windows-latest - os: macos-latest
python-version: "3.9"
force-foolscap: false
- os: windows-latest
python-version: "3.9"
force-foolscap: false
# 22.04 has some issue with Tor at the moment: # 22.04 has some issue with Tor at the moment:
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3943 # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3943
- ubuntu-20.04 - os: ubuntu-20.04
python-version: python-version: "3.9"
- 3.7 force-foolscap: false
- 3.9
include:
# On macOS don't bother with 3.7, just to get faster builds.
- os: macos-latest
python-version: 3.9
steps: steps:
- name: Install Tor [Ubuntu] - name: Install Tor [Ubuntu]
@ -206,12 +204,24 @@ jobs:
run: python misc/build_helpers/show-tool-versions.py run: python misc/build_helpers/show-tool-versions.py
- name: Run "Python 3 integration tests" - name: Run "Python 3 integration tests"
if: "${{ !matrix.force-foolscap }}"
env: env:
# On macOS this is necessary to ensure unix socket paths for tor # On macOS this is necessary to ensure unix socket paths for tor
# aren't too long. On Windows tox won't pass it through so it has no # aren't too long. On Windows tox won't pass it through so it has no
# effect. On Linux it doesn't make a difference one way or another. # effect. On Linux it doesn't make a difference one way or another.
TMPDIR: "/tmp" TMPDIR: "/tmp"
run: tox -e integration run: |
tox -e integration
- name: Run "Python 3 integration tests (force Foolscap)"
if: "${{ matrix.force-foolscap }}"
env:
# On macOS this is necessary to ensure unix socket paths for tor
# aren't too long. On Windows tox won't pass it through so it has no
# effect. On Linux it doesn't make a difference one way or another.
TMPDIR: "/tmp"
run: |
tox -e integration -- --force-foolscap integration/
- name: Upload eliot.log in case of failure - name: Upload eliot.log in case of failure
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3

View File

@ -56,7 +56,7 @@ Once ``tahoe --version`` works, see `How to Run Tahoe-LAFS <docs/running.rst>`__
🐍 Python 2 🐍 Python 2
----------- -----------
Python 3.7 or later is now required. Python 3.8 or later is required.
If you are still using Python 2.7, use Tahoe-LAFS version 1.17.1. If you are still using Python 2.7, use Tahoe-LAFS version 1.17.1.

View File

@ -29,7 +29,7 @@ in
, pypiData ? sources.pypi-deps-db # the pypi package database snapshot to use , pypiData ? sources.pypi-deps-db # the pypi package database snapshot to use
# for dependency resolution # for dependency resolution
, pythonVersion ? "python37" # a string choosing the python derivation from , pythonVersion ? "python39" # a string choosing the python derivation from
# nixpkgs to target # nixpkgs to target
, extras ? [ "tor" "i2p" ] # a list of strings identifying tahoe-lafs extras, , extras ? [ "tor" "i2p" ] # a list of strings identifying tahoe-lafs extras,

View File

@ -1,15 +1,6 @@
""" """
Ported to Python 3. Ported to Python 3.
""" """
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import sys import sys
import shutil import shutil
from time import sleep from time import sleep
@ -66,6 +57,13 @@ def pytest_addoption(parser):
"--coverage", action="store_true", dest="coverage", "--coverage", action="store_true", dest="coverage",
help="Collect coverage statistics", help="Collect coverage statistics",
) )
parser.addoption(
"--force-foolscap", action="store_true", default=False,
dest="force_foolscap",
help=("If set, force Foolscap only for the storage protocol. " +
"Otherwise HTTP will be used.")
)
@pytest.fixture(autouse=True, scope='session') @pytest.fixture(autouse=True, scope='session')
def eliot_logging(): def eliot_logging():

View File

@ -1,14 +1,6 @@
""" """
Ported to Python 3. Ported to Python 3.
""" """
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import sys import sys
import time import time
@ -300,6 +292,14 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam
u'log_gatherer.furl', u'log_gatherer.furl',
flog_gatherer, flog_gatherer,
) )
force_foolscap = request.config.getoption("force_foolscap")
assert force_foolscap in (True, False)
set_config(
config,
'storage',
'force_foolscap',
str(force_foolscap),
)
write_config(FilePath(config_path), config) write_config(FilePath(config_path), config)
created_d.addCallback(created) created_d.addCallback(created)

0
newsfragments/3870.minor Normal file
View File

0
newsfragments/3914.minor Normal file
View File

0
newsfragments/3937.minor Normal file
View File

1
newsfragments/3942.minor Normal file
View File

@ -0,0 +1 @@

0
newsfragments/3947.minor Normal file
View File

0
newsfragments/3953.minor Normal file
View File

0
newsfragments/3954.minor Normal file
View File

View File

@ -0,0 +1 @@
Python 3.7 is no longer supported, and Debian 10 and Ubuntu 18.04 are no longer tested.

View File

@ -96,7 +96,9 @@ install_requires = [
# an sftp extra in Tahoe-LAFS, there is no point in having one. # an sftp extra in Tahoe-LAFS, there is no point in having one.
# * Twisted 19.10 introduces Site.getContentFile which we use to get # * Twisted 19.10 introduces Site.getContentFile which we use to get
# temporary upload files placed into a per-node temporary directory. # temporary upload files placed into a per-node temporary directory.
"Twisted[tls,conch] >= 19.10.0", # * Twisted 22.8.0 added support for coroutine-returning functions in many
# places (mainly via `maybeDeferred`)
"Twisted[tls,conch] >= 22.8.0",
"PyYAML >= 3.11", "PyYAML >= 3.11",
@ -225,7 +227,7 @@ def run_command(args, cwd=None):
use_shell = sys.platform == "win32" use_shell = sys.platform == "win32"
try: try:
p = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=cwd, shell=use_shell) p = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=cwd, shell=use_shell)
except EnvironmentError as e: # if this gives a SyntaxError, note that Tahoe-LAFS requires Python 3.7+ except EnvironmentError as e: # if this gives a SyntaxError, note that Tahoe-LAFS requires Python 3.8+
print("Warning: unable to run %r." % (" ".join(args),)) print("Warning: unable to run %r." % (" ".join(args),))
print(e) print(e)
return None return None
@ -376,8 +378,8 @@ setup(name="tahoe-lafs", # also set in __init__.py
package_dir = {'':'src'}, package_dir = {'':'src'},
packages=find_packages('src') + ['allmydata.test.plugins'], packages=find_packages('src') + ['allmydata.test.plugins'],
classifiers=trove_classifiers, classifiers=trove_classifiers,
# We support Python 3.7 or later. 3.11 is not supported yet. # We support Python 3.8 or later. 3.11 is not supported yet.
python_requires=">=3.7, <3.11", python_requires=">=3.8, <3.11",
install_requires=install_requires, install_requires=install_requires,
extras_require={ extras_require={
# Duplicate the Twisted pywin32 dependency here. See # Duplicate the Twisted pywin32 dependency here. See
@ -390,9 +392,6 @@ setup(name="tahoe-lafs", # also set in __init__.py
], ],
"test": [ "test": [
"flake8", "flake8",
# On Python 3.7, importlib_metadata v5 breaks flake8.
# https://github.com/python/importlib_metadata/issues/407
"importlib_metadata<5; python_version < '3.8'",
# Pin a specific pyflakes so we don't have different folks # Pin a specific pyflakes so we don't have different folks
# disagreeing on what is or is not a lint issue. We can bump # disagreeing on what is or is not a lint issue. We can bump
# this version from time to time, but we will do it # this version from time to time, but we will do it

View File

@ -12,14 +12,9 @@ on any of their methods.
Ported to Python 3. Ported to Python 3.
""" """
from __future__ import absolute_import from __future__ import annotations
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2 from functools import partial
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from cryptography.exceptions import InvalidSignature from cryptography.exceptions import InvalidSignature
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.backends import default_backend
@ -72,20 +67,39 @@ def create_signing_keypair_from_string(private_key_der):
:returns: 2-tuple of (private_key, public_key) :returns: 2-tuple of (private_key, public_key)
""" """
priv_key = load_der_private_key( load = partial(
load_der_private_key,
private_key_der, private_key_der,
password=None, password=None,
backend=default_backend(), backend=default_backend(),
) )
if not isinstance(priv_key, rsa.RSAPrivateKey):
try:
# Load it once without the potentially expensive OpenSSL validation
# checks. These have superlinear complexity. We *will* run them just
# below - but first we'll apply our own constant-time checks.
unsafe_priv_key = load(unsafe_skip_rsa_key_validation=True)
except TypeError:
# cryptography<39 does not support this parameter, so just load the
# key with validation...
unsafe_priv_key = load()
# But avoid *reloading* it since that will run the expensive
# validation *again*.
load = lambda: unsafe_priv_key
if not isinstance(unsafe_priv_key, rsa.RSAPrivateKey):
raise ValueError( raise ValueError(
"Private Key did not decode to an RSA key" "Private Key did not decode to an RSA key"
) )
if priv_key.key_size != 2048: if unsafe_priv_key.key_size != 2048:
raise ValueError( raise ValueError(
"Private Key must be 2048 bits" "Private Key must be 2048 bits"
) )
return priv_key, priv_key.public_key()
# Now re-load it with OpenSSL's validation applied.
safe_priv_key = load()
return safe_priv_key, safe_priv_key.public_key()
def der_string_from_signing_key(private_key): def der_string_from_signing_key(private_key):

View File

@ -323,6 +323,7 @@ class StorageClient(object):
swissnum = nurl.path[0].encode("ascii") swissnum = nurl.path[0].encode("ascii")
certificate_hash = nurl.user.encode("ascii") certificate_hash = nurl.user.encode("ascii")
pool = HTTPConnectionPool(reactor) pool = HTTPConnectionPool(reactor)
pool.maxPersistentPerHost = 20
if cls.TEST_MODE_REGISTER_HTTP_POOL is not None: if cls.TEST_MODE_REGISTER_HTTP_POOL is not None:
cls.TEST_MODE_REGISTER_HTTP_POOL(pool) cls.TEST_MODE_REGISTER_HTTP_POOL(pool)

View File

@ -117,7 +117,7 @@ def _authorization_decorator(required_secrets):
@wraps(f) @wraps(f)
def route(self, request, *args, **kwargs): def route(self, request, *args, **kwargs):
if not timing_safe_compare( if not timing_safe_compare(
request.requestHeaders.getRawHeaders("Authorization", [None])[0].encode( request.requestHeaders.getRawHeaders("Authorization", [""])[0].encode(
"utf-8" "utf-8"
), ),
swissnum_auth_header(self._swissnum), swissnum_auth_header(self._swissnum),

View File

@ -1,19 +1,12 @@
""" """
Ported to Python 3. Tests related to the way ``allmydata.mutable`` handles different versions
of data for an object.
""" """
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from io import StringIO
import os import os
from six.moves import cStringIO as StringIO from typing import Optional
from twisted.internet import defer
from ..common import AsyncTestCase from ..common import AsyncTestCase
from testtools.matchers import ( from testtools.matchers import (
Equals, Equals,
@ -47,49 +40,38 @@ class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \
self.small_data = b"test data" * 10 # 90 B; SDMF self.small_data = b"test data" * 10 # 90 B; SDMF
def do_upload_mdmf(self, data=None): async def do_upload_mdmf(self, data: Optional[bytes] = None) -> MutableFileNode:
if data is None: if data is None:
data = self.data data = self.data
d = self.nm.create_mutable_file(MutableData(data), n = await self.nm.create_mutable_file(MutableData(data),
version=MDMF_VERSION) version=MDMF_VERSION)
def _then(n):
self.assertThat(n, IsInstance(MutableFileNode)) self.assertThat(n, IsInstance(MutableFileNode))
self.assertThat(n._protocol_version, Equals(MDMF_VERSION)) self.assertThat(n._protocol_version, Equals(MDMF_VERSION))
self.mdmf_node = n self.mdmf_node = n
return n return n
d.addCallback(_then)
return d
def do_upload_sdmf(self, data=None): async def do_upload_sdmf(self, data: Optional[bytes] = None) -> MutableFileNode:
if data is None: if data is None:
data = self.small_data data = self.small_data
d = self.nm.create_mutable_file(MutableData(data)) n = await self.nm.create_mutable_file(MutableData(data))
def _then(n):
self.assertThat(n, IsInstance(MutableFileNode)) self.assertThat(n, IsInstance(MutableFileNode))
self.assertThat(n._protocol_version, Equals(SDMF_VERSION)) self.assertThat(n._protocol_version, Equals(SDMF_VERSION))
self.sdmf_node = n self.sdmf_node = n
return n return n
d.addCallback(_then)
return d
def do_upload_empty_sdmf(self): async def do_upload_empty_sdmf(self) -> MutableFileNode:
d = self.nm.create_mutable_file(MutableData(b"")) n = await self.nm.create_mutable_file(MutableData(b""))
def _then(n):
self.assertThat(n, IsInstance(MutableFileNode)) self.assertThat(n, IsInstance(MutableFileNode))
self.sdmf_zero_length_node = n self.sdmf_zero_length_node = n
self.assertThat(n._protocol_version, Equals(SDMF_VERSION)) self.assertThat(n._protocol_version, Equals(SDMF_VERSION))
return n return n
d.addCallback(_then)
return d
def do_upload(self): async def do_upload(self) -> MutableFileNode:
d = self.do_upload_mdmf() await self.do_upload_mdmf()
d.addCallback(lambda ign: self.do_upload_sdmf()) return await self.do_upload_sdmf()
return d
def test_debug(self): async def test_debug(self) -> None:
d = self.do_upload_mdmf() n = await self.do_upload_mdmf()
def _debug(n):
fso = debug.FindSharesOptions() fso = debug.FindSharesOptions()
storage_index = base32.b2a(n.get_storage_index()) storage_index = base32.b2a(n.get_storage_index())
fso.si_s = str(storage_index, "utf-8") # command-line options are unicode on Python 3 fso.si_s = str(storage_index, "utf-8") # command-line options are unicode on Python 3
@ -139,165 +121,123 @@ class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \
# encryption salts and is not constant. fields[5] is the # encryption salts and is not constant. fields[5] is the
# remaining time on the longest lease, which is timing dependent. # remaining time on the longest lease, which is timing dependent.
# The rest of the line is the quoted pathname to the share. # The rest of the line is the quoted pathname to the share.
d.addCallback(_debug)
return d
def test_get_sequence_number(self): async def test_get_sequence_number(self) -> None:
d = self.do_upload() await self.do_upload()
d.addCallback(lambda ign: self.mdmf_node.get_best_readable_version()) bv = await self.mdmf_node.get_best_readable_version()
d.addCallback(lambda bv: self.assertThat(bv.get_sequence_number(), Equals(1))
self.assertThat(bv.get_sequence_number(), Equals(1))) bv = await self.sdmf_node.get_best_readable_version()
d.addCallback(lambda ignored: self.assertThat(bv.get_sequence_number(), Equals(1))
self.sdmf_node.get_best_readable_version())
d.addCallback(lambda bv:
self.assertThat(bv.get_sequence_number(), Equals(1)))
# Now update. The sequence number in both cases should be 1 in # Now update. The sequence number in both cases should be 1 in
# both cases. # both cases.
def _do_update(ignored):
new_data = MutableData(b"foo bar baz" * 100000) new_data = MutableData(b"foo bar baz" * 100000)
new_small_data = MutableData(b"foo bar baz" * 10) new_small_data = MutableData(b"foo bar baz" * 10)
d1 = self.mdmf_node.overwrite(new_data) d1 = self.mdmf_node.overwrite(new_data)
d2 = self.sdmf_node.overwrite(new_small_data) d2 = self.sdmf_node.overwrite(new_small_data)
dl = gatherResults([d1, d2]) await gatherResults([d1, d2])
return dl bv = await self.mdmf_node.get_best_readable_version()
d.addCallback(_do_update) self.assertThat(bv.get_sequence_number(), Equals(2))
d.addCallback(lambda ignored: bv = await self.sdmf_node.get_best_readable_version()
self.mdmf_node.get_best_readable_version()) self.assertThat(bv.get_sequence_number(), Equals(2))
d.addCallback(lambda bv:
self.assertThat(bv.get_sequence_number(), Equals(2)))
d.addCallback(lambda ignored:
self.sdmf_node.get_best_readable_version())
d.addCallback(lambda bv:
self.assertThat(bv.get_sequence_number(), Equals(2)))
return d
async def test_cap_after_upload(self) -> None:
def test_cap_after_upload(self):
# If we create a new mutable file and upload things to it, and # If we create a new mutable file and upload things to it, and
# it's an MDMF file, we should get an MDMF cap back from that # it's an MDMF file, we should get an MDMF cap back from that
# file and should be able to use that. # file and should be able to use that.
# That's essentially what MDMF node is, so just check that. # That's essentially what MDMF node is, so just check that.
d = self.do_upload_mdmf() await self.do_upload_mdmf()
def _then(ign):
mdmf_uri = self.mdmf_node.get_uri() mdmf_uri = self.mdmf_node.get_uri()
cap = uri.from_string(mdmf_uri) cap = uri.from_string(mdmf_uri)
self.assertTrue(isinstance(cap, uri.WriteableMDMFFileURI)) self.assertTrue(isinstance(cap, uri.WriteableMDMFFileURI))
readonly_mdmf_uri = self.mdmf_node.get_readonly_uri() readonly_mdmf_uri = self.mdmf_node.get_readonly_uri()
cap = uri.from_string(readonly_mdmf_uri) cap = uri.from_string(readonly_mdmf_uri)
self.assertTrue(isinstance(cap, uri.ReadonlyMDMFFileURI)) self.assertTrue(isinstance(cap, uri.ReadonlyMDMFFileURI))
d.addCallback(_then)
return d
def test_mutable_version(self): async def test_mutable_version(self) -> None:
# assert that getting parameters from the IMutableVersion object # assert that getting parameters from the IMutableVersion object
# gives us the same data as getting them from the filenode itself # gives us the same data as getting them from the filenode itself
d = self.do_upload() await self.do_upload()
d.addCallback(lambda ign: self.mdmf_node.get_best_mutable_version()) bv = await self.mdmf_node.get_best_mutable_version()
def _check_mdmf(bv):
n = self.mdmf_node n = self.mdmf_node
self.assertThat(bv.get_writekey(), Equals(n.get_writekey())) self.assertThat(bv.get_writekey(), Equals(n.get_writekey()))
self.assertThat(bv.get_storage_index(), Equals(n.get_storage_index())) self.assertThat(bv.get_storage_index(), Equals(n.get_storage_index()))
self.assertFalse(bv.is_readonly()) self.assertFalse(bv.is_readonly())
d.addCallback(_check_mdmf)
d.addCallback(lambda ign: self.sdmf_node.get_best_mutable_version()) bv = await self.sdmf_node.get_best_mutable_version()
def _check_sdmf(bv):
n = self.sdmf_node n = self.sdmf_node
self.assertThat(bv.get_writekey(), Equals(n.get_writekey())) self.assertThat(bv.get_writekey(), Equals(n.get_writekey()))
self.assertThat(bv.get_storage_index(), Equals(n.get_storage_index())) self.assertThat(bv.get_storage_index(), Equals(n.get_storage_index()))
self.assertFalse(bv.is_readonly()) self.assertFalse(bv.is_readonly())
d.addCallback(_check_sdmf)
return d
def test_get_readonly_version(self): async def test_get_readonly_version(self) -> None:
d = self.do_upload() await self.do_upload()
d.addCallback(lambda ign: self.mdmf_node.get_best_readable_version()) bv = await self.mdmf_node.get_best_readable_version()
d.addCallback(lambda bv: self.assertTrue(bv.is_readonly())) self.assertTrue(bv.is_readonly())
# Attempting to get a mutable version of a mutable file from a # Attempting to get a mutable version of a mutable file from a
# filenode initialized with a readcap should return a readonly # filenode initialized with a readcap should return a readonly
# version of that same node. # version of that same node.
d.addCallback(lambda ign: self.mdmf_node.get_readonly()) ro = self.mdmf_node.get_readonly()
d.addCallback(lambda ro: ro.get_best_mutable_version()) v = await ro.get_best_mutable_version()
d.addCallback(lambda v: self.assertTrue(v.is_readonly())) self.assertTrue(v.is_readonly())
d.addCallback(lambda ign: self.sdmf_node.get_best_readable_version()) bv = await self.sdmf_node.get_best_readable_version()
d.addCallback(lambda bv: self.assertTrue(bv.is_readonly())) self.assertTrue(bv.is_readonly())
d.addCallback(lambda ign: self.sdmf_node.get_readonly()) ro = self.sdmf_node.get_readonly()
d.addCallback(lambda ro: ro.get_best_mutable_version()) v = await ro.get_best_mutable_version()
d.addCallback(lambda v: self.assertTrue(v.is_readonly())) self.assertTrue(v.is_readonly())
return d
def test_toplevel_overwrite(self): async def test_toplevel_overwrite(self) -> None:
new_data = MutableData(b"foo bar baz" * 100000) new_data = MutableData(b"foo bar baz" * 100000)
new_small_data = MutableData(b"foo bar baz" * 10) new_small_data = MutableData(b"foo bar baz" * 10)
d = self.do_upload() await self.do_upload()
d.addCallback(lambda ign: self.mdmf_node.overwrite(new_data)) await self.mdmf_node.overwrite(new_data)
d.addCallback(lambda ignored: data = await self.mdmf_node.download_best_version()
self.mdmf_node.download_best_version()) self.assertThat(data, Equals(b"foo bar baz" * 100000))
d.addCallback(lambda data: await self.sdmf_node.overwrite(new_small_data)
self.assertThat(data, Equals(b"foo bar baz" * 100000))) data = await self.sdmf_node.download_best_version()
d.addCallback(lambda ignored: self.assertThat(data, Equals(b"foo bar baz" * 10))
self.sdmf_node.overwrite(new_small_data))
d.addCallback(lambda ignored:
self.sdmf_node.download_best_version())
d.addCallback(lambda data:
self.assertThat(data, Equals(b"foo bar baz" * 10)))
return d
def test_toplevel_modify(self): async def test_toplevel_modify(self) -> None:
d = self.do_upload() await self.do_upload()
def modifier(old_contents, servermap, first_time): def modifier(old_contents, servermap, first_time):
return old_contents + b"modified" return old_contents + b"modified"
d.addCallback(lambda ign: self.mdmf_node.modify(modifier)) await self.mdmf_node.modify(modifier)
d.addCallback(lambda ignored: data = await self.mdmf_node.download_best_version()
self.mdmf_node.download_best_version()) self.assertThat(data, Contains(b"modified"))
d.addCallback(lambda data: await self.sdmf_node.modify(modifier)
self.assertThat(data, Contains(b"modified"))) data = await self.sdmf_node.download_best_version()
d.addCallback(lambda ignored: self.assertThat(data, Contains(b"modified"))
self.sdmf_node.modify(modifier))
d.addCallback(lambda ignored:
self.sdmf_node.download_best_version())
d.addCallback(lambda data:
self.assertThat(data, Contains(b"modified")))
return d
def test_version_modify(self): async def test_version_modify(self) -> None:
# TODO: When we can publish multiple versions, alter this test # TODO: When we can publish multiple versions, alter this test
# to modify a version other than the best usable version, then # to modify a version other than the best usable version, then
# test to see that the best recoverable version is that. # test to see that the best recoverable version is that.
d = self.do_upload() await self.do_upload()
def modifier(old_contents, servermap, first_time): def modifier(old_contents, servermap, first_time):
return old_contents + b"modified" return old_contents + b"modified"
d.addCallback(lambda ign: self.mdmf_node.modify(modifier)) await self.mdmf_node.modify(modifier)
d.addCallback(lambda ignored: data = await self.mdmf_node.download_best_version()
self.mdmf_node.download_best_version()) self.assertThat(data, Contains(b"modified"))
d.addCallback(lambda data: await self.sdmf_node.modify(modifier)
self.assertThat(data, Contains(b"modified"))) data = await self.sdmf_node.download_best_version()
d.addCallback(lambda ignored: self.assertThat(data, Contains(b"modified"))
self.sdmf_node.modify(modifier))
d.addCallback(lambda ignored:
self.sdmf_node.download_best_version())
d.addCallback(lambda data:
self.assertThat(data, Contains(b"modified")))
return d
def test_download_version(self): async def test_download_version(self) -> None:
d = self.publish_multiple() await self.publish_multiple()
# We want to have two recoverable versions on the grid. # We want to have two recoverable versions on the grid.
d.addCallback(lambda res:
self._set_versions({0:0,2:0,4:0,6:0,8:0, self._set_versions({0:0,2:0,4:0,6:0,8:0,
1:1,3:1,5:1,7:1,9:1})) 1:1,3:1,5:1,7:1,9:1})
# Now try to download each version. We should get the plaintext # Now try to download each version. We should get the plaintext
# associated with that version. # associated with that version.
d.addCallback(lambda ignored: smap = await self._fn.get_servermap(mode=MODE_READ)
self._fn.get_servermap(mode=MODE_READ))
def _got_servermap(smap):
versions = smap.recoverable_versions() versions = smap.recoverable_versions()
assert len(versions) == 2 assert len(versions) == 2
@ -310,80 +250,58 @@ class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \
self.version1_index = self.version1_seqnum - 1 self.version1_index = self.version1_seqnum - 1
self.version2_index = self.version2_seqnum - 1 self.version2_index = self.version2_seqnum - 1
d.addCallback(_got_servermap) results = await self._fn.download_version(self.servermap, self.version1)
d.addCallback(lambda ignored:
self._fn.download_version(self.servermap, self.version1))
d.addCallback(lambda results:
self.assertThat(self.CONTENTS[self.version1_index], self.assertThat(self.CONTENTS[self.version1_index],
Equals(results))) Equals(results))
d.addCallback(lambda ignored: results = await self._fn.download_version(self.servermap, self.version2)
self._fn.download_version(self.servermap, self.version2))
d.addCallback(lambda results:
self.assertThat(self.CONTENTS[self.version2_index], self.assertThat(self.CONTENTS[self.version2_index],
Equals(results))) Equals(results))
return d
def test_download_nonexistent_version(self): async def test_download_nonexistent_version(self) -> None:
d = self.do_upload_mdmf() await self.do_upload_mdmf()
d.addCallback(lambda ign: self.mdmf_node.get_servermap(mode=MODE_WRITE)) servermap = await self.mdmf_node.get_servermap(mode=MODE_WRITE)
def _set_servermap(servermap): await self.shouldFail(UnrecoverableFileError, "nonexistent version",
self.servermap = servermap
d.addCallback(_set_servermap)
d.addCallback(lambda ignored:
self.shouldFail(UnrecoverableFileError, "nonexistent version",
None, None,
self.mdmf_node.download_version, self.servermap, self.mdmf_node.download_version, servermap,
"not a version")) "not a version")
return d
def _test_partial_read(self, node, expected, modes, step): async def _test_partial_read(self, node, expected, modes, step) -> None:
d = node.get_best_readable_version() version = await node.get_best_readable_version()
for (name, offset, length) in modes: for (name, offset, length) in modes:
d.addCallback(self._do_partial_read, name, expected, offset, length) await self._do_partial_read(version, name, expected, offset, length)
# then read the whole thing, but only a few bytes at a time, and see # then read the whole thing, but only a few bytes at a time, and see
# that the results are what we expect. # that the results are what we expect.
def _read_data(version):
c = consumer.MemoryConsumer() c = consumer.MemoryConsumer()
d2 = defer.succeed(None)
for i in range(0, len(expected), step): for i in range(0, len(expected), step):
d2.addCallback(lambda ignored, i=i: version.read(c, i, step)) await version.read(c, i, step)
d2.addCallback(lambda ignored: self.assertThat(expected, Equals(b"".join(c.chunks)))
self.assertThat(expected, Equals(b"".join(c.chunks))))
return d2
d.addCallback(_read_data)
return d
def _do_partial_read(self, version, name, expected, offset, length): async def _do_partial_read(self, version, name, expected, offset, length) -> None:
c = consumer.MemoryConsumer() c = consumer.MemoryConsumer()
d = version.read(c, offset, length) await version.read(c, offset, length)
if length is None: if length is None:
expected_range = expected[offset:] expected_range = expected[offset:]
else: else:
expected_range = expected[offset:offset+length] expected_range = expected[offset:offset+length]
d.addCallback(lambda ignored: b"".join(c.chunks)) results = b"".join(c.chunks)
def _check(results):
if results != expected_range: if results != expected_range:
print("read([%d]+%s) got %d bytes, not %d" % \ print("read([%d]+%s) got %d bytes, not %d" % \
(offset, length, len(results), len(expected_range))) (offset, length, len(results), len(expected_range)))
print("got: %s ... %s" % (results[:20], results[-20:])) print("got: %r ... %r" % (results[:20], results[-20:]))
print("exp: %s ... %s" % (expected_range[:20], expected_range[-20:])) print("exp: %r ... %r" % (expected_range[:20], expected_range[-20:]))
self.fail("results[%s] != expected_range" % name) self.fail("results[%s] != expected_range" % name)
return version # daisy-chained to next call
d.addCallback(_check)
return d
def test_partial_read_mdmf_0(self): async def test_partial_read_mdmf_0(self) -> None:
data = b"" data = b""
d = self.do_upload_mdmf(data=data) result = await self.do_upload_mdmf(data=data)
modes = [("all1", 0,0), modes = [("all1", 0,0),
("all2", 0,None), ("all2", 0,None),
] ]
d.addCallback(self._test_partial_read, data, modes, 1) await self._test_partial_read(result, data, modes, 1)
return d
def test_partial_read_mdmf_large(self): async def test_partial_read_mdmf_large(self) -> None:
segment_boundary = mathutil.next_multiple(128 * 1024, 3) segment_boundary = mathutil.next_multiple(128 * 1024, 3)
modes = [("start_on_segment_boundary", segment_boundary, 50), modes = [("start_on_segment_boundary", segment_boundary, 50),
("ending_one_byte_after_segment_boundary", segment_boundary-50, 51), ("ending_one_byte_after_segment_boundary", segment_boundary-50, 51),
@ -393,20 +311,18 @@ class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \
("complete_file1", 0, len(self.data)), ("complete_file1", 0, len(self.data)),
("complete_file2", 0, None), ("complete_file2", 0, None),
] ]
d = self.do_upload_mdmf() result = await self.do_upload_mdmf()
d.addCallback(self._test_partial_read, self.data, modes, 10000) await self._test_partial_read(result, self.data, modes, 10000)
return d
def test_partial_read_sdmf_0(self): async def test_partial_read_sdmf_0(self) -> None:
data = b"" data = b""
modes = [("all1", 0,0), modes = [("all1", 0,0),
("all2", 0,None), ("all2", 0,None),
] ]
d = self.do_upload_sdmf(data=data) result = await self.do_upload_sdmf(data=data)
d.addCallback(self._test_partial_read, data, modes, 1) await self._test_partial_read(result, data, modes, 1)
return d
def test_partial_read_sdmf_2(self): async def test_partial_read_sdmf_2(self) -> None:
data = b"hi" data = b"hi"
modes = [("one_byte", 0, 1), modes = [("one_byte", 0, 1),
("last_byte", 1, 1), ("last_byte", 1, 1),
@ -414,11 +330,10 @@ class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \
("complete_file", 0, 2), ("complete_file", 0, 2),
("complete_file2", 0, None), ("complete_file2", 0, None),
] ]
d = self.do_upload_sdmf(data=data) result = await self.do_upload_sdmf(data=data)
d.addCallback(self._test_partial_read, data, modes, 1) await self._test_partial_read(result, data, modes, 1)
return d
def test_partial_read_sdmf_90(self): async def test_partial_read_sdmf_90(self) -> None:
modes = [("start_at_middle", 50, 40), modes = [("start_at_middle", 50, 40),
("start_at_middle2", 50, None), ("start_at_middle2", 50, None),
("zero_length_at_start", 0, 0), ("zero_length_at_start", 0, 0),
@ -427,11 +342,10 @@ class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \
("complete_file1", 0, None), ("complete_file1", 0, None),
("complete_file2", 0, 90), ("complete_file2", 0, 90),
] ]
d = self.do_upload_sdmf() result = await self.do_upload_sdmf()
d.addCallback(self._test_partial_read, self.small_data, modes, 10) await self._test_partial_read(result, self.small_data, modes, 10)
return d
def test_partial_read_sdmf_100(self): async def test_partial_read_sdmf_100(self) -> None:
data = b"test data "*10 data = b"test data "*10
modes = [("start_at_middle", 50, 50), modes = [("start_at_middle", 50, 50),
("start_at_middle2", 50, None), ("start_at_middle2", 50, None),
@ -440,42 +354,30 @@ class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \
("complete_file1", 0, 100), ("complete_file1", 0, 100),
("complete_file2", 0, None), ("complete_file2", 0, None),
] ]
d = self.do_upload_sdmf(data=data) result = await self.do_upload_sdmf(data=data)
d.addCallback(self._test_partial_read, data, modes, 10) await self._test_partial_read(result, data, modes, 10)
return d
async def _test_read_and_download(self, node, expected) -> None:
def _test_read_and_download(self, node, expected): version = await node.get_best_readable_version()
d = node.get_best_readable_version()
def _read_data(version):
c = consumer.MemoryConsumer() c = consumer.MemoryConsumer()
await version.read(c)
self.assertThat(expected, Equals(b"".join(c.chunks)))
c2 = consumer.MemoryConsumer() c2 = consumer.MemoryConsumer()
d2 = defer.succeed(None) await version.read(c2, offset=0, size=len(expected))
d2.addCallback(lambda ignored: version.read(c)) self.assertThat(expected, Equals(b"".join(c2.chunks)))
d2.addCallback(lambda ignored:
self.assertThat(expected, Equals(b"".join(c.chunks))))
d2.addCallback(lambda ignored: version.read(c2, offset=0, data = await node.download_best_version()
size=len(expected))) self.assertThat(expected, Equals(data))
d2.addCallback(lambda ignored:
self.assertThat(expected, Equals(b"".join(c2.chunks))))
return d2
d.addCallback(_read_data)
d.addCallback(lambda ignored: node.download_best_version())
d.addCallback(lambda data: self.assertThat(expected, Equals(data)))
return d
def test_read_and_download_mdmf(self): async def test_read_and_download_mdmf(self) -> None:
d = self.do_upload_mdmf() result = await self.do_upload_mdmf()
d.addCallback(self._test_read_and_download, self.data) await self._test_read_and_download(result, self.data)
return d
def test_read_and_download_sdmf(self): async def test_read_and_download_sdmf(self) -> None:
d = self.do_upload_sdmf() result = await self.do_upload_sdmf()
d.addCallback(self._test_read_and_download, self.small_data) await self._test_read_and_download(result, self.small_data)
return d
def test_read_and_download_sdmf_zero_length(self): async def test_read_and_download_sdmf_zero_length(self) -> None:
d = self.do_upload_empty_sdmf() result = await self.do_upload_empty_sdmf()
d.addCallback(self._test_read_and_download, b"") await self._test_read_and_download(result, b"")
return d

View File

@ -37,6 +37,7 @@ from twisted.web import http
from twisted.web.http_headers import Headers from twisted.web.http_headers import Headers
from werkzeug import routing from werkzeug import routing
from werkzeug.exceptions import NotFound as WNotFound from werkzeug.exceptions import NotFound as WNotFound
from testtools.matchers import Equals
from .common import SyncTestCase from .common import SyncTestCase
from ..storage.http_common import get_content_type, CBOR_MIME_TYPE from ..storage.http_common import get_content_type, CBOR_MIME_TYPE
@ -555,6 +556,20 @@ class GenericHTTPAPITests(SyncTestCase):
super(GenericHTTPAPITests, self).setUp() super(GenericHTTPAPITests, self).setUp()
self.http = self.useFixture(HttpTestFixture()) self.http = self.useFixture(HttpTestFixture())
def test_missing_authentication(self) -> None:
"""
If nothing is given in the ``Authorization`` header at all an
``Unauthorized`` response is returned.
"""
client = StubTreq(self.http.http_server.get_resource())
response = self.http.result_of_with_flush(
client.request(
"GET",
"http://127.0.0.1/storage/v1/version",
),
)
self.assertThat(response.code, Equals(http.UNAUTHORIZED))
def test_bad_authentication(self): def test_bad_authentication(self):
""" """
If the wrong swissnum is used, an ``Unauthorized`` response code is If the wrong swissnum is used, an ``Unauthorized`` response code is

View File

@ -5,7 +5,7 @@ in
{ pkgsVersion ? "nixpkgs-21.11" { pkgsVersion ? "nixpkgs-21.11"
, pkgs ? import sources.${pkgsVersion} { } , pkgs ? import sources.${pkgsVersion} { }
, pypiData ? sources.pypi-deps-db , pypiData ? sources.pypi-deps-db
, pythonVersion ? "python37" , pythonVersion ? "python39"
, mach-nix ? import sources.mach-nix { , mach-nix ? import sources.mach-nix {
inherit pkgs pypiData; inherit pkgs pypiData;
python = pythonVersion; python = pythonVersion;
@ -21,7 +21,7 @@ let
inherit pkgs; inherit pkgs;
lib = pkgs.lib; lib = pkgs.lib;
}; };
tests_require = (mach-lib.extract "python37" ./. "extras_require" ).extras_require.test; tests_require = (mach-lib.extract "python39" ./. "extras_require" ).extras_require.test;
# Get the Tahoe-LAFS package itself. This does not include test # Get the Tahoe-LAFS package itself. This does not include test
# requirements and we don't ask for test requirements so that we can just # requirements and we don't ask for test requirements so that we can just

View File

@ -7,11 +7,9 @@
# the tox-gh-actions package. # the tox-gh-actions package.
[gh-actions] [gh-actions]
python = python =
3.7: py37-coverage,typechecks,codechecks
3.8: py38-coverage 3.8: py38-coverage
3.9: py39-coverage 3.9: py39-coverage
3.10: py310-coverage 3.10: py310-coverage
pypy-3.7: pypy37
pypy-3.8: pypy38 pypy-3.8: pypy38
pypy-3.9: pypy39 pypy-3.9: pypy39
@ -19,7 +17,7 @@ python =
twisted = 1 twisted = 1
[tox] [tox]
envlist = typechecks,codechecks,py{37,38,39,310}-{coverage},pypy27,pypy37,pypy38,pypy39,integration envlist = typechecks,codechecks,py{38,39,310}-{coverage},pypy27,pypy38,pypy39,integration
minversion = 2.4 minversion = 2.4
[testenv] [testenv]
@ -49,8 +47,6 @@ deps =
# regressions in new releases of this package that cause us the kind of # regressions in new releases of this package that cause us the kind of
# suffering we're trying to avoid with the above pins. # suffering we're trying to avoid with the above pins.
certifi certifi
# VCS hooks support
py37,!coverage: pre-commit
# We add usedevelop=False because testing against a true installation gives # We add usedevelop=False because testing against a true installation gives
# more useful results. # more useful results.