Merge branch '3855-immutable-http-apis-part-1' into 3868-istorageserver-tests-http

This commit is contained in:
Itamar Turner-Trauring 2022-01-31 11:36:28 -05:00
commit ef56435578
43 changed files with 1523 additions and 786 deletions

View File

@ -39,11 +39,13 @@ workflows:
- "centos-8":
{}
- "nixos-19-09":
{}
- "nixos":
name: "NixOS 21.05"
nixpkgs: "21.05"
- "nixos-21-05":
{}
- "nixos":
name: "NixOS 21.11"
nixpkgs: "21.11"
# Test against PyPy 2.7
- "pypy27-buster":
@ -441,20 +443,58 @@ jobs:
image: "tahoelafsci/fedora:29-py"
user: "nobody"
nixos-19-09: &NIXOS
nixos:
parameters:
nixpkgs:
description: >-
Reference the name of a niv-managed nixpkgs source (see `niv show`
and nix/sources.json)
type: "string"
docker:
# Run in a highly Nix-capable environment.
- <<: *DOCKERHUB_AUTH
image: "nixorg/nix:circleci"
image: "nixos/nix:2.3.16"
environment:
NIX_PATH: "nixpkgs=https://github.com/NixOS/nixpkgs-channels/archive/nixos-19.09-small.tar.gz"
SOURCE: "nix/"
# CACHIX_AUTH_TOKEN is manually set in the CircleCI web UI and
# allows us to push to CACHIX_NAME. We only need this set for
# `cachix use` in this step.
CACHIX_NAME: "tahoe-lafs-opensource"
steps:
- "checkout"
- "run":
name: "Build and Test"
# The nixos/nix image does not include ssh. Install it so the
# `checkout` step will succeed. We also want cachix for
# Nix-friendly caching.
name: "Install Basic Dependencies"
command: |
nix-env \
--file https://github.com/nixos/nixpkgs/archive/nixos-<<parameters.nixpkgs>>.tar.gz \
--install \
-A openssh cachix bash
- "checkout"
- run:
name: "Cachix setup"
# Record the store paths that exist before we did much. There's no
# reason to cache these, they're either in the image or have to be
# retrieved before we can use cachix to restore from cache.
command: |
cachix use "${CACHIX_NAME}"
nix path-info --all > /tmp/store-path-pre-build
- "run":
# The Nix package doesn't know how to do this part, unfortunately.
name: "Generate version"
command: |
nix-shell \
-p 'python3.withPackages (ps: [ ps.setuptools ])' \
--run 'python setup.py update_version'
- "run":
name: "Build"
command: |
# CircleCI build environment looks like it has a zillion and a
# half cores. Don't let Nix autodetect this high core count
@ -466,17 +506,50 @@ jobs:
# build a couple simple little dependencies that don't take
# advantage of multiple cores and we get a little speedup by doing
# them in parallel.
nix-build --cores 3 --max-jobs 2 "$SOURCE"
nix-build --cores 3 --max-jobs 2 --argstr pkgsVersion "nixpkgs-<<parameters.nixpkgs>>"
nixos-21-05:
<<: *NIXOS
- "run":
name: "Test"
command: |
# Let it go somewhat wild for the test suite itself
nix-build --cores 8 --argstr pkgsVersion "nixpkgs-<<parameters.nixpkgs>>" tests.nix
environment:
# Note this doesn't look more similar to the 19.09 NIX_PATH URL because
# there was some internal shuffling by the NixOS project about how they
# publish stable revisions.
NIX_PATH: "nixpkgs=https://github.com/NixOS/nixpkgs/archive/d32b07e6df276d78e3640eb43882b80c9b2b3459.tar.gz"
SOURCE: "nix/py3.nix"
- run:
# Send any new store objects to cachix.
name: "Push to Cachix"
when: "always"
command: |
# Cribbed from
# https://circleci.com/blog/managing-secrets-when-you-have-pull-requests-from-outside-contributors/
if [ -n "$CIRCLE_PR_NUMBER" ]; then
# I'm sure you're thinking "CIRCLE_PR_NUMBER must just be the
# number of the PR being built". Sorry, dear reader, you have
# guessed poorly. It is also conditionally set based on whether
# this is a PR from a fork or not.
#
# https://circleci.com/docs/2.0/env-vars/#built-in-environment-variables
echo "Skipping Cachix push for forked PR."
else
# If this *isn't* a build from a fork then we have the Cachix
# write key in our environment and we can push any new objects
# to Cachix.
#
# To decide what to push, we inspect the list of store objects
# that existed before and after we did most of our work. Any
# that are new after the work is probably a useful thing to have
# around so push it to the cache. We exclude all derivation
# objects (.drv files) because they're cheap to reconstruct and
# by the time you know their cache key you've already done all
# the work anyway.
#
# This shell expression for finding the objects and pushing them
# was from the Cachix docs:
#
# https://docs.cachix.org/continuous-integration-setup/circleci.html
#
# but they seem to have removed it now.
bash -c "comm -13 <(sort /tmp/store-path-pre-build | grep -v '\.drv$') <(nix path-info --all | grep -v '\.drv$' | sort) | cachix push $CACHIX_NAME"
fi
typechecks:
docker:

116
default.nix Normal file
View File

@ -0,0 +1,116 @@
let
# sources.nix contains information about which versions of some of our
# dependencies we should use. since we use it to pin nixpkgs and the PyPI
# package database, roughly all the rest of our dependencies are *also*
# pinned - indirectly.
#
# sources.nix is managed using a tool called `niv`. as an example, to
# update to the most recent version of nixpkgs from the 21.11 maintenance
# release, in the top-level tahoe-lafs checkout directory you run:
#
# niv update nixpkgs-21.11
#
# or, to update the PyPI package database -- which is necessary to make any
# newly released packages visible -- you likewise run:
#
# niv update pypi-deps-db
#
# niv also supports chosing a specific revision, following a different
# branch, etc. find complete documentation for the tool at
# https://github.com/nmattia/niv
sources = import nix/sources.nix;
in
{
pkgsVersion ? "nixpkgs-21.11" # a string which chooses a nixpkgs from the
# niv-managed sources data
, pkgs ? import sources.${pkgsVersion} { } # nixpkgs itself
, pypiData ? sources.pypi-deps-db # the pypi package database snapshot to use
# for dependency resolution
, pythonVersion ? "python37" # a string choosing the python derivation from
# nixpkgs to target
, extras ? [ "tor" "i2p" ] # a list of strings identifying tahoe-lafs extras,
# the dependencies of which the resulting package
# will also depend on. Include all of the runtime
# extras by default because the incremental cost of
# including them is a lot smaller than the cost of
# re-building the whole thing to add them.
, mach-nix ? import sources.mach-nix { # the mach-nix package to use to build
# the tahoe-lafs package
inherit pkgs pypiData;
python = pythonVersion;
}
}:
# The project name, version, and most other metadata are automatically
# extracted from the source. Some requirements are not properly extracted
# and those cases are handled below. The version can only be extracted if
# `setup.py update_version` has been run (this is not at all ideal but it
# seems difficult to fix) - so for now just be sure to run that first.
mach-nix.buildPythonPackage rec {
# Define the location of the Tahoe-LAFS source to be packaged. Clean up all
# as many of the non-source files (eg the `.git` directory, `~` backup
# files, nix's own `result` symlink, etc) as possible to avoid needing to
# re-build when files that make no difference to the package have changed.
src = pkgs.lib.cleanSource ./.;
# Select whichever package extras were requested.
inherit extras;
# Define some extra requirements that mach-nix does not automatically detect
# from inspection of the source. We typically don't need to put version
# constraints on any of these requirements. The pypi-deps-db we're
# operating with makes dependency resolution deterministic so as long as it
# works once it will always work. It could be that in the future we update
# pypi-deps-db and an incompatibility arises - in which case it would make
# sense to apply some version constraints here.
requirementsExtra = ''
# mach-nix does not yet support pyproject.toml which means it misses any
# build-time requirements of our dependencies which are declared in such a
# file. Tell it about them here.
setuptools_rust
# mach-nix does not yet parse environment markers (e.g. "python > '3.0'")
# correctly. It misses all of our requirements which have an environment marker.
# Duplicate them here.
foolscap
eliot
pyrsistent
collections-extended
'';
# Specify where mach-nix should find packages for our Python dependencies.
# There are some reasonable defaults so we only need to specify certain
# packages where the default configuration runs into some issue.
providers = {
# Through zfec 1.5.5 the wheel has an incorrect runtime dependency
# declared on argparse, not available for recent versions of Python 3.
# Force mach-nix to use the sdist instead. This allows us to apply a
# patch that removes the offending declaration.
zfec = "sdist";
};
# Define certain overrides to the way Python dependencies are built.
_ = {
# Apply the argparse declaration fix to zfec sdist.
zfec.patches = with pkgs; [
(fetchpatch {
name = "fix-argparse.patch";
url = "https://github.com/tahoe-lafs/zfec/commit/c3e736a72cccf44b8e1fb7d6c276400204c6bc1e.patch";
sha256 = "1md9i2fx1ya7mgcj9j01z58hs3q9pj4ch5is5b5kq4v86cf6x33x";
})
];
# Remove a click-default-group patch for a test suite problem which no
# longer applies because the project apparently no longer has a test suite
# in its source distribution.
click-default-group.patches = [];
};
passthru.meta.mach-nix = {
inherit providers _;
};
}

View File

@ -382,6 +382,11 @@ the server will respond with ``400 BAD REQUEST``.
If authorization using the secret fails, then a ``401 UNAUTHORIZED`` response should be sent.
Encoding
~~~~~~~~
* ``storage_index`` should be base32 encoded (RFC3548) in URLs.
General
~~~~~~~
@ -483,6 +488,14 @@ For example::
The upload secret is an opaque _byte_ string.
Handling repeat calls:
* If the same API call is repeated with the same upload secret, the response is the same and no change is made to server state.
This is necessary to ensure retries work in the face of lost responses from the server.
* If the API calls is with a different upload secret, this implies a new client, perhaps because the old client died.
In order to prevent storage servers from being able to mess with each other, this API call will fail, because the secret doesn't match.
The use case of restarting upload from scratch if the client dies can be implemented by having the client persist the upload secret.
Discussion
``````````
@ -627,7 +640,7 @@ For example::
Read a contiguous sequence of bytes from one share in one bucket.
The response body is the raw share data (i.e., ``application/octet-stream``).
The ``Range`` header may be used to request exactly one ``bytes`` range.
The ``Range`` header may be used to request exactly one ``bytes`` range, in which case the response code will be 206 (partial content).
Interpretation and response behavior is as specified in RFC 7233 § 4.1.
Multiple ranges in a single request are *not* supported.

0
newsfragments/3788.minor Normal file
View File

0
newsfragments/3855.minor Normal file
View File

0
newsfragments/3867.minor Normal file
View File

View File

@ -1,34 +0,0 @@
{ lib, buildPythonPackage, fetchPypi, isPy3k,
six, txaio, twisted, zope_interface, cffi, futures,
mock, pytest, cryptography, pynacl
}:
buildPythonPackage rec {
pname = "autobahn";
version = "19.8.1";
src = fetchPypi {
inherit pname version;
sha256 = "294e7381dd54e73834354832604ae85567caf391c39363fed0ea2bfa86aa4304";
};
propagatedBuildInputs = [ six txaio twisted zope_interface cffi cryptography pynacl ] ++
(lib.optionals (!isPy3k) [ futures ]);
checkInputs = [ mock pytest ];
checkPhase = ''
runHook preCheck
USE_TWISTED=true py.test $out
runHook postCheck
'';
# Tests do no seem to be compatible yet with pytest 5.1
# https://github.com/crossbario/autobahn-python/issues/1235
doCheck = false;
meta = with lib; {
description = "WebSocket and WAMP in Python for Twisted and asyncio.";
homepage = "https://crossbar.io/autobahn";
license = licenses.mit;
maintainers = with maintainers; [ nand0p ];
};
}

View File

@ -1,20 +0,0 @@
{ lib, buildPythonPackage, fetchPypi, setuptools_scm }:
buildPythonPackage rec {
pname = "cbor2";
version = "5.2.0";
src = fetchPypi {
sha256 = "1gwlgjl70vlv35cgkcw3cg7b5qsmws36hs4mmh0l9msgagjs4fm3";
inherit pname version;
};
doCheck = false;
propagatedBuildInputs = [ setuptools_scm ];
meta = with lib; {
homepage = https://github.com/agronholm/cbor2;
description = "CBOR encoder/decoder";
license = licenses.mit;
};
}

View File

@ -1,19 +0,0 @@
{ lib, buildPythonPackage, fetchPypi }:
buildPythonPackage rec {
pname = "collections-extended";
version = "1.0.3";
src = fetchPypi {
inherit pname version;
sha256 = "0lb69x23asd68n0dgw6lzxfclavrp2764xsnh45jm97njdplznkw";
};
# Tests aren't in tarball, for 1.0.3 at least.
doCheck = false;
meta = with lib; {
homepage = https://github.com/mlenzen/collections-extended;
description = "Extra Python Collections - bags (multisets), setlists (unique list / indexed set), RangeMap and IndexedDict";
license = licenses.asl20;
};
}

View File

@ -1,7 +0,0 @@
# This is the main entrypoint for the Tahoe-LAFS derivation.
{ pkgs ? import <nixpkgs> { } }:
# Add our Python packages to nixpkgs to simplify the expression for the
# Tahoe-LAFS derivation.
let pkgs' = pkgs.extend (import ./overlays.nix);
# Evaluate the expression for our Tahoe-LAFS derivation.
in pkgs'.python2.pkgs.callPackage ./tahoe-lafs.nix { }

View File

@ -1,31 +0,0 @@
{ lib, buildPythonPackage, fetchPypi, zope_interface, pyrsistent, boltons
, hypothesis, testtools, pytest }:
buildPythonPackage rec {
pname = "eliot";
version = "1.7.0";
src = fetchPypi {
inherit pname version;
sha256 = "0ylyycf717s5qsrx8b9n6m38vyj2k8328lfhn8y6r31824991wv8";
};
postPatch = ''
substituteInPlace setup.py \
--replace "boltons >= 19.0.1" boltons
'';
# A seemingly random subset of the test suite fails intermittently. After
# Tahoe-LAFS is ported to Python 3 we can update to a newer Eliot and, if
# the test suite continues to fail, maybe it will be more likely that we can
# have upstream fix it for us.
doCheck = false;
checkInputs = [ testtools pytest hypothesis ];
propagatedBuildInputs = [ zope_interface pyrsistent boltons ];
meta = with lib; {
homepage = https://github.com/itamarst/eliot/;
description = "Logging library that tells you why it happened";
license = licenses.asl20;
};
}

View File

@ -1,35 +0,0 @@
{ lib
, buildPythonPackage
, fetchPypi
}:
buildPythonPackage rec {
pname = "future";
version = "0.18.2";
src = fetchPypi {
inherit pname version;
sha256 = "sha256:0zakvfj87gy6mn1nba06sdha63rn4njm7bhh0wzyrxhcny8avgmi";
};
doCheck = false;
meta = {
description = "Clean single-source support for Python 3 and 2";
longDescription = ''
python-future is the missing compatibility layer between Python 2 and
Python 3. It allows you to use a single, clean Python 3.x-compatible
codebase to support both Python 2 and Python 3 with minimal overhead.
It provides future and past packages with backports and forward ports
of features from Python 3 and 2. It also comes with futurize and
pasteurize, customized 2to3-based scripts that helps you to convert
either Py2 or Py3 code easily to support both Python 2 and 3 in a
single clean Py3-style codebase, module by module.
'';
homepage = https://python-future.org;
downloadPage = https://github.com/PythonCharmers/python-future/releases;
license = with lib.licenses; [ mit ];
maintainers = with lib.maintainers; [ prikhi ];
};
}

View File

@ -1,36 +0,0 @@
self: super: {
python27 = super.python27.override {
packageOverrides = python-self: python-super: {
# eliot is not part of nixpkgs at all at this time.
eliot = python-self.pythonPackages.callPackage ./eliot.nix { };
# NixOS autobahn package has trollius as a dependency, although
# it is optional. Trollius is unmaintained and fails on CI.
autobahn = python-super.pythonPackages.callPackage ./autobahn.nix { };
# Porting to Python 3 is greatly aided by the future package. A
# slightly newer version than appears in nixos 19.09 is helpful.
future = python-super.pythonPackages.callPackage ./future.nix { };
# Need version of pyutil that supports Python 3. The version in 19.09
# is too old.
pyutil = python-super.pythonPackages.callPackage ./pyutil.nix { };
# Need a newer version of Twisted, too.
twisted = python-super.pythonPackages.callPackage ./twisted.nix { };
# collections-extended is not part of nixpkgs at this time.
collections-extended = python-super.pythonPackages.callPackage ./collections-extended.nix { };
# cbor2 is not part of nixpkgs at this time.
cbor2 = python-super.pythonPackages.callPackage ./cbor2.nix { };
};
};
python39 = super.python39.override {
packageOverrides = python-self: python-super: {
# collections-extended is not part of nixpkgs at this time.
collections-extended = python-super.pythonPackages.callPackage ./collections-extended.nix { };
};
};
}

View File

@ -1,7 +0,0 @@
# This is the main entrypoint for the Tahoe-LAFS derivation.
{ pkgs ? import <nixpkgs> { } }:
# Add our Python packages to nixpkgs to simplify the expression for the
# Tahoe-LAFS derivation.
let pkgs' = pkgs.extend (import ./overlays.nix);
# Evaluate the expression for our Tahoe-LAFS derivation.
in pkgs'.python39.pkgs.callPackage ./tahoe-lafs.nix { }

View File

@ -1,48 +0,0 @@
{ stdenv
, buildPythonPackage
, fetchPypi
, setuptoolsDarcs
, setuptoolsTrial
, simplejson
, twisted
, isPyPy
}:
buildPythonPackage rec {
pname = "pyutil";
version = "3.3.0";
src = fetchPypi {
inherit pname version;
sha256 = "8c4d4bf668c559186389bb9bce99e4b1b871c09ba252a756ccaacd2b8f401848";
};
buildInputs = [ setuptoolsDarcs setuptoolsTrial ] ++ (if doCheck then [ simplejson ] else []);
propagatedBuildInputs = [ twisted ];
# Tests fail because they try to write new code into the twisted
# package, apparently some kind of plugin.
doCheck = false;
prePatch = stdenv.lib.optionalString isPyPy ''
grep -rl 'utf-8-with-signature-unix' ./ | xargs sed -i -e "s|utf-8-with-signature-unix|utf-8|g"
'';
meta = with stdenv.lib; {
description = "Pyutil, a collection of mature utilities for Python programmers";
longDescription = ''
These are a few data structures, classes and functions which
we've needed over many years of Python programming and which
seem to be of general use to other Python programmers. Many of
the modules that have existed in pyutil over the years have
subsequently been obsoleted by new features added to the
Python language or its standard library, thus showing that
we're not alone in wanting tools like these.
'';
homepage = "http://allmydata.org/trac/pyutil";
license = licenses.gpl2Plus;
};
}

62
nix/sources.json Normal file
View File

@ -0,0 +1,62 @@
{
"mach-nix": {
"branch": "master",
"description": "Create highly reproducible python environments",
"homepage": "",
"owner": "davhau",
"repo": "mach-nix",
"rev": "bdc97ba6b2ecd045a467b008cff4ae337b6a7a6b",
"sha256": "12b3jc0g0ak6s93g3ifvdpwxbyqx276k1kl66bpwz8a67qjbcbwf",
"type": "tarball",
"url": "https://github.com/davhau/mach-nix/archive/bdc97ba6b2ecd045a467b008cff4ae337b6a7a6b.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"niv": {
"branch": "master",
"description": "Easy dependency management for Nix projects",
"homepage": "https://github.com/nmattia/niv",
"owner": "nmattia",
"repo": "niv",
"rev": "5830a4dd348d77e39a0f3c4c762ff2663b602d4c",
"sha256": "1d3lsrqvci4qz2hwjrcnd8h5vfkg8aypq3sjd4g3izbc8frwz5sm",
"type": "tarball",
"url": "https://github.com/nmattia/niv/archive/5830a4dd348d77e39a0f3c4c762ff2663b602d4c.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"nixpkgs-21.05": {
"branch": "nixos-21.05",
"description": "Nix Packages collection",
"homepage": "",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "0fd9ee1aa36ce865ad273f4f07fdc093adeb5c00",
"sha256": "1mr2qgv5r2nmf6s3gqpcjj76zpsca6r61grzmqngwm0xlh958smx",
"type": "tarball",
"url": "https://github.com/NixOS/nixpkgs/archive/0fd9ee1aa36ce865ad273f4f07fdc093adeb5c00.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"nixpkgs-21.11": {
"branch": "nixos-21.11",
"description": "Nix Packages collection",
"homepage": "",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "6c4b9f1a2fd761e2d384ef86cff0d208ca27fdca",
"sha256": "1yl5gj0mzczhl1j8sl8iqpwa1jzsgr12fdszw9rq13cdig2a2r5f",
"type": "tarball",
"url": "https://github.com/nixos/nixpkgs/archive/6c4b9f1a2fd761e2d384ef86cff0d208ca27fdca.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"pypi-deps-db": {
"branch": "master",
"description": "Probably the most complete python dependency database",
"homepage": "",
"owner": "DavHau",
"repo": "pypi-deps-db",
"rev": "0f6de8bf1f186c275af862ec9667abb95aae8542",
"sha256": "1ygw9pywyl4p25hx761d1sbwl3qjhm630fa36gdf6b649im4mx8y",
"type": "tarball",
"url": "https://github.com/DavHau/pypi-deps-db/archive/0f6de8bf1f186c275af862ec9667abb95aae8542.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}
}

174
nix/sources.nix Normal file
View File

@ -0,0 +1,174 @@
# This file has been generated by Niv.
let
#
# The fetchers. fetch_<type> fetches specs of type <type>.
#
fetch_file = pkgs: name: spec:
let
name' = sanitizeName name + "-src";
in
if spec.builtin or true then
builtins_fetchurl { inherit (spec) url sha256; name = name'; }
else
pkgs.fetchurl { inherit (spec) url sha256; name = name'; };
fetch_tarball = pkgs: name: spec:
let
name' = sanitizeName name + "-src";
in
if spec.builtin or true then
builtins_fetchTarball { name = name'; inherit (spec) url sha256; }
else
pkgs.fetchzip { name = name'; inherit (spec) url sha256; };
fetch_git = name: spec:
let
ref =
if spec ? ref then spec.ref else
if spec ? branch then "refs/heads/${spec.branch}" else
if spec ? tag then "refs/tags/${spec.tag}" else
abort "In git source '${name}': Please specify `ref`, `tag` or `branch`!";
in
builtins.fetchGit { url = spec.repo; inherit (spec) rev; inherit ref; };
fetch_local = spec: spec.path;
fetch_builtin-tarball = name: throw
''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`.
$ niv modify ${name} -a type=tarball -a builtin=true'';
fetch_builtin-url = name: throw
''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`.
$ niv modify ${name} -a type=file -a builtin=true'';
#
# Various helpers
#
# https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695
sanitizeName = name:
(
concatMapStrings (s: if builtins.isList s then "-" else s)
(
builtins.split "[^[:alnum:]+._?=-]+"
((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name)
)
);
# The set of packages used when specs are fetched using non-builtins.
mkPkgs = sources: system:
let
sourcesNixpkgs =
import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) { inherit system; };
hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
hasThisAsNixpkgsPath = <nixpkgs> == ./.;
in
if builtins.hasAttr "nixpkgs" sources
then sourcesNixpkgs
else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
import <nixpkgs> {}
else
abort
''
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
add a package called "nixpkgs" to your sources.json.
'';
# The actual fetching function.
fetch = pkgs: name: spec:
if ! builtins.hasAttr "type" spec then
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
else if spec.type == "file" then fetch_file pkgs name spec
else if spec.type == "tarball" then fetch_tarball pkgs name spec
else if spec.type == "git" then fetch_git name spec
else if spec.type == "local" then fetch_local spec
else if spec.type == "builtin-tarball" then fetch_builtin-tarball name
else if spec.type == "builtin-url" then fetch_builtin-url name
else
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
# If the environment variable NIV_OVERRIDE_${name} is set, then use
# the path directly as opposed to the fetched source.
replace = name: drv:
let
saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name;
ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}";
in
if ersatz == "" then drv else
# this turns the string into an actual Nix path (for both absolute and
# relative paths)
if builtins.substring 0 1 ersatz == "/" then /. + ersatz else /. + builtins.getEnv "PWD" + "/${ersatz}";
# Ports of functions for older nix versions
# a Nix version of mapAttrs if the built-in doesn't exist
mapAttrs = builtins.mapAttrs or (
f: set: with builtins;
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
);
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295
range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1);
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257
stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1));
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269
stringAsChars = f: s: concatStrings (map f (stringToCharacters s));
concatMapStrings = f: list: concatStrings (map f list);
concatStrings = builtins.concatStringsSep "";
# https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331
optionalAttrs = cond: as: if cond then as else {};
# fetchTarball version that is compatible between all the versions of Nix
builtins_fetchTarball = { url, name ? null, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchTarball;
in
if lessThan nixVersion "1.12" then
fetchTarball ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
else
fetchTarball attrs;
# fetchurl version that is compatible between all the versions of Nix
builtins_fetchurl = { url, name ? null, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchurl;
in
if lessThan nixVersion "1.12" then
fetchurl ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
else
fetchurl attrs;
# Create the final "sources" from the config
mkSources = config:
mapAttrs (
name: spec:
if builtins.hasAttr "outPath" spec
then abort
"The values in sources.json should not have an 'outPath' attribute"
else
spec // { outPath = replace name (fetch config.pkgs name spec); }
) config.sources;
# The "config" used by the fetchers
mkConfig =
{ sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null
, sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile)
, system ? builtins.currentSystem
, pkgs ? mkPkgs sources system
}: rec {
# The sources, i.e. the attribute set of spec name to spec
inherit sources;
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
inherit pkgs;
};
in
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }

View File

@ -1,126 +0,0 @@
{ fetchFromGitHub, lib
, git, python
, twisted, foolscap, zfec
, setuptools, setuptoolsTrial, pyasn1, zope_interface
, service-identity, pyyaml, magic-wormhole, treq, appdirs
, beautifulsoup4, eliot, autobahn, cryptography, netifaces
, html5lib, pyutil, distro, configparser, klein, cbor2
}:
python.pkgs.buildPythonPackage rec {
# Most of the time this is not exactly the release version (eg 1.17.1).
# Give it a `post` component to make it look newer than the release version
# and we'll bump this up at the time of each release.
#
# It's difficult to read the version from Git the way the Python code does
# for two reasons. First, doing so involves populating the Nix expression
# with values from the source. Nix calls this "import from derivation" or
# "IFD" (<https://nixos.wiki/wiki/Import_From_Derivation>). This is
# discouraged in most cases - including this one, I think. Second, the
# Python code reads the contents of `.git` to determine its version. `.git`
# is not a reproducable artifact (in the sense of "reproducable builds") so
# it is excluded from the source tree by default. When it is included, the
# package tends to be frequently spuriously rebuilt.
version = "1.17.1.post1";
name = "tahoe-lafs-${version}";
src = lib.cleanSourceWith {
src = ../.;
filter = name: type:
let
basename = baseNameOf name;
split = lib.splitString ".";
join = builtins.concatStringsSep ".";
ext = join (builtins.tail (split basename));
# Build up a bunch of knowledge about what kind of file this is.
isTox = type == "directory" && basename == ".tox";
isTrialTemp = type == "directory" && basename == "_trial_temp";
isVersion = basename == "_version.py";
isBytecode = ext == "pyc" || ext == "pyo";
isBackup = lib.hasSuffix "~" basename;
isTemporary = lib.hasPrefix "#" basename && lib.hasSuffix "#" basename;
isSymlink = type == "symlink";
isGit = type == "directory" && basename == ".git";
in
# Exclude all these things
! (isTox
|| isTrialTemp
|| isVersion
|| isBytecode
|| isBackup
|| isTemporary
|| isSymlink
|| isGit
);
};
postPatch = ''
# Chroots don't have /etc/hosts and /etc/resolv.conf, so work around
# that.
for i in $(find src/allmydata/test -type f)
do
sed -i "$i" -e"s/localhost/127.0.0.1/g"
done
# Some tests are flaky or fail to skip when dependencies are missing.
# This list is over-zealous because it's more work to disable individual
# tests with in a module.
# Many of these tests don't properly skip when i2p or tor dependencies are
# not supplied (and we are not supplying them).
rm src/allmydata/test/test_i2p_provider.py
rm src/allmydata/test/test_connections.py
rm src/allmydata/test/cli/test_create.py
# Generate _version.py ourselves since we can't rely on the Python code
# extracting the information from the .git directory we excluded.
cat > src/allmydata/_version.py <<EOF
# This _version.py is generated from metadata by nix/tahoe-lafs.nix.
__pkgname__ = "tahoe-lafs"
real_version = "${version}"
full_version = "${version}"
branch = "master"
verstr = "${version}"
__version__ = verstr
EOF
'';
nativeBuildInputs = [
git
];
propagatedBuildInputs = with python.pkgs; [
twisted foolscap zfec appdirs
setuptoolsTrial pyasn1 zope_interface
service-identity pyyaml magic-wormhole
eliot autobahn cryptography netifaces setuptools
future pyutil distro configparser collections-extended
klein cbor2 treq
];
checkInputs = with python.pkgs; [
hypothesis
testtools
fixtures
beautifulsoup4
html5lib
tenacity
prometheus_client
];
checkPhase = ''
if ! $out/bin/tahoe --version | grep --fixed-strings "${version}"; then
echo "Package version:"
$out/bin/tahoe --version
echo "Did not contain expected:"
echo "${version}"
exit 1
else
echo "Version string contained expected value \"${version}.\""
fi
${python}/bin/python -m twisted.trial -j $NIX_BUILD_CORES allmydata
'';
}

View File

@ -1,63 +0,0 @@
{ stdenv
, buildPythonPackage
, fetchPypi
, python
, zope_interface
, incremental
, automat
, constantly
, hyperlink
, pyhamcrest
, attrs
, pyopenssl
, service-identity
, setuptools
, idna
, bcrypt
}:
buildPythonPackage rec {
pname = "Twisted";
version = "19.10.0";
src = fetchPypi {
inherit pname version;
extension = "tar.bz2";
sha256 = "7394ba7f272ae722a74f3d969dcf599bc4ef093bc392038748a490f1724a515d";
};
propagatedBuildInputs = [ zope_interface incremental automat constantly hyperlink pyhamcrest attrs setuptools bcrypt ];
passthru.extras.tls = [ pyopenssl service-identity idna ];
# Patch t.p._inotify to point to libc. Without this,
# twisted.python.runtime.platform.supportsINotify() == False
patchPhase = stdenv.lib.optionalString stdenv.isLinux ''
substituteInPlace src/twisted/python/_inotify.py --replace \
"ctypes.util.find_library('c')" "'${stdenv.glibc.out}/lib/libc.so.6'"
'';
# Generate Twisted's plug-in cache. Twisted users must do it as well. See
# http://twistedmatrix.com/documents/current/core/howto/plugin.html#auto3
# and http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=477103 for
# details.
postFixup = ''
$out/bin/twistd --help > /dev/null
'';
checkPhase = ''
${python.interpreter} -m unittest discover -s twisted/test
'';
# Tests require network
doCheck = false;
meta = with stdenv.lib; {
homepage = https://twistedmatrix.com/;
description = "Twisted, an event-driven networking engine written in Python";
longDescription = ''
Twisted is an event-driven networking engine written in Python
and licensed under the MIT license.
'';
license = licenses.mit;
maintainers = [ ];
};
}

View File

@ -138,11 +138,15 @@ install_requires = [
# Backported configparser for Python 2:
"configparser ; python_version < '3.0'",
# For the RangeMap datastructure.
"collections-extended",
# For the RangeMap datastructure. Need 2.0.2 at least for bugfixes. Python
# 2 doesn't actually need this, since HTTP storage protocol isn't supported
# there, so we just pick whatever version so that code imports.
"collections-extended >= 2.0.2 ; python_version > '3.0'",
"collections-extended ; python_version < '3.0'",
# HTTP server and client
"klein",
"werkzeug",
"treq",
"cbor2"
]

View File

@ -13,23 +13,39 @@ if PY2:
# fmt: off
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
# fmt: on
from collections import defaultdict
Optional = Set = defaultdict(
lambda: None
) # some garbage to just make this module import
else:
# typing module not available in Python 2, and we only do type checking in
# Python 3 anyway.
from typing import Union
from typing import Union, Set, Optional
from treq.testing import StubTreq
from base64 import b64encode
import attr
# TODO Make sure to import Python version?
from cbor2 import loads
from cbor2 import loads, dumps
from collections_extended import RangeMap
from werkzeug.datastructures import Range, ContentRange
from twisted.web.http_headers import Headers
from twisted.internet.defer import inlineCallbacks, returnValue, fail
from twisted.web import http
from twisted.internet.defer import inlineCallbacks, returnValue, fail, Deferred
from hyperlink import DecodedURL
import treq
from .http_common import swissnum_auth_header, Secrets
from .common import si_b2a
def _encode_si(si): # type: (bytes) -> str
"""Encode the storage index into Unicode string."""
return str(si_b2a(si), "ascii")
class ClientException(Exception):
"""An unexpected error."""
@ -42,9 +58,12 @@ def _decode_cbor(response):
return fail(ClientException(response.code, response.phrase))
def swissnum_auth_header(swissnum): # type: (bytes) -> bytes
"""Return value for ``Authentication`` header."""
return b"Tahoe-LAFS " + b64encode(swissnum).strip()
@attr.s
class ImmutableCreateResult(object):
"""Result of creating a storage index for an immutable."""
already_have = attr.ib(type=Set[int])
allocated = attr.ib(type=Set[int])
class StorageClient(object):
@ -59,25 +78,45 @@ class StorageClient(object):
self._swissnum = swissnum
self._treq = treq
def _get_headers(self): # type: () -> Headers
def _url(self, path):
"""Get a URL relative to the base URL."""
return self._base_url.click(path)
def _get_headers(self, headers): # type: (Optional[Headers]) -> Headers
"""Return the basic headers to be used by default."""
headers = Headers()
if headers is None:
headers = Headers()
headers.addRawHeader(
"Authorization",
swissnum_auth_header(self._swissnum),
)
return headers
def _request(self, method, url, secrets, **kwargs):
def _request(
self,
method,
url,
lease_renew_secret=None,
lease_cancel_secret=None,
upload_secret=None,
headers=None,
**kwargs
):
"""
Like ``treq.request()``, but additional argument of secrets mapping
``http_server.Secret`` to the bytes value of the secret.
Like ``treq.request()``, but with optional secrets that get translated
into corresponding HTTP headers.
"""
headers = self._get_headers()
for key, value in secrets.items():
headers = self._get_headers(headers)
for secret, value in [
(Secrets.LEASE_RENEW, lease_renew_secret),
(Secrets.LEASE_CANCEL, lease_cancel_secret),
(Secrets.UPLOAD, upload_secret),
]:
if value is None:
continue
headers.addRawHeader(
"X-Tahoe-Authorization",
b"%s %s" % (key.value.encode("ascii"), b64encode(value).strip())
b"%s %s" % (secret.value.encode("ascii"), b64encode(value).strip()),
)
return self._treq.request(method, url, headers=headers, **kwargs)
@ -86,7 +125,160 @@ class StorageClient(object):
"""
Return the version metadata for the server.
"""
url = self._base_url.click("/v1/version")
response = yield self._request("GET", url, {})
url = self._url("/v1/version")
response = yield self._request("GET", url)
decoded_response = yield _decode_cbor(response)
returnValue(decoded_response)
@attr.s
class UploadProgress(object):
"""
Progress of immutable upload, per the server.
"""
# True when upload has finished.
finished = attr.ib(type=bool)
# Remaining ranges to upload.
required = attr.ib(type=RangeMap)
class StorageClientImmutables(object):
"""
APIs for interacting with immutables.
"""
def __init__(self, client): # type: (StorageClient) -> None
self._client = client
@inlineCallbacks
def create(
self,
storage_index,
share_numbers,
allocated_size,
upload_secret,
lease_renew_secret,
lease_cancel_secret,
): # type: (bytes, Set[int], int, bytes, bytes, bytes) -> Deferred[ImmutableCreateResult]
"""
Create a new storage index for an immutable.
TODO https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3857 retry
internally on failure, to ensure the operation fully succeeded. If
sufficient number of failures occurred, the result may fire with an
error, but there's no expectation that user code needs to have a
recovery codepath; it will most likely just report an error to the
user.
Result fires when creating the storage index succeeded, if creating the
storage index failed the result will fire with an exception.
"""
url = self._client._url("/v1/immutable/" + _encode_si(storage_index))
message = dumps(
{"share-numbers": share_numbers, "allocated-size": allocated_size}
)
response = yield self._client._request(
"POST",
url,
lease_renew_secret=lease_renew_secret,
lease_cancel_secret=lease_cancel_secret,
upload_secret=upload_secret,
data=message,
headers=Headers({"content-type": ["application/cbor"]}),
)
decoded_response = yield _decode_cbor(response)
returnValue(
ImmutableCreateResult(
already_have=decoded_response["already-have"],
allocated=decoded_response["allocated"],
)
)
@inlineCallbacks
def write_share_chunk(
self, storage_index, share_number, upload_secret, offset, data
): # type: (bytes, int, bytes, int, bytes) -> Deferred[UploadProgress]
"""
Upload a chunk of data for a specific share.
TODO https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3857 The
implementation should retry failed uploads transparently a number of
times, so that if a failure percolates up, the caller can assume the
failure isn't a short-term blip.
Result fires when the upload succeeded, with a boolean indicating
whether the _complete_ share (i.e. all chunks, not just this one) has
been uploaded.
"""
url = self._client._url(
"/v1/immutable/{}/{}".format(_encode_si(storage_index), share_number)
)
response = yield self._client._request(
"PATCH",
url,
upload_secret=upload_secret,
data=data,
headers=Headers(
{
"content-range": [
ContentRange("bytes", offset, offset+len(data)).to_header()
]
}
),
)
if response.code == http.OK:
# Upload is still unfinished.
finished = False
elif response.code == http.CREATED:
# Upload is done!
finished = True
else:
raise ClientException(
response.code,
)
body = loads((yield response.content()))
remaining = RangeMap()
for chunk in body["required"]:
remaining.set(True, chunk["begin"], chunk["end"])
returnValue(UploadProgress(finished=finished, required=remaining))
@inlineCallbacks
def read_share_chunk(
self, storage_index, share_number, offset, length
): # type: (bytes, int, int, int) -> Deferred[bytes]
"""
Download a chunk of data from a share.
TODO https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3857 Failed
downloads should be transparently retried and redownloaded by the
implementation a few times so that if a failure percolates up, the
caller can assume the failure isn't a short-term blip.
NOTE: the underlying HTTP protocol is much more flexible than this API,
so a future refactor may expand this in order to simplify the calling
code and perhaps download data more efficiently. But then again maybe
the HTTP protocol will be simplified, see
https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3777
"""
url = self._client._url(
"/v1/immutable/{}/{}".format(_encode_si(storage_index), share_number)
)
response = yield self._client._request(
"GET",
url,
headers=Headers(
{
"range": [
Range("bytes", [(offset, offset + length)]).to_header()
]
}
),
)
if response.code == http.PARTIAL_CONTENT:
body = yield response.content()
returnValue(body)
else:
raise ClientException(
response.code,
)

View File

@ -0,0 +1,25 @@
"""
Common HTTP infrastructure for the storge server.
"""
from future.utils import PY2
if PY2:
# fmt: off
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
# fmt: on
from enum import Enum
from base64 import b64encode
def swissnum_auth_header(swissnum): # type: (bytes) -> bytes
"""Return value for ``Authentication`` header."""
return b"Tahoe-LAFS " + b64encode(swissnum).strip()
class Secrets(Enum):
"""Different kinds of secrets the client may send."""
LEASE_RENEW = "lease-renew-secret"
LEASE_CANCEL = "lease-cancel-secret"
UPLOAD = "upload-secret"

View File

@ -17,28 +17,23 @@ else:
from typing import Dict, List, Set
from functools import wraps
from enum import Enum
from base64 import b64decode
from klein import Klein
from twisted.web import http
import attr
from werkzeug.http import parse_range_header, parse_content_range_header
# TODO Make sure to use pure Python versions?
from cbor2 import dumps
from cbor2 import dumps, loads
from .server import StorageServer
from .http_client import swissnum_auth_header
from .http_common import swissnum_auth_header, Secrets
from .common import si_a2b
from .immutable import BucketWriter
from ..util.hashutil import timing_safe_compare
class Secrets(Enum):
"""Different kinds of secrets the client may send."""
LEASE_RENEW = "lease-renew-secret"
LEASE_CANCEL = "lease-cancel-secret"
UPLOAD = "upload-secret"
class ClientSecretsException(Exception):
"""The client did not send the appropriate secrets."""
@ -117,6 +112,7 @@ def _authorized_route(app, required_secrets, *route_args, **route_kwargs):
def decorator(f):
@app.route(*route_args, **route_kwargs)
@_authorization_decorator(required_secrets)
@wraps(f)
def handle_route(*args, **kwargs):
return f(*args, **kwargs)
@ -125,6 +121,19 @@ def _authorized_route(app, required_secrets, *route_args, **route_kwargs):
return decorator
@attr.s
class StorageIndexUploads(object):
"""
In-progress upload to storage index.
"""
# Map share number to BucketWriter
shares = attr.ib() # type: Dict[int,BucketWriter]
# The upload key.
upload_secret = attr.ib() # type: bytes
class HTTPServer(object):
"""
A HTTP interface to the storage server.
@ -137,6 +146,8 @@ class HTTPServer(object):
): # type: (StorageServer, bytes) -> None
self._storage_server = storage_server
self._swissnum = swissnum
# Maps storage index to StorageIndexUploads:
self._uploads = {} # type: Dict[bytes,StorageIndexUploads]
def get_resource(self):
"""Return twisted.web ``Resource`` for this object."""
@ -144,6 +155,8 @@ class HTTPServer(object):
def _cbor(self, request, data):
"""Return CBOR-encoded data."""
# TODO Might want to optionally send JSON someday, based on Accept
# headers, see https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3861
request.setHeader("Content-Type", "application/cbor")
# TODO if data is big, maybe want to use a temporary file eventually...
return dumps(data)
@ -154,3 +167,123 @@ class HTTPServer(object):
def version(self, request, authorization):
"""Return version information."""
return self._cbor(request, self._storage_server.get_version())
##### Immutable APIs #####
@_authorized_route(
_app,
{Secrets.LEASE_RENEW, Secrets.LEASE_CANCEL, Secrets.UPLOAD},
"/v1/immutable/<string:storage_index>",
methods=["POST"],
)
def allocate_buckets(self, request, authorization, storage_index):
"""Allocate buckets."""
storage_index = si_a2b(storage_index.encode("ascii"))
info = loads(request.content.read())
upload_secret = authorization[Secrets.UPLOAD]
if storage_index in self._uploads:
# Pre-existing upload.
in_progress = self._uploads[storage_index]
if timing_safe_compare(in_progress.upload_secret, upload_secret):
# Same session.
# TODO add BucketWriters only for new shares that don't already have buckets; see the HTTP spec for details.
# The backend code may already implement this logic.
pass
else:
# TODO Fail, since the secret doesnt match.
pass
else:
# New upload.
already_got, sharenum_to_bucket = self._storage_server.allocate_buckets(
storage_index,
renew_secret=authorization[Secrets.LEASE_RENEW],
cancel_secret=authorization[Secrets.LEASE_CANCEL],
sharenums=info["share-numbers"],
allocated_size=info["allocated-size"],
)
self._uploads[storage_index] = StorageIndexUploads(
shares=sharenum_to_bucket, upload_secret=authorization[Secrets.UPLOAD]
)
return self._cbor(
request,
{
"already-have": set(already_got),
"allocated": set(sharenum_to_bucket),
},
)
@_authorized_route(
_app,
{Secrets.UPLOAD},
"/v1/immutable/<string:storage_index>/<int:share_number>",
methods=["PATCH"],
)
def write_share_data(self, request, authorization, storage_index, share_number):
"""Write data to an in-progress immutable upload."""
storage_index = si_a2b(storage_index.encode("ascii"))
content_range = parse_content_range_header(request.getHeader("content-range"))
# TODO in https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3860
# 1. Malformed header should result in error 416
# 2. Non-bytes unit should result in error 416
# 3. Missing header means full upload in one request
# 4. Impossible range should resul tin error 416
offset = content_range.start
# TODO basic checks on validity of start, offset, and content-range in general. also of share_number.
# TODO basic check that body isn't infinite. require content-length? or maybe we should require content-range (it's optional now)? if so, needs to be rflected in protocol spec.
data = request.content.read()
try:
bucket = self._uploads[storage_index].shares[share_number]
except (KeyError, IndexError):
# TODO return 404
raise
finished = bucket.write(offset, data)
# TODO if raises ConflictingWriteError, return HTTP CONFLICT code.
if finished:
bucket.close()
request.setResponseCode(http.CREATED)
else:
request.setResponseCode(http.OK)
required = []
for start, end, _ in bucket.required_ranges().ranges():
required.append({"begin": start, "end": end})
return self._cbor(request, {"required": required})
@_authorized_route(
_app,
set(),
"/v1/immutable/<string:storage_index>/<int:share_number>",
methods=["GET"],
)
def read_share_chunk(self, request, authorization, storage_index, share_number):
"""Read a chunk for an already uploaded immutable."""
# TODO in https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3860
# 1. basic checks on validity on storage index, share number
# 2. missing range header should have response code 200 and return whole thing
# 3. malformed range header should result in error? or return everything?
# 4. non-bytes range results in error
# 5. ranges make sense semantically (positive, etc.)
# 6. multiple ranges fails with error
# 7. missing end of range means "to the end of share"
storage_index = si_a2b(storage_index.encode("ascii"))
range_header = parse_range_header(request.getHeader("range"))
offset, end = range_header.ranges[0]
assert end != None # TODO support this case
# TODO if not found, 404
bucket = self._storage_server.get_buckets(storage_index)[share_number]
data = bucket.read(offset, end - offset)
request.setResponseCode(http.PARTIAL_CONTENT)
# TODO set content-range on response. We we need to expand the
# BucketReader interface to return share's length.
#
# request.setHeader(
# "content-range", range_header.make_content_range(share_length).to_header()
# )
return data

View File

@ -372,16 +372,29 @@ class BucketWriter(object):
self._clock = clock
self._timeout = clock.callLater(30 * 60, self._abort_due_to_timeout)
def required_ranges(self): # type: () -> RangeMap
"""
Return which ranges still need to be written.
"""
result = RangeMap()
result.set(True, 0, self._max_size)
for start, end, _ in self._already_written.ranges():
result.delete(start, end)
return result
def allocated_size(self):
return self._max_size
def write(self, offset, data):
def write(self, offset, data): # type: (int, bytes) -> bool
"""
Write data at given offset, return whether the upload is complete.
"""
# Delay the timeout, since we received data:
self._timeout.reset(30 * 60)
start = self._clock.seconds()
precondition(not self.closed)
if self.throw_out_all_data:
return
return False
# Make sure we're not conflicting with existing data:
end = offset + len(data)
@ -399,6 +412,12 @@ class BucketWriter(object):
self.ss.add_latency("write", self._clock.seconds() - start)
self.ss.count("write")
# Return whether the whole thing has been written. See
# https://github.com/mlenzen/collections-extended/issues/169 and
# https://github.com/mlenzen/collections-extended/issues/172 for why
# it's done this way.
return sum([mr.stop - mr.start for mr in self._already_written.ranges()]) == self._max_size
def close(self):
precondition(not self.closed)
self._timeout.cancel()
@ -485,7 +504,7 @@ class FoolscapBucketWriter(Referenceable): # type: ignore # warner/foolscap#78
self._bucket_writer = bucket_writer
def remote_write(self, offset, data):
return self._bucket_writer.write(offset, data)
self._bucket_writer.write(offset, data)
def remote_close(self):
return self._bucket_writer.close()

View File

@ -353,6 +353,9 @@ class StorageServer(service.MultiService):
max_space_per_bucket, lease_info,
clock=self._clock)
if self.no_storage:
# Really this should be done by having a separate class for
# this situation; see
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3862
bw.throw_out_all_data = True
bucketwriters[shnum] = bw
self._bucket_writers[incominghome] = bw

View File

@ -10,14 +10,15 @@ from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from ..common import AsyncTestCase
from foolscap.api import flushEventualQueue
from allmydata.monitor import Monitor
from allmydata.mutable.common import CorruptShareError
from .util import PublishMixin, corrupt, CheckerMixin
class Checker(unittest.TestCase, CheckerMixin, PublishMixin):
class Checker(AsyncTestCase, CheckerMixin, PublishMixin):
def setUp(self):
super(Checker, self).setUp()
return self.publish_one()

View File

@ -10,11 +10,14 @@ from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from ..common import SyncTestCase
from allmydata.mutable.publish import MutableData
from testtools.matchers import Equals, HasLength
class DataHandle(unittest.TestCase):
class DataHandle(SyncTestCase):
def setUp(self):
super(DataHandle, self).setUp()
self.test_data = b"Test Data" * 50000
self.uploadable = MutableData(self.test_data)
@ -26,13 +29,13 @@ class DataHandle(unittest.TestCase):
data = b"".join(data)
start = i
end = i + chunk_size
self.failUnlessEqual(data, self.test_data[start:end])
self.assertThat(data, Equals(self.test_data[start:end]))
def test_datahandle_get_size(self):
actual_size = len(self.test_data)
size = self.uploadable.get_size()
self.failUnlessEqual(size, actual_size)
self.assertThat(size, Equals(actual_size))
def test_datahandle_get_size_out_of_order(self):
@ -40,14 +43,14 @@ class DataHandle(unittest.TestCase):
# disturbing the location of the seek pointer.
chunk_size = 100
data = self.uploadable.read(chunk_size)
self.failUnlessEqual(b"".join(data), self.test_data[:chunk_size])
self.assertThat(b"".join(data), Equals(self.test_data[:chunk_size]))
# Now get the size.
size = self.uploadable.get_size()
self.failUnlessEqual(size, len(self.test_data))
self.assertThat(self.test_data, HasLength(size))
# Now get more data. We should be right where we left off.
more_data = self.uploadable.read(chunk_size)
start = chunk_size
end = chunk_size * 2
self.failUnlessEqual(b"".join(more_data), self.test_data[start:end])
self.assertThat(b"".join(more_data), Equals(self.test_data[start:end]))

View File

@ -10,11 +10,12 @@ from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from ..common import AsyncTestCase
from .util import FakeStorage, make_nodemaker
class DifferentEncoding(unittest.TestCase):
class DifferentEncoding(AsyncTestCase):
def setUp(self):
super(DifferentEncoding, self).setUp()
self._storage = s = FakeStorage()
self.nodemaker = make_nodemaker(s)

View File

@ -11,12 +11,14 @@ from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from ..common import SyncTestCase
from allmydata.mutable.common import NeedMoreDataError, UncoordinatedWriteError
class Exceptions(unittest.TestCase):
class Exceptions(SyncTestCase):
def test_repr(self):
nmde = NeedMoreDataError(100, 50, 100)
self.failUnless("NeedMoreDataError" in repr(nmde), repr(nmde))
self.assertTrue("NeedMoreDataError" in repr(nmde), msg=repr(nmde))
self.assertTrue("NeedMoreDataError" in repr(nmde), msg=repr(nmde))
ucwe = UncoordinatedWriteError()
self.failUnless("UncoordinatedWriteError" in repr(ucwe), repr(ucwe))
self.assertTrue("UncoordinatedWriteError" in repr(ucwe), msg=repr(ucwe))

View File

@ -12,11 +12,13 @@ if PY2:
import os
from io import BytesIO
from twisted.trial import unittest
from ..common import SyncTestCase
from allmydata.mutable.publish import MutableFileHandle
class FileHandle(unittest.TestCase):
class FileHandle(SyncTestCase):
def setUp(self):
super(FileHandle, self).setUp()
self.test_data = b"Test Data" * 50000
self.sio = BytesIO(self.test_data)
self.uploadable = MutableFileHandle(self.sio)

View File

@ -12,7 +12,14 @@ if PY2:
from six.moves import cStringIO as StringIO
from twisted.internet import defer, reactor
from twisted.trial import unittest
from ..common import AsyncBrokenTestCase
from testtools.matchers import (
Equals,
Contains,
HasLength,
Is,
IsInstance,
)
from allmydata import uri, client
from allmydata.util.consumer import MemoryConsumer
from allmydata.interfaces import SDMF_VERSION, MDMF_VERSION, DownloadStopped
@ -29,12 +36,13 @@ from .util import (
make_peer,
)
class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
class Filenode(AsyncBrokenTestCase, testutil.ShouldFailMixin):
# this used to be in Publish, but we removed the limit. Some of
# these tests test whether the new code correctly allows files
# larger than the limit.
OLD_MAX_SEGMENT_SIZE = 3500000
def setUp(self):
super(Filenode, self).setUp()
self._storage = FakeStorage()
self._peers = list(
make_peer(self._storage, n)
@ -48,12 +56,12 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
def test_create(self):
d = self.nodemaker.create_mutable_file()
def _created(n):
self.failUnless(isinstance(n, MutableFileNode))
self.failUnlessEqual(n.get_storage_index(), n._storage_index)
self.assertThat(n, IsInstance(MutableFileNode))
self.assertThat(n.get_storage_index(), Equals(n._storage_index))
sb = self.nodemaker.storage_broker
peer0 = sorted(sb.get_all_serverids())[0]
shnums = self._storage._peers[peer0].keys()
self.failUnlessEqual(len(shnums), 1)
self.assertThat(shnums, HasLength(1))
d.addCallback(_created)
return d
@ -61,12 +69,12 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
def test_create_mdmf(self):
d = self.nodemaker.create_mutable_file(version=MDMF_VERSION)
def _created(n):
self.failUnless(isinstance(n, MutableFileNode))
self.failUnlessEqual(n.get_storage_index(), n._storage_index)
self.assertThat(n, IsInstance(MutableFileNode))
self.assertThat(n.get_storage_index(), Equals(n._storage_index))
sb = self.nodemaker.storage_broker
peer0 = sorted(sb.get_all_serverids())[0]
shnums = self._storage._peers[peer0].keys()
self.failUnlessEqual(len(shnums), 1)
self.assertThat(shnums, HasLength(1))
d.addCallback(_created)
return d
@ -80,7 +88,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
d.addCallback(lambda ignored, v=v:
self.nodemaker.create_mutable_file(version=v))
def _created(n):
self.failUnless(isinstance(n, MutableFileNode))
self.assertThat(n, IsInstance(MutableFileNode))
self._node = n
return n
d.addCallback(_created)
@ -89,19 +97,19 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
d.addCallback(lambda ignored:
self._node.download_best_version())
d.addCallback(lambda contents:
self.failUnlessEqual(contents, b"Contents" * 50000))
self.assertThat(contents, Equals(b"Contents" * 50000)))
return d
def test_max_shares(self):
self.nodemaker.default_encoding_parameters['n'] = 255
d = self.nodemaker.create_mutable_file(version=SDMF_VERSION)
def _created(n):
self.failUnless(isinstance(n, MutableFileNode))
self.failUnlessEqual(n.get_storage_index(), n._storage_index)
self.assertThat(n, IsInstance(MutableFileNode))
self.assertThat(n.get_storage_index(), Equals(n._storage_index))
sb = self.nodemaker.storage_broker
num_shares = sum([len(self._storage._peers[x].keys()) for x \
in sb.get_all_serverids()])
self.failUnlessEqual(num_shares, 255)
self.assertThat(num_shares, Equals(255))
self._node = n
return n
d.addCallback(_created)
@ -113,7 +121,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
self._node.download_best_version())
# ...and check to make sure everything went okay.
d.addCallback(lambda contents:
self.failUnlessEqual(b"contents" * 50000, contents))
self.assertThat(b"contents" * 50000, Equals(contents)))
return d
def test_max_shares_mdmf(self):
@ -121,12 +129,12 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
self.nodemaker.default_encoding_parameters['n'] = 255
d = self.nodemaker.create_mutable_file(version=MDMF_VERSION)
def _created(n):
self.failUnless(isinstance(n, MutableFileNode))
self.failUnlessEqual(n.get_storage_index(), n._storage_index)
self.assertThat(n, IsInstance(MutableFileNode))
self.assertThat(n.get_storage_index(), Equals(n._storage_index))
sb = self.nodemaker.storage_broker
num_shares = sum([len(self._storage._peers[x].keys()) for x \
in sb.get_all_serverids()])
self.failUnlessEqual(num_shares, 255)
self.assertThat(num_shares, Equals(255))
self._node = n
return n
d.addCallback(_created)
@ -135,20 +143,20 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
d.addCallback(lambda ignored:
self._node.download_best_version())
d.addCallback(lambda contents:
self.failUnlessEqual(contents, b"contents" * 50000))
self.assertThat(contents, Equals(b"contents" * 50000)))
return d
def test_mdmf_filenode_cap(self):
# Test that an MDMF filenode, once created, returns an MDMF URI.
d = self.nodemaker.create_mutable_file(version=MDMF_VERSION)
def _created(n):
self.failUnless(isinstance(n, MutableFileNode))
self.assertThat(n, IsInstance(MutableFileNode))
cap = n.get_cap()
self.failUnless(isinstance(cap, uri.WriteableMDMFFileURI))
self.assertThat(cap, IsInstance(uri.WriteableMDMFFileURI))
rcap = n.get_readcap()
self.failUnless(isinstance(rcap, uri.ReadonlyMDMFFileURI))
self.assertThat(rcap, IsInstance(uri.ReadonlyMDMFFileURI))
vcap = n.get_verify_cap()
self.failUnless(isinstance(vcap, uri.MDMFVerifierURI))
self.assertThat(vcap, IsInstance(uri.MDMFVerifierURI))
d.addCallback(_created)
return d
@ -158,13 +166,13 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
# filenode given an MDMF cap.
d = self.nodemaker.create_mutable_file(version=MDMF_VERSION)
def _created(n):
self.failUnless(isinstance(n, MutableFileNode))
self.assertThat(n, IsInstance(MutableFileNode))
s = n.get_uri()
self.failUnless(s.startswith(b"URI:MDMF"))
self.assertTrue(s.startswith(b"URI:MDMF"))
n2 = self.nodemaker.create_from_cap(s)
self.failUnless(isinstance(n2, MutableFileNode))
self.failUnlessEqual(n.get_storage_index(), n2.get_storage_index())
self.failUnlessEqual(n.get_uri(), n2.get_uri())
self.assertThat(n2, IsInstance(MutableFileNode))
self.assertThat(n.get_storage_index(), Equals(n2.get_storage_index()))
self.assertThat(n.get_uri(), Equals(n2.get_uri()))
d.addCallback(_created)
return d
@ -172,13 +180,13 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
def test_create_from_mdmf_readcap(self):
d = self.nodemaker.create_mutable_file(version=MDMF_VERSION)
def _created(n):
self.failUnless(isinstance(n, MutableFileNode))
self.assertThat(n, IsInstance(MutableFileNode))
s = n.get_readonly_uri()
n2 = self.nodemaker.create_from_cap(s)
self.failUnless(isinstance(n2, MutableFileNode))
self.assertThat(n2, IsInstance(MutableFileNode))
# Check that it's a readonly node
self.failUnless(n2.is_readonly())
self.assertTrue(n2.is_readonly())
d.addCallback(_created)
return d
@ -191,10 +199,10 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
d = self.nodemaker.create_mutable_file(version=MDMF_VERSION)
def _created(n):
self.uri = n.get_uri()
self.failUnlessEqual(n._protocol_version, MDMF_VERSION)
self.assertThat(n._protocol_version, Equals(MDMF_VERSION))
n2 = self.nodemaker.create_from_cap(self.uri)
self.failUnlessEqual(n2._protocol_version, MDMF_VERSION)
self.assertThat(n2._protocol_version, Equals(MDMF_VERSION))
d.addCallback(_created)
return d
@ -203,14 +211,14 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
n = MutableFileNode(None, None, {"k": 3, "n": 10}, None)
calls = []
def _callback(*args, **kwargs):
self.failUnlessEqual(args, (4,) )
self.failUnlessEqual(kwargs, {"foo": 5})
self.assertThat(args, Equals((4,)))
self.assertThat(kwargs, Equals({"foo": 5}))
calls.append(1)
return 6
d = n._do_serialized(_callback, 4, foo=5)
def _check_callback(res):
self.failUnlessEqual(res, 6)
self.failUnlessEqual(calls, [1])
self.assertThat(res, Equals(6))
self.assertThat(calls, Equals([1]))
d.addCallback(_check_callback)
def _errback():
@ -227,26 +235,26 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
d.addCallback(lambda res: n.get_servermap(MODE_READ))
d.addCallback(lambda smap: smap.dump(StringIO()))
d.addCallback(lambda sio:
self.failUnless("3-of-10" in sio.getvalue()))
self.assertTrue("3-of-10" in sio.getvalue()))
d.addCallback(lambda res: n.overwrite(MutableData(b"contents 1")))
d.addCallback(lambda res: self.failUnlessIdentical(res, None))
d.addCallback(lambda res: self.assertThat(res, Is(None)))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 1")))
d.addCallback(lambda res: n.get_size_of_best_version())
d.addCallback(lambda size:
self.failUnlessEqual(size, len(b"contents 1")))
self.assertThat(size, Equals(len(b"contents 1"))))
d.addCallback(lambda res: n.overwrite(MutableData(b"contents 2")))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 2")))
d.addCallback(lambda res: n.get_servermap(MODE_WRITE))
d.addCallback(lambda smap: n.upload(MutableData(b"contents 3"), smap))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 3"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 3")))
d.addCallback(lambda res: n.get_servermap(MODE_ANYTHING))
d.addCallback(lambda smap:
n.download_version(smap,
smap.best_recoverable_version()))
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 3"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 3")))
# test a file that is large enough to overcome the
# mapupdate-to-retrieve data caching (i.e. make the shares larger
# than the default readsize, which is 2000 bytes). A 15kB file
@ -254,7 +262,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
d.addCallback(lambda res: n.overwrite(MutableData(b"large size file" * 1000)))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res:
self.failUnlessEqual(res, b"large size file" * 1000))
self.assertThat(res, Equals(b"large size file" * 1000)))
return d
d.addCallback(_created)
return d
@ -268,7 +276,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
n.get_servermap(MODE_READ))
def _then(servermap):
dumped = servermap.dump(StringIO())
self.failUnlessIn("3-of-10", dumped.getvalue())
self.assertThat(dumped.getvalue(), Contains("3-of-10"))
d.addCallback(_then)
# Now overwrite the contents with some new contents. We want
# to make them big enough to force the file to be uploaded
@ -280,7 +288,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
d.addCallback(lambda ignored:
n.download_best_version())
d.addCallback(lambda data:
self.failUnlessEqual(data, big_contents))
self.assertThat(data, Equals(big_contents)))
# Overwrite the contents again with some new contents. As
# before, they need to be big enough to force multiple
# segments, so that we make the downloader deal with
@ -292,7 +300,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
d.addCallback(lambda ignored:
n.download_best_version())
d.addCallback(lambda data:
self.failUnlessEqual(data, bigger_contents))
self.assertThat(data, Equals(bigger_contents)))
return d
d.addCallback(_created)
return d
@ -323,7 +331,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
# Now we'll retrieve it into a pausing consumer.
c = PausingConsumer()
d = version.read(c)
d.addCallback(lambda ign: self.failUnlessEqual(c.size, len(data)))
d.addCallback(lambda ign: self.assertThat(c.size, Equals(len(data))))
c2 = PausingAndStoppingConsumer()
d.addCallback(lambda ign:
@ -360,14 +368,14 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
self.uri = node.get_uri()
# also confirm that the cap has no extension fields
pieces = self.uri.split(b":")
self.failUnlessEqual(len(pieces), 4)
self.assertThat(pieces, HasLength(4))
return node.overwrite(MutableData(b"contents1" * 100000))
def _then(ignored):
node = self.nodemaker.create_from_cap(self.uri)
return node.download_best_version()
def _downloaded(data):
self.failUnlessEqual(data, b"contents1" * 100000)
self.assertThat(data, Equals(b"contents1" * 100000))
d.addCallback(_created)
d.addCallback(_then)
d.addCallback(_downloaded)
@ -397,11 +405,11 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
d = self.nodemaker.create_mutable_file(upload1)
def _created(n):
d = n.download_best_version()
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 1")))
upload2 = MutableData(b"contents 2")
d.addCallback(lambda res: n.overwrite(upload2))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 2")))
return d
d.addCallback(_created)
return d
@ -415,15 +423,15 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
def _created(n):
d = n.download_best_version()
d.addCallback(lambda data:
self.failUnlessEqual(data, initial_contents))
self.assertThat(data, Equals(initial_contents)))
uploadable2 = MutableData(initial_contents + b"foobarbaz")
d.addCallback(lambda ignored:
n.overwrite(uploadable2))
d.addCallback(lambda ignored:
n.download_best_version())
d.addCallback(lambda data:
self.failUnlessEqual(data, initial_contents +
b"foobarbaz"))
self.assertThat(data, Equals(initial_contents +
b"foobarbaz")))
return d
d.addCallback(_created)
return d
@ -431,33 +439,33 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
def test_create_with_initial_contents_function(self):
data = b"initial contents"
def _make_contents(n):
self.failUnless(isinstance(n, MutableFileNode))
self.assertThat(n, IsInstance(MutableFileNode))
key = n.get_writekey()
self.failUnless(isinstance(key, bytes), key)
self.failUnlessEqual(len(key), 16) # AES key size
self.assertTrue(isinstance(key, bytes), key)
self.assertThat(key, HasLength(16)) # AES key size
return MutableData(data)
d = self.nodemaker.create_mutable_file(_make_contents)
def _created(n):
return n.download_best_version()
d.addCallback(_created)
d.addCallback(lambda data2: self.failUnlessEqual(data2, data))
d.addCallback(lambda data2: self.assertThat(data2, Equals(data)))
return d
def test_create_mdmf_with_initial_contents_function(self):
data = b"initial contents" * 100000
def _make_contents(n):
self.failUnless(isinstance(n, MutableFileNode))
self.assertThat(n, IsInstance(MutableFileNode))
key = n.get_writekey()
self.failUnless(isinstance(key, bytes), key)
self.failUnlessEqual(len(key), 16)
self.assertTrue(isinstance(key, bytes), key)
self.assertThat(key, HasLength(16))
return MutableData(data)
d = self.nodemaker.create_mutable_file(_make_contents,
version=MDMF_VERSION)
d.addCallback(lambda n:
n.download_best_version())
d.addCallback(lambda data2:
self.failUnlessEqual(data2, data))
self.assertThat(data2, Equals(data)))
return d
@ -476,7 +484,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
d = n.get_servermap(MODE_READ)
d.addCallback(lambda servermap: servermap.best_recoverable_version())
d.addCallback(lambda verinfo:
self.failUnlessEqual(verinfo[0], expected_seqnum, which))
self.assertThat(verinfo[0], Equals(expected_seqnum), which))
return d
def test_modify(self):
@ -513,36 +521,36 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
def _created(n):
d = n.modify(_modifier)
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"line1line2")))
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "m"))
d.addCallback(lambda res: n.modify(_non_modifier))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"line1line2")))
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "non"))
d.addCallback(lambda res: n.modify(_none_modifier))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"line1line2")))
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "none"))
d.addCallback(lambda res:
self.shouldFail(ValueError, "error_modifier", None,
n.modify, _error_modifier))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"line1line2")))
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "err"))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"line1line2")))
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "big"))
d.addCallback(lambda res: n.modify(_ucw_error_modifier))
d.addCallback(lambda res: self.failUnlessEqual(len(calls), 2))
d.addCallback(lambda res: self.assertThat(calls, HasLength(2)))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res,
b"line1line2line3"))
d.addCallback(lambda res: self.assertThat(res,
Equals(b"line1line2line3")))
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 3, "ucw"))
def _reset_ucw_error_modifier(res):
@ -557,10 +565,10 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
# will only be one larger than the previous test, not two (i.e. 4
# instead of 5).
d.addCallback(lambda res: n.modify(_ucw_error_non_modifier))
d.addCallback(lambda res: self.failUnlessEqual(len(calls), 2))
d.addCallback(lambda res: self.assertThat(calls, HasLength(2)))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res,
b"line1line2line3"))
d.addCallback(lambda res: self.assertThat(res,
Equals(b"line1line2line3")))
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 4, "ucw"))
d.addCallback(lambda res: n.modify(_toobig_modifier))
return d
@ -596,7 +604,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
def _created(n):
d = n.modify(_modifier)
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"line1line2")))
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "m"))
d.addCallback(lambda res:
@ -605,7 +613,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
n.modify, _ucw_error_modifier,
_backoff_stopper))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"line1line2")))
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "stop"))
def _reset_ucw_error_modifier(res):
@ -615,8 +623,8 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
d.addCallback(lambda res: n.modify(_ucw_error_modifier,
_backoff_pauser))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res,
b"line1line2line3"))
d.addCallback(lambda res: self.assertThat(res,
Equals(b"line1line2line3")))
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 3, "pause"))
d.addCallback(lambda res:
@ -625,8 +633,8 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
n.modify, _always_ucw_error_modifier,
giveuper.delay))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res,
b"line1line2line3"))
d.addCallback(lambda res: self.assertThat(res,
Equals(b"line1line2line3")))
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 3, "giveup"))
return d
@ -641,23 +649,23 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
d.addCallback(lambda res: n.get_servermap(MODE_READ))
d.addCallback(lambda smap: smap.dump(StringIO()))
d.addCallback(lambda sio:
self.failUnless("3-of-10" in sio.getvalue()))
self.assertTrue("3-of-10" in sio.getvalue()))
d.addCallback(lambda res: n.overwrite(MutableData(b"contents 1")))
d.addCallback(lambda res: self.failUnlessIdentical(res, None))
d.addCallback(lambda res: self.assertThat(res, Is(None)))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 1")))
d.addCallback(lambda res: n.overwrite(MutableData(b"contents 2")))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 2")))
d.addCallback(lambda res: n.get_servermap(MODE_WRITE))
d.addCallback(lambda smap: n.upload(MutableData(b"contents 3"), smap))
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 3"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 3")))
d.addCallback(lambda res: n.get_servermap(MODE_ANYTHING))
d.addCallback(lambda smap:
n.download_version(smap,
smap.best_recoverable_version()))
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 3"))
d.addCallback(lambda res: self.assertThat(res, Equals(b"contents 3")))
return d
d.addCallback(_created)
return d
@ -673,14 +681,14 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
return n.get_servermap(MODE_READ)
d.addCallback(_created)
d.addCallback(lambda ignored:
self.failUnlessEqual(self.n.get_size(), 0))
self.assertThat(self.n.get_size(), Equals(0)))
d.addCallback(lambda ignored:
self.n.overwrite(MutableData(b"foobarbaz")))
d.addCallback(lambda ignored:
self.failUnlessEqual(self.n.get_size(), 9))
self.assertThat(self.n.get_size(), Equals(9)))
d.addCallback(lambda ignored:
self.nodemaker.create_mutable_file(MutableData(b"foobarbaz")))
d.addCallback(_created)
d.addCallback(lambda ignored:
self.failUnlessEqual(self.n.get_size(), 9))
self.assertThat(self.n.get_size(), Equals(9)))
return d

View File

@ -11,14 +11,15 @@ if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os, base64
from twisted.trial import unittest
from ..common import AsyncTestCase
from testtools.matchers import HasLength
from allmydata import uri
from allmydata.storage.common import storage_index_to_dir
from allmydata.util import fileutil
from .. import common_util as testutil
from ..no_network import GridTestMixin
class Interoperability(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
class Interoperability(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin):
sdmf_old_shares = {}
sdmf_old_shares[0] = b"VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcABOOLy8EETxh7h7/z9d62EiPu9CNpRrCOLxUhn+JUS+DuAAhgcAb/adrQFrhlrRNoRpvjDuxmFebA4F0qCyqWssm61AAQ/EX4eC/1+hGOQ/h4EiKUkqxdsfzdcPlDvd11SGWZ0VHsUclZChTzuBAU2zLTXm+cG8IFhO50ly6Ey/DB44NtMKVaVzO0nU8DE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA="
sdmf_old_shares[1] = b"VGFob2UgbXV0YWJsZSBjb250YWluZXIgdjEKdQlEA47ESLbTdKdpLJXCpBxd5OH239tl5hvAiz1dvGdE5rIOpf8cbfxbPcwNF+Y5dM92uBVbmV6KAAAAAAAAB/wAAAAAAAAJ0AAAAAFOWSw7jSx7WXzaMpdleJYXwYsRCV82jNA5oex9m2YhXSnb2POh+vvC1LE1NAfRc9GOb2zQG84Xdsx1Jub2brEeKkyt0sRIttN0p2kslcKkHF3k4fbf22XmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABamJprL6ecrsOoFKdrXUmWveLq8nzEGDOjFnyK9detI3noX3uyK2MwSnFdAfyN0tuAwoAAAAAAAAAFQAAAAAAAAAVAAABjwAAAo8AAAMXAAADNwAAAAAAAAM+AAAAAAAAB/wwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC1IkainlJF12IBXBQdpRK1zXB7a26vuEYqRmQM09YjC6sQjCs0F2ICk8n9m/2Kw4l16eIEboB2Au9pODCE+u/dEAakEFh4qidTMn61rbGUbsLK8xzuWNW22ezzz9/nPia0HDrulXt51/FYtfnnAuD1RJGXJv/8tDllE9FL/18TzlH4WuB6Fp8FTgv7QdbZAfWJHDGFIpVCJr1XxOCsSZNFJIqGwZnD2lsChiWw5OJDbKd8otqN1hIbfHyMyfMOJ/BzRzvZXaUt4Dv5nf93EmQDWClxShRwpuX/NkZ5B2K9OFonFTbOCexm/MjMAdCBqebKKaiHFkiknUCn9eJQpZ5bAgERgV50VKj+AVTDfgTpqfO2vfo4wrufi6ZBb8QV7hllhUFBjYogQ9C96dnS7skv0s+cqFuUjwMILr5/rsbEmEMGvl0T0ytyAbtlXuowEFVj/YORNknM4yjY72YUtEPTlMpk0Cis7aIgTvu5qWMPER26PMApZuRqiwRsGIkaJIvOVOTHHjFYe3/YzdMkc7OZtqRMfQLtwVl2/zKQQV8b/a9vaT6q3mRLRd4P3esaAFe/+7sR/t+9tmB+a8kxtKM6kmaVQJMbXJZ4aoHGfeLX0m35Rcvu2Bmph7QfSDjk/eaE3q55zYSoGWShmlhlw4Kwg84sMuhmcVhLvo0LovR8bKmbdgACtTh7+7gs/l5w1lOkgbF6w7rkXLNslK7L2KYF4SPFLUcABOOLy8EETxh7h7/z9d62EiPu9CNpRrCOLxUhn+JUS+DuAAhgcAb/adrQFrhlrRNoRpvjDuxmFebA4F0qCyqWssm61AAP7FHJWQoU87gQFNsy015vnBvCBYTudJcuhMvwweODbTD8Rfh4L/X6EY5D+HgSIpSSrF2x/N1w+UO93XVIZZnRUeePDXEwhqYDE0Wua7Lx6Bnad5n91qmHAnwSEJE5YIhQM634omd6cq9Wk4seJCUIn+ucoknrpxp0IR9QMxpKSMRHRUg2K8ZegnY3YqFunRZKCfsq9ufQEKgjZN12AFqi551KPBdn4/3V5HK6xTv0P4robSsE/BvuIfByvRf/W7ZrDx+CFC4EEcsBOACOZCrkhhqd5TkYKbe9RA+vs56+9N5qZGurkxcoKviiyEncxvTuShD65DK/6x6kMDMgQv/EdZDI3x9GtHTnRBYXwDGnPJ19w+q2zC3e2XarbxTGYQIPEC5mYx0gAA0sbjf018NGfwBhl6SB54iGsa8uLvR3jHv6OSRJgwxL6j7P0Ts4Hv2EtO12P0Lv21pwi3JC1O/WviSrKCvrQD5lMHL9Uym3hwFi2zu0mqwZvxOAbGy7kfOPXkLYKOHTZLthzKj3PsdjeceWBfYIvPGKYcd6wDr36d1aXSYS4IWeApTS2AQ2lu0DUcgSefAvsA8NkgOklvJY1cjTMSg6j6cxQo48Bvl8RAWGLbr4h2S/8KwDGxwLsSv0Gop/gnFc3GzCsmL0EkEyHHWkCA8YRXCghfW80KLDV495ff7yF5oiwK56GniqowZ3RG9Jxp5MXoJQgsLV1VMQFMAmsY69yz8eoxRH3wl9L0dMyndLulhWWzNwPMQ2I0yAWdzA/pksVmwTJTFenB3MHCiWc5rEwJ3yofe6NZZnZQrYyL9r1TNnVwfTwRUiykPiLSk4x9Mi6DX7RamDAxc8u3gDVfjPsTOTagBOEGUWlGAL54KE/E6sgCQ5DEAt12chk8AxbjBFLPgV+/idrzS0lZHOL+IVBI9D0i3Bq1yZcSIqcjZB0M3IbxbPm4gLAYOWEiTUN2ecsEHHg9nt6rhgffVoqSbCCFPbpC0xf7WOC3+BQORIZECOCC7cUAciXq3xn+GuxpFE40RWRJeKAK7bBQ21X89ABIXlQFkFddZ9kRvlZ2Pnl0oeF+2pjnZu0Yc2czNfZEQF2P7BKIdLrgMgxG89snxAY8qAYTCKyQw6xTG87wkjDcpy1wzsZLP3WsOuO7cAm7b27xU0jRKq8Cw4d1hDoyRG+RdS53F8RFJzVMaNNYgxU2tfRwUvXpTRXiOheeRVvh25+YGVnjakUXjx/dSDnOw4ETHGHD+7styDkeSfc3BdSZxswzc6OehgMI+xsCxeeRym15QUm9hxvg8X7Bfz/0WulgFwgzrm11TVynZYOmvyHpiZKoqQyQyKahIrfhwuchCr7lMsZ4a+umIkNkKxCLZnI+T7jd+eGFMgKItjz3kTTxRl3IhaJG3LbPmwRUJynMxQKdMi4Uf0qy0U7+i8hIJ9m50QXc+3tw2bwDSbx22XYJ9Wf14gxx5G5SPTb1JVCbhe4fxNt91xIxCow2zk62tzbYfRe6dfmDmgYHkv2PIEtMJZK8iKLDjFfu2ZUxsKT2A5g1q17og6o9MeXeuFS3mzJXJYFQZd+3UzlFR9qwkFkby9mg5y4XSeMvRLOHPt/H/r5SpEqBE6a9MadZYt61FBV152CUEzd43ihXtrAa0XH9HdsiySBcWI1SpM3mv9rRP0DiLjMUzHw/K1D8TE2f07zW4t/9kvE11tFj/NpICixQAAAAA="
@ -53,7 +54,7 @@ class Interoperability(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixi
sharedata)
# ...and verify that the shares are there.
shares = self.find_uri_shares(self.sdmf_old_cap)
assert len(shares) == 10
self.assertThat(shares, HasLength(10))
def test_new_downloader_can_read_old_shares(self):
self.basedir = "mutable/Interoperability/new_downloader_can_read_old_shares"
@ -62,5 +63,5 @@ class Interoperability(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixi
nm = self.g.clients[0].nodemaker
n = nm.create_from_cap(self.sdmf_old_cap)
d = n.download_best_version()
d.addCallback(self.failUnlessEqual, self.sdmf_old_contents)
d.addCallback(self.assertEqual, self.sdmf_old_contents)
return d

View File

@ -10,7 +10,8 @@ from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from ..common import AsyncTestCase
from testtools.matchers import Equals
from allmydata.interfaces import SDMF_VERSION
from allmydata.monitor import Monitor
from foolscap.logging import log
@ -20,8 +21,9 @@ from allmydata.mutable.servermap import ServerMap, ServermapUpdater
from ..common_util import DevNullDictionary
from .util import FakeStorage, make_nodemaker
class MultipleEncodings(unittest.TestCase):
class MultipleEncodings(AsyncTestCase):
def setUp(self):
super(MultipleEncodings, self).setUp()
self.CONTENTS = b"New contents go here"
self.uploadable = MutableData(self.CONTENTS)
self._storage = FakeStorage()
@ -159,6 +161,6 @@ class MultipleEncodings(unittest.TestCase):
d.addCallback(lambda res: fn3.download_best_version())
def _retrieved(new_contents):
# the current specified behavior is "first version recoverable"
self.failUnlessEqual(new_contents, contents1)
self.assertThat(new_contents, Equals(contents1))
d.addCallback(_retrieved)
return d

View File

@ -10,15 +10,17 @@ from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from ..common import AsyncTestCase
from testtools.matchers import Equals, HasLength
from allmydata.monitor import Monitor
from allmydata.mutable.common import MODE_CHECK, MODE_READ
from .util import PublishMixin, CheckerMixin
class MultipleVersions(unittest.TestCase, PublishMixin, CheckerMixin):
class MultipleVersions(AsyncTestCase, PublishMixin, CheckerMixin):
def setUp(self):
super(MultipleVersions, self).setUp()
return self.publish_multiple()
def test_multiple_versions(self):
@ -26,7 +28,7 @@ class MultipleVersions(unittest.TestCase, PublishMixin, CheckerMixin):
# should get the latest one
self._set_versions(dict([(i,2) for i in (0,2,4,6,8)]))
d = self._fn.download_best_version()
d.addCallback(lambda res: self.failUnlessEqual(res, self.CONTENTS[4]))
d.addCallback(lambda res: self.assertThat(res, Equals(self.CONTENTS[4])))
# and the checker should report problems
d.addCallback(lambda res: self._fn.check(Monitor()))
d.addCallback(self.check_bad, "test_multiple_versions")
@ -35,23 +37,23 @@ class MultipleVersions(unittest.TestCase, PublishMixin, CheckerMixin):
d.addCallback(lambda res:
self._set_versions(dict([(i,2) for i in range(10)])))
d.addCallback(lambda res: self._fn.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, self.CONTENTS[2]))
d.addCallback(lambda res: self.assertThat(res, Equals(self.CONTENTS[2])))
# if exactly one share is at version 3, we should still get v2
d.addCallback(lambda res:
self._set_versions({0:3}))
d.addCallback(lambda res: self._fn.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, self.CONTENTS[2]))
d.addCallback(lambda res: self.assertThat(res, Equals(self.CONTENTS[2])))
# but the servermap should see the unrecoverable version. This
# depends upon the single newer share being queried early.
d.addCallback(lambda res: self._fn.get_servermap(MODE_READ))
def _check_smap(smap):
self.failUnlessEqual(len(smap.unrecoverable_versions()), 1)
self.assertThat(smap.unrecoverable_versions(), HasLength(1))
newer = smap.unrecoverable_newer_versions()
self.failUnlessEqual(len(newer), 1)
self.assertThat(newer, HasLength(1))
verinfo, health = list(newer.items())[0]
self.failUnlessEqual(verinfo[0], 4)
self.failUnlessEqual(health, (1,3))
self.failIf(smap.needs_merge())
self.assertThat(verinfo[0], Equals(4))
self.assertThat(health, Equals((1,3)))
self.assertThat(smap.needs_merge(), Equals(False))
d.addCallback(_check_smap)
# if we have a mix of two parallel versions (s4a and s4b), we could
# recover either
@ -60,13 +62,13 @@ class MultipleVersions(unittest.TestCase, PublishMixin, CheckerMixin):
1:4,3:4,5:4,7:4,9:4}))
d.addCallback(lambda res: self._fn.get_servermap(MODE_READ))
def _check_smap_mixed(smap):
self.failUnlessEqual(len(smap.unrecoverable_versions()), 0)
self.assertThat(smap.unrecoverable_versions(), HasLength(0))
newer = smap.unrecoverable_newer_versions()
self.failUnlessEqual(len(newer), 0)
self.failUnless(smap.needs_merge())
self.assertThat(newer, HasLength(0))
self.assertTrue(smap.needs_merge())
d.addCallback(_check_smap_mixed)
d.addCallback(lambda res: self._fn.download_best_version())
d.addCallback(lambda res: self.failUnless(res == self.CONTENTS[3] or
d.addCallback(lambda res: self.assertTrue(res == self.CONTENTS[3] or
res == self.CONTENTS[4]))
return d
@ -86,12 +88,12 @@ class MultipleVersions(unittest.TestCase, PublishMixin, CheckerMixin):
d = self._fn.modify(_modify)
d.addCallback(lambda res: self._fn.download_best_version())
expected = self.CONTENTS[2] + b" modified"
d.addCallback(lambda res: self.failUnlessEqual(res, expected))
d.addCallback(lambda res: self.assertThat(res, Equals(expected)))
# and the servermap should indicate that the outlier was replaced too
d.addCallback(lambda res: self._fn.get_servermap(MODE_CHECK))
def _check_smap(smap):
self.failUnlessEqual(smap.highest_seqnum(), 5)
self.failUnlessEqual(len(smap.unrecoverable_versions()), 0)
self.failUnlessEqual(len(smap.recoverable_versions()), 1)
self.assertThat(smap.highest_seqnum(), Equals(5))
self.assertThat(smap.unrecoverable_versions(), HasLength(0))
self.assertThat(smap.recoverable_versions(), HasLength(1))
d.addCallback(_check_smap)
return d

View File

@ -11,7 +11,8 @@ if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os, base64
from twisted.trial import unittest
from ..common import AsyncTestCase
from testtools.matchers import HasLength
from twisted.internet import defer
from foolscap.logging import log
from allmydata import uri
@ -61,7 +62,7 @@ class FirstServerGetsDeleted(object):
return (True, {})
return retval
class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
class Problems(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin):
def do_publish_surprise(self, version):
self.basedir = "mutable/Problems/test_publish_surprise_%s" % version
self.set_up_grid()
@ -198,8 +199,8 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
def _overwritten_again(smap):
# Make sure that all shares were updated by making sure that
# there aren't any other versions in the sharemap.
self.failUnlessEqual(len(smap.recoverable_versions()), 1)
self.failUnlessEqual(len(smap.unrecoverable_versions()), 0)
self.assertThat(smap.recoverable_versions(), HasLength(1))
self.assertThat(smap.unrecoverable_versions(), HasLength(0))
d.addCallback(_overwritten_again)
return d
@ -240,7 +241,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
# that ought to work
def _got_node(n):
d = n.download_best_version()
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1"))
d.addCallback(lambda res: self.assertEquals(res, b"contents 1"))
# now break the second peer
def _break_peer1(res):
self.g.break_server(self.server1.get_serverid())
@ -248,7 +249,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
d.addCallback(lambda res: n.overwrite(MutableData(b"contents 2")))
# that ought to work too
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2"))
d.addCallback(lambda res: self.assertEquals(res, b"contents 2"))
def _explain_error(f):
print(f)
if f.check(NotEnoughServersError):
@ -280,7 +281,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
d = nm.create_mutable_file(MutableData(b"contents 1"))
def _created(n):
d = n.download_best_version()
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1"))
d.addCallback(lambda res: self.assertEquals(res, b"contents 1"))
# now break one of the remaining servers
def _break_second_server(res):
self.g.break_server(peerids[1])
@ -288,7 +289,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
d.addCallback(lambda res: n.overwrite(MutableData(b"contents 2")))
# that ought to work too
d.addCallback(lambda res: n.download_best_version())
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2"))
d.addCallback(lambda res: self.assertEquals(res, b"contents 2"))
return d
d.addCallback(_created)
return d
@ -419,7 +420,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
return self._node.download_version(servermap, ver)
d.addCallback(_then)
d.addCallback(lambda data:
self.failUnlessEqual(data, CONTENTS))
self.assertEquals(data, CONTENTS))
return d
def test_1654(self):

View File

@ -10,7 +10,8 @@ from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from ..common import AsyncTestCase
from testtools.matchers import Equals, HasLength
from allmydata.interfaces import IRepairResults, ICheckAndRepairResults
from allmydata.monitor import Monitor
from allmydata.mutable.common import MODE_CHECK
@ -19,7 +20,7 @@ from allmydata.mutable.repairer import MustForceRepairError
from ..common import ShouldFailMixin
from .util import PublishMixin
class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin):
class Repair(AsyncTestCase, PublishMixin, ShouldFailMixin):
def get_shares(self, s):
all_shares = {} # maps (peerid, shnum) to share data
@ -40,8 +41,8 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin):
d.addCallback(lambda res: self._fn.check(Monitor()))
d.addCallback(lambda check_results: self._fn.repair(check_results))
def _check_results(rres):
self.failUnless(IRepairResults.providedBy(rres))
self.failUnless(rres.get_successful())
self.assertThat(IRepairResults.providedBy(rres), Equals(True))
self.assertThat(rres.get_successful(), Equals(True))
# TODO: examine results
self.copy_shares()
@ -50,11 +51,11 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin):
new_shares = self.old_shares[1]
# TODO: this really shouldn't change anything. When we implement
# a "minimal-bandwidth" repairer", change this test to assert:
#self.failUnlessEqual(new_shares, initial_shares)
#self.assertThat(new_shares, Equals(initial_shares))
# all shares should be in the same place as before
self.failUnlessEqual(set(initial_shares.keys()),
set(new_shares.keys()))
self.assertThat(set(initial_shares.keys()),
Equals(set(new_shares.keys())))
# but they should all be at a newer seqnum. The IV will be
# different, so the roothash will be too.
for key in initial_shares:
@ -70,19 +71,19 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin):
IV1,
k1, N1, segsize1, datalen1,
o1) = unpack_header(new_shares[key])
self.failUnlessEqual(version0, version1)
self.failUnlessEqual(seqnum0+1, seqnum1)
self.failUnlessEqual(k0, k1)
self.failUnlessEqual(N0, N1)
self.failUnlessEqual(segsize0, segsize1)
self.failUnlessEqual(datalen0, datalen1)
self.assertThat(version0, Equals(version1))
self.assertThat(seqnum0+1, Equals(seqnum1))
self.assertThat(k0, Equals(k1))
self.assertThat(N0, Equals(N1))
self.assertThat(segsize0, Equals(segsize1))
self.assertThat(datalen0, Equals(datalen1))
d.addCallback(_check_results)
return d
def failIfSharesChanged(self, ignored=None):
old_shares = self.old_shares[-2]
current_shares = self.old_shares[-1]
self.failUnlessEqual(old_shares, current_shares)
self.assertThat(old_shares, Equals(current_shares))
def _test_whether_repairable(self, publisher, nshares, expected_result):
@ -96,12 +97,12 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin):
d.addCallback(_delete_some_shares)
d.addCallback(lambda ign: self._fn.check(Monitor()))
def _check(cr):
self.failIf(cr.is_healthy())
self.failUnlessEqual(cr.is_recoverable(), expected_result)
self.assertThat(cr.is_healthy(), Equals(False))
self.assertThat(cr.is_recoverable(), Equals(expected_result))
return cr
d.addCallback(_check)
d.addCallback(lambda check_results: self._fn.repair(check_results))
d.addCallback(lambda crr: self.failUnlessEqual(crr.get_successful(), expected_result))
d.addCallback(lambda crr: self.assertThat(crr.get_successful(), Equals(expected_result)))
return d
def test_unrepairable_0shares(self):
@ -136,7 +137,7 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin):
del shares[peerid][shnum]
d.addCallback(_delete_some_shares)
d.addCallback(lambda ign: self._fn.check_and_repair(Monitor()))
d.addCallback(lambda crr: self.failUnlessEqual(crr.get_repair_successful(), expected_result))
d.addCallback(lambda crr: self.assertThat(crr.get_repair_successful(), Equals(expected_result)))
return d
def test_unrepairable_0shares_checkandrepair(self):
@ -181,13 +182,13 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin):
self._fn.repair(check_results, force=True))
# this should give us 10 shares of the highest roothash
def _check_repair_results(rres):
self.failUnless(rres.get_successful())
self.assertThat(rres.get_successful(), Equals(True))
pass # TODO
d.addCallback(_check_repair_results)
d.addCallback(lambda res: self._fn.get_servermap(MODE_CHECK))
def _check_smap(smap):
self.failUnlessEqual(len(smap.recoverable_versions()), 1)
self.failIf(smap.unrecoverable_versions())
self.assertThat(smap.recoverable_versions(), HasLength(1))
self.assertThat(smap.unrecoverable_versions(), HasLength(0))
# now, which should have won?
roothash_s4a = self.get_roothash_for(3)
roothash_s4b = self.get_roothash_for(4)
@ -196,9 +197,9 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin):
else:
expected_contents = self.CONTENTS[3]
new_versionid = smap.best_recoverable_version()
self.failUnlessEqual(new_versionid[0], 5) # seqnum 5
self.assertThat(new_versionid[0], Equals(5)) # seqnum 5
d2 = self._fn.download_version(smap, new_versionid)
d2.addCallback(self.failUnlessEqual, expected_contents)
d2.addCallback(self.assertEqual, expected_contents)
return d2
d.addCallback(_check_smap)
return d
@ -216,19 +217,19 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin):
d.addCallback(lambda check_results: self._fn.repair(check_results))
# this should give us 10 shares of v3
def _check_repair_results(rres):
self.failUnless(rres.get_successful())
self.assertThat(rres.get_successful(), Equals(True))
pass # TODO
d.addCallback(_check_repair_results)
d.addCallback(lambda res: self._fn.get_servermap(MODE_CHECK))
def _check_smap(smap):
self.failUnlessEqual(len(smap.recoverable_versions()), 1)
self.failIf(smap.unrecoverable_versions())
self.assertThat(smap.recoverable_versions(), HasLength(1))
self.assertThat(smap.unrecoverable_versions(), HasLength(0))
# now, which should have won?
expected_contents = self.CONTENTS[3]
new_versionid = smap.best_recoverable_version()
self.failUnlessEqual(new_versionid[0], 5) # seqnum 5
self.assertThat(new_versionid[0], Equals(5)) # seqnum 5
d2 = self._fn.download_version(smap, new_versionid)
d2.addCallback(self.failUnlessEqual, expected_contents)
d2.addCallback(self.assertEquals, expected_contents)
return d2
d.addCallback(_check_smap)
return d
@ -256,12 +257,12 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin):
d.addCallback(_get_readcap)
d.addCallback(lambda res: self._fn3.check_and_repair(Monitor()))
def _check_results(crr):
self.failUnless(ICheckAndRepairResults.providedBy(crr))
self.assertThat(ICheckAndRepairResults.providedBy(crr), Equals(True))
# we should detect the unhealthy, but skip over mutable-readcap
# repairs until #625 is fixed
self.failIf(crr.get_pre_repair_results().is_healthy())
self.failIf(crr.get_repair_attempted())
self.failIf(crr.get_post_repair_results().is_healthy())
self.assertThat(crr.get_pre_repair_results().is_healthy(), Equals(False))
self.assertThat(crr.get_repair_attempted(), Equals(False))
self.assertThat(crr.get_post_repair_results().is_healthy(), Equals(False))
d.addCallback(_check_results)
return d
@ -281,6 +282,6 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin):
d.addCallback(lambda ign: self._fn2.check(Monitor()))
d.addCallback(lambda check_results: self._fn2.repair(check_results))
def _check(crr):
self.failUnlessEqual(crr.get_successful(), True)
self.assertThat(crr.get_successful(), Equals(True))
d.addCallback(_check)
return d

View File

@ -11,7 +11,8 @@ if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from six.moves import cStringIO as StringIO
from twisted.trial import unittest
from ..common import AsyncTestCase
from testtools.matchers import Equals, HasLength, Contains
from twisted.internet import defer
from allmydata.util import base32, consumer
@ -23,8 +24,9 @@ from allmydata.mutable.retrieve import Retrieve
from .util import PublishMixin, make_storagebroker, corrupt
from .. import common_util as testutil
class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
class Roundtrip(AsyncTestCase, testutil.ShouldFailMixin, PublishMixin):
def setUp(self):
super(Roundtrip, self).setUp()
return self.publish_one()
def make_servermap(self, mode=MODE_READ, oldmap=None, sb=None):
@ -73,11 +75,11 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
def _do_retrieve(servermap):
self._smap = servermap
#self.dump_servermap(servermap)
self.failUnlessEqual(len(servermap.recoverable_versions()), 1)
self.assertThat(servermap.recoverable_versions(), HasLength(1))
return self.do_download(servermap)
d.addCallback(_do_retrieve)
def _retrieved(new_contents):
self.failUnlessEqual(new_contents, self.CONTENTS)
self.assertThat(new_contents, Equals(self.CONTENTS))
d.addCallback(_retrieved)
# we should be able to re-use the same servermap, both with and
# without updating it.
@ -132,10 +134,10 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
# back empty
d = self.make_servermap(sb=sb2)
def _check_servermap(servermap):
self.failUnlessEqual(servermap.best_recoverable_version(), None)
self.failIf(servermap.recoverable_versions())
self.failIf(servermap.unrecoverable_versions())
self.failIf(servermap.all_servers())
self.assertThat(servermap.best_recoverable_version(), Equals(None))
self.assertFalse(servermap.recoverable_versions())
self.assertFalse(servermap.unrecoverable_versions())
self.assertFalse(servermap.all_servers())
d.addCallback(_check_servermap)
return d
@ -154,7 +156,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
self._fn._storage_broker = self._storage_broker
return self._fn.download_best_version()
def _retrieved(new_contents):
self.failUnlessEqual(new_contents, self.CONTENTS)
self.assertThat(new_contents, Equals(self.CONTENTS))
d.addCallback(_restore)
d.addCallback(_retrieved)
return d
@ -178,13 +180,13 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
# should be noted in the servermap's list of problems.
if substring:
allproblems = [str(f) for f in servermap.get_problems()]
self.failUnlessIn(substring, "".join(allproblems))
self.assertThat("".join(allproblems), Contains(substring))
return servermap
if should_succeed:
d1 = self._fn.download_version(servermap, ver,
fetch_privkey)
d1.addCallback(lambda new_contents:
self.failUnlessEqual(new_contents, self.CONTENTS))
self.assertThat(new_contents, Equals(self.CONTENTS)))
else:
d1 = self.shouldFail(NotEnoughSharesError,
"_corrupt_all(offset=%s)" % (offset,),
@ -207,7 +209,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
# and the dump should mention the problems
s = StringIO()
dump = servermap.dump(s).getvalue()
self.failUnless("30 PROBLEMS" in dump, dump)
self.assertTrue("30 PROBLEMS" in dump, msg=dump)
d.addCallback(_check_servermap)
return d
@ -299,8 +301,8 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
# in NotEnoughSharesError, since each share will look invalid
def _check(res):
f = res[0]
self.failUnless(f.check(NotEnoughSharesError))
self.failUnless("uncoordinated write" in str(f))
self.assertThat(f.check(NotEnoughSharesError), HasLength(1))
self.assertThat("uncoordinated write" in str(f), Equals(True))
return self._test_corrupt_all(1, "ran out of servers",
corrupt_early=False,
failure_checker=_check)
@ -309,7 +311,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
def test_corrupt_all_block_late(self):
def _check(res):
f = res[0]
self.failUnless(f.check(NotEnoughSharesError))
self.assertTrue(f.check(NotEnoughSharesError))
return self._test_corrupt_all("share_data", "block hash tree failure",
corrupt_early=False,
failure_checker=_check)
@ -330,9 +332,9 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
shnums_to_corrupt=list(range(0, N-k)))
d.addCallback(lambda res: self.make_servermap())
def _do_retrieve(servermap):
self.failUnless(servermap.get_problems())
self.failUnless("pubkey doesn't match fingerprint"
in str(servermap.get_problems()[0]))
self.assertTrue(servermap.get_problems())
self.assertThat("pubkey doesn't match fingerprint"
in str(servermap.get_problems()[0]), Equals(True))
ver = servermap.best_recoverable_version()
r = Retrieve(self._fn, self._storage_broker, servermap, ver)
c = consumer.MemoryConsumer()
@ -340,7 +342,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
d.addCallback(_do_retrieve)
d.addCallback(lambda mc: b"".join(mc.chunks))
d.addCallback(lambda new_contents:
self.failUnlessEqual(new_contents, self.CONTENTS))
self.assertThat(new_contents, Equals(self.CONTENTS)))
return d
@ -355,11 +357,11 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
self.make_servermap())
def _do_retrieve(servermap):
ver = servermap.best_recoverable_version()
self.failUnless(ver)
self.assertTrue(ver)
return self._fn.download_best_version()
d.addCallback(_do_retrieve)
d.addCallback(lambda new_contents:
self.failUnlessEqual(new_contents, self.CONTENTS))
self.assertThat(new_contents, Equals(self.CONTENTS)))
return d

View File

@ -11,7 +11,8 @@ from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from ..common import AsyncTestCase
from testtools.matchers import Equals, NotEquals, HasLength
from twisted.internet import defer
from allmydata.monitor import Monitor
from allmydata.mutable.common import \
@ -20,8 +21,9 @@ from allmydata.mutable.publish import MutableData
from allmydata.mutable.servermap import ServerMap, ServermapUpdater
from .util import PublishMixin
class Servermap(unittest.TestCase, PublishMixin):
class Servermap(AsyncTestCase, PublishMixin):
def setUp(self):
super(Servermap, self).setUp()
return self.publish_one()
def make_servermap(self, mode=MODE_CHECK, fn=None, sb=None,
@ -42,17 +44,17 @@ class Servermap(unittest.TestCase, PublishMixin):
return d
def failUnlessOneRecoverable(self, sm, num_shares):
self.failUnlessEqual(len(sm.recoverable_versions()), 1)
self.failUnlessEqual(len(sm.unrecoverable_versions()), 0)
self.assertThat(sm.recoverable_versions(), HasLength(1))
self.assertThat(sm.unrecoverable_versions(), HasLength(0))
best = sm.best_recoverable_version()
self.failIfEqual(best, None)
self.failUnlessEqual(sm.recoverable_versions(), set([best]))
self.failUnlessEqual(len(sm.shares_available()), 1)
self.failUnlessEqual(sm.shares_available()[best], (num_shares, 3, 10))
self.assertThat(best, NotEquals(None))
self.assertThat(sm.recoverable_versions(), Equals(set([best])))
self.assertThat(sm.shares_available(), HasLength(1))
self.assertThat(sm.shares_available()[best], Equals((num_shares, 3, 10)))
shnum, servers = list(sm.make_sharemap().items())[0]
server = list(servers)[0]
self.failUnlessEqual(sm.version_on_server(server, shnum), best)
self.failUnlessEqual(sm.version_on_server(server, 666), None)
self.assertThat(sm.version_on_server(server, shnum), Equals(best))
self.assertThat(sm.version_on_server(server, 666), Equals(None))
return sm
def test_basic(self):
@ -117,7 +119,7 @@ class Servermap(unittest.TestCase, PublishMixin):
v = sm.best_recoverable_version()
vm = sm.make_versionmap()
shares = list(vm[v])
self.failUnlessEqual(len(shares), 6)
self.assertThat(shares, HasLength(6))
self._corrupted = set()
# mark the first 5 shares as corrupt, then update the servermap.
# The map should not have the marked shares it in any more, and
@ -135,18 +137,17 @@ class Servermap(unittest.TestCase, PublishMixin):
shares = list(vm[v])
for (server, shnum) in self._corrupted:
server_shares = sm.debug_shares_on_server(server)
self.failIf(shnum in server_shares,
"%d was in %s" % (shnum, server_shares))
self.failUnlessEqual(len(shares), 5)
self.assertFalse(shnum in server_shares, "%d was in %s" % (shnum, server_shares))
self.assertThat(shares, HasLength(5))
d.addCallback(_check_map)
return d
def failUnlessNoneRecoverable(self, sm):
self.failUnlessEqual(len(sm.recoverable_versions()), 0)
self.failUnlessEqual(len(sm.unrecoverable_versions()), 0)
self.assertThat(sm.recoverable_versions(), HasLength(0))
self.assertThat(sm.unrecoverable_versions(), HasLength(0))
best = sm.best_recoverable_version()
self.failUnlessEqual(best, None)
self.failUnlessEqual(len(sm.shares_available()), 0)
self.assertThat(best, Equals(None))
self.assertThat(sm.shares_available(), HasLength(0))
def test_no_shares(self):
self._storage._peers = {} # delete all shares
@ -168,12 +169,12 @@ class Servermap(unittest.TestCase, PublishMixin):
return d
def failUnlessNotQuiteEnough(self, sm):
self.failUnlessEqual(len(sm.recoverable_versions()), 0)
self.failUnlessEqual(len(sm.unrecoverable_versions()), 1)
self.assertThat(sm.recoverable_versions(), HasLength(0))
self.assertThat(sm.unrecoverable_versions(), HasLength(1))
best = sm.best_recoverable_version()
self.failUnlessEqual(best, None)
self.failUnlessEqual(len(sm.shares_available()), 1)
self.failUnlessEqual(list(sm.shares_available().values())[0], (2,3,10) )
self.assertThat(best, Equals(None))
self.assertThat(sm.shares_available(), HasLength(1))
self.assertThat(list(sm.shares_available().values())[0], Equals((2,3,10)))
return sm
def test_not_quite_enough_shares(self):
@ -193,7 +194,7 @@ class Servermap(unittest.TestCase, PublishMixin):
d.addCallback(lambda res: ms(mode=MODE_CHECK))
d.addCallback(lambda sm: self.failUnlessNotQuiteEnough(sm))
d.addCallback(lambda sm:
self.failUnlessEqual(len(sm.make_sharemap()), 2))
self.assertThat(sm.make_sharemap(), HasLength(2)))
d.addCallback(lambda res: ms(mode=MODE_ANYTHING))
d.addCallback(lambda sm: self.failUnlessNotQuiteEnough(sm))
d.addCallback(lambda res: ms(mode=MODE_WRITE))
@ -216,7 +217,7 @@ class Servermap(unittest.TestCase, PublishMixin):
# Calling make_servermap also updates the servermap in the mode
# that we specify, so we just need to see what it says.
def _check_servermap(sm):
self.failUnlessEqual(len(sm.recoverable_versions()), 1)
self.assertThat(sm.recoverable_versions(), HasLength(1))
d.addCallback(_check_servermap)
return d
@ -229,10 +230,10 @@ class Servermap(unittest.TestCase, PublishMixin):
self.make_servermap(mode=MODE_WRITE, update_range=(1, 2)))
def _check_servermap(sm):
# 10 shares
self.failUnlessEqual(len(sm.update_data), 10)
self.assertThat(sm.update_data, HasLength(10))
# one version
for data in sm.update_data.values():
self.failUnlessEqual(len(data), 1)
self.assertThat(data, HasLength(1))
d.addCallback(_check_servermap)
return d
@ -244,5 +245,5 @@ class Servermap(unittest.TestCase, PublishMixin):
d.addCallback(lambda ignored:
self.make_servermap(mode=MODE_CHECK))
d.addCallback(lambda servermap:
self.failUnlessEqual(len(servermap.recoverable_versions()), 1))
self.assertThat(servermap.recoverable_versions(), HasLength(1)))
return d

View File

@ -11,7 +11,12 @@ if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import re
from twisted.trial import unittest
from ..common import AsyncTestCase
from testtools.matchers import (
Equals,
IsInstance,
GreaterThan,
)
from twisted.internet import defer
from allmydata.interfaces import MDMF_VERSION
from allmydata.mutable.filenode import MutableFileNode
@ -25,7 +30,7 @@ from .. import common_util as testutil
# this up.
SEGSIZE = 128*1024
class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
class Update(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin):
def setUp(self):
GridTestMixin.setUp(self)
self.basedir = self.mktemp()
@ -35,14 +40,14 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
# self.data should be at least three segments long.
td = b"testdata "
self.data = td*(int(3*SEGSIZE//len(td))+10) # currently about 400kB
assert len(self.data) > 3*SEGSIZE
self.assertThat(len(self.data), GreaterThan(3*SEGSIZE))
self.small_data = b"test data" * 10 # 90 B; SDMF
def do_upload_sdmf(self):
d = self.nm.create_mutable_file(MutableData(self.small_data))
def _then(n):
assert isinstance(n, MutableFileNode)
self.assertThat(n, IsInstance(MutableFileNode))
self.sdmf_node = n
d.addCallback(_then)
return d
@ -51,7 +56,7 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
d = self.nm.create_mutable_file(MutableData(self.data),
version=MDMF_VERSION)
def _then(n):
assert isinstance(n, MutableFileNode)
self.assertThat(n, IsInstance(MutableFileNode))
self.mdmf_node = n
d.addCallback(_then)
return d
@ -185,7 +190,7 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
len(self.data)))
d.addCallback(lambda ign: self.mdmf_node.download_best_version())
d.addCallback(lambda results:
self.failUnlessEqual(results, new_data))
self.assertThat(results, Equals(new_data)))
return d
d0.addCallback(_run)
return d0
@ -201,7 +206,7 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
len(self.small_data)))
d.addCallback(lambda ign: self.sdmf_node.download_best_version())
d.addCallback(lambda results:
self.failUnlessEqual(results, new_data))
self.assertThat(results, Equals(new_data)))
return d
d0.addCallback(_run)
return d0
@ -221,7 +226,7 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
replace_offset))
d.addCallback(lambda ign: self.mdmf_node.download_best_version())
d.addCallback(lambda results:
self.failUnlessEqual(results, new_data))
self.assertThat(results, Equals(new_data)))
return d
d0.addCallback(_run)
return d0
@ -242,7 +247,7 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
replace_offset))
d.addCallback(lambda ignored: self.mdmf_node.download_best_version())
d.addCallback(lambda results:
self.failUnlessEqual(results, new_data))
self.assertThat(results, Equals(new_data)))
return d
d0.addCallback(_run)
return d0

View File

@ -14,7 +14,13 @@ import os
from six.moves import cStringIO as StringIO
from twisted.internet import defer
from twisted.trial import unittest
from ..common import AsyncTestCase
from testtools.matchers import (
Equals,
IsInstance,
HasLength,
Contains,
)
from allmydata import uri
from allmydata.interfaces import SDMF_VERSION, MDMF_VERSION
@ -29,7 +35,7 @@ from ..no_network import GridTestMixin
from .util import PublishMixin
from .. import common_util as testutil
class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
class Version(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin, \
PublishMixin):
def setUp(self):
GridTestMixin.setUp(self)
@ -47,8 +53,8 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
d = self.nm.create_mutable_file(MutableData(data),
version=MDMF_VERSION)
def _then(n):
assert isinstance(n, MutableFileNode)
assert n._protocol_version == MDMF_VERSION
self.assertThat(n, IsInstance(MutableFileNode))
self.assertThat(n._protocol_version, Equals(MDMF_VERSION))
self.mdmf_node = n
return n
d.addCallback(_then)
@ -59,8 +65,8 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
data = self.small_data
d = self.nm.create_mutable_file(MutableData(data))
def _then(n):
assert isinstance(n, MutableFileNode)
assert n._protocol_version == SDMF_VERSION
self.assertThat(n, IsInstance(MutableFileNode))
self.assertThat(n._protocol_version, Equals(SDMF_VERSION))
self.sdmf_node = n
return n
d.addCallback(_then)
@ -69,9 +75,9 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
def do_upload_empty_sdmf(self):
d = self.nm.create_mutable_file(MutableData(b""))
def _then(n):
assert isinstance(n, MutableFileNode)
self.assertThat(n, IsInstance(MutableFileNode))
self.sdmf_zero_length_node = n
assert n._protocol_version == SDMF_VERSION
self.assertThat(n._protocol_version, Equals(SDMF_VERSION))
return n
d.addCallback(_then)
return d
@ -95,7 +101,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
debug.find_shares(fso)
sharefiles = fso.stdout.getvalue().splitlines()
expected = self.nm.default_encoding_parameters["n"]
self.failUnlessEqual(len(sharefiles), expected)
self.assertThat(sharefiles, HasLength(expected))
do = debug.DumpOptions()
do["filename"] = sharefiles[0]
@ -103,17 +109,17 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
debug.dump_share(do)
output = do.stdout.getvalue()
lines = set(output.splitlines())
self.failUnless("Mutable slot found:" in lines, output)
self.failUnless(" share_type: MDMF" in lines, output)
self.failUnless(" num_extra_leases: 0" in lines, output)
self.failUnless(" MDMF contents:" in lines, output)
self.failUnless(" seqnum: 1" in lines, output)
self.failUnless(" required_shares: 3" in lines, output)
self.failUnless(" total_shares: 10" in lines, output)
self.failUnless(" segsize: 131073" in lines, output)
self.failUnless(" datalen: %d" % len(self.data) in lines, output)
self.assertTrue("Mutable slot found:" in lines, output)
self.assertTrue(" share_type: MDMF" in lines, output)
self.assertTrue(" num_extra_leases: 0" in lines, output)
self.assertTrue(" MDMF contents:" in lines, output)
self.assertTrue(" seqnum: 1" in lines, output)
self.assertTrue(" required_shares: 3" in lines, output)
self.assertTrue(" total_shares: 10" in lines, output)
self.assertTrue(" segsize: 131073" in lines, output)
self.assertTrue(" datalen: %d" % len(self.data) in lines, output)
vcap = str(n.get_verify_cap().to_string(), "utf-8")
self.failUnless(" verify-cap: %s" % vcap in lines, output)
self.assertTrue(" verify-cap: %s" % vcap in lines, output)
cso = debug.CatalogSharesOptions()
cso.nodedirs = fso.nodedirs
cso.stdout = StringIO()
@ -122,13 +128,13 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
shares = cso.stdout.getvalue().splitlines()
oneshare = shares[0] # all shares should be MDMF
self.failIf(oneshare.startswith("UNKNOWN"), oneshare)
self.failUnless(oneshare.startswith("MDMF"), oneshare)
self.assertTrue(oneshare.startswith("MDMF"), oneshare)
fields = oneshare.split()
self.failUnlessEqual(fields[0], "MDMF")
self.failUnlessEqual(fields[1].encode("ascii"), storage_index)
self.failUnlessEqual(fields[2], "3/10")
self.failUnlessEqual(fields[3], "%d" % len(self.data))
self.failUnless(fields[4].startswith("#1:"), fields[3])
self.assertThat(fields[0], Equals("MDMF"))
self.assertThat(fields[1].encode("ascii"), Equals(storage_index))
self.assertThat(fields[2], Equals("3/10"))
self.assertThat(fields[3], Equals("%d" % len(self.data)))
self.assertTrue(fields[4].startswith("#1:"), fields[3])
# the rest of fields[4] is the roothash, which depends upon
# encryption salts and is not constant. fields[5] is the
# remaining time on the longest lease, which is timing dependent.
@ -140,11 +146,11 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
d = self.do_upload()
d.addCallback(lambda ign: self.mdmf_node.get_best_readable_version())
d.addCallback(lambda bv:
self.failUnlessEqual(bv.get_sequence_number(), 1))
self.assertThat(bv.get_sequence_number(), Equals(1)))
d.addCallback(lambda ignored:
self.sdmf_node.get_best_readable_version())
d.addCallback(lambda bv:
self.failUnlessEqual(bv.get_sequence_number(), 1))
self.assertThat(bv.get_sequence_number(), Equals(1)))
# Now update. The sequence number in both cases should be 1 in
# both cases.
def _do_update(ignored):
@ -158,11 +164,11 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
d.addCallback(lambda ignored:
self.mdmf_node.get_best_readable_version())
d.addCallback(lambda bv:
self.failUnlessEqual(bv.get_sequence_number(), 2))
self.assertThat(bv.get_sequence_number(), Equals(2)))
d.addCallback(lambda ignored:
self.sdmf_node.get_best_readable_version())
d.addCallback(lambda bv:
self.failUnlessEqual(bv.get_sequence_number(), 2))
self.assertThat(bv.get_sequence_number(), Equals(2)))
return d
@ -175,10 +181,10 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
def _then(ign):
mdmf_uri = self.mdmf_node.get_uri()
cap = uri.from_string(mdmf_uri)
self.failUnless(isinstance(cap, uri.WriteableMDMFFileURI))
self.assertTrue(isinstance(cap, uri.WriteableMDMFFileURI))
readonly_mdmf_uri = self.mdmf_node.get_readonly_uri()
cap = uri.from_string(readonly_mdmf_uri)
self.failUnless(isinstance(cap, uri.ReadonlyMDMFFileURI))
self.assertTrue(isinstance(cap, uri.ReadonlyMDMFFileURI))
d.addCallback(_then)
return d
@ -189,16 +195,16 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
d.addCallback(lambda ign: self.mdmf_node.get_best_mutable_version())
def _check_mdmf(bv):
n = self.mdmf_node
self.failUnlessEqual(bv.get_writekey(), n.get_writekey())
self.failUnlessEqual(bv.get_storage_index(), n.get_storage_index())
self.failIf(bv.is_readonly())
self.assertThat(bv.get_writekey(), Equals(n.get_writekey()))
self.assertThat(bv.get_storage_index(), Equals(n.get_storage_index()))
self.assertFalse(bv.is_readonly())
d.addCallback(_check_mdmf)
d.addCallback(lambda ign: self.sdmf_node.get_best_mutable_version())
def _check_sdmf(bv):
n = self.sdmf_node
self.failUnlessEqual(bv.get_writekey(), n.get_writekey())
self.failUnlessEqual(bv.get_storage_index(), n.get_storage_index())
self.failIf(bv.is_readonly())
self.assertThat(bv.get_writekey(), Equals(n.get_writekey()))
self.assertThat(bv.get_storage_index(), Equals(n.get_storage_index()))
self.assertFalse(bv.is_readonly())
d.addCallback(_check_sdmf)
return d
@ -206,21 +212,21 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
def test_get_readonly_version(self):
d = self.do_upload()
d.addCallback(lambda ign: self.mdmf_node.get_best_readable_version())
d.addCallback(lambda bv: self.failUnless(bv.is_readonly()))
d.addCallback(lambda bv: self.assertTrue(bv.is_readonly()))
# Attempting to get a mutable version of a mutable file from a
# filenode initialized with a readcap should return a readonly
# version of that same node.
d.addCallback(lambda ign: self.mdmf_node.get_readonly())
d.addCallback(lambda ro: ro.get_best_mutable_version())
d.addCallback(lambda v: self.failUnless(v.is_readonly()))
d.addCallback(lambda v: self.assertTrue(v.is_readonly()))
d.addCallback(lambda ign: self.sdmf_node.get_best_readable_version())
d.addCallback(lambda bv: self.failUnless(bv.is_readonly()))
d.addCallback(lambda bv: self.assertTrue(bv.is_readonly()))
d.addCallback(lambda ign: self.sdmf_node.get_readonly())
d.addCallback(lambda ro: ro.get_best_mutable_version())
d.addCallback(lambda v: self.failUnless(v.is_readonly()))
d.addCallback(lambda v: self.assertTrue(v.is_readonly()))
return d
@ -232,13 +238,13 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
d.addCallback(lambda ignored:
self.mdmf_node.download_best_version())
d.addCallback(lambda data:
self.failUnlessEqual(data, b"foo bar baz" * 100000))
self.assertThat(data, Equals(b"foo bar baz" * 100000)))
d.addCallback(lambda ignored:
self.sdmf_node.overwrite(new_small_data))
d.addCallback(lambda ignored:
self.sdmf_node.download_best_version())
d.addCallback(lambda data:
self.failUnlessEqual(data, b"foo bar baz" * 10))
self.assertThat(data, Equals(b"foo bar baz" * 10)))
return d
@ -250,13 +256,13 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
d.addCallback(lambda ignored:
self.mdmf_node.download_best_version())
d.addCallback(lambda data:
self.failUnlessIn(b"modified", data))
self.assertThat(data, Contains(b"modified")))
d.addCallback(lambda ignored:
self.sdmf_node.modify(modifier))
d.addCallback(lambda ignored:
self.sdmf_node.download_best_version())
d.addCallback(lambda data:
self.failUnlessIn(b"modified", data))
self.assertThat(data, Contains(b"modified")))
return d
@ -271,13 +277,13 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
d.addCallback(lambda ignored:
self.mdmf_node.download_best_version())
d.addCallback(lambda data:
self.failUnlessIn(b"modified", data))
self.assertThat(data, Contains(b"modified")))
d.addCallback(lambda ignored:
self.sdmf_node.modify(modifier))
d.addCallback(lambda ignored:
self.sdmf_node.download_best_version())
d.addCallback(lambda data:
self.failUnlessIn(b"modified", data))
self.assertThat(data, Contains(b"modified")))
return d
@ -308,13 +314,13 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
d.addCallback(lambda ignored:
self._fn.download_version(self.servermap, self.version1))
d.addCallback(lambda results:
self.failUnlessEqual(self.CONTENTS[self.version1_index],
results))
self.assertThat(self.CONTENTS[self.version1_index],
Equals(results)))
d.addCallback(lambda ignored:
self._fn.download_version(self.servermap, self.version2))
d.addCallback(lambda results:
self.failUnlessEqual(self.CONTENTS[self.version2_index],
results))
self.assertThat(self.CONTENTS[self.version2_index],
Equals(results)))
return d
@ -344,7 +350,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
for i in range(0, len(expected), step):
d2.addCallback(lambda ignored, i=i: version.read(c, i, step))
d2.addCallback(lambda ignored:
self.failUnlessEqual(expected, b"".join(c.chunks)))
self.assertThat(expected, Equals(b"".join(c.chunks))))
return d2
d.addCallback(_read_data)
return d
@ -447,16 +453,16 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
d2 = defer.succeed(None)
d2.addCallback(lambda ignored: version.read(c))
d2.addCallback(lambda ignored:
self.failUnlessEqual(expected, b"".join(c.chunks)))
self.assertThat(expected, Equals(b"".join(c.chunks))))
d2.addCallback(lambda ignored: version.read(c2, offset=0,
size=len(expected)))
d2.addCallback(lambda ignored:
self.failUnlessEqual(expected, b"".join(c2.chunks)))
self.assertThat(expected, Equals(b"".join(c2.chunks))))
return d2
d.addCallback(_read_data)
d.addCallback(lambda ignored: node.download_best_version())
d.addCallback(lambda data: self.failUnlessEqual(expected, data))
d.addCallback(lambda data: self.assertThat(expected, Equals(data)))
return d
def test_read_and_download_mdmf(self):

View File

@ -34,7 +34,7 @@ from twisted.trial import unittest
from twisted.internet import defer
from twisted.internet.task import Clock
from hypothesis import given, strategies
from hypothesis import given, strategies, example
import itertools
from allmydata import interfaces
@ -230,7 +230,6 @@ class Bucket(unittest.TestCase):
br = BucketReader(self, bw.finalhome)
self.assertEqual(br.read(0, length), expected_data)
@given(
maybe_overlapping_offset=strategies.integers(min_value=0, max_value=98),
maybe_overlapping_length=strategies.integers(min_value=1, max_value=100),
@ -264,6 +263,38 @@ class Bucket(unittest.TestCase):
bw.write(40, b"1" * 10)
bw.write(60, b"1" * 40)
@given(
offsets=strategies.lists(
strategies.integers(min_value=0, max_value=99),
min_size=20,
max_size=20
),
)
@example(offsets=[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 40, 70])
def test_writes_return_when_finished(
self, offsets
):
"""
The ``BucketWriter.write()`` return true if and only if the maximum
size has been reached via potentially overlapping writes. The
remaining ranges can be checked via ``BucketWriter.required_ranges()``.
"""
incoming, final = self.make_workdir("overlapping_writes_{}".format(uuid4()))
bw = BucketWriter(
self, incoming, final, 100, self.make_lease(), Clock()
)
local_written = [0] * 100
for offset in offsets:
length = min(30, 100 - offset)
data = b"1" * length
for i in range(offset, offset+length):
local_written[i] = 1
finished = bw.write(offset, data)
self.assertEqual(finished, sum(local_written) == 100)
required_ranges = bw.required_ranges()
for i in range(0, 100):
self.assertEqual(local_written[i] == 1, required_ranges.get(i) is None)
def test_read_past_end_of_share_data(self):
# test vector for immutable files (hard-coded contents of an immutable share
# file):

View File

@ -15,16 +15,16 @@ if PY2:
# fmt: on
from base64 import b64encode
from twisted.internet.defer import inlineCallbacks
from os import urandom
from hypothesis import assume, given, strategies as st
from fixtures import Fixture, TempDir
from treq.testing import StubTreq
from klein import Klein
from hyperlink import DecodedURL
from collections_extended import RangeMap
from .common import AsyncTestCase, SyncTestCase
from .common import SyncTestCase
from ..storage.server import StorageServer
from ..storage.http_server import (
HTTPServer,
@ -33,7 +33,13 @@ from ..storage.http_server import (
ClientSecretsException,
_authorized_route,
)
from ..storage.http_client import StorageClient, ClientException
from ..storage.http_client import (
StorageClient,
ClientException,
StorageClientImmutables,
ImmutableCreateResult,
UploadProgress,
)
def _post_process(params):
@ -140,6 +146,7 @@ class ExtractSecretsTests(SyncTestCase):
_extract_secrets(["lease-cancel-secret eA=="], {Secrets.LEASE_RENEW})
# TODO should be actual swissnum
SWISSNUM_FOR_TEST = b"abcd"
@ -157,7 +164,24 @@ class TestApp(object):
return "BAD: {}".format(authorization)
class RoutingTests(AsyncTestCase):
def result_of(d):
"""
Synchronously extract the result of a Deferred.
"""
result = []
error = []
d.addCallbacks(result.append, error.append)
if result:
return result[0]
if error:
error[0].raiseException()
raise RuntimeError(
"We expected given Deferred to have result already, but it wasn't. "
+ "This is probably a test design issue."
)
class RoutingTests(SyncTestCase):
"""
Tests for the HTTP routing infrastructure.
"""
@ -175,24 +199,28 @@ class RoutingTests(AsyncTestCase):
treq=StubTreq(self._http_server._app.resource()),
)
@inlineCallbacks
def test_authorization_enforcement(self):
"""
The requirement for secrets is enforced; if they are not given, a 400
response code is returned.
"""
# Without secret, get a 400 error.
response = yield self.client._request(
"GET", "http://127.0.0.1/upload_secret", {}
response = result_of(
self.client._request(
"GET",
"http://127.0.0.1/upload_secret",
)
)
self.assertEqual(response.code, 400)
# With secret, we're good.
response = yield self.client._request(
"GET", "http://127.0.0.1/upload_secret", {Secrets.UPLOAD: b"MAGIC"}
response = result_of(
self.client._request(
"GET", "http://127.0.0.1/upload_secret", upload_secret=b"MAGIC"
)
)
self.assertEqual(response.code, 200)
self.assertEqual((yield response.content()), b"GOOD SECRET")
self.assertEqual(result_of(response.content()), b"GOOD SECRET")
class HttpTestFixture(Fixture):
@ -204,7 +232,6 @@ class HttpTestFixture(Fixture):
def _setUp(self):
self.tempdir = self.useFixture(TempDir())
self.storage_server = StorageServer(self.tempdir.path, b"\x00" * 20)
# TODO what should the swissnum _actually_ be?
self.http_server = HTTPServer(self.storage_server, SWISSNUM_FOR_TEST)
self.client = StorageClient(
DecodedURL.from_text("http://127.0.0.1"),
@ -213,7 +240,7 @@ class HttpTestFixture(Fixture):
)
class GenericHTTPAPITests(AsyncTestCase):
class GenericHTTPAPITests(SyncTestCase):
"""
Tests of HTTP client talking to the HTTP server, for generic HTTP API
endpoints and concerns.
@ -225,7 +252,6 @@ class GenericHTTPAPITests(AsyncTestCase):
super(GenericHTTPAPITests, self).setUp()
self.http = self.useFixture(HttpTestFixture())
@inlineCallbacks
def test_bad_authentication(self):
"""
If the wrong swissnum is used, an ``Unauthorized`` response code is
@ -237,10 +263,9 @@ class GenericHTTPAPITests(AsyncTestCase):
treq=StubTreq(self.http.http_server.get_resource()),
)
with self.assertRaises(ClientException) as e:
yield client.get_version()
result_of(client.get_version())
self.assertEqual(e.exception.args[0], 401)
@inlineCallbacks
def test_version(self):
"""
The client can return the version.
@ -248,7 +273,7 @@ class GenericHTTPAPITests(AsyncTestCase):
We ignore available disk space and max immutable share size, since that
might change across calls.
"""
version = yield self.http.client.get_version()
version = result_of(self.http.client.get_version())
version[b"http://allmydata.org/tahoe/protocols/storage/v1"].pop(
b"available-space"
)
@ -263,3 +288,171 @@ class GenericHTTPAPITests(AsyncTestCase):
b"maximum-immutable-share-size"
)
self.assertEqual(version, expected_version)
class ImmutableHTTPAPITests(SyncTestCase):
"""
Tests for immutable upload/download APIs.
"""
def setUp(self):
if PY2:
self.skipTest("Not going to bother supporting Python 2")
super(ImmutableHTTPAPITests, self).setUp()
self.http = self.useFixture(HttpTestFixture())
def test_upload_can_be_downloaded(self):
"""
A single share can be uploaded in (possibly overlapping) chunks, and
then a random chunk can be downloaded, and it will match the original
file.
We don't exercise the full variation of overlapping chunks because
that's already done in test_storage.py.
"""
length = 100
expected_data = b"".join(bytes([i]) for i in range(100))
im_client = StorageClientImmutables(self.http.client)
# Create a upload:
upload_secret = urandom(32)
lease_secret = urandom(32)
storage_index = b"".join(bytes([i]) for i in range(16))
created = result_of(
im_client.create(
storage_index, {1}, 100, upload_secret, lease_secret, lease_secret
)
)
self.assertEqual(
created, ImmutableCreateResult(already_have=set(), allocated={1})
)
remaining = RangeMap()
remaining.set(True, 0, 100)
# Three writes: 10-19, 30-39, 50-59. This allows for a bunch of holes.
def write(offset, length):
remaining.empty(offset, offset + length)
return im_client.write_share_chunk(
storage_index,
1,
upload_secret,
offset,
expected_data[offset : offset + length],
)
upload_progress = result_of(write(10, 10))
self.assertEqual(
upload_progress, UploadProgress(finished=False, required=remaining)
)
upload_progress = result_of(write(30, 10))
self.assertEqual(
upload_progress, UploadProgress(finished=False, required=remaining)
)
upload_progress = result_of(write(50, 10))
self.assertEqual(
upload_progress, UploadProgress(finished=False, required=remaining)
)
# Then, an overlapping write with matching data (15-35):
upload_progress = result_of(write(15, 20))
self.assertEqual(
upload_progress, UploadProgress(finished=False, required=remaining)
)
# Now fill in the holes:
upload_progress = result_of(write(0, 10))
self.assertEqual(
upload_progress, UploadProgress(finished=False, required=remaining)
)
upload_progress = result_of(write(40, 10))
self.assertEqual(
upload_progress, UploadProgress(finished=False, required=remaining)
)
upload_progress = result_of(write(60, 40))
self.assertEqual(
upload_progress, UploadProgress(finished=True, required=RangeMap())
)
# We can now read:
for offset, length in [(0, 100), (10, 19), (99, 1), (49, 200)]:
downloaded = result_of(
im_client.read_share_chunk(storage_index, 1, offset, length)
)
self.assertEqual(downloaded, expected_data[offset : offset + length])
def test_multiple_shares_uploaded_to_different_place(self):
"""
If a storage index has multiple shares, uploads to different shares are
stored separately and can be downloaded separately.
TBD in https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3860
"""
def test_bucket_allocated_with_new_shares(self):
"""
If some shares already exist, allocating shares indicates only the new
ones were created.
TBD in https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3860
"""
def test_bucket_allocation_new_upload_secret(self):
"""
If a bucket was allocated with one upload secret, and a different upload
key is used to allocate the bucket again, the second allocation fails.
TBD in https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3860
"""
def test_upload_with_wrong_upload_secret_fails(self):
"""
Uploading with a key that doesn't match the one used to allocate the
bucket will fail.
TBD in https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3860
"""
def test_upload_offset_cannot_be_negative(self):
"""
A negative upload offset will be rejected.
TBD in https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3860
"""
def test_mismatching_upload_fails(self):
"""
If an uploaded chunk conflicts with an already uploaded chunk, a
CONFLICT error is returned.
TBD in https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3860
"""
def test_read_of_wrong_storage_index_fails(self):
"""
Reading from unknown storage index results in 404.
TBD in https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3860
"""
def test_read_of_wrong_share_number_fails(self):
"""
Reading from unknown storage index results in 404.
TBD in https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3860
"""
def test_read_with_negative_offset_fails(self):
"""
The offset for reads cannot be negative.
TBD in https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3860
"""
def test_read_with_negative_length_fails(self):
"""
The length for reads cannot be negative.
TBD in https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3860
"""

87
tests.nix Normal file
View File

@ -0,0 +1,87 @@
let
sources = import nix/sources.nix;
in
# See default.nix for documentation about parameters.
{ pkgsVersion ? "nixpkgs-21.11"
, pkgs ? import sources.${pkgsVersion} { }
, pypiData ? sources.pypi-deps-db
, pythonVersion ? "python37"
, mach-nix ? import sources.mach-nix {
inherit pkgs pypiData;
python = pythonVersion;
}
}@args:
let
# We would like to know the test requirements but mach-nix does not directly
# expose this information to us. However, it is perfectly capable of
# determining it if we ask right... This is probably not meant to be a
# public mach-nix API but we pinned mach-nix so we can deal with mach-nix
# upgrade breakage in our own time.
mach-lib = import "${sources.mach-nix}/mach_nix/nix/lib.nix" {
inherit pkgs;
lib = pkgs.lib;
};
tests_require = (mach-lib.extract "python37" ./. "extras_require" ).extras_require.test;
# Get the Tahoe-LAFS package itself. This does not include test
# requirements and we don't ask for test requirements so that we can just
# re-use the normal package if it is already built.
tahoe-lafs = import ./. args;
# If we want to get tahoe-lafs into a Python environment with a bunch of
# *other* Python modules and let them interact in the usual way then we have
# to ask mach-nix for tahoe-lafs and those other Python modules in the same
# way - i.e., using `requirements`. The other tempting mechanism,
# `packagesExtra`, inserts an extra layer of Python environment and prevents
# normal interaction between Python modules (as well as usually producing
# file collisions in the packages that are both runtime and test
# dependencies). To get the tahoe-lafs we just built into the environment,
# put it into nixpkgs using an overlay and tell mach-nix to get tahoe-lafs
# from nixpkgs.
overridesPre = [(self: super: { inherit tahoe-lafs; })];
providers = tahoe-lafs.meta.mach-nix.providers // { tahoe-lafs = "nixpkgs"; };
# Make the Python environment in which we can run the tests.
python-env = mach-nix.mkPython {
# Get the packaging fixes we already know we need from putting together
# the runtime package.
inherit (tahoe-lafs.meta.mach-nix) _;
# Share the runtime package's provider configuration - combined with our
# own that causes the right tahoe-lafs to be picked up.
inherit providers overridesPre;
requirements = ''
# Here we pull in the Tahoe-LAFS package itself.
tahoe-lafs
# Unfortunately mach-nix misses all of the Python dependencies of the
# tahoe-lafs satisfied from nixpkgs. Drag them in here. This gives a
# bit of a pyrrhic flavor to the whole endeavor but maybe mach-nix will
# fix this soon.
#
# https://github.com/DavHau/mach-nix/issues/123
# https://github.com/DavHau/mach-nix/pull/386
${tahoe-lafs.requirements}
# And then all of the test-only dependencies.
${builtins.concatStringsSep "\n" tests_require}
# txi2p-tahoe is another dependency with an environment marker that
# mach-nix doesn't automatically pick up.
txi2p-tahoe
'';
};
in
# Make a derivation that runs the unit test suite.
pkgs.runCommand "tahoe-lafs-tests" { } ''
${python-env}/bin/python -m twisted.trial -j $NIX_BUILD_CORES allmydata
# It's not cool to put the whole _trial_temp into $out because it has weird
# files in it we don't want in the store. Plus, even all of the less weird
# files are mostly just trash that's not meaningful if the test suite passes
# (which is the only way we get $out anyway).
#
# The build log itself is typically available from `nix-store --read-log` so
# we don't need to record that either.
echo "passed" >$out
''