mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-06-20 08:13:49 +00:00
Merge branch 'master' into 2916.grid-manager-integration-tests.2
This commit is contained in:
@ -89,20 +89,12 @@ workflows:
|
|||||||
|
|
||||||
- "nixos":
|
- "nixos":
|
||||||
name: "<<matrix.pythonVersion>>"
|
name: "<<matrix.pythonVersion>>"
|
||||||
nixpkgs: "22.11"
|
nixpkgs: "nixpkgs-unstable"
|
||||||
matrix:
|
matrix:
|
||||||
parameters:
|
parameters:
|
||||||
pythonVersion:
|
pythonVersion:
|
||||||
- "python38"
|
|
||||||
- "python39"
|
- "python39"
|
||||||
- "python310"
|
- "python310"
|
||||||
|
|
||||||
- "nixos":
|
|
||||||
name: "<<matrix.pythonVersion>>"
|
|
||||||
nixpkgs: "unstable"
|
|
||||||
matrix:
|
|
||||||
parameters:
|
|
||||||
pythonVersion:
|
|
||||||
- "python311"
|
- "python311"
|
||||||
|
|
||||||
# Eventually, test against PyPy 3.8
|
# Eventually, test against PyPy 3.8
|
||||||
@ -385,8 +377,8 @@ jobs:
|
|||||||
parameters:
|
parameters:
|
||||||
nixpkgs:
|
nixpkgs:
|
||||||
description: >-
|
description: >-
|
||||||
Reference the name of a niv-managed nixpkgs source (see `niv show`
|
Reference the name of a flake-managed nixpkgs input (see `nix flake
|
||||||
and nix/sources.json)
|
metadata` and flake.nix)
|
||||||
type: "string"
|
type: "string"
|
||||||
pythonVersion:
|
pythonVersion:
|
||||||
description: >-
|
description: >-
|
||||||
@ -403,14 +395,17 @@ jobs:
|
|||||||
- "run":
|
- "run":
|
||||||
name: "Unit Test"
|
name: "Unit Test"
|
||||||
command: |
|
command: |
|
||||||
# The dependencies are all built so we can allow more
|
|
||||||
# parallelism here.
|
|
||||||
source .circleci/lib.sh
|
source .circleci/lib.sh
|
||||||
cache_if_able nix-build \
|
|
||||||
--cores 8 \
|
# Translate the nixpkgs selection into a flake reference we
|
||||||
--argstr pkgsVersion "nixpkgs-<<parameters.nixpkgs>>" \
|
# can use to override the default nixpkgs input.
|
||||||
--argstr pythonVersion "<<parameters.pythonVersion>>" \
|
NIXPKGS=$(nixpkgs_flake_reference <<parameters.nixpkgs>>)
|
||||||
nix/tests.nix
|
|
||||||
|
cache_if_able nix run \
|
||||||
|
--override-input nixpkgs "$NIXPKGS" \
|
||||||
|
.#<<parameters.pythonVersion>>-unittest -- \
|
||||||
|
--jobs $UNITTEST_CORES \
|
||||||
|
allmydata
|
||||||
|
|
||||||
typechecks:
|
typechecks:
|
||||||
docker:
|
docker:
|
||||||
@ -536,20 +531,23 @@ executors:
|
|||||||
docker:
|
docker:
|
||||||
# Run in a highly Nix-capable environment.
|
# Run in a highly Nix-capable environment.
|
||||||
- <<: *DOCKERHUB_AUTH
|
- <<: *DOCKERHUB_AUTH
|
||||||
image: "nixos/nix:2.10.3"
|
image: "nixos/nix:2.16.1"
|
||||||
environment:
|
environment:
|
||||||
# CACHIX_AUTH_TOKEN is manually set in the CircleCI web UI and allows us
|
# CACHIX_AUTH_TOKEN is manually set in the CircleCI web UI and allows us
|
||||||
# to push to CACHIX_NAME. CACHIX_NAME tells cachix which cache to push
|
# to push to CACHIX_NAME. CACHIX_NAME tells cachix which cache to push
|
||||||
# to.
|
# to.
|
||||||
CACHIX_NAME: "tahoe-lafs-opensource"
|
CACHIX_NAME: "tahoe-lafs-opensource"
|
||||||
|
# Let us use features marked "experimental". For example, most/all of
|
||||||
|
# the `nix <subcommand>` forms.
|
||||||
|
NIX_CONFIG: "experimental-features = nix-command flakes"
|
||||||
|
|
||||||
commands:
|
commands:
|
||||||
nix-build:
|
nix-build:
|
||||||
parameters:
|
parameters:
|
||||||
nixpkgs:
|
nixpkgs:
|
||||||
description: >-
|
description: >-
|
||||||
Reference the name of a niv-managed nixpkgs source (see `niv show`
|
Reference the name of a flake-managed nixpkgs input (see `nix flake
|
||||||
and nix/sources.json)
|
metadata` and flake.nix)
|
||||||
type: "string"
|
type: "string"
|
||||||
pythonVersion:
|
pythonVersion:
|
||||||
description: >-
|
description: >-
|
||||||
@ -565,13 +563,15 @@ commands:
|
|||||||
# Get cachix for Nix-friendly caching.
|
# Get cachix for Nix-friendly caching.
|
||||||
name: "Install Basic Dependencies"
|
name: "Install Basic Dependencies"
|
||||||
command: |
|
command: |
|
||||||
NIXPKGS="https://github.com/nixos/nixpkgs/archive/nixos-<<parameters.nixpkgs>>.tar.gz"
|
# Get some build environment dependencies and let them float on a
|
||||||
nix-env \
|
# certain release branch. These aren't involved in the actual
|
||||||
--file $NIXPKGS \
|
# package build (only in CI environment setup) so the fact that
|
||||||
--install \
|
# they float shouldn't hurt reproducibility.
|
||||||
-A cachix bash
|
NIXPKGS="nixpkgs/nixos-23.05"
|
||||||
# Activate it for "binary substitution". This sets up
|
nix profile install $NIXPKGS#cachix $NIXPKGS#bash $NIXPKGS#jp
|
||||||
# configuration tht lets Nix download something from the cache
|
|
||||||
|
# Activate our cachix cache for "binary substitution". This sets
|
||||||
|
# up configuration tht lets Nix download something from the cache
|
||||||
# instead of building it locally, if possible.
|
# instead of building it locally, if possible.
|
||||||
cachix use "${CACHIX_NAME}"
|
cachix use "${CACHIX_NAME}"
|
||||||
|
|
||||||
@ -585,32 +585,16 @@ commands:
|
|||||||
-p 'python3.withPackages (ps: [ ps.setuptools ])' \
|
-p 'python3.withPackages (ps: [ ps.setuptools ])' \
|
||||||
--run 'python setup.py update_version'
|
--run 'python setup.py update_version'
|
||||||
|
|
||||||
- "run":
|
|
||||||
name: "Build Dependencies"
|
|
||||||
command: |
|
|
||||||
# CircleCI build environment looks like it has a zillion and a
|
|
||||||
# half cores. Don't let Nix autodetect this high core count
|
|
||||||
# because it blows up memory usage and fails the test run. Pick a
|
|
||||||
# number of cores that suits the build environment we're paying
|
|
||||||
# for (the free one!).
|
|
||||||
source .circleci/lib.sh
|
|
||||||
# nix-shell will build all of the dependencies of the target but
|
|
||||||
# not the target itself.
|
|
||||||
cache_if_able nix-shell \
|
|
||||||
--run "" \
|
|
||||||
--cores 3 \
|
|
||||||
--argstr pkgsVersion "nixpkgs-<<parameters.nixpkgs>>" \
|
|
||||||
--argstr pythonVersion "<<parameters.pythonVersion>>" \
|
|
||||||
./default.nix
|
|
||||||
|
|
||||||
- "run":
|
- "run":
|
||||||
name: "Build Package"
|
name: "Build Package"
|
||||||
command: |
|
command: |
|
||||||
source .circleci/lib.sh
|
source .circleci/lib.sh
|
||||||
cache_if_able nix-build \
|
NIXPKGS=$(nixpkgs_flake_reference <<parameters.nixpkgs>>)
|
||||||
--cores 4 \
|
cache_if_able nix build \
|
||||||
--argstr pkgsVersion "nixpkgs-<<parameters.nixpkgs>>" \
|
--verbose \
|
||||||
--argstr pythonVersion "<<parameters.pythonVersion>>" \
|
--print-build-logs \
|
||||||
./default.nix
|
--cores "$DEPENDENCY_CORES" \
|
||||||
|
--override-input nixpkgs "$NIXPKGS" \
|
||||||
|
.#<<parameters.pythonVersion>>-tahoe-lafs
|
||||||
|
|
||||||
- steps: "<<parameters.buildSteps>>"
|
- steps: "<<parameters.buildSteps>>"
|
||||||
|
@ -1,3 +1,13 @@
|
|||||||
|
# CircleCI build environment looks like it has a zillion and a half cores.
|
||||||
|
# Don't let Nix autodetect this high core count because it blows up memory
|
||||||
|
# usage and fails the test run. Pick a number of cores that suits the build
|
||||||
|
# environment we're paying for (the free one!).
|
||||||
|
DEPENDENCY_CORES=3
|
||||||
|
|
||||||
|
# Once dependencies are built, we can allow some more concurrency for our own
|
||||||
|
# test suite.
|
||||||
|
UNITTEST_CORES=8
|
||||||
|
|
||||||
# Run a command, enabling cache writes to cachix if possible. The command is
|
# Run a command, enabling cache writes to cachix if possible. The command is
|
||||||
# accepted as a variable number of positional arguments (like argv).
|
# accepted as a variable number of positional arguments (like argv).
|
||||||
function cache_if_able() {
|
function cache_if_able() {
|
||||||
@ -117,3 +127,22 @@ function describe_build() {
|
|||||||
echo "Cache not writeable."
|
echo "Cache not writeable."
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Inspect the flake input metadata for an input of a given name and return the
|
||||||
|
# revision at which that input is pinned. If the input does not exist then
|
||||||
|
# return garbage (probably "null").
|
||||||
|
read_input_revision() {
|
||||||
|
input_name=$1
|
||||||
|
shift
|
||||||
|
|
||||||
|
nix flake metadata --json | jp --unquoted 'locks.nodes."'"$input_name"'".locked.rev'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Return a flake reference that refers to a certain revision of nixpkgs. The
|
||||||
|
# certain revision is the revision to which the specified input is pinned.
|
||||||
|
nixpkgs_flake_reference() {
|
||||||
|
input_name=$1
|
||||||
|
shift
|
||||||
|
|
||||||
|
echo "github:NixOS/nixpkgs?rev=$(read_input_revision $input_name)"
|
||||||
|
}
|
||||||
|
60
default.nix
60
default.nix
@ -1,49 +1,13 @@
|
|||||||
let
|
# This is the flake-compat glue code. It loads the flake and gives us its
|
||||||
# sources.nix contains information about which versions of some of our
|
# outputs. This gives us backwards compatibility with pre-flake consumers.
|
||||||
# dependencies we should use. since we use it to pin nixpkgs, all the rest
|
# All of the real action is in flake.nix.
|
||||||
# of our dependencies are *also* pinned - indirectly.
|
(import
|
||||||
#
|
(
|
||||||
# sources.nix is managed using a tool called `niv`. as an example, to
|
let lock = builtins.fromJSON (builtins.readFile ./flake.lock); in
|
||||||
# update to the most recent version of nixpkgs from the 21.11 maintenance
|
fetchTarball {
|
||||||
# release, in the top-level tahoe-lafs checkout directory you run:
|
url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz";
|
||||||
#
|
sha256 = lock.nodes.flake-compat.locked.narHash;
|
||||||
# niv update nixpkgs-21.11
|
|
||||||
#
|
|
||||||
# niv also supports chosing a specific revision, following a different
|
|
||||||
# branch, etc. find complete documentation for the tool at
|
|
||||||
# https://github.com/nmattia/niv
|
|
||||||
sources = import nix/sources.nix;
|
|
||||||
in
|
|
||||||
{
|
|
||||||
pkgsVersion ? "nixpkgs-22.11" # a string which chooses a nixpkgs from the
|
|
||||||
# niv-managed sources data
|
|
||||||
|
|
||||||
, pkgs ? import sources.${pkgsVersion} { } # nixpkgs itself
|
|
||||||
|
|
||||||
, pythonVersion ? "python310" # a string choosing the python derivation from
|
|
||||||
# nixpkgs to target
|
|
||||||
|
|
||||||
, extrasNames ? [ "tor" "i2p" ] # a list of strings identifying tahoe-lafs extras,
|
|
||||||
# the dependencies of which the resulting
|
|
||||||
# package will also depend on. Include all of the
|
|
||||||
# runtime extras by default because the incremental
|
|
||||||
# cost of including them is a lot smaller than the
|
|
||||||
# cost of re-building the whole thing to add them.
|
|
||||||
|
|
||||||
}:
|
|
||||||
with (pkgs.${pythonVersion}.override {
|
|
||||||
packageOverrides = import ./nix/python-overrides.nix;
|
|
||||||
}).pkgs;
|
|
||||||
callPackage ./nix/tahoe-lafs.nix {
|
|
||||||
# Select whichever package extras were requested.
|
|
||||||
inherit extrasNames;
|
|
||||||
|
|
||||||
# Define the location of the Tahoe-LAFS source to be packaged (the same
|
|
||||||
# directory as contains this file). Clean up as many of the non-source
|
|
||||||
# files (eg the `.git` directory, `~` backup files, nix's own `result`
|
|
||||||
# symlink, etc) as possible to avoid needing to re-build when files that
|
|
||||||
# make no difference to the package have changed.
|
|
||||||
tahoe-lafs-src = pkgs.lib.cleanSource ./.;
|
|
||||||
|
|
||||||
doCheck = false;
|
|
||||||
}
|
}
|
||||||
|
)
|
||||||
|
{ src = ./.; }
|
||||||
|
).defaultNix.default
|
||||||
|
@ -57,6 +57,20 @@ The key-value store is implemented by a grid of Tahoe-LAFS storage servers --
|
|||||||
user-space processes. Tahoe-LAFS storage clients communicate with the storage
|
user-space processes. Tahoe-LAFS storage clients communicate with the storage
|
||||||
servers over TCP.
|
servers over TCP.
|
||||||
|
|
||||||
|
There are two supported protocols:
|
||||||
|
|
||||||
|
* Foolscap, the only supported protocol in release before v1.19.
|
||||||
|
* HTTPS, new in v1.19.
|
||||||
|
|
||||||
|
By default HTTPS is disabled (this will change in
|
||||||
|
https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4041). When HTTPS is enabled on
|
||||||
|
the server, the server transparently listens for both Foolscap and HTTPS on the
|
||||||
|
same port. Clients can use either; by default they will only use Foolscap, but
|
||||||
|
when configured appropriately they will use HTTPS when possible (this will
|
||||||
|
change in https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4041). At this time the
|
||||||
|
only limitations of HTTPS is that I2P is not supported, so any usage of I2P only
|
||||||
|
uses Foolscap.
|
||||||
|
|
||||||
Storage servers hold data in the form of "shares". Shares are encoded pieces
|
Storage servers hold data in the form of "shares". Shares are encoded pieces
|
||||||
of files. There are a configurable number of shares for each file, 10 by
|
of files. There are a configurable number of shares for each file, 10 by
|
||||||
default. Normally, each share is stored on a separate server, but in some
|
default. Normally, each share is stored on a separate server, but in some
|
||||||
|
@ -679,6 +679,14 @@ Client Configuration
|
|||||||
location to prefer their local servers so that they can maintain access to
|
location to prefer their local servers so that they can maintain access to
|
||||||
all of their uploads without using the internet.
|
all of their uploads without using the internet.
|
||||||
|
|
||||||
|
``force_foolscap = (boolean, optional)``
|
||||||
|
|
||||||
|
If this is ``True``, the client will only connect to storage servers via
|
||||||
|
Foolscap, regardless of whether they support HTTPS. If this is ``False``,
|
||||||
|
the client will prefer HTTPS when it is available on the server. The default
|
||||||
|
value is ``True`` (this will change in
|
||||||
|
https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4041).
|
||||||
|
|
||||||
In addition,
|
In addition,
|
||||||
see :doc:`accepting-donations` for a convention for donating to storage server operators.
|
see :doc:`accepting-donations` for a convention for donating to storage server operators.
|
||||||
|
|
||||||
@ -796,6 +804,14 @@ Storage Server Configuration
|
|||||||
(i.e. ``BASEDIR/storage``), but it can be placed elsewhere. Relative paths
|
(i.e. ``BASEDIR/storage``), but it can be placed elsewhere. Relative paths
|
||||||
will be interpreted relative to the node's base directory.
|
will be interpreted relative to the node's base directory.
|
||||||
|
|
||||||
|
``force_foolscap = (boolean, optional)``
|
||||||
|
|
||||||
|
If this is ``True``, the node will expose the storage server via Foolscap
|
||||||
|
only, with no support for HTTPS. If this is ``False``, the server will
|
||||||
|
support both Foolscap and HTTPS on the same port. The default value is
|
||||||
|
``True`` (this will change in
|
||||||
|
https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4041).
|
||||||
|
|
||||||
In addition,
|
In addition,
|
||||||
see :doc:`accepting-donations` for a convention encouraging donations to storage server operators.
|
see :doc:`accepting-donations` for a convention encouraging donations to storage server operators.
|
||||||
|
|
||||||
|
@ -278,8 +278,8 @@ This NURL will be announced alongside their existing Foolscap-based server's fUR
|
|||||||
Such an announcement will resemble this::
|
Such an announcement will resemble this::
|
||||||
|
|
||||||
{
|
{
|
||||||
"anonymous-storage-FURL": "pb://...", # The old key
|
"anonymous-storage-FURL": "pb://...", # The old entry
|
||||||
"gbs-anonymous-storage-url": "pb://...#v=1" # The new key
|
"anonymous-storage-NURLs": ["pb://...#v=1"] # The new, additional entry
|
||||||
}
|
}
|
||||||
|
|
||||||
The transition process will proceed in three stages:
|
The transition process will proceed in three stages:
|
||||||
@ -320,12 +320,7 @@ The follow sequence of events is likely:
|
|||||||
|
|
||||||
Ideally,
|
Ideally,
|
||||||
the client would not rely on an update from the introducer to give it the GBS NURL for the updated storage server.
|
the client would not rely on an update from the introducer to give it the GBS NURL for the updated storage server.
|
||||||
Therefore,
|
In practice, we have decided not to implement this functionality.
|
||||||
when an updated client connects to a storage server using Foolscap,
|
|
||||||
it should request the server's version information.
|
|
||||||
If this information indicates that GBS is supported then the client should cache this GBS information.
|
|
||||||
On subsequent connection attempts,
|
|
||||||
it should make use of this GBS information.
|
|
||||||
|
|
||||||
Server Details
|
Server Details
|
||||||
--------------
|
--------------
|
||||||
|
115
flake.lock
generated
Normal file
115
flake.lock
generated
Normal file
@ -0,0 +1,115 @@
|
|||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"flake-compat": {
|
||||||
|
"flake": false,
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1673956053,
|
||||||
|
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
|
||||||
|
"owner": "edolstra",
|
||||||
|
"repo": "flake-compat",
|
||||||
|
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "edolstra",
|
||||||
|
"repo": "flake-compat",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"flake-utils": {
|
||||||
|
"inputs": {
|
||||||
|
"systems": "systems"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1687709756,
|
||||||
|
"narHash": "sha256-Y5wKlQSkgEK2weWdOu4J3riRd+kV/VCgHsqLNTTWQ/0=",
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"rev": "dbabf0ca0c0c4bce6ea5eaf65af5cb694d2082c7",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs-22_11": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1688392541,
|
||||||
|
"narHash": "sha256-lHrKvEkCPTUO+7tPfjIcb7Trk6k31rz18vkyqmkeJfY=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "ea4c80b39be4c09702b0cb3b42eab59e2ba4f24b",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "nixos-22.11",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs-23_05": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1689885880,
|
||||||
|
"narHash": "sha256-2ikAcvHKkKh8J/eUrwMA+wy1poscC+oL1RkN1V3RmT8=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "fa793b06f56896b7d1909e4b69977c7bf842b2f0",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "nixos-23.05",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs-unstable": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1689791806,
|
||||||
|
"narHash": "sha256-QpXjfiyBFwa7MV/J6nM5FoBreks9O7j9cAZxV22MR8A=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "439ba0789ff84dddea64eb2d47a4a0d4887dbb1f",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "pull/244135/head",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-compat": "flake-compat",
|
||||||
|
"flake-utils": "flake-utils",
|
||||||
|
"nixpkgs": [
|
||||||
|
"nixpkgs-unstable"
|
||||||
|
],
|
||||||
|
"nixpkgs-22_11": "nixpkgs-22_11",
|
||||||
|
"nixpkgs-23_05": "nixpkgs-23_05",
|
||||||
|
"nixpkgs-unstable": "nixpkgs-unstable"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"systems": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1681028828,
|
||||||
|
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
240
flake.nix
Normal file
240
flake.nix
Normal file
@ -0,0 +1,240 @@
|
|||||||
|
{
|
||||||
|
description = "Tahoe-LAFS, free and open decentralized data store";
|
||||||
|
|
||||||
|
nixConfig = {
|
||||||
|
# Supply configuration for the build cache updated by our CI system. This
|
||||||
|
# should allow most users to avoid having to build a large number of
|
||||||
|
# packages (otherwise necessary due to our Python package overrides).
|
||||||
|
substituters = ["https://tahoe-lafs-opensource.cachix.org"];
|
||||||
|
trusted-public-keys = ["tahoe-lafs-opensource.cachix.org-1:eIKCHOPJYceJ2gb74l6e0mayuSdXqiavxYeAio0LFGo="];
|
||||||
|
};
|
||||||
|
|
||||||
|
inputs = {
|
||||||
|
# A couple possible nixpkgs pins. Ideally these could be selected easily
|
||||||
|
# from the command line but there seems to be no syntax/support for that.
|
||||||
|
# However, these at least cause certain revisions to be pinned in our lock
|
||||||
|
# file where you *can* dig them out - and the CI configuration does.
|
||||||
|
#
|
||||||
|
# These are really just examples for the time being since neither of these
|
||||||
|
# releases contains a package set that is completely compatible with our
|
||||||
|
# requirements. We could decide in the future that supporting multiple
|
||||||
|
# releases of NixOS at a time is worthwhile and then pins like these will
|
||||||
|
# help us test each of those releases.
|
||||||
|
"nixpkgs-22_11" = {
|
||||||
|
url = github:NixOS/nixpkgs?ref=nixos-22.11;
|
||||||
|
};
|
||||||
|
"nixpkgs-23_05" = {
|
||||||
|
url = github:NixOS/nixpkgs?ref=nixos-23.05;
|
||||||
|
};
|
||||||
|
|
||||||
|
# We depend on a very new python-cryptography which is not yet available
|
||||||
|
# from any release branch of nixpkgs. However, it is contained in a PR
|
||||||
|
# currently up for review. Point our nixpkgs at that for now.
|
||||||
|
"nixpkgs-unstable" = {
|
||||||
|
url = github:NixOS/nixpkgs?ref=pull/244135/head;
|
||||||
|
};
|
||||||
|
|
||||||
|
# Point the default nixpkgs at one of those. This avoids having getting a
|
||||||
|
# _third_ package set involved and gives a way to provide what should be a
|
||||||
|
# working experience by default (that is, if nixpkgs doesn't get
|
||||||
|
# overridden).
|
||||||
|
nixpkgs.follows = "nixpkgs-unstable";
|
||||||
|
|
||||||
|
# Also get flake-utils for simplified multi-system definitions.
|
||||||
|
flake-utils = {
|
||||||
|
url = github:numtide/flake-utils;
|
||||||
|
};
|
||||||
|
|
||||||
|
# And get a helper that lets us easily continue to provide a default.nix.
|
||||||
|
flake-compat = {
|
||||||
|
url = "github:edolstra/flake-compat";
|
||||||
|
flake = false;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
outputs = { self, nixpkgs, flake-utils, ... }:
|
||||||
|
{
|
||||||
|
# Expose an overlay which adds our version of Tahoe-LAFS to the Python
|
||||||
|
# package sets we specify, as well as all of the correct versions of its
|
||||||
|
# dependencies.
|
||||||
|
#
|
||||||
|
# We will also use this to define some other outputs since it gives us
|
||||||
|
# the most succinct way to get a working Tahoe-LAFS package.
|
||||||
|
overlays.default = import ./nix/overlay.nix;
|
||||||
|
|
||||||
|
} // (flake-utils.lib.eachDefaultSystem (system: let
|
||||||
|
|
||||||
|
# The package set for this system architecture.
|
||||||
|
pkgs = import nixpkgs {
|
||||||
|
inherit system;
|
||||||
|
# And include our Tahoe-LAFS package in that package set.
|
||||||
|
overlays = [ self.overlays.default ];
|
||||||
|
};
|
||||||
|
|
||||||
|
# pythonVersions :: [string]
|
||||||
|
#
|
||||||
|
# The version strings for the Python runtimes we'll work with.
|
||||||
|
pythonVersions =
|
||||||
|
let
|
||||||
|
# Match attribute names that look like a Python derivation - CPython
|
||||||
|
# or PyPy. We take care to avoid things like "python-foo" and
|
||||||
|
# "python3Full-unittest" though. We only want things like "pypy38"
|
||||||
|
# or "python311".
|
||||||
|
nameMatches = name: null != builtins.match "(python|pypy)3[[:digit:]]{0,2}" name;
|
||||||
|
|
||||||
|
# Sometimes an old version is left in the package set as an error
|
||||||
|
# saying something like "we remove this". Make sure we whatever we
|
||||||
|
# found by name evaluates without error, too.
|
||||||
|
notError = drv: (builtins.tryEval drv).success;
|
||||||
|
in
|
||||||
|
# Discover all of the Python runtime derivations by inspecting names
|
||||||
|
# and filtering out derivations with errors.
|
||||||
|
builtins.attrNames (
|
||||||
|
pkgs.lib.attrsets.filterAttrs
|
||||||
|
(name: drv: nameMatches name && notError drv)
|
||||||
|
pkgs
|
||||||
|
);
|
||||||
|
|
||||||
|
# defaultPyVersion :: string
|
||||||
|
#
|
||||||
|
# An element of pythonVersions which we'll use for the default package.
|
||||||
|
defaultPyVersion = "python3";
|
||||||
|
|
||||||
|
# pythons :: [derivation]
|
||||||
|
#
|
||||||
|
# Retrieve the actual Python package for each configured version. We
|
||||||
|
# already applied our overlay to pkgs so our packages will already be
|
||||||
|
# available.
|
||||||
|
pythons = builtins.map (pyVer: pkgs.${pyVer}) pythonVersions;
|
||||||
|
|
||||||
|
# packageName :: string -> string
|
||||||
|
#
|
||||||
|
# Construct the Tahoe-LAFS package name for the given Python runtime.
|
||||||
|
packageName = pyVersion: "${pyVersion}-tahoe-lafs";
|
||||||
|
|
||||||
|
# string -> string
|
||||||
|
#
|
||||||
|
# Construct the unit test application name for the given Python runtime.
|
||||||
|
unitTestName = pyVersion: "${pyVersion}-unittest";
|
||||||
|
|
||||||
|
# (string -> a) -> (string -> b) -> string -> attrset a b
|
||||||
|
#
|
||||||
|
# Make a singleton attribute set from the result of two functions.
|
||||||
|
singletonOf = f: g: x: { ${f x} = g x; };
|
||||||
|
|
||||||
|
# [attrset] -> attrset
|
||||||
|
#
|
||||||
|
# Merge a list of attrset into a single attrset with overlap preferring
|
||||||
|
# rightmost values.
|
||||||
|
mergeAttrs = pkgs.lib.foldr pkgs.lib.mergeAttrs {};
|
||||||
|
|
||||||
|
# makeRuntimeEnv :: string -> derivation
|
||||||
|
#
|
||||||
|
# Create a derivation that includes a Python runtime, Tahoe-LAFS, and
|
||||||
|
# all of its dependencies.
|
||||||
|
makeRuntimeEnv = singletonOf packageName makeRuntimeEnv';
|
||||||
|
makeRuntimeEnv' = pyVersion: (pkgs.${pyVersion}.withPackages (ps: with ps;
|
||||||
|
[ tahoe-lafs ] ++
|
||||||
|
tahoe-lafs.passthru.extras.i2p ++
|
||||||
|
tahoe-lafs.passthru.extras.tor
|
||||||
|
)).overrideAttrs (old: {
|
||||||
|
# By default, withPackages gives us a derivation with a fairly generic
|
||||||
|
# name (like "python-env"). Put our name in there for legibility.
|
||||||
|
# See the similar override in makeTestEnv.
|
||||||
|
name = packageName pyVersion;
|
||||||
|
});
|
||||||
|
|
||||||
|
# makeTestEnv :: string -> derivation
|
||||||
|
#
|
||||||
|
# Create a derivation that includes a Python runtime and all of the
|
||||||
|
# Tahoe-LAFS dependencies, but not Tahoe-LAFS itself, which we'll get
|
||||||
|
# from the working directory.
|
||||||
|
makeTestEnv = pyVersion: (pkgs.${pyVersion}.withPackages (ps: with ps;
|
||||||
|
[ tahoe-lafs ] ++
|
||||||
|
tahoe-lafs.passthru.extras.i2p ++
|
||||||
|
tahoe-lafs.passthru.extras.tor ++
|
||||||
|
tahoe-lafs.passthru.extras.unittest
|
||||||
|
)).overrideAttrs (old: {
|
||||||
|
# See the similar override in makeRuntimeEnv'.
|
||||||
|
name = packageName pyVersion;
|
||||||
|
});
|
||||||
|
in {
|
||||||
|
# Include a package set with out overlay on it in our own output. This
|
||||||
|
# is mainly a development/debugging convenience as it will expose all of
|
||||||
|
# our Python package overrides beneath it. The magic name
|
||||||
|
# "legacyPackages" is copied from nixpkgs and has special support in the
|
||||||
|
# nix command line tool.
|
||||||
|
legacyPackages = pkgs;
|
||||||
|
|
||||||
|
# The flake's package outputs. We'll define one version of the package
|
||||||
|
# for each version of Python we could find. We'll also point the
|
||||||
|
# flake's "default" package at the derivation corresponding to the
|
||||||
|
# default Python version we defined above. The package consists of a
|
||||||
|
# Python environment with Tahoe-LAFS available to it.
|
||||||
|
packages =
|
||||||
|
mergeAttrs (
|
||||||
|
[ { default = self.packages.${system}.${packageName defaultPyVersion}; } ]
|
||||||
|
++ (builtins.map makeRuntimeEnv pythonVersions)
|
||||||
|
++ (builtins.map (singletonOf unitTestName makeTestEnv) pythonVersions)
|
||||||
|
);
|
||||||
|
|
||||||
|
# The flake's app outputs. We'll define a version of an app for running
|
||||||
|
# the test suite for each version of Python we could find. We'll also
|
||||||
|
# define a version of an app for running the "tahoe" command-line
|
||||||
|
# entrypoint for each version of Python we could find.
|
||||||
|
apps =
|
||||||
|
let
|
||||||
|
# writeScript :: string -> string -> path
|
||||||
|
#
|
||||||
|
# Write a shell program to a file so it can be run later.
|
||||||
|
#
|
||||||
|
# We avoid writeShellApplication here because it has ghc as a
|
||||||
|
# dependency but ghc has Python as a dependency and our Python
|
||||||
|
# package override triggers a rebuild of ghc and many Haskell
|
||||||
|
# packages which takes a looong time.
|
||||||
|
writeScript = name: text: "${pkgs.writeShellScript name text}";
|
||||||
|
|
||||||
|
# makeTahoeApp :: string -> attrset
|
||||||
|
#
|
||||||
|
# A helper function to define the Tahoe-LAFS runtime entrypoint for
|
||||||
|
# a certain Python runtime.
|
||||||
|
makeTahoeApp = pyVersion: {
|
||||||
|
"tahoe-${pyVersion}" = {
|
||||||
|
type = "app";
|
||||||
|
program =
|
||||||
|
writeScript "tahoe"
|
||||||
|
''
|
||||||
|
${makeRuntimeEnv' pyVersion}/bin/tahoe "$@"
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
# makeUnitTestsApp :: string -> attrset
|
||||||
|
#
|
||||||
|
# A helper function to define the Tahoe-LAFS unit test entrypoint
|
||||||
|
# for a certain Python runtime.
|
||||||
|
makeUnitTestsApp = pyVersion: {
|
||||||
|
"${unitTestName pyVersion}" = {
|
||||||
|
type = "app";
|
||||||
|
program =
|
||||||
|
let
|
||||||
|
python = "${makeTestEnv pyVersion}/bin/python";
|
||||||
|
in
|
||||||
|
writeScript "unit-tests"
|
||||||
|
''
|
||||||
|
${python} setup.py update_version
|
||||||
|
export TAHOE_LAFS_HYPOTHESIS_PROFILE=ci
|
||||||
|
export PYTHONPATH=$PWD/src
|
||||||
|
${python} -m twisted.trial "$@"
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
in
|
||||||
|
# Merge a default app definition with the rest of the apps.
|
||||||
|
mergeAttrs (
|
||||||
|
[ { default = self.apps.${system}."tahoe-python3"; } ]
|
||||||
|
++ (builtins.map makeUnitTestsApp pythonVersions)
|
||||||
|
++ (builtins.map makeTahoeApp pythonVersions)
|
||||||
|
);
|
||||||
|
}));
|
||||||
|
}
|
@ -446,6 +446,32 @@ def tor_network(reactor, temp_dir, chutney, request):
|
|||||||
request.addfinalizer(cleanup)
|
request.addfinalizer(cleanup)
|
||||||
|
|
||||||
pytest_twisted.blockon(chutney(("start", basic_network)))
|
pytest_twisted.blockon(chutney(("start", basic_network)))
|
||||||
|
|
||||||
|
# Wait for the nodes to "bootstrap" - ie, form a network among themselves.
|
||||||
|
# Successful bootstrap is reported with a message something like:
|
||||||
|
#
|
||||||
|
# Everything bootstrapped after 151 sec
|
||||||
|
# Bootstrap finished: 151 seconds
|
||||||
|
# Node status:
|
||||||
|
# test000a : 100, done , Done
|
||||||
|
# test001a : 100, done , Done
|
||||||
|
# test002a : 100, done , Done
|
||||||
|
# test003r : 100, done , Done
|
||||||
|
# test004r : 100, done , Done
|
||||||
|
# test005r : 100, done , Done
|
||||||
|
# test006r : 100, done , Done
|
||||||
|
# test007r : 100, done , Done
|
||||||
|
# test008c : 100, done , Done
|
||||||
|
# test009c : 100, done , Done
|
||||||
|
# Published dir info:
|
||||||
|
# test000a : 100, all nodes , desc md md_cons ns_cons , Dir info cached
|
||||||
|
# test001a : 100, all nodes , desc md md_cons ns_cons , Dir info cached
|
||||||
|
# test002a : 100, all nodes , desc md md_cons ns_cons , Dir info cached
|
||||||
|
# test003r : 100, all nodes , desc md md_cons ns_cons , Dir info cached
|
||||||
|
# test004r : 100, all nodes , desc md md_cons ns_cons , Dir info cached
|
||||||
|
# test005r : 100, all nodes , desc md md_cons ns_cons , Dir info cached
|
||||||
|
# test006r : 100, all nodes , desc md md_cons ns_cons , Dir info cached
|
||||||
|
# test007r : 100, all nodes , desc md md_cons ns_cons , Dir info cached
|
||||||
pytest_twisted.blockon(chutney(("wait_for_bootstrap", basic_network)))
|
pytest_twisted.blockon(chutney(("wait_for_bootstrap", basic_network)))
|
||||||
|
|
||||||
# print some useful stuff
|
# print some useful stuff
|
||||||
|
@ -657,19 +657,28 @@ def await_client_ready(tahoe, timeout=10, liveness=60*2, minimum_number_of_serve
|
|||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
print(
|
now = time.time()
|
||||||
f"Now: {time.ctime()}\n"
|
|
||||||
f"Server last-received-data: {[s['last_received_data'] for s in servers]}"
|
|
||||||
)
|
|
||||||
|
|
||||||
server_times = [
|
server_times = [
|
||||||
server['last_received_data']
|
server['last_received_data']
|
||||||
for server in servers
|
for server
|
||||||
|
in servers
|
||||||
|
if server['last_received_data'] is not None
|
||||||
]
|
]
|
||||||
|
print(
|
||||||
|
f"Now: {time.ctime(now)}\n"
|
||||||
|
f"Liveness required: {liveness}\n"
|
||||||
|
f"Server last-received-data: {[time.ctime(s) for s in server_times]}\n"
|
||||||
|
f"Server ages: {[now - s for s in server_times]}\n"
|
||||||
|
)
|
||||||
|
|
||||||
# check that all times are 'recent enough' (it's OK if _some_ servers
|
# check that all times are 'recent enough' (it's OK if _some_ servers
|
||||||
# are down, we just want to make sure a sufficient number are up)
|
# are down, we just want to make sure a sufficient number are up)
|
||||||
if len([time.time() - t <= liveness for t in server_times if t is not None]) < minimum_number_of_servers:
|
alive = [t for t in server_times if now - t <= liveness]
|
||||||
print("waiting because at least one server too old")
|
if len(alive) < minimum_number_of_servers:
|
||||||
|
print(
|
||||||
|
f"waiting because we found {len(alive)} servers "
|
||||||
|
f"and want {minimum_number_of_servers}"
|
||||||
|
)
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
1
newsfragments/4039.documentation
Normal file
1
newsfragments/4039.documentation
Normal file
@ -0,0 +1 @@
|
|||||||
|
Document the ``force_foolscap`` configuration options for ``[storage]`` and ``[client]``.
|
0
newsfragments/4042.minor
Normal file
0
newsfragments/4042.minor
Normal file
0
newsfragments/4052.minor
Normal file
0
newsfragments/4052.minor
Normal file
0
newsfragments/4055.minor
Normal file
0
newsfragments/4055.minor
Normal file
10
nix/overlay.nix
Normal file
10
nix/overlay.nix
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
# This overlay adds Tahoe-LAFS and all of its properly-configured Python
|
||||||
|
# package dependencies to a Python package set. Downstream consumers can
|
||||||
|
# apply it to their own nixpkgs derivation to produce a Tahoe-LAFS package.
|
||||||
|
final: prev: {
|
||||||
|
# Add our overrides such that they will be applied to any Python derivation
|
||||||
|
# in nixpkgs.
|
||||||
|
pythonPackagesExtensions = prev.pythonPackagesExtensions ++ [
|
||||||
|
(import ./python-overrides.nix)
|
||||||
|
];
|
||||||
|
}
|
@ -9,14 +9,39 @@ let
|
|||||||
# Disable a Python package's test suite.
|
# Disable a Python package's test suite.
|
||||||
dontCheck = drv: drv.overrideAttrs (old: { doInstallCheck = false; });
|
dontCheck = drv: drv.overrideAttrs (old: { doInstallCheck = false; });
|
||||||
|
|
||||||
|
# string -> any -> derivation -> derivation
|
||||||
|
#
|
||||||
|
# If the overrideable function for the given derivation accepts an argument
|
||||||
|
# with the given name, override it with the given value.
|
||||||
|
#
|
||||||
|
# Since we try to work with multiple versions of nixpkgs, sometimes we need
|
||||||
|
# to override a parameter that exists in one version but not others. This
|
||||||
|
# makes it a bit easier to do so.
|
||||||
|
overrideIfPresent = name: value: drv:
|
||||||
|
if (drv.override.__functionArgs ? ${name})
|
||||||
|
then drv.override { "${name}" = value; }
|
||||||
|
else drv;
|
||||||
|
|
||||||
# Disable building a Python package's documentation.
|
# Disable building a Python package's documentation.
|
||||||
dontBuildDocs = alsoDisable: drv: (drv.override ({
|
dontBuildDocs = drv: (
|
||||||
sphinxHook = null;
|
overrideIfPresent "sphinxHook" null (
|
||||||
} // alsoDisable)).overrideAttrs ({ outputs, ... }: {
|
overrideIfPresent "sphinx-rtd-theme" null
|
||||||
|
drv
|
||||||
|
)
|
||||||
|
).overrideAttrs ({ outputs, ... }: {
|
||||||
outputs = builtins.filter (x: "doc" != x) outputs;
|
outputs = builtins.filter (x: "doc" != x) outputs;
|
||||||
});
|
});
|
||||||
|
|
||||||
in {
|
in {
|
||||||
|
tahoe-lafs = self.callPackage ./tahoe-lafs.nix {
|
||||||
|
# Define the location of the Tahoe-LAFS source to be packaged (the same
|
||||||
|
# directory as contains this file). Clean up as many of the non-source
|
||||||
|
# files (eg the `.git` directory, `~` backup files, nix's own `result`
|
||||||
|
# symlink, etc) as possible to avoid needing to re-build when files that
|
||||||
|
# make no difference to the package have changed.
|
||||||
|
tahoe-lafs-src = self.lib.cleanSource ../.;
|
||||||
|
};
|
||||||
|
|
||||||
# Some dependencies aren't packaged in nixpkgs so supply our own packages.
|
# Some dependencies aren't packaged in nixpkgs so supply our own packages.
|
||||||
pycddl = self.callPackage ./pycddl.nix { };
|
pycddl = self.callPackage ./pycddl.nix { };
|
||||||
txi2p = self.callPackage ./txi2p.nix { };
|
txi2p = self.callPackage ./txi2p.nix { };
|
||||||
@ -30,15 +55,23 @@ in {
|
|||||||
inherit (super) txtorcon;
|
inherit (super) txtorcon;
|
||||||
};
|
};
|
||||||
|
|
||||||
# Update the version of pyopenssl.
|
# With our customized package set a Twisted unit test fails. Patch the
|
||||||
pyopenssl = self.callPackage ./pyopenssl.nix {
|
# Twisted test suite to skip that test.
|
||||||
pyopenssl =
|
# Filed upstream at https://github.com/twisted/twisted/issues/11877
|
||||||
# Building the docs requires sphinx which brings in a dependency on babel,
|
twisted = super.twisted.overrideAttrs (old: {
|
||||||
# the test suite of which fails.
|
patches = (old.patches or []) ++ [ ./twisted.patch ];
|
||||||
onPyPy (dontBuildDocs { sphinx-rtd-theme = null; })
|
});
|
||||||
# Avoid infinite recursion.
|
|
||||||
super.pyopenssl;
|
# Update the version of pyopenssl - and since we're doing that anyway, we
|
||||||
};
|
# don't need the docs. Unfortunately this triggers a lot of rebuilding of
|
||||||
|
# dependent packages.
|
||||||
|
pyopenssl = dontBuildDocs (self.callPackage ./pyopenssl.nix {
|
||||||
|
inherit (super) pyopenssl;
|
||||||
|
});
|
||||||
|
|
||||||
|
# The cryptography that we get from nixpkgs to satisfy the pyopenssl upgrade
|
||||||
|
# that we did breaks service-identity ... so get a newer version that works.
|
||||||
|
service-identity = self.callPackage ./service-identity.nix { };
|
||||||
|
|
||||||
# collections-extended is currently broken for Python 3.11 in nixpkgs but
|
# collections-extended is currently broken for Python 3.11 in nixpkgs but
|
||||||
# we know where a working version lives.
|
# we know where a working version lives.
|
||||||
@ -52,16 +85,19 @@ in {
|
|||||||
|
|
||||||
# tornado and tk pull in a huge dependency trees for functionality we don't
|
# tornado and tk pull in a huge dependency trees for functionality we don't
|
||||||
# care about, also tkinter doesn't work on PyPy.
|
# care about, also tkinter doesn't work on PyPy.
|
||||||
matplotlib = super.matplotlib.override { tornado = null; enableTk = false; };
|
matplotlib = onPyPy (matplotlib: matplotlib.override {
|
||||||
|
tornado = null;
|
||||||
|
enableTk = false;
|
||||||
|
}) super.matplotlib;
|
||||||
|
|
||||||
tqdm = super.tqdm.override {
|
tqdm = onPyPy (tqdm: tqdm.override {
|
||||||
# ibid.
|
# ibid.
|
||||||
tkinter = null;
|
tkinter = null;
|
||||||
# pandas is only required by the part of the test suite covering
|
# pandas is only required by the part of the test suite covering
|
||||||
# integration with pandas that we don't care about. pandas is a huge
|
# integration with pandas that we don't care about. pandas is a huge
|
||||||
# dependency.
|
# dependency.
|
||||||
pandas = null;
|
pandas = null;
|
||||||
};
|
}) super.tqdm;
|
||||||
|
|
||||||
# The treq test suite depends on httpbin. httpbin pulls in babel (flask ->
|
# The treq test suite depends on httpbin. httpbin pulls in babel (flask ->
|
||||||
# jinja2 -> babel) and arrow (brotlipy -> construct -> arrow). babel fails
|
# jinja2 -> babel) and arrow (brotlipy -> construct -> arrow). babel fails
|
||||||
@ -74,48 +110,25 @@ in {
|
|||||||
six = onPyPy dontCheck super.six;
|
six = onPyPy dontCheck super.six;
|
||||||
|
|
||||||
# Likewise for beautifulsoup4.
|
# Likewise for beautifulsoup4.
|
||||||
beautifulsoup4 = onPyPy (dontBuildDocs {}) super.beautifulsoup4;
|
beautifulsoup4 = onPyPy dontBuildDocs super.beautifulsoup4;
|
||||||
|
|
||||||
# The autobahn test suite pulls in a vast number of dependencies for
|
# The autobahn test suite pulls in a vast number of dependencies for
|
||||||
# functionality we don't care about. It might be nice to *selectively*
|
# functionality we don't care about. It might be nice to *selectively*
|
||||||
# disable just some of it but this is easier.
|
# disable just some of it but this is easier.
|
||||||
autobahn = onPyPy dontCheck super.autobahn;
|
autobahn = dontCheck super.autobahn;
|
||||||
|
|
||||||
# and python-dotenv tests pulls in a lot of dependencies, including jedi,
|
# and python-dotenv tests pulls in a lot of dependencies, including jedi,
|
||||||
# which does not work on PyPy.
|
# which does not work on PyPy.
|
||||||
python-dotenv = onPyPy dontCheck super.python-dotenv;
|
python-dotenv = onPyPy dontCheck super.python-dotenv;
|
||||||
|
|
||||||
# Upstream package unaccountably includes a sqlalchemy dependency ... but
|
|
||||||
# the project has no such dependency. Fixed in nixpkgs in
|
|
||||||
# da10e809fff70fbe1d86303b133b779f09f56503.
|
|
||||||
aiocontextvars = super.aiocontextvars.override { sqlalchemy = null; };
|
|
||||||
|
|
||||||
# By default, the sphinx docs are built, which pulls in a lot of
|
# By default, the sphinx docs are built, which pulls in a lot of
|
||||||
# dependencies - including jedi, which does not work on PyPy.
|
# dependencies - including jedi, which does not work on PyPy.
|
||||||
hypothesis =
|
hypothesis = onPyPy dontBuildDocs super.hypothesis;
|
||||||
(let h = super.hypothesis;
|
|
||||||
in
|
|
||||||
if (h.override.__functionArgs.enableDocumentation or false)
|
|
||||||
then h.override { enableDocumentation = false; }
|
|
||||||
else h).overrideAttrs ({ nativeBuildInputs, ... }: {
|
|
||||||
# The nixpkgs expression is missing the tzdata check input.
|
|
||||||
nativeBuildInputs = nativeBuildInputs ++ [ super.tzdata ];
|
|
||||||
});
|
|
||||||
|
|
||||||
# flaky's test suite depends on nose and nose appears to have Python 3
|
# flaky's test suite depends on nose and nose appears to have Python 3
|
||||||
# incompatibilities (it includes `print` statements, for example).
|
# incompatibilities (it includes `print` statements, for example).
|
||||||
flaky = onPyPy dontCheck super.flaky;
|
flaky = onPyPy dontCheck super.flaky;
|
||||||
|
|
||||||
# Replace the deprecated way of running the test suite with the modern way.
|
|
||||||
# This also drops a bunch of unnecessary build-time dependencies, some of
|
|
||||||
# which are broken on PyPy. Fixed in nixpkgs in
|
|
||||||
# 5feb5054bb08ba779bd2560a44cf7d18ddf37fea.
|
|
||||||
zfec = (super.zfec.override {
|
|
||||||
setuptoolsTrial = null;
|
|
||||||
}).overrideAttrs (old: {
|
|
||||||
checkPhase = "trial zfec";
|
|
||||||
});
|
|
||||||
|
|
||||||
# collections-extended is packaged with poetry-core. poetry-core test suite
|
# collections-extended is packaged with poetry-core. poetry-core test suite
|
||||||
# uses virtualenv and virtualenv test suite fails on PyPy.
|
# uses virtualenv and virtualenv test suite fails on PyPy.
|
||||||
poetry-core = onPyPy dontCheck super.poetry-core;
|
poetry-core = onPyPy dontCheck super.poetry-core;
|
||||||
@ -134,15 +147,6 @@ in {
|
|||||||
# since we actually depend directly and significantly on Foolscap.
|
# since we actually depend directly and significantly on Foolscap.
|
||||||
foolscap = onPyPy dontCheck super.foolscap;
|
foolscap = onPyPy dontCheck super.foolscap;
|
||||||
|
|
||||||
# Fixed by nixpkgs PR https://github.com/NixOS/nixpkgs/pull/222246
|
|
||||||
psutil = super.psutil.overrideAttrs ({ pytestFlagsArray, disabledTests, ...}: {
|
|
||||||
# Upstream already disables some tests but there are even more that have
|
|
||||||
# build impurities that come from build system hardware configuration.
|
|
||||||
# Skip them too.
|
|
||||||
pytestFlagsArray = [ "-v" ] ++ pytestFlagsArray;
|
|
||||||
disabledTests = disabledTests ++ [ "sensors_temperatures" ];
|
|
||||||
});
|
|
||||||
|
|
||||||
# CircleCI build systems don't have enough memory to run this test suite.
|
# CircleCI build systems don't have enough memory to run this test suite.
|
||||||
lz4 = dontCheck super.lz4;
|
lz4 = onPyPy dontCheck super.lz4;
|
||||||
}
|
}
|
||||||
|
61
nix/service-identity.nix
Normal file
61
nix/service-identity.nix
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
{ lib
|
||||||
|
, attrs
|
||||||
|
, buildPythonPackage
|
||||||
|
, cryptography
|
||||||
|
, fetchFromGitHub
|
||||||
|
, hatch-fancy-pypi-readme
|
||||||
|
, hatch-vcs
|
||||||
|
, hatchling
|
||||||
|
, idna
|
||||||
|
, pyasn1
|
||||||
|
, pyasn1-modules
|
||||||
|
, pytestCheckHook
|
||||||
|
, pythonOlder
|
||||||
|
, setuptools
|
||||||
|
}:
|
||||||
|
|
||||||
|
buildPythonPackage rec {
|
||||||
|
pname = "service-identity";
|
||||||
|
version = "23.1.0";
|
||||||
|
format = "pyproject";
|
||||||
|
|
||||||
|
disabled = pythonOlder "3.8";
|
||||||
|
|
||||||
|
src = fetchFromGitHub {
|
||||||
|
owner = "pyca";
|
||||||
|
repo = pname;
|
||||||
|
rev = "refs/tags/${version}";
|
||||||
|
hash = "sha256-PGDtsDgRwh7GuuM4OuExiy8L4i3Foo+OD0wMrndPkvo=";
|
||||||
|
};
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
hatch-fancy-pypi-readme
|
||||||
|
hatch-vcs
|
||||||
|
hatchling
|
||||||
|
setuptools
|
||||||
|
];
|
||||||
|
|
||||||
|
propagatedBuildInputs = [
|
||||||
|
attrs
|
||||||
|
cryptography
|
||||||
|
idna
|
||||||
|
pyasn1
|
||||||
|
pyasn1-modules
|
||||||
|
];
|
||||||
|
|
||||||
|
nativeCheckInputs = [
|
||||||
|
pytestCheckHook
|
||||||
|
];
|
||||||
|
|
||||||
|
pythonImportsCheck = [
|
||||||
|
"service_identity"
|
||||||
|
];
|
||||||
|
|
||||||
|
meta = with lib; {
|
||||||
|
description = "Service identity verification for pyOpenSSL";
|
||||||
|
homepage = "https://service-identity.readthedocs.io";
|
||||||
|
changelog = "https://github.com/pyca/service-identity/releases/tag/${version}";
|
||||||
|
license = licenses.mit;
|
||||||
|
maintainers = with maintainers; [ fab ];
|
||||||
|
};
|
||||||
|
}
|
@ -1,38 +0,0 @@
|
|||||||
{
|
|
||||||
"niv": {
|
|
||||||
"branch": "master",
|
|
||||||
"description": "Easy dependency management for Nix projects",
|
|
||||||
"homepage": "https://github.com/nmattia/niv",
|
|
||||||
"owner": "nmattia",
|
|
||||||
"repo": "niv",
|
|
||||||
"rev": "5830a4dd348d77e39a0f3c4c762ff2663b602d4c",
|
|
||||||
"sha256": "1d3lsrqvci4qz2hwjrcnd8h5vfkg8aypq3sjd4g3izbc8frwz5sm",
|
|
||||||
"type": "tarball",
|
|
||||||
"url": "https://github.com/nmattia/niv/archive/5830a4dd348d77e39a0f3c4c762ff2663b602d4c.tar.gz",
|
|
||||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
|
||||||
},
|
|
||||||
"nixpkgs-22.11": {
|
|
||||||
"branch": "nixos-22.11",
|
|
||||||
"description": "Nix Packages collection",
|
|
||||||
"homepage": "",
|
|
||||||
"owner": "nixos",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"rev": "970402e6147c49603f4d06defe44d27fe51884ce",
|
|
||||||
"sha256": "1v0ljy7wqq14ad3gd1871fgvd4psr7dy14q724k0wwgxk7inbbwh",
|
|
||||||
"type": "tarball",
|
|
||||||
"url": "https://github.com/nixos/nixpkgs/archive/970402e6147c49603f4d06defe44d27fe51884ce.tar.gz",
|
|
||||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
|
||||||
},
|
|
||||||
"nixpkgs-unstable": {
|
|
||||||
"branch": "master",
|
|
||||||
"description": "Nix Packages collection",
|
|
||||||
"homepage": "",
|
|
||||||
"owner": "nixos",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"rev": "d0c9a536331227ab883b4f6964be638fa436d81f",
|
|
||||||
"sha256": "1gg6v5rk1p26ciygdg262zc5vqws753rvgcma5rim2s6gyfrjaq1",
|
|
||||||
"type": "tarball",
|
|
||||||
"url": "https://github.com/nixos/nixpkgs/archive/d0c9a536331227ab883b4f6964be638fa436d81f.tar.gz",
|
|
||||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
|
||||||
}
|
|
||||||
}
|
|
174
nix/sources.nix
174
nix/sources.nix
@ -1,174 +0,0 @@
|
|||||||
# This file has been generated by Niv.
|
|
||||||
|
|
||||||
let
|
|
||||||
|
|
||||||
#
|
|
||||||
# The fetchers. fetch_<type> fetches specs of type <type>.
|
|
||||||
#
|
|
||||||
|
|
||||||
fetch_file = pkgs: name: spec:
|
|
||||||
let
|
|
||||||
name' = sanitizeName name + "-src";
|
|
||||||
in
|
|
||||||
if spec.builtin or true then
|
|
||||||
builtins_fetchurl { inherit (spec) url sha256; name = name'; }
|
|
||||||
else
|
|
||||||
pkgs.fetchurl { inherit (spec) url sha256; name = name'; };
|
|
||||||
|
|
||||||
fetch_tarball = pkgs: name: spec:
|
|
||||||
let
|
|
||||||
name' = sanitizeName name + "-src";
|
|
||||||
in
|
|
||||||
if spec.builtin or true then
|
|
||||||
builtins_fetchTarball { name = name'; inherit (spec) url sha256; }
|
|
||||||
else
|
|
||||||
pkgs.fetchzip { name = name'; inherit (spec) url sha256; };
|
|
||||||
|
|
||||||
fetch_git = name: spec:
|
|
||||||
let
|
|
||||||
ref =
|
|
||||||
if spec ? ref then spec.ref else
|
|
||||||
if spec ? branch then "refs/heads/${spec.branch}" else
|
|
||||||
if spec ? tag then "refs/tags/${spec.tag}" else
|
|
||||||
abort "In git source '${name}': Please specify `ref`, `tag` or `branch`!";
|
|
||||||
in
|
|
||||||
builtins.fetchGit { url = spec.repo; inherit (spec) rev; inherit ref; };
|
|
||||||
|
|
||||||
fetch_local = spec: spec.path;
|
|
||||||
|
|
||||||
fetch_builtin-tarball = name: throw
|
|
||||||
''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`.
|
|
||||||
$ niv modify ${name} -a type=tarball -a builtin=true'';
|
|
||||||
|
|
||||||
fetch_builtin-url = name: throw
|
|
||||||
''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`.
|
|
||||||
$ niv modify ${name} -a type=file -a builtin=true'';
|
|
||||||
|
|
||||||
#
|
|
||||||
# Various helpers
|
|
||||||
#
|
|
||||||
|
|
||||||
# https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695
|
|
||||||
sanitizeName = name:
|
|
||||||
(
|
|
||||||
concatMapStrings (s: if builtins.isList s then "-" else s)
|
|
||||||
(
|
|
||||||
builtins.split "[^[:alnum:]+._?=-]+"
|
|
||||||
((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name)
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
# The set of packages used when specs are fetched using non-builtins.
|
|
||||||
mkPkgs = sources: system:
|
|
||||||
let
|
|
||||||
sourcesNixpkgs =
|
|
||||||
import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) { inherit system; };
|
|
||||||
hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
|
|
||||||
hasThisAsNixpkgsPath = <nixpkgs> == ./.;
|
|
||||||
in
|
|
||||||
if builtins.hasAttr "nixpkgs" sources
|
|
||||||
then sourcesNixpkgs
|
|
||||||
else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
|
|
||||||
import <nixpkgs> {}
|
|
||||||
else
|
|
||||||
abort
|
|
||||||
''
|
|
||||||
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
|
|
||||||
add a package called "nixpkgs" to your sources.json.
|
|
||||||
'';
|
|
||||||
|
|
||||||
# The actual fetching function.
|
|
||||||
fetch = pkgs: name: spec:
|
|
||||||
|
|
||||||
if ! builtins.hasAttr "type" spec then
|
|
||||||
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
|
|
||||||
else if spec.type == "file" then fetch_file pkgs name spec
|
|
||||||
else if spec.type == "tarball" then fetch_tarball pkgs name spec
|
|
||||||
else if spec.type == "git" then fetch_git name spec
|
|
||||||
else if spec.type == "local" then fetch_local spec
|
|
||||||
else if spec.type == "builtin-tarball" then fetch_builtin-tarball name
|
|
||||||
else if spec.type == "builtin-url" then fetch_builtin-url name
|
|
||||||
else
|
|
||||||
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
|
|
||||||
|
|
||||||
# If the environment variable NIV_OVERRIDE_${name} is set, then use
|
|
||||||
# the path directly as opposed to the fetched source.
|
|
||||||
replace = name: drv:
|
|
||||||
let
|
|
||||||
saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name;
|
|
||||||
ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}";
|
|
||||||
in
|
|
||||||
if ersatz == "" then drv else
|
|
||||||
# this turns the string into an actual Nix path (for both absolute and
|
|
||||||
# relative paths)
|
|
||||||
if builtins.substring 0 1 ersatz == "/" then /. + ersatz else /. + builtins.getEnv "PWD" + "/${ersatz}";
|
|
||||||
|
|
||||||
# Ports of functions for older nix versions
|
|
||||||
|
|
||||||
# a Nix version of mapAttrs if the built-in doesn't exist
|
|
||||||
mapAttrs = builtins.mapAttrs or (
|
|
||||||
f: set: with builtins;
|
|
||||||
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
|
|
||||||
);
|
|
||||||
|
|
||||||
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295
|
|
||||||
range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1);
|
|
||||||
|
|
||||||
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257
|
|
||||||
stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1));
|
|
||||||
|
|
||||||
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269
|
|
||||||
stringAsChars = f: s: concatStrings (map f (stringToCharacters s));
|
|
||||||
concatMapStrings = f: list: concatStrings (map f list);
|
|
||||||
concatStrings = builtins.concatStringsSep "";
|
|
||||||
|
|
||||||
# https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331
|
|
||||||
optionalAttrs = cond: as: if cond then as else {};
|
|
||||||
|
|
||||||
# fetchTarball version that is compatible between all the versions of Nix
|
|
||||||
builtins_fetchTarball = { url, name ? null, sha256 }@attrs:
|
|
||||||
let
|
|
||||||
inherit (builtins) lessThan nixVersion fetchTarball;
|
|
||||||
in
|
|
||||||
if lessThan nixVersion "1.12" then
|
|
||||||
fetchTarball ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
|
|
||||||
else
|
|
||||||
fetchTarball attrs;
|
|
||||||
|
|
||||||
# fetchurl version that is compatible between all the versions of Nix
|
|
||||||
builtins_fetchurl = { url, name ? null, sha256 }@attrs:
|
|
||||||
let
|
|
||||||
inherit (builtins) lessThan nixVersion fetchurl;
|
|
||||||
in
|
|
||||||
if lessThan nixVersion "1.12" then
|
|
||||||
fetchurl ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
|
|
||||||
else
|
|
||||||
fetchurl attrs;
|
|
||||||
|
|
||||||
# Create the final "sources" from the config
|
|
||||||
mkSources = config:
|
|
||||||
mapAttrs (
|
|
||||||
name: spec:
|
|
||||||
if builtins.hasAttr "outPath" spec
|
|
||||||
then abort
|
|
||||||
"The values in sources.json should not have an 'outPath' attribute"
|
|
||||||
else
|
|
||||||
spec // { outPath = replace name (fetch config.pkgs name spec); }
|
|
||||||
) config.sources;
|
|
||||||
|
|
||||||
# The "config" used by the fetchers
|
|
||||||
mkConfig =
|
|
||||||
{ sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null
|
|
||||||
, sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile)
|
|
||||||
, system ? builtins.currentSystem
|
|
||||||
, pkgs ? mkPkgs sources system
|
|
||||||
}: rec {
|
|
||||||
# The sources, i.e. the attribute set of spec name to spec
|
|
||||||
inherit sources;
|
|
||||||
|
|
||||||
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
|
|
||||||
inherit pkgs;
|
|
||||||
};
|
|
||||||
|
|
||||||
in
|
|
||||||
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }
|
|
@ -1,24 +1,16 @@
|
|||||||
|
let
|
||||||
|
pname = "tahoe-lafs";
|
||||||
|
version = "1.18.0.post1";
|
||||||
|
in
|
||||||
{ lib
|
{ lib
|
||||||
, pythonPackages
|
, pythonPackages
|
||||||
, buildPythonPackage
|
, buildPythonPackage
|
||||||
, tahoe-lafs-src
|
, tahoe-lafs-src
|
||||||
, extrasNames
|
|
||||||
|
|
||||||
# control how the test suite is run
|
|
||||||
, doCheck
|
|
||||||
}:
|
}:
|
||||||
let
|
buildPythonPackage rec {
|
||||||
pname = "tahoe-lafs";
|
inherit pname version;
|
||||||
version = "1.18.0.post1";
|
src = tahoe-lafs-src;
|
||||||
|
propagatedBuildInputs = with pythonPackages; [
|
||||||
pickExtraDependencies = deps: extras: builtins.foldl' (accum: extra: accum ++ deps.${extra}) [] extras;
|
|
||||||
|
|
||||||
pythonExtraDependencies = with pythonPackages; {
|
|
||||||
tor = [ txtorcon ];
|
|
||||||
i2p = [ txi2p ];
|
|
||||||
};
|
|
||||||
|
|
||||||
pythonPackageDependencies = with pythonPackages; [
|
|
||||||
attrs
|
attrs
|
||||||
autobahn
|
autobahn
|
||||||
cbor2
|
cbor2
|
||||||
@ -41,35 +33,42 @@ let
|
|||||||
six
|
six
|
||||||
treq
|
treq
|
||||||
twisted
|
twisted
|
||||||
# Get the dependencies for the Twisted extras we depend on, too.
|
|
||||||
twisted.passthru.optional-dependencies.tls
|
|
||||||
twisted.passthru.optional-dependencies.conch
|
|
||||||
werkzeug
|
werkzeug
|
||||||
zfec
|
zfec
|
||||||
zope_interface
|
zope_interface
|
||||||
] ++ pickExtraDependencies pythonExtraDependencies extrasNames;
|
] ++
|
||||||
|
# Get the dependencies for the Twisted extras we depend on, too.
|
||||||
|
twisted.passthru.optional-dependencies.tls ++
|
||||||
|
twisted.passthru.optional-dependencies.conch;
|
||||||
|
|
||||||
unitTestDependencies = with pythonPackages; [
|
# The test suite lives elsewhere.
|
||||||
|
doCheck = false;
|
||||||
|
|
||||||
|
passthru = {
|
||||||
|
extras = with pythonPackages; {
|
||||||
|
tor = [
|
||||||
|
txtorcon
|
||||||
|
];
|
||||||
|
i2p = [
|
||||||
|
txi2p
|
||||||
|
];
|
||||||
|
unittest = [
|
||||||
beautifulsoup4
|
beautifulsoup4
|
||||||
|
html5lib
|
||||||
fixtures
|
fixtures
|
||||||
hypothesis
|
hypothesis
|
||||||
mock
|
mock
|
||||||
prometheus-client
|
prometheus-client
|
||||||
testtools
|
testtools
|
||||||
];
|
];
|
||||||
|
integrationtest = [
|
||||||
in
|
pytest
|
||||||
buildPythonPackage {
|
pytest-twisted
|
||||||
inherit pname version;
|
paramiko
|
||||||
src = tahoe-lafs-src;
|
pytest-timeout
|
||||||
propagatedBuildInputs = pythonPackageDependencies;
|
];
|
||||||
|
};
|
||||||
inherit doCheck;
|
};
|
||||||
checkInputs = unitTestDependencies;
|
|
||||||
checkPhase = ''
|
|
||||||
export TAHOE_LAFS_HYPOTHESIS_PROFILE=ci
|
|
||||||
python -m twisted.trial -j $NIX_BUILD_CORES allmydata
|
|
||||||
'';
|
|
||||||
|
|
||||||
meta = with lib; {
|
meta = with lib; {
|
||||||
homepage = "https://tahoe-lafs.org/";
|
homepage = "https://tahoe-lafs.org/";
|
||||||
|
@ -1,4 +0,0 @@
|
|||||||
# Build the package with the test suite enabled.
|
|
||||||
args@{...}: (import ../. args).override {
|
|
||||||
doCheck = true;
|
|
||||||
}
|
|
12
nix/twisted.patch
Normal file
12
nix/twisted.patch
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
diff --git a/src/twisted/internet/test/test_endpoints.py b/src/twisted/internet/test/test_endpoints.py
|
||||||
|
index c650fd8aa6..a1754fd533 100644
|
||||||
|
--- a/src/twisted/internet/test/test_endpoints.py
|
||||||
|
+++ b/src/twisted/internet/test/test_endpoints.py
|
||||||
|
@@ -4214,6 +4214,7 @@ class WrapClientTLSParserTests(unittest.TestCase):
|
||||||
|
connectionCreator = connectionCreatorFromEndpoint(reactor, endpoint)
|
||||||
|
self.assertEqual(connectionCreator._hostname, "\xe9xample.example.com")
|
||||||
|
|
||||||
|
+ @skipIf(True, "self.assertFalse(plainClient.transport.disconnecting) fails")
|
||||||
|
def test_tls(self):
|
||||||
|
"""
|
||||||
|
When passed a string endpoint description beginning with C{tls:},
|
@ -41,6 +41,7 @@ from twisted.internet.interfaces import (
|
|||||||
IDelayedCall,
|
IDelayedCall,
|
||||||
)
|
)
|
||||||
from twisted.internet.ssl import CertificateOptions
|
from twisted.internet.ssl import CertificateOptions
|
||||||
|
from twisted.protocols.tls import TLSMemoryBIOProtocol
|
||||||
from twisted.web.client import Agent, HTTPConnectionPool
|
from twisted.web.client import Agent, HTTPConnectionPool
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from hyperlink import DecodedURL
|
from hyperlink import DecodedURL
|
||||||
@ -72,7 +73,7 @@ except ImportError:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def _encode_si(si): # type: (bytes) -> str
|
def _encode_si(si: bytes) -> str:
|
||||||
"""Encode the storage index into Unicode string."""
|
"""Encode the storage index into Unicode string."""
|
||||||
return str(si_b2a(si), "ascii")
|
return str(si_b2a(si), "ascii")
|
||||||
|
|
||||||
@ -80,9 +81,13 @@ def _encode_si(si): # type: (bytes) -> str
|
|||||||
class ClientException(Exception):
|
class ClientException(Exception):
|
||||||
"""An unexpected response code from the server."""
|
"""An unexpected response code from the server."""
|
||||||
|
|
||||||
def __init__(self, code, *additional_args):
|
def __init__(
|
||||||
Exception.__init__(self, code, *additional_args)
|
self, code: int, message: Optional[str] = None, body: Optional[bytes] = None
|
||||||
|
):
|
||||||
|
Exception.__init__(self, code, message, body)
|
||||||
self.code = code
|
self.code = code
|
||||||
|
self.message = message
|
||||||
|
self.body = body
|
||||||
|
|
||||||
|
|
||||||
register_exception_extractor(ClientException, lambda e: {"response_code": e.code})
|
register_exception_extractor(ClientException, lambda e: {"response_code": e.code})
|
||||||
@ -93,7 +98,7 @@ register_exception_extractor(ClientException, lambda e: {"response_code": e.code
|
|||||||
# Tags are of the form #6.nnn, where the number is documented at
|
# Tags are of the form #6.nnn, where the number is documented at
|
||||||
# https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml. Notably, #6.258
|
# https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml. Notably, #6.258
|
||||||
# indicates a set.
|
# indicates a set.
|
||||||
_SCHEMAS = {
|
_SCHEMAS: Mapping[str, Schema] = {
|
||||||
"get_version": Schema(
|
"get_version": Schema(
|
||||||
# Note that the single-quoted (`'`) string keys in this schema
|
# Note that the single-quoted (`'`) string keys in this schema
|
||||||
# represent *byte* strings - per the CDDL specification. Text strings
|
# represent *byte* strings - per the CDDL specification. Text strings
|
||||||
@ -155,7 +160,7 @@ class _LengthLimitedCollector:
|
|||||||
timeout_on_silence: IDelayedCall
|
timeout_on_silence: IDelayedCall
|
||||||
f: BytesIO = field(factory=BytesIO)
|
f: BytesIO = field(factory=BytesIO)
|
||||||
|
|
||||||
def __call__(self, data: bytes):
|
def __call__(self, data: bytes) -> None:
|
||||||
self.timeout_on_silence.reset(60)
|
self.timeout_on_silence.reset(60)
|
||||||
self.remaining_length -= len(data)
|
self.remaining_length -= len(data)
|
||||||
if self.remaining_length < 0:
|
if self.remaining_length < 0:
|
||||||
@ -164,7 +169,7 @@ class _LengthLimitedCollector:
|
|||||||
|
|
||||||
|
|
||||||
def limited_content(
|
def limited_content(
|
||||||
response,
|
response: IResponse,
|
||||||
clock: IReactorTime,
|
clock: IReactorTime,
|
||||||
max_length: int = 30 * 1024 * 1024,
|
max_length: int = 30 * 1024 * 1024,
|
||||||
) -> Deferred[BinaryIO]:
|
) -> Deferred[BinaryIO]:
|
||||||
@ -300,11 +305,13 @@ class _StorageClientHTTPSPolicy:
|
|||||||
expected_spki_hash: bytes
|
expected_spki_hash: bytes
|
||||||
|
|
||||||
# IPolicyForHTTPS
|
# IPolicyForHTTPS
|
||||||
def creatorForNetloc(self, hostname, port):
|
def creatorForNetloc(self, hostname: str, port: int) -> _StorageClientHTTPSPolicy:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
# IOpenSSLClientConnectionCreator
|
# IOpenSSLClientConnectionCreator
|
||||||
def clientConnectionForTLS(self, tlsProtocol):
|
def clientConnectionForTLS(
|
||||||
|
self, tlsProtocol: TLSMemoryBIOProtocol
|
||||||
|
) -> SSL.Connection:
|
||||||
return SSL.Connection(
|
return SSL.Connection(
|
||||||
_TLSContextFactory(self.expected_spki_hash).getContext(), None
|
_TLSContextFactory(self.expected_spki_hash).getContext(), None
|
||||||
)
|
)
|
||||||
@ -344,7 +351,7 @@ class StorageClientFactory:
|
|||||||
cls.TEST_MODE_REGISTER_HTTP_POOL = callback
|
cls.TEST_MODE_REGISTER_HTTP_POOL = callback
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def stop_test_mode(cls):
|
def stop_test_mode(cls) -> None:
|
||||||
"""Stop testing mode."""
|
"""Stop testing mode."""
|
||||||
cls.TEST_MODE_REGISTER_HTTP_POOL = None
|
cls.TEST_MODE_REGISTER_HTTP_POOL = None
|
||||||
|
|
||||||
@ -437,7 +444,7 @@ class StorageClient(object):
|
|||||||
"""Get a URL relative to the base URL."""
|
"""Get a URL relative to the base URL."""
|
||||||
return self._base_url.click(path)
|
return self._base_url.click(path)
|
||||||
|
|
||||||
def _get_headers(self, headers): # type: (Optional[Headers]) -> Headers
|
def _get_headers(self, headers: Optional[Headers]) -> Headers:
|
||||||
"""Return the basic headers to be used by default."""
|
"""Return the basic headers to be used by default."""
|
||||||
if headers is None:
|
if headers is None:
|
||||||
headers = Headers()
|
headers = Headers()
|
||||||
@ -565,7 +572,7 @@ class StorageClient(object):
|
|||||||
).read()
|
).read()
|
||||||
raise ClientException(response.code, response.phrase, data)
|
raise ClientException(response.code, response.phrase, data)
|
||||||
|
|
||||||
def shutdown(self) -> Deferred:
|
def shutdown(self) -> Deferred[object]:
|
||||||
"""Shutdown any connections."""
|
"""Shutdown any connections."""
|
||||||
return self._pool.closeCachedConnections()
|
return self._pool.closeCachedConnections()
|
||||||
|
|
||||||
|
@ -4,7 +4,18 @@ HTTP server for storage.
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Any, Callable, Union, cast, Optional
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Union,
|
||||||
|
cast,
|
||||||
|
Optional,
|
||||||
|
TypeVar,
|
||||||
|
Sequence,
|
||||||
|
Protocol,
|
||||||
|
Dict,
|
||||||
|
)
|
||||||
|
from typing_extensions import ParamSpec, Concatenate
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from base64 import b64decode
|
from base64 import b64decode
|
||||||
import binascii
|
import binascii
|
||||||
@ -15,20 +26,24 @@ import mmap
|
|||||||
from eliot import start_action
|
from eliot import start_action
|
||||||
from cryptography.x509 import Certificate as CryptoCertificate
|
from cryptography.x509 import Certificate as CryptoCertificate
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from klein import Klein
|
from klein import Klein, KleinRenderable
|
||||||
|
from klein.resource import KleinResource
|
||||||
from twisted.web import http
|
from twisted.web import http
|
||||||
from twisted.internet.interfaces import (
|
from twisted.internet.interfaces import (
|
||||||
IListeningPort,
|
IListeningPort,
|
||||||
IStreamServerEndpoint,
|
IStreamServerEndpoint,
|
||||||
IPullProducer,
|
IPullProducer,
|
||||||
|
IProtocolFactory,
|
||||||
)
|
)
|
||||||
from twisted.internet.address import IPv4Address, IPv6Address
|
from twisted.internet.address import IPv4Address, IPv6Address
|
||||||
from twisted.internet.defer import Deferred
|
from twisted.internet.defer import Deferred
|
||||||
from twisted.internet.ssl import CertificateOptions, Certificate, PrivateCertificate
|
from twisted.internet.ssl import CertificateOptions, Certificate, PrivateCertificate
|
||||||
from twisted.internet.interfaces import IReactorFromThreads
|
from twisted.internet.interfaces import IReactorFromThreads
|
||||||
from twisted.web.server import Site, Request
|
from twisted.web.server import Site, Request
|
||||||
|
from twisted.web.iweb import IRequest
|
||||||
from twisted.protocols.tls import TLSMemoryBIOFactory
|
from twisted.protocols.tls import TLSMemoryBIOFactory
|
||||||
from twisted.python.filepath import FilePath
|
from twisted.python.filepath import FilePath
|
||||||
|
from twisted.python.failure import Failure
|
||||||
|
|
||||||
from attrs import define, field, Factory
|
from attrs import define, field, Factory
|
||||||
from werkzeug.http import (
|
from werkzeug.http import (
|
||||||
@ -68,7 +83,7 @@ class ClientSecretsException(Exception):
|
|||||||
|
|
||||||
|
|
||||||
def _extract_secrets(
|
def _extract_secrets(
|
||||||
header_values: list[str], required_secrets: set[Secrets]
|
header_values: Sequence[str], required_secrets: set[Secrets]
|
||||||
) -> dict[Secrets, bytes]:
|
) -> dict[Secrets, bytes]:
|
||||||
"""
|
"""
|
||||||
Given list of values of ``X-Tahoe-Authorization`` headers, and required
|
Given list of values of ``X-Tahoe-Authorization`` headers, and required
|
||||||
@ -102,18 +117,43 @@ def _extract_secrets(
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def _authorization_decorator(required_secrets):
|
class BaseApp(Protocol):
|
||||||
|
"""Protocol for ``HTTPServer`` and testing equivalent."""
|
||||||
|
|
||||||
|
_swissnum: bytes
|
||||||
|
|
||||||
|
|
||||||
|
P = ParamSpec("P")
|
||||||
|
T = TypeVar("T")
|
||||||
|
SecretsDict = Dict[Secrets, bytes]
|
||||||
|
App = TypeVar("App", bound=BaseApp)
|
||||||
|
|
||||||
|
|
||||||
|
def _authorization_decorator(
|
||||||
|
required_secrets: set[Secrets],
|
||||||
|
) -> Callable[
|
||||||
|
[Callable[Concatenate[App, Request, SecretsDict, P], T]],
|
||||||
|
Callable[Concatenate[App, Request, P], T],
|
||||||
|
]:
|
||||||
"""
|
"""
|
||||||
1. Check the ``Authorization`` header matches server swissnum.
|
1. Check the ``Authorization`` header matches server swissnum.
|
||||||
2. Extract ``X-Tahoe-Authorization`` headers and pass them in.
|
2. Extract ``X-Tahoe-Authorization`` headers and pass them in.
|
||||||
3. Log the request and response.
|
3. Log the request and response.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def decorator(f):
|
def decorator(
|
||||||
|
f: Callable[Concatenate[App, Request, SecretsDict, P], T]
|
||||||
|
) -> Callable[Concatenate[App, Request, P], T]:
|
||||||
@wraps(f)
|
@wraps(f)
|
||||||
def route(self, request, *args, **kwargs):
|
def route(
|
||||||
# Don't set text/html content type by default:
|
self: App,
|
||||||
request.defaultContentType = None
|
request: Request,
|
||||||
|
*args: P.args,
|
||||||
|
**kwargs: P.kwargs,
|
||||||
|
) -> T:
|
||||||
|
# Don't set text/html content type by default.
|
||||||
|
# None is actually supported, see https://github.com/twisted/twisted/issues/11902
|
||||||
|
request.defaultContentType = None # type: ignore[assignment]
|
||||||
|
|
||||||
with start_action(
|
with start_action(
|
||||||
action_type="allmydata:storage:http-server:handle-request",
|
action_type="allmydata:storage:http-server:handle-request",
|
||||||
@ -163,7 +203,22 @@ def _authorization_decorator(required_secrets):
|
|||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
def _authorized_route(app, required_secrets, *route_args, **route_kwargs):
|
def _authorized_route(
|
||||||
|
klein_app: Klein,
|
||||||
|
required_secrets: set[Secrets],
|
||||||
|
url: str,
|
||||||
|
*route_args: Any,
|
||||||
|
branch: bool = False,
|
||||||
|
**route_kwargs: Any,
|
||||||
|
) -> Callable[
|
||||||
|
[
|
||||||
|
Callable[
|
||||||
|
Concatenate[App, Request, SecretsDict, P],
|
||||||
|
KleinRenderable,
|
||||||
|
]
|
||||||
|
],
|
||||||
|
Callable[..., KleinRenderable],
|
||||||
|
]:
|
||||||
"""
|
"""
|
||||||
Like Klein's @route, but with additional support for checking the
|
Like Klein's @route, but with additional support for checking the
|
||||||
``Authorization`` header as well as ``X-Tahoe-Authorization`` headers. The
|
``Authorization`` header as well as ``X-Tahoe-Authorization`` headers. The
|
||||||
@ -173,12 +228,23 @@ def _authorized_route(app, required_secrets, *route_args, **route_kwargs):
|
|||||||
:param required_secrets: Set of required ``Secret`` types.
|
:param required_secrets: Set of required ``Secret`` types.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def decorator(f):
|
def decorator(
|
||||||
@app.route(*route_args, **route_kwargs)
|
f: Callable[
|
||||||
|
Concatenate[App, Request, SecretsDict, P],
|
||||||
|
KleinRenderable,
|
||||||
|
]
|
||||||
|
) -> Callable[..., KleinRenderable]:
|
||||||
|
@klein_app.route(url, *route_args, branch=branch, **route_kwargs) # type: ignore[arg-type]
|
||||||
@_authorization_decorator(required_secrets)
|
@_authorization_decorator(required_secrets)
|
||||||
@wraps(f)
|
@wraps(f)
|
||||||
def handle_route(*args, **kwargs):
|
def handle_route(
|
||||||
return f(*args, **kwargs)
|
app: App,
|
||||||
|
request: Request,
|
||||||
|
secrets: SecretsDict,
|
||||||
|
*args: P.args,
|
||||||
|
**kwargs: P.kwargs,
|
||||||
|
) -> KleinRenderable:
|
||||||
|
return f(app, request, secrets, *args, **kwargs)
|
||||||
|
|
||||||
return handle_route
|
return handle_route
|
||||||
|
|
||||||
@ -234,7 +300,7 @@ class UploadsInProgress(object):
|
|||||||
except (KeyError, IndexError):
|
except (KeyError, IndexError):
|
||||||
raise _HTTPError(http.NOT_FOUND)
|
raise _HTTPError(http.NOT_FOUND)
|
||||||
|
|
||||||
def remove_write_bucket(self, bucket: BucketWriter):
|
def remove_write_bucket(self, bucket: BucketWriter) -> None:
|
||||||
"""Stop tracking the given ``BucketWriter``."""
|
"""Stop tracking the given ``BucketWriter``."""
|
||||||
try:
|
try:
|
||||||
storage_index, share_number = self._bucketwriters.pop(bucket)
|
storage_index, share_number = self._bucketwriters.pop(bucket)
|
||||||
@ -250,7 +316,7 @@ class UploadsInProgress(object):
|
|||||||
|
|
||||||
def validate_upload_secret(
|
def validate_upload_secret(
|
||||||
self, storage_index: bytes, share_number: int, upload_secret: bytes
|
self, storage_index: bytes, share_number: int, upload_secret: bytes
|
||||||
):
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Raise an unauthorized-HTTP-response exception if the given
|
Raise an unauthorized-HTTP-response exception if the given
|
||||||
storage_index+share_number have a different upload secret than the
|
storage_index+share_number have a different upload secret than the
|
||||||
@ -272,7 +338,7 @@ class StorageIndexConverter(BaseConverter):
|
|||||||
|
|
||||||
regex = "[" + str(rfc3548_alphabet, "ascii") + "]{26}"
|
regex = "[" + str(rfc3548_alphabet, "ascii") + "]{26}"
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value: str) -> bytes:
|
||||||
try:
|
try:
|
||||||
return si_a2b(value.encode("ascii"))
|
return si_a2b(value.encode("ascii"))
|
||||||
except (AssertionError, binascii.Error, ValueError):
|
except (AssertionError, binascii.Error, ValueError):
|
||||||
@ -351,7 +417,7 @@ class _ReadAllProducer:
|
|||||||
start: int = field(default=0)
|
start: int = field(default=0)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def produce_to(cls, request: Request, read_data: ReadData) -> Deferred:
|
def produce_to(cls, request: Request, read_data: ReadData) -> Deferred[bytes]:
|
||||||
"""
|
"""
|
||||||
Create and register the producer, returning ``Deferred`` that should be
|
Create and register the producer, returning ``Deferred`` that should be
|
||||||
returned from a HTTP server endpoint.
|
returned from a HTTP server endpoint.
|
||||||
@ -360,7 +426,7 @@ class _ReadAllProducer:
|
|||||||
request.registerProducer(producer, False)
|
request.registerProducer(producer, False)
|
||||||
return producer.result
|
return producer.result
|
||||||
|
|
||||||
def resumeProducing(self):
|
def resumeProducing(self) -> None:
|
||||||
data = self.read_data(self.start, 65536)
|
data = self.read_data(self.start, 65536)
|
||||||
if not data:
|
if not data:
|
||||||
self.request.unregisterProducer()
|
self.request.unregisterProducer()
|
||||||
@ -371,10 +437,10 @@ class _ReadAllProducer:
|
|||||||
self.request.write(data)
|
self.request.write(data)
|
||||||
self.start += len(data)
|
self.start += len(data)
|
||||||
|
|
||||||
def pauseProducing(self):
|
def pauseProducing(self) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def stopProducing(self):
|
def stopProducing(self) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@ -392,7 +458,7 @@ class _ReadRangeProducer:
|
|||||||
start: int
|
start: int
|
||||||
remaining: int
|
remaining: int
|
||||||
|
|
||||||
def resumeProducing(self):
|
def resumeProducing(self) -> None:
|
||||||
if self.result is None or self.request is None:
|
if self.result is None or self.request is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -429,10 +495,10 @@ class _ReadRangeProducer:
|
|||||||
if self.remaining == 0:
|
if self.remaining == 0:
|
||||||
self.stopProducing()
|
self.stopProducing()
|
||||||
|
|
||||||
def pauseProducing(self):
|
def pauseProducing(self) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def stopProducing(self):
|
def stopProducing(self) -> None:
|
||||||
if self.request is not None:
|
if self.request is not None:
|
||||||
self.request.unregisterProducer()
|
self.request.unregisterProducer()
|
||||||
self.request = None
|
self.request = None
|
||||||
@ -511,12 +577,13 @@ def read_range(
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
def _add_error_handling(app: Klein):
|
def _add_error_handling(app: Klein) -> None:
|
||||||
"""Add exception handlers to a Klein app."""
|
"""Add exception handlers to a Klein app."""
|
||||||
|
|
||||||
@app.handle_errors(_HTTPError)
|
@app.handle_errors(_HTTPError)
|
||||||
def _http_error(_, request, failure):
|
def _http_error(self: Any, request: IRequest, failure: Failure) -> KleinRenderable:
|
||||||
"""Handle ``_HTTPError`` exceptions."""
|
"""Handle ``_HTTPError`` exceptions."""
|
||||||
|
assert isinstance(failure.value, _HTTPError)
|
||||||
request.setResponseCode(failure.value.code)
|
request.setResponseCode(failure.value.code)
|
||||||
if failure.value.body is not None:
|
if failure.value.body is not None:
|
||||||
return failure.value.body
|
return failure.value.body
|
||||||
@ -524,71 +591,16 @@ def _add_error_handling(app: Klein):
|
|||||||
return b""
|
return b""
|
||||||
|
|
||||||
@app.handle_errors(CDDLValidationError)
|
@app.handle_errors(CDDLValidationError)
|
||||||
def _cddl_validation_error(_, request, failure):
|
def _cddl_validation_error(
|
||||||
|
self: Any, request: IRequest, failure: Failure
|
||||||
|
) -> KleinRenderable:
|
||||||
"""Handle CDDL validation errors."""
|
"""Handle CDDL validation errors."""
|
||||||
request.setResponseCode(http.BAD_REQUEST)
|
request.setResponseCode(http.BAD_REQUEST)
|
||||||
return str(failure.value).encode("utf-8")
|
return str(failure.value).encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
class HTTPServer(object):
|
async def read_encoded(
|
||||||
"""
|
reactor, request, schema: Schema, max_size: int = 1024 * 1024
|
||||||
A HTTP interface to the storage server.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_app = Klein()
|
|
||||||
_app.url_map.converters["storage_index"] = StorageIndexConverter
|
|
||||||
_add_error_handling(_app)
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
reactor: IReactorFromThreads,
|
|
||||||
storage_server: StorageServer,
|
|
||||||
swissnum: bytes,
|
|
||||||
):
|
|
||||||
self._reactor = reactor
|
|
||||||
self._storage_server = storage_server
|
|
||||||
self._swissnum = swissnum
|
|
||||||
# Maps storage index to StorageIndexUploads:
|
|
||||||
self._uploads = UploadsInProgress()
|
|
||||||
|
|
||||||
# When an upload finishes successfully, gets aborted, or times out,
|
|
||||||
# make sure it gets removed from our tracking datastructure:
|
|
||||||
self._storage_server.register_bucket_writer_close_handler(
|
|
||||||
self._uploads.remove_write_bucket
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_resource(self):
|
|
||||||
"""Return twisted.web ``Resource`` for this object."""
|
|
||||||
return self._app.resource()
|
|
||||||
|
|
||||||
def _send_encoded(self, request, data):
|
|
||||||
"""
|
|
||||||
Return encoded data suitable for writing as the HTTP body response, by
|
|
||||||
default using CBOR.
|
|
||||||
|
|
||||||
Also sets the appropriate ``Content-Type`` header on the response.
|
|
||||||
"""
|
|
||||||
accept_headers = request.requestHeaders.getRawHeaders("accept") or [
|
|
||||||
CBOR_MIME_TYPE
|
|
||||||
]
|
|
||||||
accept = parse_accept_header(accept_headers[0])
|
|
||||||
if accept.best == CBOR_MIME_TYPE:
|
|
||||||
request.setHeader("Content-Type", CBOR_MIME_TYPE)
|
|
||||||
f = TemporaryFile()
|
|
||||||
cbor2.dump(data, f)
|
|
||||||
|
|
||||||
def read_data(offset: int, length: int) -> bytes:
|
|
||||||
f.seek(offset)
|
|
||||||
return f.read(length)
|
|
||||||
|
|
||||||
return _ReadAllProducer.produce_to(request, read_data)
|
|
||||||
else:
|
|
||||||
# TODO Might want to optionally send JSON someday:
|
|
||||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3861
|
|
||||||
raise _HTTPError(http.NOT_ACCEPTABLE)
|
|
||||||
|
|
||||||
async def _read_encoded(
|
|
||||||
self, request, schema: Schema, max_size: int = 1024 * 1024
|
|
||||||
) -> Any:
|
) -> Any:
|
||||||
"""
|
"""
|
||||||
Read encoded request body data, decoding it with CBOR by default.
|
Read encoded request body data, decoding it with CBOR by default.
|
||||||
@ -596,6 +608,8 @@ class HTTPServer(object):
|
|||||||
Somewhat arbitrarily, limit body size to 1MiB by default.
|
Somewhat arbitrarily, limit body size to 1MiB by default.
|
||||||
"""
|
"""
|
||||||
content_type = get_content_type(request.requestHeaders)
|
content_type = get_content_type(request.requestHeaders)
|
||||||
|
if content_type is None:
|
||||||
|
content_type = CBOR_MIME_TYPE
|
||||||
if content_type != CBOR_MIME_TYPE:
|
if content_type != CBOR_MIME_TYPE:
|
||||||
raise _HTTPError(http.UNSUPPORTED_MEDIA_TYPE)
|
raise _HTTPError(http.UNSUPPORTED_MEDIA_TYPE)
|
||||||
|
|
||||||
@ -625,7 +639,7 @@ class HTTPServer(object):
|
|||||||
# Pycddl will release the GIL when validating larger documents, so
|
# Pycddl will release the GIL when validating larger documents, so
|
||||||
# let's take advantage of multiple CPUs:
|
# let's take advantage of multiple CPUs:
|
||||||
if size > 10_000:
|
if size > 10_000:
|
||||||
await defer_to_thread(self._reactor, schema.validate_cbor, message)
|
await defer_to_thread(reactor, schema.validate_cbor, message)
|
||||||
else:
|
else:
|
||||||
schema.validate_cbor(message)
|
schema.validate_cbor(message)
|
||||||
|
|
||||||
@ -638,10 +652,68 @@ class HTTPServer(object):
|
|||||||
# in the decode path. As such, running it in a different thread has no benefit.
|
# in the decode path. As such, running it in a different thread has no benefit.
|
||||||
return cbor2.load(request.content)
|
return cbor2.load(request.content)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPServer(BaseApp):
|
||||||
|
"""
|
||||||
|
A HTTP interface to the storage server.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_app = Klein()
|
||||||
|
_app.url_map.converters["storage_index"] = StorageIndexConverter
|
||||||
|
_add_error_handling(_app)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
reactor: IReactorFromThreads,
|
||||||
|
storage_server: StorageServer,
|
||||||
|
swissnum: bytes,
|
||||||
|
):
|
||||||
|
self._reactor = reactor
|
||||||
|
self._storage_server = storage_server
|
||||||
|
self._swissnum = swissnum
|
||||||
|
# Maps storage index to StorageIndexUploads:
|
||||||
|
self._uploads = UploadsInProgress()
|
||||||
|
|
||||||
|
# When an upload finishes successfully, gets aborted, or times out,
|
||||||
|
# make sure it gets removed from our tracking datastructure:
|
||||||
|
self._storage_server.register_bucket_writer_close_handler(
|
||||||
|
self._uploads.remove_write_bucket
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_resource(self) -> KleinResource:
|
||||||
|
"""Return twisted.web ``Resource`` for this object."""
|
||||||
|
return self._app.resource()
|
||||||
|
|
||||||
|
def _send_encoded(self, request: Request, data: object) -> Deferred[bytes]:
|
||||||
|
"""
|
||||||
|
Return encoded data suitable for writing as the HTTP body response, by
|
||||||
|
default using CBOR.
|
||||||
|
|
||||||
|
Also sets the appropriate ``Content-Type`` header on the response.
|
||||||
|
"""
|
||||||
|
accept_headers = request.requestHeaders.getRawHeaders("accept") or [
|
||||||
|
CBOR_MIME_TYPE
|
||||||
|
]
|
||||||
|
accept = parse_accept_header(accept_headers[0])
|
||||||
|
if accept.best == CBOR_MIME_TYPE:
|
||||||
|
request.setHeader("Content-Type", CBOR_MIME_TYPE)
|
||||||
|
f = TemporaryFile()
|
||||||
|
cbor2.dump(data, f)
|
||||||
|
|
||||||
|
def read_data(offset: int, length: int) -> bytes:
|
||||||
|
f.seek(offset)
|
||||||
|
return f.read(length)
|
||||||
|
|
||||||
|
return _ReadAllProducer.produce_to(request, read_data)
|
||||||
|
else:
|
||||||
|
# TODO Might want to optionally send JSON someday:
|
||||||
|
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3861
|
||||||
|
raise _HTTPError(http.NOT_ACCEPTABLE)
|
||||||
|
|
||||||
##### Generic APIs #####
|
##### Generic APIs #####
|
||||||
|
|
||||||
@_authorized_route(_app, set(), "/storage/v1/version", methods=["GET"])
|
@_authorized_route(_app, set(), "/storage/v1/version", methods=["GET"])
|
||||||
def version(self, request, authorization):
|
def version(self, request: Request, authorization: SecretsDict) -> KleinRenderable:
|
||||||
"""Return version information."""
|
"""Return version information."""
|
||||||
return self._send_encoded(request, self._get_version())
|
return self._send_encoded(request, self._get_version())
|
||||||
|
|
||||||
@ -673,12 +745,14 @@ class HTTPServer(object):
|
|||||||
methods=["POST"],
|
methods=["POST"],
|
||||||
)
|
)
|
||||||
@async_to_deferred
|
@async_to_deferred
|
||||||
async def allocate_buckets(self, request, authorization, storage_index):
|
async def allocate_buckets(
|
||||||
|
self, request: Request, authorization: SecretsDict, storage_index: bytes
|
||||||
|
) -> KleinRenderable:
|
||||||
"""Allocate buckets."""
|
"""Allocate buckets."""
|
||||||
upload_secret = authorization[Secrets.UPLOAD]
|
upload_secret = authorization[Secrets.UPLOAD]
|
||||||
# It's just a list of up to ~256 shares, shouldn't use many bytes.
|
# It's just a list of up to ~256 shares, shouldn't use many bytes.
|
||||||
info = await self._read_encoded(
|
info = await read_encoded(
|
||||||
request, _SCHEMAS["allocate_buckets"], max_size=8192
|
self._reactor, request, _SCHEMAS["allocate_buckets"], max_size=8192
|
||||||
)
|
)
|
||||||
|
|
||||||
# We do NOT validate the upload secret for existing bucket uploads.
|
# We do NOT validate the upload secret for existing bucket uploads.
|
||||||
@ -712,7 +786,13 @@ class HTTPServer(object):
|
|||||||
"/storage/v1/immutable/<storage_index:storage_index>/<int(signed=False):share_number>/abort",
|
"/storage/v1/immutable/<storage_index:storage_index>/<int(signed=False):share_number>/abort",
|
||||||
methods=["PUT"],
|
methods=["PUT"],
|
||||||
)
|
)
|
||||||
def abort_share_upload(self, request, authorization, storage_index, share_number):
|
def abort_share_upload(
|
||||||
|
self,
|
||||||
|
request: Request,
|
||||||
|
authorization: SecretsDict,
|
||||||
|
storage_index: bytes,
|
||||||
|
share_number: int,
|
||||||
|
) -> KleinRenderable:
|
||||||
"""Abort an in-progress immutable share upload."""
|
"""Abort an in-progress immutable share upload."""
|
||||||
try:
|
try:
|
||||||
bucket = self._uploads.get_write_bucket(
|
bucket = self._uploads.get_write_bucket(
|
||||||
@ -743,7 +823,13 @@ class HTTPServer(object):
|
|||||||
"/storage/v1/immutable/<storage_index:storage_index>/<int(signed=False):share_number>",
|
"/storage/v1/immutable/<storage_index:storage_index>/<int(signed=False):share_number>",
|
||||||
methods=["PATCH"],
|
methods=["PATCH"],
|
||||||
)
|
)
|
||||||
def write_share_data(self, request, authorization, storage_index, share_number):
|
def write_share_data(
|
||||||
|
self,
|
||||||
|
request: Request,
|
||||||
|
authorization: SecretsDict,
|
||||||
|
storage_index: bytes,
|
||||||
|
share_number: int,
|
||||||
|
) -> KleinRenderable:
|
||||||
"""Write data to an in-progress immutable upload."""
|
"""Write data to an in-progress immutable upload."""
|
||||||
content_range = parse_content_range_header(request.getHeader("content-range"))
|
content_range = parse_content_range_header(request.getHeader("content-range"))
|
||||||
if content_range is None or content_range.units != "bytes":
|
if content_range is None or content_range.units != "bytes":
|
||||||
@ -753,14 +839,17 @@ class HTTPServer(object):
|
|||||||
bucket = self._uploads.get_write_bucket(
|
bucket = self._uploads.get_write_bucket(
|
||||||
storage_index, share_number, authorization[Secrets.UPLOAD]
|
storage_index, share_number, authorization[Secrets.UPLOAD]
|
||||||
)
|
)
|
||||||
offset = content_range.start
|
offset = content_range.start or 0
|
||||||
remaining = content_range.stop - content_range.start
|
# We don't support an unspecified stop for the range:
|
||||||
|
assert content_range.stop is not None
|
||||||
|
# Missing body makes no sense:
|
||||||
|
assert request.content is not None
|
||||||
|
remaining = content_range.stop - offset
|
||||||
finished = False
|
finished = False
|
||||||
|
|
||||||
while remaining > 0:
|
while remaining > 0:
|
||||||
data = request.content.read(min(remaining, 65536))
|
data = request.content.read(min(remaining, 65536))
|
||||||
assert data, "uploaded data length doesn't match range"
|
assert data, "uploaded data length doesn't match range"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
finished = bucket.write(offset, data)
|
finished = bucket.write(offset, data)
|
||||||
except ConflictingWriteError:
|
except ConflictingWriteError:
|
||||||
@ -786,7 +875,9 @@ class HTTPServer(object):
|
|||||||
"/storage/v1/immutable/<storage_index:storage_index>/shares",
|
"/storage/v1/immutable/<storage_index:storage_index>/shares",
|
||||||
methods=["GET"],
|
methods=["GET"],
|
||||||
)
|
)
|
||||||
def list_shares(self, request, authorization, storage_index):
|
def list_shares(
|
||||||
|
self, request: Request, authorization: SecretsDict, storage_index: bytes
|
||||||
|
) -> KleinRenderable:
|
||||||
"""
|
"""
|
||||||
List shares for the given storage index.
|
List shares for the given storage index.
|
||||||
"""
|
"""
|
||||||
@ -799,7 +890,13 @@ class HTTPServer(object):
|
|||||||
"/storage/v1/immutable/<storage_index:storage_index>/<int(signed=False):share_number>",
|
"/storage/v1/immutable/<storage_index:storage_index>/<int(signed=False):share_number>",
|
||||||
methods=["GET"],
|
methods=["GET"],
|
||||||
)
|
)
|
||||||
def read_share_chunk(self, request, authorization, storage_index, share_number):
|
def read_share_chunk(
|
||||||
|
self,
|
||||||
|
request: Request,
|
||||||
|
authorization: SecretsDict,
|
||||||
|
storage_index: bytes,
|
||||||
|
share_number: int,
|
||||||
|
) -> KleinRenderable:
|
||||||
"""Read a chunk for an already uploaded immutable."""
|
"""Read a chunk for an already uploaded immutable."""
|
||||||
request.setHeader("content-type", "application/octet-stream")
|
request.setHeader("content-type", "application/octet-stream")
|
||||||
try:
|
try:
|
||||||
@ -816,7 +913,9 @@ class HTTPServer(object):
|
|||||||
"/storage/v1/lease/<storage_index:storage_index>",
|
"/storage/v1/lease/<storage_index:storage_index>",
|
||||||
methods=["PUT"],
|
methods=["PUT"],
|
||||||
)
|
)
|
||||||
def add_or_renew_lease(self, request, authorization, storage_index):
|
def add_or_renew_lease(
|
||||||
|
self, request: Request, authorization: SecretsDict, storage_index: bytes
|
||||||
|
) -> KleinRenderable:
|
||||||
"""Update the lease for an immutable or mutable share."""
|
"""Update the lease for an immutable or mutable share."""
|
||||||
if not list(self._storage_server.get_shares(storage_index)):
|
if not list(self._storage_server.get_shares(storage_index)):
|
||||||
raise _HTTPError(http.NOT_FOUND)
|
raise _HTTPError(http.NOT_FOUND)
|
||||||
@ -839,8 +938,12 @@ class HTTPServer(object):
|
|||||||
)
|
)
|
||||||
@async_to_deferred
|
@async_to_deferred
|
||||||
async def advise_corrupt_share_immutable(
|
async def advise_corrupt_share_immutable(
|
||||||
self, request, authorization, storage_index, share_number
|
self,
|
||||||
):
|
request: Request,
|
||||||
|
authorization: SecretsDict,
|
||||||
|
storage_index: bytes,
|
||||||
|
share_number: int,
|
||||||
|
) -> KleinRenderable:
|
||||||
"""Indicate that given share is corrupt, with a text reason."""
|
"""Indicate that given share is corrupt, with a text reason."""
|
||||||
try:
|
try:
|
||||||
bucket = self._storage_server.get_buckets(storage_index)[share_number]
|
bucket = self._storage_server.get_buckets(storage_index)[share_number]
|
||||||
@ -849,7 +952,8 @@ class HTTPServer(object):
|
|||||||
|
|
||||||
# The reason can be a string with explanation, so in theory it could be
|
# The reason can be a string with explanation, so in theory it could be
|
||||||
# longish?
|
# longish?
|
||||||
info = await self._read_encoded(
|
info = await read_encoded(
|
||||||
|
self._reactor,
|
||||||
request,
|
request,
|
||||||
_SCHEMAS["advise_corrupt_share"],
|
_SCHEMAS["advise_corrupt_share"],
|
||||||
max_size=32768,
|
max_size=32768,
|
||||||
@ -866,10 +970,15 @@ class HTTPServer(object):
|
|||||||
methods=["POST"],
|
methods=["POST"],
|
||||||
)
|
)
|
||||||
@async_to_deferred
|
@async_to_deferred
|
||||||
async def mutable_read_test_write(self, request, authorization, storage_index):
|
async def mutable_read_test_write(
|
||||||
|
self, request: Request, authorization: SecretsDict, storage_index: bytes
|
||||||
|
) -> KleinRenderable:
|
||||||
"""Read/test/write combined operation for mutables."""
|
"""Read/test/write combined operation for mutables."""
|
||||||
rtw_request = await self._read_encoded(
|
rtw_request = await read_encoded(
|
||||||
request, _SCHEMAS["mutable_read_test_write"], max_size=2**48
|
self._reactor,
|
||||||
|
request,
|
||||||
|
_SCHEMAS["mutable_read_test_write"],
|
||||||
|
max_size=2**48,
|
||||||
)
|
)
|
||||||
secrets = (
|
secrets = (
|
||||||
authorization[Secrets.WRITE_ENABLER],
|
authorization[Secrets.WRITE_ENABLER],
|
||||||
@ -905,7 +1014,13 @@ class HTTPServer(object):
|
|||||||
"/storage/v1/mutable/<storage_index:storage_index>/<int(signed=False):share_number>",
|
"/storage/v1/mutable/<storage_index:storage_index>/<int(signed=False):share_number>",
|
||||||
methods=["GET"],
|
methods=["GET"],
|
||||||
)
|
)
|
||||||
def read_mutable_chunk(self, request, authorization, storage_index, share_number):
|
def read_mutable_chunk(
|
||||||
|
self,
|
||||||
|
request: Request,
|
||||||
|
authorization: SecretsDict,
|
||||||
|
storage_index: bytes,
|
||||||
|
share_number: int,
|
||||||
|
) -> KleinRenderable:
|
||||||
"""Read a chunk from a mutable."""
|
"""Read a chunk from a mutable."""
|
||||||
request.setHeader("content-type", "application/octet-stream")
|
request.setHeader("content-type", "application/octet-stream")
|
||||||
|
|
||||||
@ -945,8 +1060,12 @@ class HTTPServer(object):
|
|||||||
)
|
)
|
||||||
@async_to_deferred
|
@async_to_deferred
|
||||||
async def advise_corrupt_share_mutable(
|
async def advise_corrupt_share_mutable(
|
||||||
self, request, authorization, storage_index, share_number
|
self,
|
||||||
):
|
request: Request,
|
||||||
|
authorization: SecretsDict,
|
||||||
|
storage_index: bytes,
|
||||||
|
share_number: int,
|
||||||
|
) -> KleinRenderable:
|
||||||
"""Indicate that given share is corrupt, with a text reason."""
|
"""Indicate that given share is corrupt, with a text reason."""
|
||||||
if share_number not in {
|
if share_number not in {
|
||||||
shnum for (shnum, _) in self._storage_server.get_shares(storage_index)
|
shnum for (shnum, _) in self._storage_server.get_shares(storage_index)
|
||||||
@ -955,8 +1074,8 @@ class HTTPServer(object):
|
|||||||
|
|
||||||
# The reason can be a string with explanation, so in theory it could be
|
# The reason can be a string with explanation, so in theory it could be
|
||||||
# longish?
|
# longish?
|
||||||
info = await self._read_encoded(
|
info = await read_encoded(
|
||||||
request, _SCHEMAS["advise_corrupt_share"], max_size=32768
|
self._reactor, request, _SCHEMAS["advise_corrupt_share"], max_size=32768
|
||||||
)
|
)
|
||||||
self._storage_server.advise_corrupt_share(
|
self._storage_server.advise_corrupt_share(
|
||||||
b"mutable", storage_index, share_number, info["reason"].encode("utf-8")
|
b"mutable", storage_index, share_number, info["reason"].encode("utf-8")
|
||||||
@ -978,7 +1097,10 @@ class _TLSEndpointWrapper(object):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_paths(
|
def from_paths(
|
||||||
cls, endpoint, private_key_path: FilePath, cert_path: FilePath
|
cls: type[_TLSEndpointWrapper],
|
||||||
|
endpoint: IStreamServerEndpoint,
|
||||||
|
private_key_path: FilePath,
|
||||||
|
cert_path: FilePath,
|
||||||
) -> "_TLSEndpointWrapper":
|
) -> "_TLSEndpointWrapper":
|
||||||
"""
|
"""
|
||||||
Create an endpoint with the given private key and certificate paths on
|
Create an endpoint with the given private key and certificate paths on
|
||||||
@ -993,7 +1115,7 @@ class _TLSEndpointWrapper(object):
|
|||||||
)
|
)
|
||||||
return cls(endpoint=endpoint, context_factory=certificate_options)
|
return cls(endpoint=endpoint, context_factory=certificate_options)
|
||||||
|
|
||||||
def listen(self, factory):
|
def listen(self, factory: IProtocolFactory) -> Deferred[IListeningPort]:
|
||||||
return self.endpoint.listen(
|
return self.endpoint.listen(
|
||||||
TLSMemoryBIOFactory(self.context_factory, False, factory)
|
TLSMemoryBIOFactory(self.context_factory, False, factory)
|
||||||
)
|
)
|
||||||
|
@ -1429,7 +1429,7 @@ class _FakeRemoteReference(object):
|
|||||||
result = yield getattr(self.local_object, action)(*args, **kwargs)
|
result = yield getattr(self.local_object, action)(*args, **kwargs)
|
||||||
defer.returnValue(result)
|
defer.returnValue(result)
|
||||||
except HTTPClientException as e:
|
except HTTPClientException as e:
|
||||||
raise RemoteException(e.args)
|
raise RemoteException((e.code, e.message, e.body))
|
||||||
|
|
||||||
|
|
||||||
@attr.s
|
@attr.s
|
||||||
|
@ -42,6 +42,7 @@ from werkzeug.exceptions import NotFound as WNotFound
|
|||||||
from testtools.matchers import Equals
|
from testtools.matchers import Equals
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
|
|
||||||
|
from ..util.deferredutil import async_to_deferred
|
||||||
from .common import SyncTestCase
|
from .common import SyncTestCase
|
||||||
from ..storage.http_common import (
|
from ..storage.http_common import (
|
||||||
get_content_type,
|
get_content_type,
|
||||||
@ -59,6 +60,9 @@ from ..storage.http_server import (
|
|||||||
_authorized_route,
|
_authorized_route,
|
||||||
StorageIndexConverter,
|
StorageIndexConverter,
|
||||||
_add_error_handling,
|
_add_error_handling,
|
||||||
|
read_encoded,
|
||||||
|
_SCHEMAS as SERVER_SCHEMAS,
|
||||||
|
BaseApp,
|
||||||
)
|
)
|
||||||
from ..storage.http_client import (
|
from ..storage.http_client import (
|
||||||
StorageClient,
|
StorageClient,
|
||||||
@ -172,7 +176,7 @@ class ExtractSecretsTests(SyncTestCase):
|
|||||||
``ClientSecretsException``.
|
``ClientSecretsException``.
|
||||||
"""
|
"""
|
||||||
with self.assertRaises(ClientSecretsException):
|
with self.assertRaises(ClientSecretsException):
|
||||||
_extract_secrets(["FOO eA=="], {})
|
_extract_secrets(["FOO eA=="], set())
|
||||||
|
|
||||||
def test_bad_secret_not_base64(self):
|
def test_bad_secret_not_base64(self):
|
||||||
"""
|
"""
|
||||||
@ -254,7 +258,7 @@ def gen_bytes(length: int) -> bytes:
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class TestApp(object):
|
class TestApp(BaseApp):
|
||||||
"""HTTP API for testing purposes."""
|
"""HTTP API for testing purposes."""
|
||||||
|
|
||||||
clock: IReactorTime
|
clock: IReactorTime
|
||||||
@ -262,7 +266,7 @@ class TestApp(object):
|
|||||||
_add_error_handling(_app)
|
_add_error_handling(_app)
|
||||||
_swissnum = SWISSNUM_FOR_TEST # Match what the test client is using
|
_swissnum = SWISSNUM_FOR_TEST # Match what the test client is using
|
||||||
|
|
||||||
@_authorized_route(_app, {}, "/noop", methods=["GET"])
|
@_authorized_route(_app, set(), "/noop", methods=["GET"])
|
||||||
def noop(self, request, authorization):
|
def noop(self, request, authorization):
|
||||||
return "noop"
|
return "noop"
|
||||||
|
|
||||||
@ -303,6 +307,19 @@ class TestApp(object):
|
|||||||
request.transport.loseConnection()
|
request.transport.loseConnection()
|
||||||
return Deferred()
|
return Deferred()
|
||||||
|
|
||||||
|
@_authorized_route(_app, set(), "/read_body", methods=["POST"])
|
||||||
|
@async_to_deferred
|
||||||
|
async def read_body(self, request, authorization):
|
||||||
|
"""
|
||||||
|
Accept an advise_corrupt_share message, return the reason.
|
||||||
|
|
||||||
|
I.e. exercise codepaths used for reading CBOR from the body.
|
||||||
|
"""
|
||||||
|
data = await read_encoded(
|
||||||
|
self.clock, request, SERVER_SCHEMAS["advise_corrupt_share"]
|
||||||
|
)
|
||||||
|
return data["reason"]
|
||||||
|
|
||||||
|
|
||||||
def result_of(d):
|
def result_of(d):
|
||||||
"""
|
"""
|
||||||
@ -320,6 +337,7 @@ def result_of(d):
|
|||||||
+ "This is probably a test design issue."
|
+ "This is probably a test design issue."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class CustomHTTPServerTests(SyncTestCase):
|
class CustomHTTPServerTests(SyncTestCase):
|
||||||
"""
|
"""
|
||||||
Tests that use a custom HTTP server.
|
Tests that use a custom HTTP server.
|
||||||
@ -504,6 +522,40 @@ class CustomHTTPServerTests(SyncTestCase):
|
|||||||
result_of(d)
|
result_of(d)
|
||||||
self.assertEqual(len(self._http_server.clock.getDelayedCalls()), 0)
|
self.assertEqual(len(self._http_server.clock.getDelayedCalls()), 0)
|
||||||
|
|
||||||
|
def test_request_with_no_content_type_same_as_cbor(self):
|
||||||
|
"""
|
||||||
|
If no ``Content-Type`` header is set when sending a body, it is assumed
|
||||||
|
to be CBOR.
|
||||||
|
"""
|
||||||
|
response = result_of(
|
||||||
|
self.client.request(
|
||||||
|
"POST",
|
||||||
|
DecodedURL.from_text("http://127.0.0.1/read_body"),
|
||||||
|
data=dumps({"reason": "test"}),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
result_of(limited_content(response, self._http_server.clock, 100)).read(),
|
||||||
|
b"test",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_request_with_wrong_content(self):
|
||||||
|
"""
|
||||||
|
If a non-CBOR ``Content-Type`` header is set when sending a body, the
|
||||||
|
server complains appropriatly.
|
||||||
|
"""
|
||||||
|
headers = Headers()
|
||||||
|
headers.setRawHeaders("content-type", ["some/value"])
|
||||||
|
response = result_of(
|
||||||
|
self.client.request(
|
||||||
|
"POST",
|
||||||
|
DecodedURL.from_text("http://127.0.0.1/read_body"),
|
||||||
|
data=dumps({"reason": "test"}),
|
||||||
|
headers=headers,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(response.code, http.UNSUPPORTED_MEDIA_TYPE)
|
||||||
|
|
||||||
|
|
||||||
@implementer(IReactorFromThreads)
|
@implementer(IReactorFromThreads)
|
||||||
class Reactor(Clock):
|
class Reactor(Clock):
|
||||||
|
@ -109,9 +109,11 @@ class PinningHTTPSValidation(AsyncTestCase):
|
|||||||
root.isLeaf = True
|
root.isLeaf = True
|
||||||
listening_port = await endpoint.listen(Site(root))
|
listening_port = await endpoint.listen(Site(root))
|
||||||
try:
|
try:
|
||||||
yield f"https://127.0.0.1:{listening_port.getHost().port}/"
|
yield f"https://127.0.0.1:{listening_port.getHost().port}/" # type: ignore[attr-defined]
|
||||||
finally:
|
finally:
|
||||||
await listening_port.stopListening()
|
result = listening_port.stopListening()
|
||||||
|
if result is not None:
|
||||||
|
await result
|
||||||
|
|
||||||
def request(self, url: str, expected_certificate: x509.Certificate):
|
def request(self, url: str, expected_certificate: x509.Certificate):
|
||||||
"""
|
"""
|
||||||
|
Reference in New Issue
Block a user