mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-04-26 05:49:44 +00:00
Merge remote-tracking branch 'origin/master' into 3978-connection-status-http-storage
This commit is contained in:
commit
e7c71e66a3
@ -11,19 +11,31 @@
|
|||||||
#
|
#
|
||||||
version: 2.1
|
version: 2.1
|
||||||
|
|
||||||
# A template that can be shared between the two different image-building
|
# Every job that pushes a Docker image from Docker Hub must authenticate to
|
||||||
|
# it. Define a couple yaml anchors that can be used to supply the necessary
|
||||||
|
# credentials.
|
||||||
|
|
||||||
|
# First is a CircleCI job context which makes Docker Hub credentials available
|
||||||
|
# in the environment.
|
||||||
|
#
|
||||||
|
# Contexts are managed in the CircleCI web interface:
|
||||||
|
#
|
||||||
|
# https://app.circleci.com/settings/organization/github/tahoe-lafs/contexts
|
||||||
|
dockerhub-context-template: &DOCKERHUB_CONTEXT
|
||||||
|
context: "dockerhub-auth"
|
||||||
|
|
||||||
|
# Next is a Docker executor template that gets the credentials from the
|
||||||
|
# environment and supplies them to the executor.
|
||||||
|
dockerhub-auth-template: &DOCKERHUB_AUTH
|
||||||
|
- auth:
|
||||||
|
username: $DOCKERHUB_USERNAME
|
||||||
|
password: $DOCKERHUB_PASSWORD
|
||||||
|
|
||||||
|
# A template that can be shared between the two different image-building
|
||||||
# workflows.
|
# workflows.
|
||||||
.images: &IMAGES
|
.images: &IMAGES
|
||||||
jobs:
|
jobs:
|
||||||
# Every job that pushes a Docker image from Docker Hub needs to provide
|
- "build-image-debian-11":
|
||||||
# credentials. Use this first job to define a yaml anchor that can be
|
|
||||||
# used to supply a CircleCI job context which makes Docker Hub credentials
|
|
||||||
# available in the environment.
|
|
||||||
#
|
|
||||||
# Contexts are managed in the CircleCI web interface:
|
|
||||||
#
|
|
||||||
# https://app.circleci.com/settings/organization/github/tahoe-lafs/contexts
|
|
||||||
- "build-image-debian-11": &DOCKERHUB_CONTEXT
|
|
||||||
<<: *DOCKERHUB_CONTEXT
|
<<: *DOCKERHUB_CONTEXT
|
||||||
- "build-image-ubuntu-20-04":
|
- "build-image-ubuntu-20-04":
|
||||||
<<: *DOCKERHUB_CONTEXT
|
<<: *DOCKERHUB_CONTEXT
|
||||||
@ -71,12 +83,20 @@ workflows:
|
|||||||
{}
|
{}
|
||||||
|
|
||||||
- "nixos":
|
- "nixos":
|
||||||
name: "NixOS 22.11"
|
name: "<<matrix.pythonVersion>>"
|
||||||
nixpkgs: "22.11"
|
nixpkgs: "22.11"
|
||||||
|
matrix:
|
||||||
|
parameters:
|
||||||
|
pythonVersion:
|
||||||
|
- "python310"
|
||||||
|
|
||||||
- "nixos":
|
- "nixos":
|
||||||
name: "NixOS unstable"
|
name: "<<matrix.pythonVersion>>"
|
||||||
nixpkgs: "unstable"
|
nixpkgs: "unstable"
|
||||||
|
matrix:
|
||||||
|
parameters:
|
||||||
|
pythonVersion:
|
||||||
|
- "python311"
|
||||||
|
|
||||||
# Eventually, test against PyPy 3.8
|
# Eventually, test against PyPy 3.8
|
||||||
#- "pypy27-buster":
|
#- "pypy27-buster":
|
||||||
@ -113,30 +133,7 @@ workflows:
|
|||||||
# Build as part of the workflow but only if requested.
|
# Build as part of the workflow but only if requested.
|
||||||
when: "<< pipeline.parameters.build-images >>"
|
when: "<< pipeline.parameters.build-images >>"
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
dockerhub-auth-template:
|
|
||||||
# This isn't a real job. It doesn't get scheduled as part of any
|
|
||||||
# workflow. Instead, it's just a place we can hang a yaml anchor to
|
|
||||||
# finish the Docker Hub authentication configuration. Workflow jobs using
|
|
||||||
# the DOCKERHUB_CONTEXT anchor will have access to the environment
|
|
||||||
# variables used here. These variables will allow the Docker Hub image
|
|
||||||
# pull to be authenticated and hopefully avoid hitting and rate limits.
|
|
||||||
docker: &DOCKERHUB_AUTH
|
|
||||||
- image: "null"
|
|
||||||
auth:
|
|
||||||
username: $DOCKERHUB_USERNAME
|
|
||||||
password: $DOCKERHUB_PASSWORD
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- run:
|
|
||||||
name: "CircleCI YAML schema conformity"
|
|
||||||
command: |
|
|
||||||
# This isn't a real command. We have to have something in this
|
|
||||||
# space, though, or the CircleCI yaml schema validator gets angry.
|
|
||||||
# Since this job is never scheduled this step is never run so the
|
|
||||||
# actual value here is irrelevant.
|
|
||||||
|
|
||||||
codechecks:
|
codechecks:
|
||||||
docker:
|
docker:
|
||||||
- <<: *DOCKERHUB_AUTH
|
- <<: *DOCKERHUB_AUTH
|
||||||
@ -374,56 +371,29 @@ jobs:
|
|||||||
Reference the name of a niv-managed nixpkgs source (see `niv show`
|
Reference the name of a niv-managed nixpkgs source (see `niv show`
|
||||||
and nix/sources.json)
|
and nix/sources.json)
|
||||||
type: "string"
|
type: "string"
|
||||||
|
pythonVersion:
|
||||||
|
description: >-
|
||||||
|
Reference the name of a Python package in nixpkgs to use.
|
||||||
|
type: "string"
|
||||||
|
|
||||||
docker:
|
executor: "nix"
|
||||||
# Run in a highly Nix-capable environment.
|
|
||||||
- <<: *DOCKERHUB_AUTH
|
|
||||||
image: "nixos/nix:2.10.3"
|
|
||||||
|
|
||||||
environment:
|
|
||||||
# CACHIX_AUTH_TOKEN is manually set in the CircleCI web UI and
|
|
||||||
# allows us to push to CACHIX_NAME. We only need this set for
|
|
||||||
# `cachix use` in this step.
|
|
||||||
CACHIX_NAME: "tahoe-lafs-opensource"
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- "run":
|
- "nix-build":
|
||||||
# Get cachix for Nix-friendly caching.
|
nixpkgs: "<<parameters.nixpkgs>>"
|
||||||
name: "Install Basic Dependencies"
|
pythonVersion: "<<parameters.pythonVersion>>"
|
||||||
command: |
|
buildSteps:
|
||||||
NIXPKGS="https://github.com/nixos/nixpkgs/archive/nixos-<<parameters.nixpkgs>>.tar.gz"
|
- "run":
|
||||||
nix-env \
|
name: "Unit Test"
|
||||||
--file $NIXPKGS \
|
command: |
|
||||||
--install \
|
# The dependencies are all built so we can allow more
|
||||||
-A cachix bash
|
# parallelism here.
|
||||||
# Activate it for "binary substitution". This sets up
|
source .circleci/lib.sh
|
||||||
# configuration tht lets Nix download something from the cache
|
cache_if_able nix-build \
|
||||||
# instead of building it locally, if possible.
|
--cores 8 \
|
||||||
cachix use "${CACHIX_NAME}"
|
--argstr pkgsVersion "nixpkgs-<<parameters.nixpkgs>>" \
|
||||||
|
--argstr pythonVersion "<<parameters.pythonVersion>>" \
|
||||||
- "checkout"
|
nix/tests.nix
|
||||||
|
|
||||||
- "run":
|
|
||||||
# The Nix package doesn't know how to do this part, unfortunately.
|
|
||||||
name: "Generate version"
|
|
||||||
command: |
|
|
||||||
nix-shell \
|
|
||||||
-p 'python3.withPackages (ps: [ ps.setuptools ])' \
|
|
||||||
--run 'python setup.py update_version'
|
|
||||||
|
|
||||||
- "run":
|
|
||||||
name: "Test"
|
|
||||||
command: |
|
|
||||||
# CircleCI build environment looks like it has a zillion and a
|
|
||||||
# half cores. Don't let Nix autodetect this high core count
|
|
||||||
# because it blows up memory usage and fails the test run. Pick a
|
|
||||||
# number of cores that suites the build environment we're paying
|
|
||||||
# for (the free one!).
|
|
||||||
source .circleci/lib.sh
|
|
||||||
cache_if_able nix-build \
|
|
||||||
--cores 8 \
|
|
||||||
--argstr pkgsVersion "nixpkgs-<<parameters.nixpkgs>>" \
|
|
||||||
nix/tests.nix
|
|
||||||
|
|
||||||
typechecks:
|
typechecks:
|
||||||
docker:
|
docker:
|
||||||
@ -527,7 +497,6 @@ jobs:
|
|||||||
|
|
||||||
# build-image-pypy27-buster:
|
# build-image-pypy27-buster:
|
||||||
# <<: *BUILD_IMAGE
|
# <<: *BUILD_IMAGE
|
||||||
|
|
||||||
# environment:
|
# environment:
|
||||||
# DISTRO: "pypy"
|
# DISTRO: "pypy"
|
||||||
# TAG: "buster"
|
# TAG: "buster"
|
||||||
@ -535,3 +504,87 @@ jobs:
|
|||||||
# # setting up PyPy 3 in the image building toolchain. This value is just
|
# # setting up PyPy 3 in the image building toolchain. This value is just
|
||||||
# # for constructing the right Docker image tag.
|
# # for constructing the right Docker image tag.
|
||||||
# PYTHON_VERSION: "2"
|
# PYTHON_VERSION: "2"
|
||||||
|
|
||||||
|
executors:
|
||||||
|
nix:
|
||||||
|
docker:
|
||||||
|
# Run in a highly Nix-capable environment.
|
||||||
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
image: "nixos/nix:2.10.3"
|
||||||
|
environment:
|
||||||
|
# CACHIX_AUTH_TOKEN is manually set in the CircleCI web UI and allows us
|
||||||
|
# to push to CACHIX_NAME. CACHIX_NAME tells cachix which cache to push
|
||||||
|
# to.
|
||||||
|
CACHIX_NAME: "tahoe-lafs-opensource"
|
||||||
|
|
||||||
|
commands:
|
||||||
|
nix-build:
|
||||||
|
parameters:
|
||||||
|
nixpkgs:
|
||||||
|
description: >-
|
||||||
|
Reference the name of a niv-managed nixpkgs source (see `niv show`
|
||||||
|
and nix/sources.json)
|
||||||
|
type: "string"
|
||||||
|
pythonVersion:
|
||||||
|
description: >-
|
||||||
|
Reference the name of a Python package in nixpkgs to use.
|
||||||
|
type: "string"
|
||||||
|
buildSteps:
|
||||||
|
description: >-
|
||||||
|
The build steps to execute after setting up the build environment.
|
||||||
|
type: "steps"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- "run":
|
||||||
|
# Get cachix for Nix-friendly caching.
|
||||||
|
name: "Install Basic Dependencies"
|
||||||
|
command: |
|
||||||
|
NIXPKGS="https://github.com/nixos/nixpkgs/archive/nixos-<<parameters.nixpkgs>>.tar.gz"
|
||||||
|
nix-env \
|
||||||
|
--file $NIXPKGS \
|
||||||
|
--install \
|
||||||
|
-A cachix bash
|
||||||
|
# Activate it for "binary substitution". This sets up
|
||||||
|
# configuration tht lets Nix download something from the cache
|
||||||
|
# instead of building it locally, if possible.
|
||||||
|
cachix use "${CACHIX_NAME}"
|
||||||
|
|
||||||
|
- "checkout"
|
||||||
|
|
||||||
|
- "run":
|
||||||
|
# The Nix package doesn't know how to do this part, unfortunately.
|
||||||
|
name: "Generate version"
|
||||||
|
command: |
|
||||||
|
nix-shell \
|
||||||
|
-p 'python3.withPackages (ps: [ ps.setuptools ])' \
|
||||||
|
--run 'python setup.py update_version'
|
||||||
|
|
||||||
|
- "run":
|
||||||
|
name: "Build Dependencies"
|
||||||
|
command: |
|
||||||
|
# CircleCI build environment looks like it has a zillion and a
|
||||||
|
# half cores. Don't let Nix autodetect this high core count
|
||||||
|
# because it blows up memory usage and fails the test run. Pick a
|
||||||
|
# number of cores that suits the build environment we're paying
|
||||||
|
# for (the free one!).
|
||||||
|
source .circleci/lib.sh
|
||||||
|
# nix-shell will build all of the dependencies of the target but
|
||||||
|
# not the target itself.
|
||||||
|
cache_if_able nix-shell \
|
||||||
|
--run "" \
|
||||||
|
--cores 3 \
|
||||||
|
--argstr pkgsVersion "nixpkgs-<<parameters.nixpkgs>>" \
|
||||||
|
--argstr pythonVersion "<<parameters.pythonVersion>>" \
|
||||||
|
./default.nix
|
||||||
|
|
||||||
|
- "run":
|
||||||
|
name: "Build Package"
|
||||||
|
command: |
|
||||||
|
source .circleci/lib.sh
|
||||||
|
cache_if_able nix-build \
|
||||||
|
--cores 4 \
|
||||||
|
--argstr pkgsVersion "nixpkgs-<<parameters.nixpkgs>>" \
|
||||||
|
--argstr pythonVersion "<<parameters.pythonVersion>>" \
|
||||||
|
./default.nix
|
||||||
|
|
||||||
|
- steps: "<<parameters.buildSteps>>"
|
||||||
|
@ -32,11 +32,7 @@ in
|
|||||||
|
|
||||||
}:
|
}:
|
||||||
with (pkgs.${pythonVersion}.override {
|
with (pkgs.${pythonVersion}.override {
|
||||||
packageOverrides = self: super: {
|
packageOverrides = import ./nix/python-overrides.nix;
|
||||||
# Some dependencies aren't packaged in nixpkgs so supply our own packages.
|
|
||||||
pycddl = self.callPackage ./nix/pycddl.nix { };
|
|
||||||
txi2p = self.callPackage ./nix/txi2p.nix { };
|
|
||||||
};
|
|
||||||
}).pkgs;
|
}).pkgs;
|
||||||
callPackage ./nix/tahoe-lafs.nix {
|
callPackage ./nix/tahoe-lafs.nix {
|
||||||
# Select whichever package extras were requested.
|
# Select whichever package extras were requested.
|
||||||
|
@ -82,8 +82,9 @@ network: A
|
|||||||
|
|
||||||
memory footprint: N/K*A
|
memory footprint: N/K*A
|
||||||
|
|
||||||
notes: Tahoe-LAFS generates a new RSA keypair for each mutable file that it
|
notes:
|
||||||
publishes to a grid. This takes up to 1 or 2 seconds on a typical desktop PC.
|
Tahoe-LAFS generates a new RSA keypair for each mutable file that it publishes to a grid.
|
||||||
|
This takes around 100 milliseconds on a relatively high-end laptop from 2021.
|
||||||
|
|
||||||
Part of the process of encrypting, encoding, and uploading a mutable file to a
|
Part of the process of encrypting, encoding, and uploading a mutable file to a
|
||||||
Tahoe-LAFS grid requires that the entire file be in memory at once. For larger
|
Tahoe-LAFS grid requires that the entire file be in memory at once. For larger
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
Storage Node Protocol ("Great Black Swamp", "GBS")
|
Storage Node Protocol ("Great Black Swamp", "GBS")
|
||||||
==================================================
|
==================================================
|
||||||
|
|
||||||
The target audience for this document is Tahoe-LAFS developers.
|
The target audience for this document is developers working on Tahoe-LAFS or on an alternate implementation intended to be interoperable.
|
||||||
After reading this document,
|
After reading this document,
|
||||||
one should expect to understand how Tahoe-LAFS clients interact over the network with Tahoe-LAFS storage nodes.
|
one should expect to understand how Tahoe-LAFS clients interact over the network with Tahoe-LAFS storage nodes.
|
||||||
|
|
||||||
@ -64,6 +64,10 @@ Glossary
|
|||||||
lease renew secret
|
lease renew secret
|
||||||
a short secret string which storage servers required to be presented before allowing a particular lease to be renewed
|
a short secret string which storage servers required to be presented before allowing a particular lease to be renewed
|
||||||
|
|
||||||
|
The key words
|
||||||
|
"MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL"
|
||||||
|
in this document are to be interpreted as described in RFC 2119.
|
||||||
|
|
||||||
Motivation
|
Motivation
|
||||||
----------
|
----------
|
||||||
|
|
||||||
@ -119,8 +123,8 @@ An HTTP-based protocol can make use of TLS in largely the same way to provide th
|
|||||||
Provision of these properties *is* dependant on implementers following Great Black Swamp's rules for x509 certificate validation
|
Provision of these properties *is* dependant on implementers following Great Black Swamp's rules for x509 certificate validation
|
||||||
(rather than the standard "web" rules for validation).
|
(rather than the standard "web" rules for validation).
|
||||||
|
|
||||||
Requirements
|
Design Requirements
|
||||||
------------
|
-------------------
|
||||||
|
|
||||||
Security
|
Security
|
||||||
~~~~~~~~
|
~~~~~~~~
|
||||||
@ -189,6 +193,9 @@ Solutions
|
|||||||
An HTTP-based protocol, dubbed "Great Black Swamp" (or "GBS"), is described below.
|
An HTTP-based protocol, dubbed "Great Black Swamp" (or "GBS"), is described below.
|
||||||
This protocol aims to satisfy the above requirements at a lower level of complexity than the current Foolscap-based protocol.
|
This protocol aims to satisfy the above requirements at a lower level of complexity than the current Foolscap-based protocol.
|
||||||
|
|
||||||
|
Summary (Non-normative)
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
Communication with the storage node will take place using TLS.
|
Communication with the storage node will take place using TLS.
|
||||||
The TLS version and configuration will be dictated by an ongoing understanding of best practices.
|
The TLS version and configuration will be dictated by an ongoing understanding of best practices.
|
||||||
The storage node will present an x509 certificate during the TLS handshake.
|
The storage node will present an x509 certificate during the TLS handshake.
|
||||||
@ -237,10 +244,10 @@ When Bob's client issues HTTP requests to Alice's storage node it includes the *
|
|||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Foolscap TubIDs are 20 bytes (SHA1 digest of the certificate).
|
Foolscap TubIDs are 20 bytes (SHA1 digest of the certificate).
|
||||||
They are encoded with Base32 for a length of 32 bytes.
|
They are encoded with `Base32`_ for a length of 32 bytes.
|
||||||
SPKI information discussed here is 32 bytes (SHA256 digest).
|
SPKI information discussed here is 32 bytes (SHA256 digest).
|
||||||
They would be encoded in Base32 for a length of 52 bytes.
|
They would be encoded in `Base32`_ for a length of 52 bytes.
|
||||||
`base64url`_ provides a more compact encoding of the information while remaining URL-compatible.
|
`unpadded base64url`_ provides a more compact encoding of the information while remaining URL-compatible.
|
||||||
This would encode the SPKI information for a length of merely 43 bytes.
|
This would encode the SPKI information for a length of merely 43 bytes.
|
||||||
SHA1,
|
SHA1,
|
||||||
the current Foolscap hash function,
|
the current Foolscap hash function,
|
||||||
@ -329,15 +336,117 @@ and shares.
|
|||||||
A particular resource is addressed by the HTTP request path.
|
A particular resource is addressed by the HTTP request path.
|
||||||
Details about the interface are encoded in the HTTP message body.
|
Details about the interface are encoded in the HTTP message body.
|
||||||
|
|
||||||
|
String Encoding
|
||||||
|
~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
.. _Base32:
|
||||||
|
|
||||||
|
Base32
|
||||||
|
!!!!!!
|
||||||
|
|
||||||
|
Where the specification refers to Base32 the meaning is *unpadded* Base32 encoding as specified by `RFC 4648`_ using a *lowercase variation* of the alphabet from Section 6.
|
||||||
|
|
||||||
|
That is, the alphabet is:
|
||||||
|
|
||||||
|
.. list-table:: Base32 Alphabet
|
||||||
|
:header-rows: 1
|
||||||
|
|
||||||
|
* - Value
|
||||||
|
- Encoding
|
||||||
|
- Value
|
||||||
|
- Encoding
|
||||||
|
- Value
|
||||||
|
- Encoding
|
||||||
|
- Value
|
||||||
|
- Encoding
|
||||||
|
|
||||||
|
* - 0
|
||||||
|
- a
|
||||||
|
- 9
|
||||||
|
- j
|
||||||
|
- 18
|
||||||
|
- s
|
||||||
|
- 27
|
||||||
|
- 3
|
||||||
|
* - 1
|
||||||
|
- b
|
||||||
|
- 10
|
||||||
|
- k
|
||||||
|
- 19
|
||||||
|
- t
|
||||||
|
- 28
|
||||||
|
- 4
|
||||||
|
* - 2
|
||||||
|
- c
|
||||||
|
- 11
|
||||||
|
- l
|
||||||
|
- 20
|
||||||
|
- u
|
||||||
|
- 29
|
||||||
|
- 5
|
||||||
|
* - 3
|
||||||
|
- d
|
||||||
|
- 12
|
||||||
|
- m
|
||||||
|
- 21
|
||||||
|
- v
|
||||||
|
- 30
|
||||||
|
- 6
|
||||||
|
* - 4
|
||||||
|
- e
|
||||||
|
- 13
|
||||||
|
- n
|
||||||
|
- 22
|
||||||
|
- w
|
||||||
|
- 31
|
||||||
|
- 7
|
||||||
|
* - 5
|
||||||
|
- f
|
||||||
|
- 14
|
||||||
|
- o
|
||||||
|
- 23
|
||||||
|
- x
|
||||||
|
-
|
||||||
|
-
|
||||||
|
* - 6
|
||||||
|
- g
|
||||||
|
- 15
|
||||||
|
- p
|
||||||
|
- 24
|
||||||
|
- y
|
||||||
|
-
|
||||||
|
-
|
||||||
|
* - 7
|
||||||
|
- h
|
||||||
|
- 16
|
||||||
|
- q
|
||||||
|
- 25
|
||||||
|
- z
|
||||||
|
-
|
||||||
|
-
|
||||||
|
* - 8
|
||||||
|
- i
|
||||||
|
- 17
|
||||||
|
- r
|
||||||
|
- 26
|
||||||
|
- 2
|
||||||
|
-
|
||||||
|
-
|
||||||
|
|
||||||
Message Encoding
|
Message Encoding
|
||||||
~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
The preferred encoding for HTTP message bodies is `CBOR`_.
|
Clients and servers MUST use the ``Content-Type`` and ``Accept`` header fields as specified in `RFC 9110`_ for message body negotiation.
|
||||||
A request may be submitted using an alternate encoding by declaring this in the ``Content-Type`` header.
|
|
||||||
A request may indicate its preference for an alternate encoding in the response using the ``Accept`` header.
|
|
||||||
These two headers are used in the typical way for an HTTP application.
|
|
||||||
|
|
||||||
The only other encoding support for which is currently recommended is JSON.
|
The encoding for HTTP message bodies SHOULD be `CBOR`_.
|
||||||
|
Clients submitting requests using this encoding MUST include a ``Content-Type: application/cbor`` request header field.
|
||||||
|
A request MAY be submitted using an alternate encoding by declaring this in the ``Content-Type`` header field.
|
||||||
|
A request MAY indicate its preference for an alternate encoding in the response using the ``Accept`` header field.
|
||||||
|
A request which includes no ``Accept`` header field MUST be interpreted in the same way as a request including a ``Accept: application/cbor`` header field.
|
||||||
|
|
||||||
|
Clients and servers MAY support additional request and response message body encodings.
|
||||||
|
|
||||||
|
Clients and servers SHOULD support ``application/json`` request and response message body encoding.
|
||||||
For HTTP messages carrying binary share data,
|
For HTTP messages carrying binary share data,
|
||||||
this is expected to be a particularly poor encoding.
|
this is expected to be a particularly poor encoding.
|
||||||
However,
|
However,
|
||||||
@ -350,10 +459,23 @@ Because of the simple types used throughout
|
|||||||
and the equivalence described in `RFC 7049`_
|
and the equivalence described in `RFC 7049`_
|
||||||
these examples should be representative regardless of which of these two encodings is chosen.
|
these examples should be representative regardless of which of these two encodings is chosen.
|
||||||
|
|
||||||
The one exception is sets.
|
There are two exceptions to this rule.
|
||||||
For CBOR messages, any sequence that is semantically a set (i.e. no repeated values allowed, order doesn't matter, and elements are hashable in Python) should be sent as a set.
|
|
||||||
Tag 6.258 is used to indicate sets in CBOR; see `the CBOR registry <https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml>`_ for more details.
|
1. Sets
|
||||||
Sets will be represented as JSON lists in examples because JSON doesn't support sets.
|
!!!!!!!
|
||||||
|
|
||||||
|
For CBOR messages,
|
||||||
|
any sequence that is semantically a set (i.e. no repeated values allowed, order doesn't matter, and elements are hashable in Python) should be sent as a set.
|
||||||
|
Tag 6.258 is used to indicate sets in CBOR;
|
||||||
|
see `the CBOR registry <https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml>`_ for more details.
|
||||||
|
The JSON encoding does not support sets.
|
||||||
|
Sets MUST be represented as arrays in JSON-encoded messages.
|
||||||
|
|
||||||
|
2. Bytes
|
||||||
|
!!!!!!!!
|
||||||
|
|
||||||
|
The CBOR encoding natively supports a bytes type while the JSON encoding does not.
|
||||||
|
Bytes MUST be represented as strings giving the `Base64`_ representation of the original bytes value.
|
||||||
|
|
||||||
HTTP Design
|
HTTP Design
|
||||||
~~~~~~~~~~~
|
~~~~~~~~~~~
|
||||||
@ -368,29 +490,50 @@ one branch contains all of the share data;
|
|||||||
another branch contains all of the lease data;
|
another branch contains all of the lease data;
|
||||||
etc.
|
etc.
|
||||||
|
|
||||||
An ``Authorization`` header in requests is required for all endpoints.
|
Clients and servers MUST use the ``Authorization`` header field,
|
||||||
The standard HTTP authorization protocol is used.
|
as specified in `RFC 9110`_,
|
||||||
The authentication *type* used is ``Tahoe-LAFS``.
|
for authorization of all requests to all endpoints specified here.
|
||||||
The swissnum from the NURL used to locate the storage service is used as the *credentials*.
|
The authentication *type* MUST be ``Tahoe-LAFS``.
|
||||||
If credentials are not presented or the swissnum is not associated with a storage service then no storage processing is performed and the request receives an ``401 UNAUTHORIZED`` response.
|
Clients MUST present the `Base64`_-encoded representation of the swissnum from the NURL used to locate the storage service as the *credentials*.
|
||||||
|
|
||||||
There are also, for some endpoints, secrets sent via ``X-Tahoe-Authorization`` headers.
|
If credentials are not presented or the swissnum is not associated with a storage service then the server MUST issue a ``401 UNAUTHORIZED`` response and perform no other processing of the message.
|
||||||
If these are:
|
|
||||||
|
Requests to certain endpoints MUST include additional secrets in the ``X-Tahoe-Authorization`` headers field.
|
||||||
|
The endpoints which require these secrets are:
|
||||||
|
|
||||||
|
* ``PUT /storage/v1/lease/:storage_index``:
|
||||||
|
The secrets included MUST be ``lease-renew-secret`` and ``lease-cancel-secret``.
|
||||||
|
|
||||||
|
* ``POST /storage/v1/immutable/:storage_index``:
|
||||||
|
The secrets included MUST be ``lease-renew-secret``, ``lease-cancel-secret``, and ``upload-secret``.
|
||||||
|
|
||||||
|
* ``PATCH /storage/v1/immutable/:storage_index/:share_number``:
|
||||||
|
The secrets included MUST be ``upload-secret``.
|
||||||
|
|
||||||
|
* ``PUT /storage/v1/immutable/:storage_index/:share_number/abort``:
|
||||||
|
The secrets included MUST be ``upload-secret``.
|
||||||
|
|
||||||
|
* ``POST /storage/v1/mutable/:storage_index/read-test-write``:
|
||||||
|
The secrets included MUST be ``lease-renew-secret``, ``lease-cancel-secret``, and ``write-enabler``.
|
||||||
|
|
||||||
|
If these secrets are:
|
||||||
|
|
||||||
1. Missing.
|
1. Missing.
|
||||||
2. The wrong length.
|
2. The wrong length.
|
||||||
3. Not the expected kind of secret.
|
3. Not the expected kind of secret.
|
||||||
4. They are otherwise unparseable before they are actually semantically used.
|
4. They are otherwise unparseable before they are actually semantically used.
|
||||||
|
|
||||||
the server will respond with ``400 BAD REQUEST``.
|
the server MUST respond with ``400 BAD REQUEST`` and perform no other processing of the message.
|
||||||
401 is not used because this isn't an authorization problem, this is a "you sent garbage and should know better" bug.
|
401 is not used because this isn't an authorization problem, this is a "you sent garbage and should know better" bug.
|
||||||
|
|
||||||
If authorization using the secret fails, then a ``401 UNAUTHORIZED`` response should be sent.
|
If authorization using the secret fails,
|
||||||
|
then the server MUST send a ``401 UNAUTHORIZED`` response and perform no other processing of the message.
|
||||||
|
|
||||||
Encoding
|
Encoding
|
||||||
~~~~~~~~
|
~~~~~~~~
|
||||||
|
|
||||||
* ``storage_index`` should be base32 encoded (RFC3548) in URLs.
|
* ``storage_index`` MUST be `Base32`_ encoded in URLs.
|
||||||
|
* ``share_number`` MUST be a decimal representation
|
||||||
|
|
||||||
General
|
General
|
||||||
~~~~~~~
|
~~~~~~~
|
||||||
@ -398,21 +541,27 @@ General
|
|||||||
``GET /storage/v1/version``
|
``GET /storage/v1/version``
|
||||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||||
|
|
||||||
Retrieve information about the version of the storage server.
|
This endpoint allows clients to retrieve some basic metadata about a storage server from the storage service.
|
||||||
Information is returned as an encoded mapping.
|
The response MUST validate against this CDDL schema::
|
||||||
For example::
|
|
||||||
|
{'http://allmydata.org/tahoe/protocols/storage/v1' => {
|
||||||
|
'maximum-immutable-share-size' => uint
|
||||||
|
'maximum-mutable-share-size' => uint
|
||||||
|
'available-space' => uint
|
||||||
|
}
|
||||||
|
'application-version' => bstr
|
||||||
|
}
|
||||||
|
|
||||||
|
The server SHOULD populate as many fields as possible with accurate information about its behavior.
|
||||||
|
|
||||||
|
For fields which relate to a specific API
|
||||||
|
the semantics are documented below in the section for that API.
|
||||||
|
For fields that are more general than a single API the semantics are as follows:
|
||||||
|
|
||||||
|
* available-space:
|
||||||
|
The server SHOULD use this field to advertise the amount of space that it currently considers unused and is willing to allocate for client requests.
|
||||||
|
The value is a number of bytes.
|
||||||
|
|
||||||
{ "http://allmydata.org/tahoe/protocols/storage/v1" :
|
|
||||||
{ "maximum-immutable-share-size": 1234,
|
|
||||||
"maximum-mutable-share-size": 1235,
|
|
||||||
"available-space": 123456,
|
|
||||||
"tolerates-immutable-read-overrun": true,
|
|
||||||
"delete-mutable-shares-with-zero-length-writev": true,
|
|
||||||
"fills-holes-with-zero-bytes": true,
|
|
||||||
"prevents-read-past-end-of-share-data": true
|
|
||||||
},
|
|
||||||
"application-version": "1.13.0"
|
|
||||||
}
|
|
||||||
|
|
||||||
``PUT /storage/v1/lease/:storage_index``
|
``PUT /storage/v1/lease/:storage_index``
|
||||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||||
@ -471,21 +620,37 @@ Writing
|
|||||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||||
|
|
||||||
Initialize an immutable storage index with some buckets.
|
Initialize an immutable storage index with some buckets.
|
||||||
The buckets may have share data written to them once.
|
The server MUST allow share data to be written to the buckets at most one time.
|
||||||
A lease is also created for the shares.
|
The server MAY create a lease for the buckets.
|
||||||
Details of the buckets to create are encoded in the request body.
|
Details of the buckets to create are encoded in the request body.
|
||||||
|
The request body MUST validate against this CDDL schema::
|
||||||
|
|
||||||
|
{
|
||||||
|
share-numbers: #6.258([0*256 uint])
|
||||||
|
allocated-size: uint
|
||||||
|
}
|
||||||
|
|
||||||
For example::
|
For example::
|
||||||
|
|
||||||
{"share-numbers": [1, 7, ...], "allocated-size": 12345}
|
{"share-numbers": [1, 7, ...], "allocated-size": 12345}
|
||||||
|
|
||||||
The request must include ``X-Tahoe-Authorization`` HTTP headers that set the various secrets—upload, lease renewal, lease cancellation—that will be later used to authorize various operations.
|
The server SHOULD accept a value for **allocated-size** that is less than or equal to the lesser of the values of the server's version message's **maximum-immutable-share-size** or **available-space** values.
|
||||||
|
|
||||||
|
The request MUST include ``X-Tahoe-Authorization`` HTTP headers that set the various secrets—upload, lease renewal, lease cancellation—that will be later used to authorize various operations.
|
||||||
For example::
|
For example::
|
||||||
|
|
||||||
X-Tahoe-Authorization: lease-renew-secret <base64-lease-renew-secret>
|
X-Tahoe-Authorization: lease-renew-secret <base64-lease-renew-secret>
|
||||||
X-Tahoe-Authorization: lease-cancel-secret <base64-lease-cancel-secret>
|
X-Tahoe-Authorization: lease-cancel-secret <base64-lease-cancel-secret>
|
||||||
X-Tahoe-Authorization: upload-secret <base64-upload-secret>
|
X-Tahoe-Authorization: upload-secret <base64-upload-secret>
|
||||||
|
|
||||||
The response body includes encoded information about the created buckets.
|
The response body MUST include encoded information about the created buckets.
|
||||||
|
The response body MUST validate against this CDDL schema::
|
||||||
|
|
||||||
|
{
|
||||||
|
already-have: #6.258([0*256 uint])
|
||||||
|
allocated: #6.258([0*256 uint])
|
||||||
|
}
|
||||||
|
|
||||||
For example::
|
For example::
|
||||||
|
|
||||||
{"already-have": [1, ...], "allocated": [7, ...]}
|
{"already-have": [1, ...], "allocated": [7, ...]}
|
||||||
@ -542,27 +707,35 @@ Rejected designs for upload secrets:
|
|||||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||||
|
|
||||||
Write data for the indicated share.
|
Write data for the indicated share.
|
||||||
The share number must belong to the storage index.
|
The share number MUST belong to the storage index.
|
||||||
The request body is the raw share data (i.e., ``application/octet-stream``).
|
The request body MUST be the raw share data (i.e., ``application/octet-stream``).
|
||||||
*Content-Range* requests are required; for large transfers this allows partially complete uploads to be resumed.
|
The request MUST include a *Content-Range* header field;
|
||||||
|
for large transfers this allows partially complete uploads to be resumed.
|
||||||
|
|
||||||
For example,
|
For example,
|
||||||
a 1MiB share can be divided in to eight separate 128KiB chunks.
|
a 1MiB share can be divided in to eight separate 128KiB chunks.
|
||||||
Each chunk can be uploaded in a separate request.
|
Each chunk can be uploaded in a separate request.
|
||||||
Each request can include a *Content-Range* value indicating its placement within the complete share.
|
Each request can include a *Content-Range* value indicating its placement within the complete share.
|
||||||
If any one of these requests fails then at most 128KiB of upload work needs to be retried.
|
If any one of these requests fails then at most 128KiB of upload work needs to be retried.
|
||||||
|
|
||||||
The server must recognize when all of the data has been received and mark the share as complete
|
The server MUST recognize when all of the data has been received and mark the share as complete
|
||||||
(which it can do because it was informed of the size when the storage index was initialized).
|
(which it can do because it was informed of the size when the storage index was initialized).
|
||||||
|
|
||||||
The request must include a ``X-Tahoe-Authorization`` header that includes the upload secret::
|
The request MUST include a ``X-Tahoe-Authorization`` header that includes the upload secret::
|
||||||
|
|
||||||
X-Tahoe-Authorization: upload-secret <base64-upload-secret>
|
X-Tahoe-Authorization: upload-secret <base64-upload-secret>
|
||||||
|
|
||||||
Responses:
|
Responses:
|
||||||
|
|
||||||
* When a chunk that does not complete the share is successfully uploaded the response is ``OK``.
|
* When a chunk that does not complete the share is successfully uploaded the response MUST be ``OK``.
|
||||||
The response body indicates the range of share data that has yet to be uploaded.
|
The response body MUST indicate the range of share data that has yet to be uploaded.
|
||||||
That is::
|
The response body MUST validate against this CDDL schema::
|
||||||
|
|
||||||
|
{
|
||||||
|
required: [0* {begin: uint, end: uint}]
|
||||||
|
}
|
||||||
|
|
||||||
|
For example::
|
||||||
|
|
||||||
{ "required":
|
{ "required":
|
||||||
[ { "begin": <byte position, inclusive>
|
[ { "begin": <byte position, inclusive>
|
||||||
@ -573,11 +746,12 @@ Responses:
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
* When the chunk that completes the share is successfully uploaded the response is ``CREATED``.
|
* When the chunk that completes the share is successfully uploaded the response MUST be ``CREATED``.
|
||||||
* If the *Content-Range* for a request covers part of the share that has already,
|
* If the *Content-Range* for a request covers part of the share that has already,
|
||||||
and the data does not match already written data,
|
and the data does not match already written data,
|
||||||
the response is ``CONFLICT``.
|
the response MUST be ``CONFLICT``.
|
||||||
At this point the only thing to do is abort the upload and start from scratch (see below).
|
In this case the client MUST abort the upload.
|
||||||
|
The client MAY then restart the upload from scratch.
|
||||||
|
|
||||||
Discussion
|
Discussion
|
||||||
``````````
|
``````````
|
||||||
@ -603,34 +777,42 @@ From RFC 7231::
|
|||||||
|
|
||||||
This cancels an *in-progress* upload.
|
This cancels an *in-progress* upload.
|
||||||
|
|
||||||
The request must include a ``X-Tahoe-Authorization`` header that includes the upload secret::
|
The request MUST include a ``X-Tahoe-Authorization`` header that includes the upload secret::
|
||||||
|
|
||||||
X-Tahoe-Authorization: upload-secret <base64-upload-secret>
|
X-Tahoe-Authorization: upload-secret <base64-upload-secret>
|
||||||
|
|
||||||
The response code:
|
If there is an incomplete upload with a matching upload-secret then the server MUST consider the abort to have succeeded.
|
||||||
|
In this case the response MUST be ``OK``.
|
||||||
* When the upload is still in progress and therefore the abort has succeeded,
|
The server MUST respond to all future requests as if the operations related to this upload did not take place.
|
||||||
the response is ``OK``.
|
|
||||||
Future uploads can start from scratch with no pre-existing upload state stored on the server.
|
|
||||||
* If the uploaded has already finished, the response is 405 (Method Not Allowed)
|
|
||||||
and no change is made.
|
|
||||||
|
|
||||||
|
If there is no incomplete upload with a matching upload-secret then the server MUST respond with ``Method Not Allowed`` (405).
|
||||||
|
The server MUST make no client-visible changes to its state in this case.
|
||||||
|
|
||||||
``POST /storage/v1/immutable/:storage_index/:share_number/corrupt``
|
``POST /storage/v1/immutable/:storage_index/:share_number/corrupt``
|
||||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||||
|
|
||||||
Advise the server the data read from the indicated share was corrupt. The
|
Advise the server the data read from the indicated share was corrupt.
|
||||||
request body includes an human-meaningful text string with details about the
|
The request body includes an human-meaningful text string with details about the corruption.
|
||||||
corruption. It also includes potentially important details about the share.
|
It also includes potentially important details about the share.
|
||||||
|
The request body MUST validate against this CDDL schema::
|
||||||
|
|
||||||
|
{
|
||||||
|
reason: tstr .size (1..32765)
|
||||||
|
}
|
||||||
|
|
||||||
For example::
|
For example::
|
||||||
|
|
||||||
{"reason": "expected hash abcd, got hash efgh"}
|
{"reason": "expected hash abcd, got hash efgh"}
|
||||||
|
|
||||||
.. share-type, storage-index, and share-number are inferred from the URL
|
The report pertains to the immutable share with a **storage index** and **share number** given in the request path.
|
||||||
|
If the identified **storage index** and **share number** are known to the server then the response SHOULD be accepted and made available to server administrators.
|
||||||
|
In this case the response SHOULD be ``OK``.
|
||||||
|
If the response is not accepted then the response SHOULD be ``Not Found`` (404).
|
||||||
|
|
||||||
The response code is OK (200) by default, or NOT FOUND (404) if the share
|
Discussion
|
||||||
couldn't be found.
|
``````````
|
||||||
|
|
||||||
|
The seemingly odd length limit on ``reason`` is chosen so that the *encoded* representation of the message is limited to 32768.
|
||||||
|
|
||||||
Reading
|
Reading
|
||||||
~~~~~~~
|
~~~~~~~
|
||||||
@ -638,26 +820,36 @@ Reading
|
|||||||
``GET /storage/v1/immutable/:storage_index/shares``
|
``GET /storage/v1/immutable/:storage_index/shares``
|
||||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||||
|
|
||||||
Retrieve a list (semantically, a set) indicating all shares available for the
|
Retrieve a list (semantically, a set) indicating all shares available for the indicated storage index.
|
||||||
indicated storage index. For example::
|
The response body MUST validate against this CDDL schema::
|
||||||
|
|
||||||
|
#6.258([0*256 uint])
|
||||||
|
|
||||||
|
For example::
|
||||||
|
|
||||||
[1, 5]
|
[1, 5]
|
||||||
|
|
||||||
An unknown storage index results in an empty list.
|
If the **storage index** in the request path is not known to the server then the response MUST include an empty list.
|
||||||
|
|
||||||
``GET /storage/v1/immutable/:storage_index/:share_number``
|
``GET /storage/v1/immutable/:storage_index/:share_number``
|
||||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||||
|
|
||||||
Read a contiguous sequence of bytes from one share in one bucket.
|
Read a contiguous sequence of bytes from one share in one bucket.
|
||||||
The response body is the raw share data (i.e., ``application/octet-stream``).
|
The response body MUST be the raw share data (i.e., ``application/octet-stream``).
|
||||||
The ``Range`` header may be used to request exactly one ``bytes`` range, in which case the response code will be 206 (partial content).
|
The ``Range`` header MAY be used to request exactly one ``bytes`` range,
|
||||||
Interpretation and response behavior is as specified in RFC 7233 § 4.1.
|
in which case the response code MUST be ``Partial Content`` (206).
|
||||||
Multiple ranges in a single request are *not* supported; open-ended ranges are also not supported.
|
Interpretation and response behavior MUST be as specified in RFC 7233 § 4.1.
|
||||||
|
Multiple ranges in a single request are *not* supported;
|
||||||
|
open-ended ranges are also not supported.
|
||||||
|
Clients MUST NOT send requests using these features.
|
||||||
|
|
||||||
If the response reads beyond the end of the data, the response may be shorter than the requested range.
|
If the response reads beyond the end of the data,
|
||||||
The resulting ``Content-Range`` header will be consistent with the returned data.
|
the response MUST be shorter than the requested range.
|
||||||
|
It MUST contain all data up to the end of the share and then end.
|
||||||
|
The resulting ``Content-Range`` header MUST be consistent with the returned data.
|
||||||
|
|
||||||
If the response to a query is an empty range, the ``NO CONTENT`` (204) response code will be used.
|
If the response to a query is an empty range,
|
||||||
|
the server MUST send a ``No Content`` (204) response.
|
||||||
|
|
||||||
Discussion
|
Discussion
|
||||||
``````````
|
``````````
|
||||||
@ -696,13 +888,27 @@ The first write operation on a mutable storage index creates it
|
|||||||
(that is,
|
(that is,
|
||||||
there is no separate "create this storage index" operation as there is for the immutable storage index type).
|
there is no separate "create this storage index" operation as there is for the immutable storage index type).
|
||||||
|
|
||||||
The request must include ``X-Tahoe-Authorization`` headers with write enabler and lease secrets::
|
The request MUST include ``X-Tahoe-Authorization`` headers with write enabler and lease secrets::
|
||||||
|
|
||||||
X-Tahoe-Authorization: write-enabler <base64-write-enabler-secret>
|
X-Tahoe-Authorization: write-enabler <base64-write-enabler-secret>
|
||||||
X-Tahoe-Authorization: lease-cancel-secret <base64-lease-cancel-secret>
|
X-Tahoe-Authorization: lease-cancel-secret <base64-lease-cancel-secret>
|
||||||
X-Tahoe-Authorization: lease-renew-secret <base64-lease-renew-secret>
|
X-Tahoe-Authorization: lease-renew-secret <base64-lease-renew-secret>
|
||||||
|
|
||||||
The request body includes test, read, and write vectors for the operation.
|
The request body MUST include test, read, and write vectors for the operation.
|
||||||
|
The request body MUST validate against this CDDL schema::
|
||||||
|
|
||||||
|
{
|
||||||
|
"test-write-vectors": {
|
||||||
|
0*256 share_number : {
|
||||||
|
"test": [0*30 {"offset": uint, "size": uint, "specimen": bstr}]
|
||||||
|
"write": [* {"offset": uint, "data": bstr}]
|
||||||
|
"new-length": uint / null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"read-vector": [0*30 {"offset": uint, "size": uint}]
|
||||||
|
}
|
||||||
|
share_number = uint
|
||||||
|
|
||||||
For example::
|
For example::
|
||||||
|
|
||||||
{
|
{
|
||||||
@ -725,6 +931,14 @@ For example::
|
|||||||
|
|
||||||
The response body contains a boolean indicating whether the tests all succeed
|
The response body contains a boolean indicating whether the tests all succeed
|
||||||
(and writes were applied) and a mapping giving read data (pre-write).
|
(and writes were applied) and a mapping giving read data (pre-write).
|
||||||
|
The response body MUST validate against this CDDL schema::
|
||||||
|
|
||||||
|
{
|
||||||
|
"success": bool,
|
||||||
|
"data": {0*256 share_number: [0* bstr]}
|
||||||
|
}
|
||||||
|
share_number = uint
|
||||||
|
|
||||||
For example::
|
For example::
|
||||||
|
|
||||||
{
|
{
|
||||||
@ -736,8 +950,17 @@ For example::
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
A test vector or read vector that read beyond the boundaries of existing data will return nothing for any bytes past the end.
|
A client MAY send a test vector or read vector to bytes beyond the end of existing data.
|
||||||
As a result, if there is no data at all, an empty bytestring is returned no matter what the offset or length.
|
In this case a server MUST behave as if the test or read vector referred to exactly as much data exists.
|
||||||
|
|
||||||
|
For example,
|
||||||
|
consider the case where the server has 5 bytes of data for a particular share.
|
||||||
|
If a client sends a read vector with an ``offset`` of 1 and a ``size`` of 4 then the server MUST respond with all of the data except the first byte.
|
||||||
|
If a client sends a read vector with the same ``offset`` and a ``size`` of 5 (or any larger value) then the server MUST respond in the same way.
|
||||||
|
|
||||||
|
Similarly,
|
||||||
|
if there is no data at all,
|
||||||
|
an empty byte string is returned no matter what the offset or length.
|
||||||
|
|
||||||
Reading
|
Reading
|
||||||
~~~~~~~
|
~~~~~~~
|
||||||
@ -746,23 +969,34 @@ Reading
|
|||||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||||
|
|
||||||
Retrieve a set indicating all shares available for the indicated storage index.
|
Retrieve a set indicating all shares available for the indicated storage index.
|
||||||
For example (this is shown as list, since it will be list for JSON, but will be set for CBOR)::
|
The response body MUST validate against this CDDL schema::
|
||||||
|
|
||||||
|
#6.258([0*256 uint])
|
||||||
|
|
||||||
|
For example::
|
||||||
|
|
||||||
[1, 5]
|
[1, 5]
|
||||||
|
|
||||||
``GET /storage/v1/mutable/:storage_index/:share_number``
|
``GET /storage/v1/mutable/:storage_index/:share_number``
|
||||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||||
|
|
||||||
Read data from the indicated mutable shares, just like ``GET /storage/v1/immutable/:storage_index``
|
Read data from the indicated mutable shares, just like ``GET /storage/v1/immutable/:storage_index``.
|
||||||
|
|
||||||
The ``Range`` header may be used to request exactly one ``bytes`` range, in which case the response code will be 206 (partial content).
|
The response body MUST be the raw share data (i.e., ``application/octet-stream``).
|
||||||
Interpretation and response behavior is as specified in RFC 7233 § 4.1.
|
The ``Range`` header MAY be used to request exactly one ``bytes`` range,
|
||||||
Multiple ranges in a single request are *not* supported; open-ended ranges are also not supported.
|
in which case the response code MUST be ``Partial Content`` (206).
|
||||||
|
Interpretation and response behavior MUST be specified in RFC 7233 § 4.1.
|
||||||
|
Multiple ranges in a single request are *not* supported;
|
||||||
|
open-ended ranges are also not supported.
|
||||||
|
Clients MUST NOT send requests using these features.
|
||||||
|
|
||||||
If the response reads beyond the end of the data, the response may be shorter than the requested range.
|
If the response reads beyond the end of the data,
|
||||||
The resulting ``Content-Range`` header will be consistent with the returned data.
|
the response MUST be shorter than the requested range.
|
||||||
|
It MUST contain all data up to the end of the share and then end.
|
||||||
|
The resulting ``Content-Range`` header MUST be consistent with the returned data.
|
||||||
|
|
||||||
If the response to a query is an empty range, the ``NO CONTENT`` (204) response code will be used.
|
If the response to a query is an empty range,
|
||||||
|
the server MUST send a ``No Content`` (204) response.
|
||||||
|
|
||||||
|
|
||||||
``POST /storage/v1/mutable/:storage_index/:share_number/corrupt``
|
``POST /storage/v1/mutable/:storage_index/:share_number/corrupt``
|
||||||
@ -774,6 +1008,9 @@ Just like the immutable version.
|
|||||||
Sample Interactions
|
Sample Interactions
|
||||||
-------------------
|
-------------------
|
||||||
|
|
||||||
|
This section contains examples of client/server interactions to help illuminate the above specification.
|
||||||
|
This section is non-normative.
|
||||||
|
|
||||||
Immutable Data
|
Immutable Data
|
||||||
~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~
|
||||||
|
|
||||||
@ -926,10 +1163,16 @@ otherwise it will read a byte which won't match `b""`::
|
|||||||
|
|
||||||
204 NO CONTENT
|
204 NO CONTENT
|
||||||
|
|
||||||
|
.. _Base64: https://www.rfc-editor.org/rfc/rfc4648#section-4
|
||||||
|
|
||||||
|
.. _RFC 4648: https://tools.ietf.org/html/rfc4648
|
||||||
|
|
||||||
.. _RFC 7469: https://tools.ietf.org/html/rfc7469#section-2.4
|
.. _RFC 7469: https://tools.ietf.org/html/rfc7469#section-2.4
|
||||||
|
|
||||||
.. _RFC 7049: https://tools.ietf.org/html/rfc7049#section-4
|
.. _RFC 7049: https://tools.ietf.org/html/rfc7049#section-4
|
||||||
|
|
||||||
|
.. _RFC 9110: https://tools.ietf.org/html/rfc9110
|
||||||
|
|
||||||
.. _CBOR: http://cbor.io/
|
.. _CBOR: http://cbor.io/
|
||||||
|
|
||||||
.. [#]
|
.. [#]
|
||||||
@ -974,7 +1217,7 @@ otherwise it will read a byte which won't match `b""`::
|
|||||||
spki_encoded = urlsafe_b64encode(spki_sha256)
|
spki_encoded = urlsafe_b64encode(spki_sha256)
|
||||||
assert spki_encoded == tub_id
|
assert spki_encoded == tub_id
|
||||||
|
|
||||||
Note we use `base64url`_ rather than the Foolscap- and Tahoe-LAFS-preferred Base32.
|
Note we use `unpadded base64url`_ rather than the Foolscap- and Tahoe-LAFS-preferred Base32.
|
||||||
|
|
||||||
.. [#]
|
.. [#]
|
||||||
https://www.cvedetails.com/cve/CVE-2017-5638/
|
https://www.cvedetails.com/cve/CVE-2017-5638/
|
||||||
@ -985,6 +1228,6 @@ otherwise it will read a byte which won't match `b""`::
|
|||||||
.. [#]
|
.. [#]
|
||||||
https://efail.de/
|
https://efail.de/
|
||||||
|
|
||||||
.. _base64url: https://tools.ietf.org/html/rfc7515#appendix-C
|
.. _unpadded base64url: https://tools.ietf.org/html/rfc7515#appendix-C
|
||||||
|
|
||||||
.. _attacking SHA1: https://en.wikipedia.org/wiki/SHA-1#Attacks
|
.. _attacking SHA1: https://en.wikipedia.org/wiki/SHA-1#Attacks
|
||||||
|
@ -267,7 +267,7 @@ How well does this design meet the goals?
|
|||||||
value, so there are no opportunities for staleness
|
value, so there are no opportunities for staleness
|
||||||
9. monotonicity: VERY: the single point of access also protects against
|
9. monotonicity: VERY: the single point of access also protects against
|
||||||
retrograde motion
|
retrograde motion
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Confidentiality leaks in the storage servers
|
Confidentiality leaks in the storage servers
|
||||||
@ -332,8 +332,9 @@ MDMF design rules allow for efficient random-access reads from the middle of
|
|||||||
the file, which would give the index something useful to point at.
|
the file, which would give the index something useful to point at.
|
||||||
|
|
||||||
The current SDMF design generates a new RSA public/private keypair for each
|
The current SDMF design generates a new RSA public/private keypair for each
|
||||||
directory. This takes considerable time and CPU effort, generally one or two
|
directory. This takes some time and CPU effort (around 100 milliseconds on a
|
||||||
seconds per directory. We have designed (but not yet built) a DSA-based
|
relatively high-end 2021 laptop) per directory.
|
||||||
|
We have designed (but not yet built) a DSA-based
|
||||||
mutable file scheme which will use shared parameters to reduce the
|
mutable file scheme which will use shared parameters to reduce the
|
||||||
directory-creation effort to a bare minimum (picking a random number instead
|
directory-creation effort to a bare minimum (picking a random number instead
|
||||||
of generating two random primes).
|
of generating two random primes).
|
||||||
@ -363,7 +364,7 @@ single child, looking up a single child) would require pulling or pushing a
|
|||||||
lot of unrelated data, increasing network overhead (and necessitating
|
lot of unrelated data, increasing network overhead (and necessitating
|
||||||
test-and-set semantics for the modification side, which increases the chances
|
test-and-set semantics for the modification side, which increases the chances
|
||||||
that a user operation will fail, making it more challenging to provide
|
that a user operation will fail, making it more challenging to provide
|
||||||
promises of atomicity to the user).
|
promises of atomicity to the user).
|
||||||
|
|
||||||
It would also make it much more difficult to enable the delegation
|
It would also make it much more difficult to enable the delegation
|
||||||
("sharing") of specific directories. Since each aggregate "realm" provides
|
("sharing") of specific directories. Since each aggregate "realm" provides
|
||||||
@ -469,4 +470,3 @@ Preventing delegation between communication parties is just as pointless as
|
|||||||
asking Bob to forget previously accessed files. However, there may be value
|
asking Bob to forget previously accessed files. However, there may be value
|
||||||
to configuring the UI to ask Carol to not share files with Bob, or to
|
to configuring the UI to ask Carol to not share files with Bob, or to
|
||||||
removing all files from Bob's view at the same time his access is revoked.
|
removing all files from Bob's view at the same time his access is revoked.
|
||||||
|
|
||||||
|
@ -33,20 +33,11 @@ a mean of 10kB and a max of 100MB, so filesize=min(int(1.0/random(.0002)),1e8)
|
|||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os, sys, httplib, binascii
|
import os, sys, httplib, binascii
|
||||||
import urllib, json, random, time, urlparse
|
import urllib, json, random, time, urlparse
|
||||||
|
|
||||||
try:
|
|
||||||
from typing import Dict
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Python 2 compatibility
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
from future.builtins import str # noqa: F401
|
|
||||||
|
|
||||||
if sys.argv[1] == "--stats":
|
if sys.argv[1] == "--stats":
|
||||||
statsfiles = sys.argv[2:]
|
statsfiles = sys.argv[2:]
|
||||||
# gather stats every 10 seconds, do a moving-window average of the last
|
# gather stats every 10 seconds, do a moving-window average of the last
|
||||||
@ -54,9 +45,9 @@ if sys.argv[1] == "--stats":
|
|||||||
DELAY = 10
|
DELAY = 10
|
||||||
MAXSAMPLES = 6
|
MAXSAMPLES = 6
|
||||||
totals = []
|
totals = []
|
||||||
last_stats = {} # type: Dict[str, float]
|
last_stats : dict[str, float] = {}
|
||||||
while True:
|
while True:
|
||||||
stats = {} # type: Dict[str, float]
|
stats : dict[str, float] = {}
|
||||||
for sf in statsfiles:
|
for sf in statsfiles:
|
||||||
for line in open(sf, "r").readlines():
|
for line in open(sf, "r").readlines():
|
||||||
name, str_value = line.split(":")
|
name, str_value = line.split(":")
|
||||||
|
0
newsfragments/3991.minor
Normal file
0
newsfragments/3991.minor
Normal file
0
newsfragments/3993.minor
Normal file
0
newsfragments/3993.minor
Normal file
0
newsfragments/3994.minor
Normal file
0
newsfragments/3994.minor
Normal file
0
newsfragments/3996.minor
Normal file
0
newsfragments/3996.minor
Normal file
1
newsfragments/3997.installation
Normal file
1
newsfragments/3997.installation
Normal file
@ -0,0 +1 @@
|
|||||||
|
Tahoe-LAFS is incompatible with cryptography >= 40 and now declares a requirement on an older version.
|
12
nix/collections-extended.nix
Normal file
12
nix/collections-extended.nix
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
# Package a version that's compatible with Python 3.11. This can go away once
|
||||||
|
# https://github.com/mlenzen/collections-extended/pull/199 is merged and
|
||||||
|
# included in a version of nixpkgs we depend on.
|
||||||
|
{ fetchFromGitHub, collections-extended }:
|
||||||
|
collections-extended.overrideAttrs (old: {
|
||||||
|
src = fetchFromGitHub {
|
||||||
|
owner = "mlenzen";
|
||||||
|
repo = "collections-extended";
|
||||||
|
rev = "8b93390636d58d28012b8e9d22334ee64ca37d73";
|
||||||
|
hash = "sha256-e7RCpNsqyS1d3q0E+uaE4UOEQziueYsRkKEvy3gCHt0=";
|
||||||
|
};
|
||||||
|
})
|
@ -27,7 +27,7 @@
|
|||||||
#
|
#
|
||||||
# 8. run `nix-build`. it should succeed. if it does not, seek assistance.
|
# 8. run `nix-build`. it should succeed. if it does not, seek assistance.
|
||||||
#
|
#
|
||||||
{ lib, fetchPypi, buildPythonPackage, rustPlatform }:
|
{ lib, fetchPypi, python, buildPythonPackage, rustPlatform }:
|
||||||
buildPythonPackage rec {
|
buildPythonPackage rec {
|
||||||
pname = "pycddl";
|
pname = "pycddl";
|
||||||
version = "0.4.0";
|
version = "0.4.0";
|
||||||
@ -38,6 +38,12 @@ buildPythonPackage rec {
|
|||||||
sha256 = "sha256-w0CGbPeiXyS74HqZXyiXhvaAMUaIj5onwjl9gWKAjqY=";
|
sha256 = "sha256-w0CGbPeiXyS74HqZXyiXhvaAMUaIj5onwjl9gWKAjqY=";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
# Without this, when building for PyPy, `maturin build` seems to fail to
|
||||||
|
# find the interpreter at all and then fails early in the build process with
|
||||||
|
# an error saying "unsupported Python interpreter". We can easily point
|
||||||
|
# directly at the relevant interpreter, so do that.
|
||||||
|
maturinBuildFlags = [ "--interpreter" python.executable ];
|
||||||
|
|
||||||
nativeBuildInputs = with rustPlatform; [
|
nativeBuildInputs = with rustPlatform; [
|
||||||
maturinBuildHook
|
maturinBuildHook
|
||||||
cargoSetupHook
|
cargoSetupHook
|
||||||
|
133
nix/python-overrides.nix
Normal file
133
nix/python-overrides.nix
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
# Override various Python packages to create a package set that works for
|
||||||
|
# Tahoe-LAFS on CPython and PyPy.
|
||||||
|
self: super:
|
||||||
|
let
|
||||||
|
|
||||||
|
# Run a function on a derivation if and only if we're building for PyPy.
|
||||||
|
onPyPy = f: drv: if super.isPyPy then f drv else drv;
|
||||||
|
|
||||||
|
# Disable a Python package's test suite.
|
||||||
|
dontCheck = drv: drv.overrideAttrs (old: { doInstallCheck = false; });
|
||||||
|
|
||||||
|
# Disable building a Python package's documentation.
|
||||||
|
dontBuildDocs = alsoDisable: drv: (drv.override ({
|
||||||
|
sphinxHook = null;
|
||||||
|
} // alsoDisable)).overrideAttrs ({ outputs, ... }: {
|
||||||
|
outputs = builtins.filter (x: "doc" != x) outputs;
|
||||||
|
});
|
||||||
|
|
||||||
|
in {
|
||||||
|
# Some dependencies aren't packaged in nixpkgs so supply our own packages.
|
||||||
|
pycddl = self.callPackage ./pycddl.nix { };
|
||||||
|
txi2p = self.callPackage ./txi2p.nix { };
|
||||||
|
|
||||||
|
# collections-extended is currently broken for Python 3.11 in nixpkgs but
|
||||||
|
# we know where a working version lives.
|
||||||
|
collections-extended = self.callPackage ./collections-extended.nix {
|
||||||
|
inherit (super) collections-extended;
|
||||||
|
};
|
||||||
|
|
||||||
|
# greenlet is incompatible with PyPy but PyPy has a builtin equivalent.
|
||||||
|
# Fixed in nixpkgs in a5f8184fb816a4fd5ae87136838c9981e0d22c67.
|
||||||
|
greenlet = onPyPy (drv: null) super.greenlet;
|
||||||
|
|
||||||
|
# tornado and tk pull in a huge dependency trees for functionality we don't
|
||||||
|
# care about, also tkinter doesn't work on PyPy.
|
||||||
|
matplotlib = super.matplotlib.override { tornado = null; enableTk = false; };
|
||||||
|
|
||||||
|
tqdm = super.tqdm.override {
|
||||||
|
# ibid.
|
||||||
|
tkinter = null;
|
||||||
|
# pandas is only required by the part of the test suite covering
|
||||||
|
# integration with pandas that we don't care about. pandas is a huge
|
||||||
|
# dependency.
|
||||||
|
pandas = null;
|
||||||
|
};
|
||||||
|
|
||||||
|
# The treq test suite depends on httpbin. httpbin pulls in babel (flask ->
|
||||||
|
# jinja2 -> babel) and arrow (brotlipy -> construct -> arrow). babel fails
|
||||||
|
# its test suite and arrow segfaults.
|
||||||
|
treq = onPyPy dontCheck super.treq;
|
||||||
|
|
||||||
|
# the six test suite fails on PyPy because it depends on dbm which the
|
||||||
|
# nixpkgs PyPy build appears to be missing. Maybe fixed in nixpkgs in
|
||||||
|
# a5f8184fb816a4fd5ae87136838c9981e0d22c67.
|
||||||
|
six = onPyPy dontCheck super.six;
|
||||||
|
|
||||||
|
# Building the docs requires sphinx which brings in a dependency on babel,
|
||||||
|
# the test suite of which fails.
|
||||||
|
pyopenssl = onPyPy (dontBuildDocs { sphinx-rtd-theme = null; }) super.pyopenssl;
|
||||||
|
|
||||||
|
# Likewise for beautifulsoup4.
|
||||||
|
beautifulsoup4 = onPyPy (dontBuildDocs {}) super.beautifulsoup4;
|
||||||
|
|
||||||
|
# The autobahn test suite pulls in a vast number of dependencies for
|
||||||
|
# functionality we don't care about. It might be nice to *selectively*
|
||||||
|
# disable just some of it but this is easier.
|
||||||
|
autobahn = onPyPy dontCheck super.autobahn;
|
||||||
|
|
||||||
|
# and python-dotenv tests pulls in a lot of dependencies, including jedi,
|
||||||
|
# which does not work on PyPy.
|
||||||
|
python-dotenv = onPyPy dontCheck super.python-dotenv;
|
||||||
|
|
||||||
|
# Upstream package unaccountably includes a sqlalchemy dependency ... but
|
||||||
|
# the project has no such dependency. Fixed in nixpkgs in
|
||||||
|
# da10e809fff70fbe1d86303b133b779f09f56503.
|
||||||
|
aiocontextvars = super.aiocontextvars.override { sqlalchemy = null; };
|
||||||
|
|
||||||
|
# By default, the sphinx docs are built, which pulls in a lot of
|
||||||
|
# dependencies - including jedi, which does not work on PyPy.
|
||||||
|
hypothesis =
|
||||||
|
(let h = super.hypothesis;
|
||||||
|
in
|
||||||
|
if (h.override.__functionArgs.enableDocumentation or false)
|
||||||
|
then h.override { enableDocumentation = false; }
|
||||||
|
else h).overrideAttrs ({ nativeBuildInputs, ... }: {
|
||||||
|
# The nixpkgs expression is missing the tzdata check input.
|
||||||
|
nativeBuildInputs = nativeBuildInputs ++ [ super.tzdata ];
|
||||||
|
});
|
||||||
|
|
||||||
|
# flaky's test suite depends on nose and nose appears to have Python 3
|
||||||
|
# incompatibilities (it includes `print` statements, for example).
|
||||||
|
flaky = onPyPy dontCheck super.flaky;
|
||||||
|
|
||||||
|
# Replace the deprecated way of running the test suite with the modern way.
|
||||||
|
# This also drops a bunch of unnecessary build-time dependencies, some of
|
||||||
|
# which are broken on PyPy. Fixed in nixpkgs in
|
||||||
|
# 5feb5054bb08ba779bd2560a44cf7d18ddf37fea.
|
||||||
|
zfec = (super.zfec.override {
|
||||||
|
setuptoolsTrial = null;
|
||||||
|
}).overrideAttrs (old: {
|
||||||
|
checkPhase = "trial zfec";
|
||||||
|
});
|
||||||
|
|
||||||
|
# collections-extended is packaged with poetry-core. poetry-core test suite
|
||||||
|
# uses virtualenv and virtualenv test suite fails on PyPy.
|
||||||
|
poetry-core = onPyPy dontCheck super.poetry-core;
|
||||||
|
|
||||||
|
# The test suite fails with some rather irrelevant (to us) string comparison
|
||||||
|
# failure on PyPy. Probably a PyPy bug but doesn't seem like we should
|
||||||
|
# care.
|
||||||
|
rich = onPyPy dontCheck super.rich;
|
||||||
|
|
||||||
|
# The pyutil test suite fails in some ... test ... for some deprecation
|
||||||
|
# functionality we don't care about.
|
||||||
|
pyutil = onPyPy dontCheck super.pyutil;
|
||||||
|
|
||||||
|
# testCall1 fails fairly inscrutibly on PyPy. Perhaps someone can fix that,
|
||||||
|
# or we could at least just skip that one test. Probably better to fix it
|
||||||
|
# since we actually depend directly and significantly on Foolscap.
|
||||||
|
foolscap = onPyPy dontCheck super.foolscap;
|
||||||
|
|
||||||
|
# Fixed by nixpkgs PR https://github.com/NixOS/nixpkgs/pull/222246
|
||||||
|
psutil = super.psutil.overrideAttrs ({ pytestFlagsArray, disabledTests, ...}: {
|
||||||
|
# Upstream already disables some tests but there are even more that have
|
||||||
|
# build impurities that come from build system hardware configuration.
|
||||||
|
# Skip them too.
|
||||||
|
pytestFlagsArray = [ "-v" ] ++ pytestFlagsArray;
|
||||||
|
disabledTests = disabledTests ++ [ "sensors_temperatures" ];
|
||||||
|
});
|
||||||
|
|
||||||
|
# CircleCI build systems don't have enough memory to run this test suite.
|
||||||
|
lz4 = dontCheck super.lz4;
|
||||||
|
}
|
@ -34,6 +34,7 @@ let
|
|||||||
magic-wormhole
|
magic-wormhole
|
||||||
netifaces
|
netifaces
|
||||||
psutil
|
psutil
|
||||||
|
pyyaml
|
||||||
pycddl
|
pycddl
|
||||||
pyrsistent
|
pyrsistent
|
||||||
pyutil
|
pyutil
|
||||||
@ -48,19 +49,15 @@ let
|
|||||||
zope_interface
|
zope_interface
|
||||||
] ++ pickExtraDependencies pythonExtraDependencies extrasNames;
|
] ++ pickExtraDependencies pythonExtraDependencies extrasNames;
|
||||||
|
|
||||||
pythonCheckDependencies = with pythonPackages; [
|
unitTestDependencies = with pythonPackages; [
|
||||||
beautifulsoup4
|
beautifulsoup4
|
||||||
fixtures
|
fixtures
|
||||||
hypothesis
|
hypothesis
|
||||||
mock
|
mock
|
||||||
paramiko
|
|
||||||
prometheus-client
|
prometheus-client
|
||||||
pytest
|
|
||||||
pytest-timeout
|
|
||||||
pytest-twisted
|
|
||||||
testtools
|
testtools
|
||||||
towncrier
|
|
||||||
];
|
];
|
||||||
|
|
||||||
in
|
in
|
||||||
buildPythonPackage {
|
buildPythonPackage {
|
||||||
inherit pname version;
|
inherit pname version;
|
||||||
@ -68,7 +65,7 @@ buildPythonPackage {
|
|||||||
propagatedBuildInputs = pythonPackageDependencies;
|
propagatedBuildInputs = pythonPackageDependencies;
|
||||||
|
|
||||||
inherit doCheck;
|
inherit doCheck;
|
||||||
checkInputs = pythonCheckDependencies;
|
checkInputs = unitTestDependencies;
|
||||||
checkPhase = ''
|
checkPhase = ''
|
||||||
export TAHOE_LAFS_HYPOTHESIS_PROFILE=ci
|
export TAHOE_LAFS_HYPOTHESIS_PROFILE=ci
|
||||||
python -m twisted.trial -j $NIX_BUILD_CORES allmydata
|
python -m twisted.trial -j $NIX_BUILD_CORES allmydata
|
||||||
|
2
setup.py
2
setup.py
@ -404,7 +404,7 @@ setup(name="tahoe-lafs", # also set in __init__.py
|
|||||||
# disagreeing on what is or is not a lint issue. We can bump
|
# disagreeing on what is or is not a lint issue. We can bump
|
||||||
# this version from time to time, but we will do it
|
# this version from time to time, but we will do it
|
||||||
# intentionally.
|
# intentionally.
|
||||||
"pyflakes == 2.2.0",
|
"pyflakes == 3.0.1",
|
||||||
"coverage ~= 5.0",
|
"coverage ~= 5.0",
|
||||||
"mock",
|
"mock",
|
||||||
"tox ~= 3.0",
|
"tox ~= 3.0",
|
||||||
|
@ -175,8 +175,6 @@ class KeyGenerator(object):
|
|||||||
"""I return a Deferred that fires with a (verifyingkey, signingkey)
|
"""I return a Deferred that fires with a (verifyingkey, signingkey)
|
||||||
pair. The returned key will be 2048 bit"""
|
pair. The returned key will be 2048 bit"""
|
||||||
keysize = 2048
|
keysize = 2048
|
||||||
# RSA key generation for a 2048 bit key takes between 0.8 and 3.2
|
|
||||||
# secs
|
|
||||||
signer, verifier = rsa.create_signing_keypair(keysize)
|
signer, verifier = rsa.create_signing_keypair(keysize)
|
||||||
return defer.succeed( (verifier, signer) )
|
return defer.succeed( (verifier, signer) )
|
||||||
|
|
||||||
|
@ -2,22 +2,12 @@
|
|||||||
Ported to Python 3.
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import absolute_import
|
from __future__ import annotations
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from future.utils import PY2, native_str
|
from future.utils import native_str
|
||||||
if PY2:
|
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
from past.builtins import long, unicode
|
from past.builtins import long, unicode
|
||||||
from six import ensure_str
|
from six import ensure_str
|
||||||
|
|
||||||
try:
|
|
||||||
from typing import List
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
import os, time, weakref, itertools
|
import os, time, weakref, itertools
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
@ -915,12 +905,12 @@ class _Accum(object):
|
|||||||
:ivar remaining: The number of bytes still expected.
|
:ivar remaining: The number of bytes still expected.
|
||||||
:ivar ciphertext: The bytes accumulated so far.
|
:ivar ciphertext: The bytes accumulated so far.
|
||||||
"""
|
"""
|
||||||
remaining = attr.ib(validator=attr.validators.instance_of(int)) # type: int
|
remaining : int = attr.ib(validator=attr.validators.instance_of(int))
|
||||||
ciphertext = attr.ib(default=attr.Factory(list)) # type: List[bytes]
|
ciphertext : list[bytes] = attr.ib(default=attr.Factory(list))
|
||||||
|
|
||||||
def extend(self,
|
def extend(self,
|
||||||
size, # type: int
|
size, # type: int
|
||||||
ciphertext, # type: List[bytes]
|
ciphertext, # type: list[bytes]
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Accumulate some more ciphertext.
|
Accumulate some more ciphertext.
|
||||||
|
@ -2,24 +2,13 @@
|
|||||||
Ported to Python 3.
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import absolute_import
|
from __future__ import annotations
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
from past.builtins import long
|
from past.builtins import long
|
||||||
from six import ensure_text
|
from six import ensure_text
|
||||||
|
|
||||||
import time, os.path, textwrap
|
import time, os.path, textwrap
|
||||||
|
from typing import Any, Union
|
||||||
try:
|
|
||||||
from typing import Any, Dict, Union
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
@ -161,11 +150,11 @@ class IntroducerService(service.MultiService, Referenceable):
|
|||||||
# v1 is the original protocol, added in 1.0 (but only advertised starting
|
# v1 is the original protocol, added in 1.0 (but only advertised starting
|
||||||
# in 1.3), removed in 1.12. v2 is the new signed protocol, added in 1.10
|
# in 1.3), removed in 1.12. v2 is the new signed protocol, added in 1.10
|
||||||
# TODO: reconcile bytes/str for keys
|
# TODO: reconcile bytes/str for keys
|
||||||
VERSION = {
|
VERSION : dict[Union[bytes, str], Any]= {
|
||||||
#"http://allmydata.org/tahoe/protocols/introducer/v1": { },
|
#"http://allmydata.org/tahoe/protocols/introducer/v1": { },
|
||||||
b"http://allmydata.org/tahoe/protocols/introducer/v2": { },
|
b"http://allmydata.org/tahoe/protocols/introducer/v2": { },
|
||||||
b"application-version": allmydata.__full_version__.encode("utf-8"),
|
b"application-version": allmydata.__full_version__.encode("utf-8"),
|
||||||
} # type: Dict[Union[bytes, str], Any]
|
}
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
service.MultiService.__init__(self)
|
service.MultiService.__init__(self)
|
||||||
|
@ -4,14 +4,8 @@ a node for Tahoe-LAFS.
|
|||||||
|
|
||||||
Ported to Python 3.
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import
|
from __future__ import annotations
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
from six import ensure_str, ensure_text
|
from six import ensure_str, ensure_text
|
||||||
|
|
||||||
import json
|
import json
|
||||||
@ -23,11 +17,7 @@ import errno
|
|||||||
from base64 import b32decode, b32encode
|
from base64 import b32decode, b32encode
|
||||||
from errno import ENOENT, EPERM
|
from errno import ENOENT, EPERM
|
||||||
from warnings import warn
|
from warnings import warn
|
||||||
|
from typing import Union
|
||||||
try:
|
|
||||||
from typing import Union
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
|
|
||||||
@ -281,8 +271,7 @@ def _error_about_old_config_files(basedir, generated_files):
|
|||||||
raise e
|
raise e
|
||||||
|
|
||||||
|
|
||||||
def ensure_text_and_abspath_expanduser_unicode(basedir):
|
def ensure_text_and_abspath_expanduser_unicode(basedir: Union[bytes, str]) -> str:
|
||||||
# type: (Union[bytes, str]) -> str
|
|
||||||
return abspath_expanduser_unicode(ensure_text(basedir))
|
return abspath_expanduser_unicode(ensure_text(basedir))
|
||||||
|
|
||||||
|
|
||||||
|
@ -255,9 +255,9 @@ def do_admin(options):
|
|||||||
return f(so)
|
return f(so)
|
||||||
|
|
||||||
|
|
||||||
subCommands = [
|
subCommands : SubCommands = [
|
||||||
("admin", None, AdminCommand, "admin subcommands: use 'tahoe admin' for a list"),
|
("admin", None, AdminCommand, "admin subcommands: use 'tahoe admin' for a list"),
|
||||||
] # type: SubCommands
|
]
|
||||||
|
|
||||||
dispatch = {
|
dispatch = {
|
||||||
"admin": do_admin,
|
"admin": do_admin,
|
||||||
|
@ -1,22 +1,10 @@
|
|||||||
"""
|
"""
|
||||||
Ported to Python 3.
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
from __future__ import unicode_literals
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
import os.path, re, fnmatch
|
import os.path, re, fnmatch
|
||||||
|
|
||||||
try:
|
from allmydata.scripts.types_ import SubCommands, Parameters
|
||||||
from allmydata.scripts.types_ import SubCommands, Parameters
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
from allmydata.scripts.common import get_aliases, get_default_nodedir, \
|
from allmydata.scripts.common import get_aliases, get_default_nodedir, \
|
||||||
@ -29,14 +17,14 @@ NODEURL_RE=re.compile("http(s?)://([^:]*)(:([1-9][0-9]*))?")
|
|||||||
_default_nodedir = get_default_nodedir()
|
_default_nodedir = get_default_nodedir()
|
||||||
|
|
||||||
class FileStoreOptions(BaseOptions):
|
class FileStoreOptions(BaseOptions):
|
||||||
optParameters = [
|
optParameters : Parameters = [
|
||||||
["node-url", "u", None,
|
["node-url", "u", None,
|
||||||
"Specify the URL of the Tahoe gateway node, such as "
|
"Specify the URL of the Tahoe gateway node, such as "
|
||||||
"'http://127.0.0.1:3456'. "
|
"'http://127.0.0.1:3456'. "
|
||||||
"This overrides the URL found in the --node-directory ."],
|
"This overrides the URL found in the --node-directory ."],
|
||||||
["dir-cap", None, None,
|
["dir-cap", None, None,
|
||||||
"Specify which dirnode URI should be used as the 'tahoe' alias."]
|
"Specify which dirnode URI should be used as the 'tahoe' alias."]
|
||||||
] # type: Parameters
|
]
|
||||||
|
|
||||||
def postOptions(self):
|
def postOptions(self):
|
||||||
self["quiet"] = self.parent["quiet"]
|
self["quiet"] = self.parent["quiet"]
|
||||||
@ -484,7 +472,7 @@ class DeepCheckOptions(FileStoreOptions):
|
|||||||
(which must be a directory), like 'tahoe check' but for multiple files.
|
(which must be a directory), like 'tahoe check' but for multiple files.
|
||||||
Optionally repair any problems found."""
|
Optionally repair any problems found."""
|
||||||
|
|
||||||
subCommands = [
|
subCommands : SubCommands = [
|
||||||
("mkdir", None, MakeDirectoryOptions, "Create a new directory."),
|
("mkdir", None, MakeDirectoryOptions, "Create a new directory."),
|
||||||
("add-alias", None, AddAliasOptions, "Add a new alias cap."),
|
("add-alias", None, AddAliasOptions, "Add a new alias cap."),
|
||||||
("create-alias", None, CreateAliasOptions, "Create a new alias cap."),
|
("create-alias", None, CreateAliasOptions, "Create a new alias cap."),
|
||||||
@ -503,7 +491,7 @@ subCommands = [
|
|||||||
("check", None, CheckOptions, "Check a single file or directory."),
|
("check", None, CheckOptions, "Check a single file or directory."),
|
||||||
("deep-check", None, DeepCheckOptions, "Check all files/directories reachable from a starting point."),
|
("deep-check", None, DeepCheckOptions, "Check all files/directories reachable from a starting point."),
|
||||||
("status", None, TahoeStatusCommand, "Various status information."),
|
("status", None, TahoeStatusCommand, "Various status information."),
|
||||||
] # type: SubCommands
|
]
|
||||||
|
|
||||||
def mkdir(options):
|
def mkdir(options):
|
||||||
from allmydata.scripts import tahoe_mkdir
|
from allmydata.scripts import tahoe_mkdir
|
||||||
|
@ -4,29 +4,13 @@
|
|||||||
Ported to Python 3.
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import unicode_literals
|
from typing import Union, Optional
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
else:
|
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
|
|
||||||
import os, sys, textwrap
|
import os, sys, textwrap
|
||||||
import codecs
|
import codecs
|
||||||
from os.path import join
|
from os.path import join
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
|
||||||
try:
|
|
||||||
from typing import Optional
|
|
||||||
from .types_ import Parameters
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
from yaml import (
|
from yaml import (
|
||||||
safe_dump,
|
safe_dump,
|
||||||
)
|
)
|
||||||
@ -37,6 +21,8 @@ from allmydata.util.assertutil import precondition
|
|||||||
from allmydata.util.encodingutil import quote_output, \
|
from allmydata.util.encodingutil import quote_output, \
|
||||||
quote_local_unicode_path, argv_to_abspath
|
quote_local_unicode_path, argv_to_abspath
|
||||||
from allmydata.scripts.default_nodedir import _default_nodedir
|
from allmydata.scripts.default_nodedir import _default_nodedir
|
||||||
|
from .types_ import Parameters
|
||||||
|
|
||||||
|
|
||||||
def get_default_nodedir():
|
def get_default_nodedir():
|
||||||
return _default_nodedir
|
return _default_nodedir
|
||||||
@ -59,7 +45,7 @@ class BaseOptions(usage.Options):
|
|||||||
def opt_version(self):
|
def opt_version(self):
|
||||||
raise usage.UsageError("--version not allowed on subcommands")
|
raise usage.UsageError("--version not allowed on subcommands")
|
||||||
|
|
||||||
description = None # type: Optional[str]
|
description : Optional[str] = None
|
||||||
description_unwrapped = None # type: Optional[str]
|
description_unwrapped = None # type: Optional[str]
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
@ -80,10 +66,10 @@ class BaseOptions(usage.Options):
|
|||||||
class BasedirOptions(BaseOptions):
|
class BasedirOptions(BaseOptions):
|
||||||
default_nodedir = _default_nodedir
|
default_nodedir = _default_nodedir
|
||||||
|
|
||||||
optParameters = [
|
optParameters : Parameters = [
|
||||||
["basedir", "C", None, "Specify which Tahoe base directory should be used. [default: %s]"
|
["basedir", "C", None, "Specify which Tahoe base directory should be used. [default: %s]"
|
||||||
% quote_local_unicode_path(_default_nodedir)],
|
% quote_local_unicode_path(_default_nodedir)],
|
||||||
] # type: Parameters
|
]
|
||||||
|
|
||||||
def parseArgs(self, basedir=None):
|
def parseArgs(self, basedir=None):
|
||||||
# This finds the node-directory option correctly even if we are in a subcommand.
|
# This finds the node-directory option correctly even if we are in a subcommand.
|
||||||
@ -283,9 +269,8 @@ def get_alias(aliases, path_unicode, default):
|
|||||||
quote_output(alias))
|
quote_output(alias))
|
||||||
return uri.from_string_dirnode(aliases[alias]).to_string(), path[colon+1:]
|
return uri.from_string_dirnode(aliases[alias]).to_string(), path[colon+1:]
|
||||||
|
|
||||||
def escape_path(path):
|
def escape_path(path: Union[str, bytes]) -> str:
|
||||||
# type: (Union[str,bytes]) -> str
|
"""
|
||||||
u"""
|
|
||||||
Return path quoted to US-ASCII, valid URL characters.
|
Return path quoted to US-ASCII, valid URL characters.
|
||||||
|
|
||||||
>>> path = u'/føö/bar/☃'
|
>>> path = u'/føö/bar/☃'
|
||||||
@ -302,9 +287,4 @@ def escape_path(path):
|
|||||||
]),
|
]),
|
||||||
"ascii"
|
"ascii"
|
||||||
)
|
)
|
||||||
# Eventually (i.e. as part of Python 3 port) we want this to always return
|
|
||||||
# Unicode strings. However, to reduce diff sizes in the short term it'll
|
|
||||||
# return native string (i.e. bytes) on Python 2.
|
|
||||||
if PY2:
|
|
||||||
result = result.encode("ascii").__native__()
|
|
||||||
return result
|
return result
|
||||||
|
@ -1,25 +1,11 @@
|
|||||||
# Ported to Python 3
|
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
|
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
|
|
||||||
try:
|
from allmydata.scripts.types_ import (
|
||||||
from allmydata.scripts.types_ import (
|
SubCommands,
|
||||||
SubCommands,
|
Parameters,
|
||||||
Parameters,
|
Flags,
|
||||||
Flags,
|
)
|
||||||
)
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
from twisted.internet import reactor, defer
|
from twisted.internet import reactor, defer
|
||||||
from twisted.python.usage import UsageError
|
from twisted.python.usage import UsageError
|
||||||
@ -48,7 +34,7 @@ def write_tac(basedir, nodetype):
|
|||||||
fileutil.write(os.path.join(basedir, "tahoe-%s.tac" % (nodetype,)), dummy_tac)
|
fileutil.write(os.path.join(basedir, "tahoe-%s.tac" % (nodetype,)), dummy_tac)
|
||||||
|
|
||||||
|
|
||||||
WHERE_OPTS = [
|
WHERE_OPTS : Parameters = [
|
||||||
("location", None, None,
|
("location", None, None,
|
||||||
"Server location to advertise (e.g. tcp:example.org:12345)"),
|
"Server location to advertise (e.g. tcp:example.org:12345)"),
|
||||||
("port", None, None,
|
("port", None, None,
|
||||||
@ -57,29 +43,29 @@ WHERE_OPTS = [
|
|||||||
"Hostname to automatically set --location/--port when --listen=tcp"),
|
"Hostname to automatically set --location/--port when --listen=tcp"),
|
||||||
("listen", None, "tcp",
|
("listen", None, "tcp",
|
||||||
"Comma-separated list of listener types (tcp,tor,i2p,none)."),
|
"Comma-separated list of listener types (tcp,tor,i2p,none)."),
|
||||||
] # type: Parameters
|
]
|
||||||
|
|
||||||
TOR_OPTS = [
|
TOR_OPTS : Parameters = [
|
||||||
("tor-control-port", None, None,
|
("tor-control-port", None, None,
|
||||||
"Tor's control port endpoint descriptor string (e.g. tcp:127.0.0.1:9051 or unix:/var/run/tor/control)"),
|
"Tor's control port endpoint descriptor string (e.g. tcp:127.0.0.1:9051 or unix:/var/run/tor/control)"),
|
||||||
("tor-executable", None, None,
|
("tor-executable", None, None,
|
||||||
"The 'tor' executable to run (default is to search $PATH)."),
|
"The 'tor' executable to run (default is to search $PATH)."),
|
||||||
] # type: Parameters
|
]
|
||||||
|
|
||||||
TOR_FLAGS = [
|
TOR_FLAGS : Flags = [
|
||||||
("tor-launch", None, "Launch a tor instead of connecting to a tor control port."),
|
("tor-launch", None, "Launch a tor instead of connecting to a tor control port."),
|
||||||
] # type: Flags
|
]
|
||||||
|
|
||||||
I2P_OPTS = [
|
I2P_OPTS : Parameters = [
|
||||||
("i2p-sam-port", None, None,
|
("i2p-sam-port", None, None,
|
||||||
"I2P's SAM API port endpoint descriptor string (e.g. tcp:127.0.0.1:7656)"),
|
"I2P's SAM API port endpoint descriptor string (e.g. tcp:127.0.0.1:7656)"),
|
||||||
("i2p-executable", None, None,
|
("i2p-executable", None, None,
|
||||||
"(future) The 'i2prouter' executable to run (default is to search $PATH)."),
|
"(future) The 'i2prouter' executable to run (default is to search $PATH)."),
|
||||||
] # type: Parameters
|
]
|
||||||
|
|
||||||
I2P_FLAGS = [
|
I2P_FLAGS : Flags = [
|
||||||
("i2p-launch", None, "(future) Launch an I2P router instead of connecting to a SAM API port."),
|
("i2p-launch", None, "(future) Launch an I2P router instead of connecting to a SAM API port."),
|
||||||
] # type: Flags
|
]
|
||||||
|
|
||||||
def validate_where_options(o):
|
def validate_where_options(o):
|
||||||
if o['listen'] == "none":
|
if o['listen'] == "none":
|
||||||
@ -508,11 +494,11 @@ def create_introducer(config):
|
|||||||
defer.returnValue(0)
|
defer.returnValue(0)
|
||||||
|
|
||||||
|
|
||||||
subCommands = [
|
subCommands : SubCommands = [
|
||||||
("create-node", None, CreateNodeOptions, "Create a node that acts as a client, server or both."),
|
("create-node", None, CreateNodeOptions, "Create a node that acts as a client, server or both."),
|
||||||
("create-client", None, CreateClientOptions, "Create a client node (with storage initially disabled)."),
|
("create-client", None, CreateClientOptions, "Create a client node (with storage initially disabled)."),
|
||||||
("create-introducer", None, CreateIntroducerOptions, "Create an introducer node."),
|
("create-introducer", None, CreateIntroducerOptions, "Create an introducer node."),
|
||||||
] # type: SubCommands
|
]
|
||||||
|
|
||||||
dispatch = {
|
dispatch = {
|
||||||
"create-node": create_node,
|
"create-node": create_node,
|
||||||
|
@ -1,19 +1,8 @@
|
|||||||
"""
|
"""
|
||||||
Ported to Python 3.
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
from __future__ import unicode_literals
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
from future.utils import PY2, bchr
|
from future.utils import bchr
|
||||||
if PY2:
|
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
|
|
||||||
try:
|
|
||||||
from allmydata.scripts.types_ import SubCommands
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
import struct, time, os, sys
|
import struct, time, os, sys
|
||||||
|
|
||||||
@ -31,6 +20,7 @@ from allmydata.mutable.common import NeedMoreDataError
|
|||||||
from allmydata.immutable.layout import ReadBucketProxy
|
from allmydata.immutable.layout import ReadBucketProxy
|
||||||
from allmydata.util import base32
|
from allmydata.util import base32
|
||||||
from allmydata.util.encodingutil import quote_output
|
from allmydata.util.encodingutil import quote_output
|
||||||
|
from allmydata.scripts.types_ import SubCommands
|
||||||
|
|
||||||
class DumpOptions(BaseOptions):
|
class DumpOptions(BaseOptions):
|
||||||
def getSynopsis(self):
|
def getSynopsis(self):
|
||||||
@ -1076,9 +1066,9 @@ def do_debug(options):
|
|||||||
return f(so)
|
return f(so)
|
||||||
|
|
||||||
|
|
||||||
subCommands = [
|
subCommands : SubCommands = [
|
||||||
("debug", None, DebugCommand, "debug subcommands: use 'tahoe debug' for a list."),
|
("debug", None, DebugCommand, "debug subcommands: use 'tahoe debug' for a list."),
|
||||||
] # type: SubCommands
|
]
|
||||||
|
|
||||||
dispatch = {
|
dispatch = {
|
||||||
"debug": do_debug,
|
"debug": do_debug,
|
||||||
|
@ -1,28 +1,15 @@
|
|||||||
from __future__ import print_function
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
|
|
||||||
import os, sys
|
import os, sys
|
||||||
from six.moves import StringIO
|
from io import StringIO
|
||||||
from past.builtins import unicode
|
from past.builtins import unicode
|
||||||
import six
|
import six
|
||||||
|
|
||||||
try:
|
|
||||||
from allmydata.scripts.types_ import SubCommands
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
from twisted.internet import defer, task, threads
|
from twisted.internet import defer, task, threads
|
||||||
|
|
||||||
from allmydata.scripts.common import get_default_nodedir
|
from allmydata.scripts.common import get_default_nodedir
|
||||||
from allmydata.scripts import debug, create_node, cli, \
|
from allmydata.scripts import debug, create_node, cli, \
|
||||||
admin, tahoe_run, tahoe_invite
|
admin, tahoe_run, tahoe_invite
|
||||||
|
from allmydata.scripts.types_ import SubCommands
|
||||||
from allmydata.util.encodingutil import quote_local_unicode_path, argv_to_unicode
|
from allmydata.util.encodingutil import quote_local_unicode_path, argv_to_unicode
|
||||||
from allmydata.util.eliotutil import (
|
from allmydata.util.eliotutil import (
|
||||||
opt_eliot_destination,
|
opt_eliot_destination,
|
||||||
@ -47,9 +34,9 @@ if _default_nodedir:
|
|||||||
NODEDIR_HELP += " [default for most commands: " + quote_local_unicode_path(_default_nodedir) + "]"
|
NODEDIR_HELP += " [default for most commands: " + quote_local_unicode_path(_default_nodedir) + "]"
|
||||||
|
|
||||||
|
|
||||||
process_control_commands = [
|
process_control_commands : SubCommands = [
|
||||||
("run", None, tahoe_run.RunOptions, "run a node without daemonizing"),
|
("run", None, tahoe_run.RunOptions, "run a node without daemonizing"),
|
||||||
] # type: SubCommands
|
]
|
||||||
|
|
||||||
|
|
||||||
class Options(usage.Options):
|
class Options(usage.Options):
|
||||||
|
@ -1,19 +1,6 @@
|
|||||||
"""
|
"""
|
||||||
Ported to Python 3.
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
from __future__ import unicode_literals
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
|
|
||||||
try:
|
|
||||||
from allmydata.scripts.types_ import SubCommands
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
@ -21,6 +8,7 @@ from twisted.internet import defer, reactor
|
|||||||
from allmydata.util.encodingutil import argv_to_abspath
|
from allmydata.util.encodingutil import argv_to_abspath
|
||||||
from allmydata.util import jsonbytes as json
|
from allmydata.util import jsonbytes as json
|
||||||
from allmydata.scripts.common import get_default_nodedir, get_introducer_furl
|
from allmydata.scripts.common import get_default_nodedir, get_introducer_furl
|
||||||
|
from allmydata.scripts.types_ import SubCommands
|
||||||
from allmydata.client import read_config
|
from allmydata.client import read_config
|
||||||
|
|
||||||
|
|
||||||
@ -112,10 +100,10 @@ def invite(options):
|
|||||||
print("Completed successfully", file=out)
|
print("Completed successfully", file=out)
|
||||||
|
|
||||||
|
|
||||||
subCommands = [
|
subCommands : SubCommands = [
|
||||||
("invite", None, InviteOptions,
|
("invite", None, InviteOptions,
|
||||||
"Invite a new node to this grid"),
|
"Invite a new node to this grid"),
|
||||||
] # type: SubCommands
|
]
|
||||||
|
|
||||||
dispatch = {
|
dispatch = {
|
||||||
"invite": invite,
|
"invite": invite,
|
||||||
|
@ -70,15 +70,14 @@ class ClientException(Exception):
|
|||||||
# indicates a set.
|
# indicates a set.
|
||||||
_SCHEMAS = {
|
_SCHEMAS = {
|
||||||
"get_version": Schema(
|
"get_version": Schema(
|
||||||
|
# Note that the single-quoted (`'`) string keys in this schema
|
||||||
|
# represent *byte* strings - per the CDDL specification. Text strings
|
||||||
|
# are represented using strings with *double* quotes (`"`).
|
||||||
"""
|
"""
|
||||||
response = {'http://allmydata.org/tahoe/protocols/storage/v1' => {
|
response = {'http://allmydata.org/tahoe/protocols/storage/v1' => {
|
||||||
'maximum-immutable-share-size' => uint
|
'maximum-immutable-share-size' => uint
|
||||||
'maximum-mutable-share-size' => uint
|
'maximum-mutable-share-size' => uint
|
||||||
'available-space' => uint
|
'available-space' => uint
|
||||||
'tolerates-immutable-read-overrun' => bool
|
|
||||||
'delete-mutable-shares-with-zero-length-writev' => bool
|
|
||||||
'fills-holes-with-zero-bytes' => bool
|
|
||||||
'prevents-read-past-end-of-share-data' => bool
|
|
||||||
}
|
}
|
||||||
'application-version' => bstr
|
'application-version' => bstr
|
||||||
}
|
}
|
||||||
@ -446,6 +445,15 @@ class StorageClientGeneral(object):
|
|||||||
decoded_response = yield self._client.decode_cbor(
|
decoded_response = yield self._client.decode_cbor(
|
||||||
response, _SCHEMAS["get_version"]
|
response, _SCHEMAS["get_version"]
|
||||||
)
|
)
|
||||||
|
# Add some features we know are true because the HTTP API
|
||||||
|
# specification requires them and because other parts of the storage
|
||||||
|
# client implementation assumes they will be present.
|
||||||
|
decoded_response[b"http://allmydata.org/tahoe/protocols/storage/v1"].update({
|
||||||
|
b'tolerates-immutable-read-overrun': True,
|
||||||
|
b'delete-mutable-shares-with-zero-length-writev': True,
|
||||||
|
b'fills-holes-with-zero-bytes': True,
|
||||||
|
b'prevents-read-past-end-of-share-data': True,
|
||||||
|
})
|
||||||
returnValue(decoded_response)
|
returnValue(decoded_response)
|
||||||
|
|
||||||
@inlineCallbacks
|
@inlineCallbacks
|
||||||
|
@ -28,7 +28,7 @@ def get_content_type(headers: Headers) -> Optional[str]:
|
|||||||
|
|
||||||
|
|
||||||
def swissnum_auth_header(swissnum: bytes) -> bytes:
|
def swissnum_auth_header(swissnum: bytes) -> bytes:
|
||||||
"""Return value for ``Authentication`` header."""
|
"""Return value for ``Authorization`` header."""
|
||||||
return b"Tahoe-LAFS " + b64encode(swissnum).strip()
|
return b"Tahoe-LAFS " + b64encode(swissnum).strip()
|
||||||
|
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ HTTP server for storage.
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Dict, List, Set, Tuple, Any, Callable, Union, cast
|
from typing import Any, Callable, Union, cast
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from base64 import b64decode
|
from base64 import b64decode
|
||||||
import binascii
|
import binascii
|
||||||
@ -67,8 +67,8 @@ class ClientSecretsException(Exception):
|
|||||||
|
|
||||||
|
|
||||||
def _extract_secrets(
|
def _extract_secrets(
|
||||||
header_values, required_secrets
|
header_values: list[str], required_secrets: set[Secrets]
|
||||||
): # type: (List[str], Set[Secrets]) -> Dict[Secrets, bytes]
|
) -> dict[Secrets, bytes]:
|
||||||
"""
|
"""
|
||||||
Given list of values of ``X-Tahoe-Authorization`` headers, and required
|
Given list of values of ``X-Tahoe-Authorization`` headers, and required
|
||||||
secrets, return dictionary mapping secrets to decoded values.
|
secrets, return dictionary mapping secrets to decoded values.
|
||||||
@ -173,7 +173,7 @@ class UploadsInProgress(object):
|
|||||||
_uploads: dict[bytes, StorageIndexUploads] = Factory(dict)
|
_uploads: dict[bytes, StorageIndexUploads] = Factory(dict)
|
||||||
|
|
||||||
# Map BucketWriter to (storage index, share number)
|
# Map BucketWriter to (storage index, share number)
|
||||||
_bucketwriters: dict[BucketWriter, Tuple[bytes, int]] = Factory(dict)
|
_bucketwriters: dict[BucketWriter, tuple[bytes, int]] = Factory(dict)
|
||||||
|
|
||||||
def add_write_bucket(
|
def add_write_bucket(
|
||||||
self,
|
self,
|
||||||
@ -273,7 +273,7 @@ _SCHEMAS = {
|
|||||||
"advise_corrupt_share": Schema(
|
"advise_corrupt_share": Schema(
|
||||||
"""
|
"""
|
||||||
request = {
|
request = {
|
||||||
reason: tstr
|
reason: tstr .size (1..32765)
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
),
|
),
|
||||||
@ -592,7 +592,26 @@ class HTTPServer(object):
|
|||||||
@_authorized_route(_app, set(), "/storage/v1/version", methods=["GET"])
|
@_authorized_route(_app, set(), "/storage/v1/version", methods=["GET"])
|
||||||
def version(self, request, authorization):
|
def version(self, request, authorization):
|
||||||
"""Return version information."""
|
"""Return version information."""
|
||||||
return self._send_encoded(request, self._storage_server.get_version())
|
return self._send_encoded(request, self._get_version())
|
||||||
|
|
||||||
|
def _get_version(self) -> dict[bytes, Any]:
|
||||||
|
"""
|
||||||
|
Get the HTTP version of the storage server's version response.
|
||||||
|
|
||||||
|
This differs from the Foolscap version by omitting certain obsolete
|
||||||
|
fields.
|
||||||
|
"""
|
||||||
|
v = self._storage_server.get_version()
|
||||||
|
v1_identifier = b"http://allmydata.org/tahoe/protocols/storage/v1"
|
||||||
|
v1 = v[v1_identifier]
|
||||||
|
return {
|
||||||
|
v1_identifier: {
|
||||||
|
b"maximum-immutable-share-size": v1[b"maximum-immutable-share-size"],
|
||||||
|
b"maximum-mutable-share-size": v1[b"maximum-mutable-share-size"],
|
||||||
|
b"available-space": v1[b"available-space"],
|
||||||
|
},
|
||||||
|
b"application-version": v[b"application-version"],
|
||||||
|
}
|
||||||
|
|
||||||
##### Immutable APIs #####
|
##### Immutable APIs #####
|
||||||
|
|
||||||
@ -779,7 +798,9 @@ class HTTPServer(object):
|
|||||||
# The reason can be a string with explanation, so in theory it could be
|
# The reason can be a string with explanation, so in theory it could be
|
||||||
# longish?
|
# longish?
|
||||||
info = await self._read_encoded(
|
info = await self._read_encoded(
|
||||||
request, _SCHEMAS["advise_corrupt_share"], max_size=32768,
|
request,
|
||||||
|
_SCHEMAS["advise_corrupt_share"],
|
||||||
|
max_size=32768,
|
||||||
)
|
)
|
||||||
bucket.advise_corrupt_share(info["reason"].encode("utf-8"))
|
bucket.advise_corrupt_share(info["reason"].encode("utf-8"))
|
||||||
return b""
|
return b""
|
||||||
@ -954,7 +975,7 @@ def listen_tls(
|
|||||||
endpoint: IStreamServerEndpoint,
|
endpoint: IStreamServerEndpoint,
|
||||||
private_key_path: FilePath,
|
private_key_path: FilePath,
|
||||||
cert_path: FilePath,
|
cert_path: FilePath,
|
||||||
) -> Deferred[Tuple[DecodedURL, IListeningPort]]:
|
) -> Deferred[tuple[DecodedURL, IListeningPort]]:
|
||||||
"""
|
"""
|
||||||
Start a HTTPS storage server on the given port, return the NURL and the
|
Start a HTTPS storage server on the given port, return the NURL and the
|
||||||
listening port.
|
listening port.
|
||||||
|
@ -2,19 +2,7 @@
|
|||||||
Ported to Python 3.
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import absolute_import
|
from typing import Union
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
|
|
||||||
try:
|
|
||||||
from typing import Union
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
|
|
||||||
@ -95,8 +83,7 @@ class HashedLeaseSerializer(object):
|
|||||||
cls._hash_secret,
|
cls._hash_secret,
|
||||||
)
|
)
|
||||||
|
|
||||||
def serialize(self, lease):
|
def serialize(self, lease: Union[LeaseInfo, HashedLeaseInfo]) -> bytes:
|
||||||
# type: (Union[LeaseInfo, HashedLeaseInfo]) -> bytes
|
|
||||||
if isinstance(lease, LeaseInfo):
|
if isinstance(lease, LeaseInfo):
|
||||||
# v2 of the immutable schema stores lease secrets hashed. If
|
# v2 of the immutable schema stores lease secrets hashed. If
|
||||||
# we're given a LeaseInfo then it holds plaintext secrets. Hash
|
# we're given a LeaseInfo then it holds plaintext secrets. Hash
|
||||||
|
@ -2,8 +2,9 @@
|
|||||||
Ported to Python 3.
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from future.utils import bytes_to_native_str
|
from future.utils import bytes_to_native_str
|
||||||
from typing import Dict, Tuple, Iterable
|
from typing import Iterable, Any
|
||||||
|
|
||||||
import os, re
|
import os, re
|
||||||
|
|
||||||
@ -823,7 +824,7 @@ class FoolscapStorageServer(Referenceable): # type: ignore # warner/foolscap#78
|
|||||||
self._server = storage_server
|
self._server = storage_server
|
||||||
|
|
||||||
# Canaries and disconnect markers for BucketWriters created via Foolscap:
|
# Canaries and disconnect markers for BucketWriters created via Foolscap:
|
||||||
self._bucket_writer_disconnect_markers = {} # type: Dict[BucketWriter,Tuple[IRemoteReference, object]]
|
self._bucket_writer_disconnect_markers : dict[BucketWriter, tuple[IRemoteReference, Any]] = {}
|
||||||
|
|
||||||
self._server.register_bucket_writer_close_handler(self._bucket_writer_closed)
|
self._server.register_bucket_writer_close_handler(self._bucket_writer_closed)
|
||||||
|
|
||||||
|
@ -1,21 +1,11 @@
|
|||||||
"""
|
"""
|
||||||
Ported to Python 3.
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import
|
from __future__ import annotations
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
try:
|
from typing import Any
|
||||||
from typing import Any, List, Tuple
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
@ -356,8 +346,7 @@ class Config(unittest.TestCase):
|
|||||||
self.assertIn("is not empty", err)
|
self.assertIn("is not empty", err)
|
||||||
self.assertIn("To avoid clobbering anything, I am going to quit now", err)
|
self.assertIn("To avoid clobbering anything, I am going to quit now", err)
|
||||||
|
|
||||||
def fake_config(testcase, module, result):
|
def fake_config(testcase: unittest.TestCase, module: Any, result: Any) -> list[tuple]:
|
||||||
# type: (unittest.TestCase, Any, Any) -> List[Tuple]
|
|
||||||
"""
|
"""
|
||||||
Monkey-patch a fake configuration function into the given module.
|
Monkey-patch a fake configuration function into the given module.
|
||||||
|
|
||||||
|
@ -3,18 +3,6 @@ Tools aimed at the interaction between tests and Eliot.
|
|||||||
|
|
||||||
Ported to Python 3.
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
# Python 2 compatibility
|
|
||||||
# Can't use `builtins.str` because it's not JSON encodable:
|
|
||||||
# `exceptions.TypeError: <class 'future.types.newstr.newstr'> is not JSON-encodeable`
|
|
||||||
from past.builtins import unicode as str
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401
|
|
||||||
|
|
||||||
from six import ensure_text
|
from six import ensure_text
|
||||||
|
|
||||||
@ -23,11 +11,7 @@ __all__ = [
|
|||||||
"EliotLoggedRunTest",
|
"EliotLoggedRunTest",
|
||||||
]
|
]
|
||||||
|
|
||||||
try:
|
from typing import Callable
|
||||||
from typing import Callable
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
from functools import (
|
from functools import (
|
||||||
partial,
|
partial,
|
||||||
wraps,
|
wraps,
|
||||||
@ -147,8 +131,8 @@ class EliotLoggedRunTest(object):
|
|||||||
|
|
||||||
|
|
||||||
def with_logging(
|
def with_logging(
|
||||||
test_id, # type: str
|
test_id: str,
|
||||||
test_method, # type: Callable
|
test_method: Callable,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Decorate a test method with additional log-related behaviors.
|
Decorate a test method with additional log-related behaviors.
|
||||||
|
@ -1,34 +1,23 @@
|
|||||||
"""
|
"""
|
||||||
Ported to Python 3.
|
This contains a test harness that creates a full Tahoe grid in a single
|
||||||
|
process (actually in a single MultiService) which does not use the network.
|
||||||
|
It does not use an Introducer, and there are no foolscap Tubs. Each storage
|
||||||
|
server puts real shares on disk, but is accessed through loopback
|
||||||
|
RemoteReferences instead of over serialized SSL. It is not as complete as
|
||||||
|
the common.SystemTestMixin framework (which does use the network), but
|
||||||
|
should be considerably faster: on my laptop, it takes 50-80ms to start up,
|
||||||
|
whereas SystemTestMixin takes close to 2s.
|
||||||
|
|
||||||
|
This should be useful for tests which want to examine and/or manipulate the
|
||||||
|
uploaded shares, checker/verifier/repairer tests, etc. The clients have no
|
||||||
|
Tubs, so it is not useful for tests that involve a Helper.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
# This contains a test harness that creates a full Tahoe grid in a single
|
from __future__ import annotations
|
||||||
# process (actually in a single MultiService) which does not use the network.
|
|
||||||
# It does not use an Introducer, and there are no foolscap Tubs. Each storage
|
|
||||||
# server puts real shares on disk, but is accessed through loopback
|
|
||||||
# RemoteReferences instead of over serialized SSL. It is not as complete as
|
|
||||||
# the common.SystemTestMixin framework (which does use the network), but
|
|
||||||
# should be considerably faster: on my laptop, it takes 50-80ms to start up,
|
|
||||||
# whereas SystemTestMixin takes close to 2s.
|
|
||||||
|
|
||||||
# This should be useful for tests which want to examine and/or manipulate the
|
|
||||||
# uploaded shares, checker/verifier/repairer tests, etc. The clients have no
|
|
||||||
# Tubs, so it is not useful for tests that involve a Helper.
|
|
||||||
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
from past.builtins import unicode
|
|
||||||
from six import ensure_text
|
from six import ensure_text
|
||||||
|
|
||||||
try:
|
from typing import Callable
|
||||||
from typing import Dict, Callable
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from base64 import b32encode
|
from base64 import b32encode
|
||||||
@ -251,7 +240,7 @@ def create_no_network_client(basedir):
|
|||||||
:return: a Deferred yielding an instance of _Client subclass which
|
:return: a Deferred yielding an instance of _Client subclass which
|
||||||
does no actual networking but has the same API.
|
does no actual networking but has the same API.
|
||||||
"""
|
"""
|
||||||
basedir = abspath_expanduser_unicode(unicode(basedir))
|
basedir = abspath_expanduser_unicode(str(basedir))
|
||||||
fileutil.make_dirs(os.path.join(basedir, "private"), 0o700)
|
fileutil.make_dirs(os.path.join(basedir, "private"), 0o700)
|
||||||
|
|
||||||
from allmydata.client import read_config
|
from allmydata.client import read_config
|
||||||
@ -577,8 +566,7 @@ class GridTestMixin(object):
|
|||||||
pass
|
pass
|
||||||
return sorted(shares)
|
return sorted(shares)
|
||||||
|
|
||||||
def copy_shares(self, uri):
|
def copy_shares(self, uri: bytes) -> dict[bytes, bytes]:
|
||||||
# type: (bytes) -> Dict[bytes, bytes]
|
|
||||||
"""
|
"""
|
||||||
Read all of the share files for the given capability from the storage area
|
Read all of the share files for the given capability from the storage area
|
||||||
of the storage servers created by ``set_up_grid``.
|
of the storage servers created by ``set_up_grid``.
|
||||||
@ -630,8 +618,7 @@ class GridTestMixin(object):
|
|||||||
with open(i_sharefile, "wb") as f:
|
with open(i_sharefile, "wb") as f:
|
||||||
f.write(corruptdata)
|
f.write(corruptdata)
|
||||||
|
|
||||||
def corrupt_all_shares(self, uri, corruptor, debug=False):
|
def corrupt_all_shares(self, uri: bytes, corruptor: Callable[[bytes, bool], bytes], debug: bool=False):
|
||||||
# type: (bytes, Callable[[bytes, bool], bytes], bool) -> None
|
|
||||||
"""
|
"""
|
||||||
Apply ``corruptor`` to the contents of all share files associated with a
|
Apply ``corruptor`` to the contents of all share files associated with a
|
||||||
given capability and replace the share file contents with its result.
|
given capability and replace the share file contents with its result.
|
||||||
|
@ -1,23 +1,14 @@
|
|||||||
"""
|
"""
|
||||||
Ported to Python 3.
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from future.utils import PY2, bchr
|
from future.utils import bchr
|
||||||
if PY2:
|
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
|
|
||||||
# system-level upload+download roundtrip test, but using shares created from
|
# system-level upload+download roundtrip test, but using shares created from
|
||||||
# a previous run. This asserts that the current code is capable of decoding
|
# a previous run. This asserts that the current code is capable of decoding
|
||||||
# shares from a previous version.
|
# shares from a previous version.
|
||||||
|
|
||||||
try:
|
from typing import Any
|
||||||
from typing import Any
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
import six
|
import six
|
||||||
import os
|
import os
|
||||||
@ -1197,8 +1188,7 @@ class Corruption(_Base, unittest.TestCase):
|
|||||||
|
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def _corrupt_flip_all(self, ign, imm_uri, which):
|
def _corrupt_flip_all(self, ign: Any, imm_uri: bytes, which: int) -> None:
|
||||||
# type: (Any, bytes, int) -> None
|
|
||||||
"""
|
"""
|
||||||
Flip the least significant bit at a given byte position in all share files
|
Flip the least significant bit at a given byte position in all share files
|
||||||
for the given capability.
|
for the given capability.
|
||||||
|
@ -1,14 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Ported to Python 3.
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import
|
from __future__ import annotations
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from struct import (
|
from struct import (
|
||||||
@ -17,13 +10,8 @@ from struct import (
|
|||||||
from functools import (
|
from functools import (
|
||||||
partial,
|
partial,
|
||||||
)
|
)
|
||||||
import attr
|
|
||||||
|
|
||||||
try:
|
import attr
|
||||||
from typing import List
|
|
||||||
from allmydata.introducer.client import IntroducerClient
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
@ -35,6 +23,7 @@ from eliot.twisted import (
|
|||||||
inline_callbacks,
|
inline_callbacks,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from allmydata.introducer.client import IntroducerClient
|
||||||
from allmydata.crypto import aes
|
from allmydata.crypto import aes
|
||||||
from allmydata.storage.server import (
|
from allmydata.storage.server import (
|
||||||
si_b2a,
|
si_b2a,
|
||||||
@ -132,7 +121,7 @@ class FakeCHKCheckerAndUEBFetcher(object):
|
|||||||
))
|
))
|
||||||
|
|
||||||
class FakeClient(service.MultiService):
|
class FakeClient(service.MultiService):
|
||||||
introducer_clients = [] # type: List[IntroducerClient]
|
introducer_clients : list[IntroducerClient] = []
|
||||||
DEFAULT_ENCODING_PARAMETERS = {"k":25,
|
DEFAULT_ENCODING_PARAMETERS = {"k":25,
|
||||||
"happy": 75,
|
"happy": 75,
|
||||||
"n": 100,
|
"n": 100,
|
||||||
|
@ -8,9 +8,9 @@ reused across tests, so each test should be careful to generate unique storage
|
|||||||
indexes.
|
indexes.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from future.utils import bchr
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Set
|
from future.utils import bchr
|
||||||
|
|
||||||
from random import Random
|
from random import Random
|
||||||
from unittest import SkipTest
|
from unittest import SkipTest
|
||||||
@ -1041,7 +1041,7 @@ class IStorageServerMutableAPIsTestsMixin(object):
|
|||||||
class _SharedMixin(SystemTestMixin):
|
class _SharedMixin(SystemTestMixin):
|
||||||
"""Base class for Foolscap and HTTP mixins."""
|
"""Base class for Foolscap and HTTP mixins."""
|
||||||
|
|
||||||
SKIP_TESTS = set() # type: Set[str]
|
SKIP_TESTS : set[str] = set()
|
||||||
|
|
||||||
def _get_istorage_server(self):
|
def _get_istorage_server(self):
|
||||||
native_server = next(iter(self.clients[0].storage_broker.get_known_servers()))
|
native_server = next(iter(self.clients[0].storage_broker.get_known_servers()))
|
||||||
|
@ -6,26 +6,11 @@ Ported to Python 3.
|
|||||||
Methods ending in to_string() are actually to_bytes(), possibly should be fixed
|
Methods ending in to_string() are actually to_bytes(), possibly should be fixed
|
||||||
in follow-up port.
|
in follow-up port.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
# Don't import bytes or str, to prevent future's newbytes leaking and
|
|
||||||
# breaking code that only expects normal bytes.
|
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, max, min # noqa: F401
|
|
||||||
from past.builtins import unicode as str
|
|
||||||
|
|
||||||
from past.builtins import unicode, long
|
from past.builtins import unicode, long
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
from typing import Type
|
||||||
try:
|
|
||||||
from typing import Type
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.python.components import registerAdapter
|
from twisted.python.components import registerAdapter
|
||||||
@ -707,7 +692,7 @@ class DirectoryURIVerifier(_DirectoryBaseURI):
|
|||||||
|
|
||||||
BASE_STRING=b'URI:DIR2-Verifier:'
|
BASE_STRING=b'URI:DIR2-Verifier:'
|
||||||
BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
|
BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
|
||||||
INNER_URI_CLASS=SSKVerifierURI # type: Type[IVerifierURI]
|
INNER_URI_CLASS : Type[IVerifierURI] = SSKVerifierURI
|
||||||
|
|
||||||
def __init__(self, filenode_uri=None):
|
def __init__(self, filenode_uri=None):
|
||||||
if filenode_uri:
|
if filenode_uri:
|
||||||
|
@ -3,30 +3,11 @@ Base32 encoding.
|
|||||||
|
|
||||||
Ported to Python 3.
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import
|
def backwardscompat_bytes(b):
|
||||||
from __future__ import division
|
return b
|
||||||
from __future__ import print_function
|
maketrans = bytes.maketrans
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
|
|
||||||
if PY2:
|
|
||||||
def backwardscompat_bytes(b):
|
|
||||||
"""
|
|
||||||
Replace Future bytes with native Python 2 bytes, so % works
|
|
||||||
consistently until other modules are ported.
|
|
||||||
"""
|
|
||||||
return getattr(b, "__native__", lambda: b)()
|
|
||||||
import string
|
|
||||||
maketrans = string.maketrans
|
|
||||||
else:
|
|
||||||
def backwardscompat_bytes(b):
|
|
||||||
return b
|
|
||||||
maketrans = bytes.maketrans
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
from allmydata.util.assertutil import precondition
|
from allmydata.util.assertutil import precondition
|
||||||
@ -34,7 +15,7 @@ from allmydata.util.assertutil import precondition
|
|||||||
rfc3548_alphabet = b"abcdefghijklmnopqrstuvwxyz234567" # RFC3548 standard used by Gnutella, Content-Addressable Web, THEX, Bitzi, Web-Calculus...
|
rfc3548_alphabet = b"abcdefghijklmnopqrstuvwxyz234567" # RFC3548 standard used by Gnutella, Content-Addressable Web, THEX, Bitzi, Web-Calculus...
|
||||||
chars = rfc3548_alphabet
|
chars = rfc3548_alphabet
|
||||||
|
|
||||||
vals = backwardscompat_bytes(bytes(range(32)))
|
vals = bytes(range(32))
|
||||||
c2vtranstable = maketrans(chars, vals)
|
c2vtranstable = maketrans(chars, vals)
|
||||||
v2ctranstable = maketrans(vals, chars)
|
v2ctranstable = maketrans(vals, chars)
|
||||||
identitytranstable = maketrans(b'', b'')
|
identitytranstable = maketrans(b'', b'')
|
||||||
@ -61,16 +42,16 @@ def get_trailing_chars_without_lsbs(N):
|
|||||||
d = {}
|
d = {}
|
||||||
return b''.join(_get_trailing_chars_without_lsbs(N, d=d))
|
return b''.join(_get_trailing_chars_without_lsbs(N, d=d))
|
||||||
|
|
||||||
BASE32CHAR = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(0)+b']')
|
BASE32CHAR = b'['+get_trailing_chars_without_lsbs(0)+b']'
|
||||||
BASE32CHAR_4bits = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(1)+b']')
|
BASE32CHAR_4bits = b'['+get_trailing_chars_without_lsbs(1)+b']'
|
||||||
BASE32CHAR_3bits = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(2)+b']')
|
BASE32CHAR_3bits = b'['+get_trailing_chars_without_lsbs(2)+b']'
|
||||||
BASE32CHAR_2bits = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(3)+b']')
|
BASE32CHAR_2bits = b'['+get_trailing_chars_without_lsbs(3)+b']'
|
||||||
BASE32CHAR_1bits = backwardscompat_bytes(b'['+get_trailing_chars_without_lsbs(4)+b']')
|
BASE32CHAR_1bits = b'['+get_trailing_chars_without_lsbs(4)+b']'
|
||||||
BASE32STR_1byte = backwardscompat_bytes(BASE32CHAR+BASE32CHAR_3bits)
|
BASE32STR_1byte = BASE32CHAR+BASE32CHAR_3bits
|
||||||
BASE32STR_2bytes = backwardscompat_bytes(BASE32CHAR+b'{3}'+BASE32CHAR_1bits)
|
BASE32STR_2bytes = BASE32CHAR+b'{3}'+BASE32CHAR_1bits
|
||||||
BASE32STR_3bytes = backwardscompat_bytes(BASE32CHAR+b'{4}'+BASE32CHAR_4bits)
|
BASE32STR_3bytes = BASE32CHAR+b'{4}'+BASE32CHAR_4bits
|
||||||
BASE32STR_4bytes = backwardscompat_bytes(BASE32CHAR+b'{6}'+BASE32CHAR_2bits)
|
BASE32STR_4bytes = BASE32CHAR+b'{6}'+BASE32CHAR_2bits
|
||||||
BASE32STR_anybytes = backwardscompat_bytes(bytes(b'((?:%s{8})*') % (BASE32CHAR,) + bytes(b"(?:|%s|%s|%s|%s))") % (BASE32STR_1byte, BASE32STR_2bytes, BASE32STR_3bytes, BASE32STR_4bytes))
|
BASE32STR_anybytes = bytes(b'((?:%s{8})*') % (BASE32CHAR,) + bytes(b"(?:|%s|%s|%s|%s))") % (BASE32STR_1byte, BASE32STR_2bytes, BASE32STR_3bytes, BASE32STR_4bytes)
|
||||||
|
|
||||||
def b2a(os): # type: (bytes) -> bytes
|
def b2a(os): # type: (bytes) -> bytes
|
||||||
"""
|
"""
|
||||||
@ -80,7 +61,7 @@ def b2a(os): # type: (bytes) -> bytes
|
|||||||
"""
|
"""
|
||||||
return base64.b32encode(os).rstrip(b"=").lower()
|
return base64.b32encode(os).rstrip(b"=").lower()
|
||||||
|
|
||||||
def b2a_or_none(os): # type: (Optional[bytes]) -> Optional[bytes]
|
def b2a_or_none(os: Optional[bytes]) -> Optional[bytes]:
|
||||||
if os is not None:
|
if os is not None:
|
||||||
return b2a(os)
|
return b2a(os)
|
||||||
return None
|
return None
|
||||||
@ -100,8 +81,6 @@ NUM_OS_TO_NUM_QS=(0, 2, 4, 5, 7,)
|
|||||||
NUM_QS_TO_NUM_OS=(0, 1, 1, 2, 2, 3, 3, 4)
|
NUM_QS_TO_NUM_OS=(0, 1, 1, 2, 2, 3, 3, 4)
|
||||||
NUM_QS_LEGIT=(1, 0, 1, 0, 1, 1, 0, 1,)
|
NUM_QS_LEGIT=(1, 0, 1, 0, 1, 1, 0, 1,)
|
||||||
NUM_QS_TO_NUM_BITS=tuple([_x*8 for _x in NUM_QS_TO_NUM_OS])
|
NUM_QS_TO_NUM_BITS=tuple([_x*8 for _x in NUM_QS_TO_NUM_OS])
|
||||||
if PY2:
|
|
||||||
del _x
|
|
||||||
|
|
||||||
# A fast way to determine whether a given string *could* be base-32 encoded data, assuming that the
|
# A fast way to determine whether a given string *could* be base-32 encoded data, assuming that the
|
||||||
# original data had 8K bits for a positive integer K.
|
# original data had 8K bits for a positive integer K.
|
||||||
@ -135,8 +114,6 @@ def a2b(cs): # type: (bytes) -> bytes
|
|||||||
"""
|
"""
|
||||||
@param cs the base-32 encoded data (as bytes)
|
@param cs the base-32 encoded data (as bytes)
|
||||||
"""
|
"""
|
||||||
# Workaround Future newbytes issues by converting to real bytes on Python 2:
|
|
||||||
cs = backwardscompat_bytes(cs)
|
|
||||||
precondition(could_be_base32_encoded(cs), "cs is required to be possibly base32 encoded data.", cs=cs)
|
precondition(could_be_base32_encoded(cs), "cs is required to be possibly base32 encoded data.", cs=cs)
|
||||||
precondition(isinstance(cs, bytes), cs)
|
precondition(isinstance(cs, bytes), cs)
|
||||||
|
|
||||||
@ -144,9 +121,8 @@ def a2b(cs): # type: (bytes) -> bytes
|
|||||||
# Add padding back, to make Python's base64 module happy:
|
# Add padding back, to make Python's base64 module happy:
|
||||||
while (len(cs) * 5) % 8 != 0:
|
while (len(cs) * 5) % 8 != 0:
|
||||||
cs += b"="
|
cs += b"="
|
||||||
# Let newbytes come through and still work on Python 2, where the base64
|
|
||||||
# module gets confused by them.
|
return base64.b32decode(cs)
|
||||||
return base64.b32decode(backwardscompat_bytes(cs))
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["b2a", "a2b", "b2a_or_none", "BASE32CHAR_3bits", "BASE32CHAR_1bits", "BASE32CHAR", "BASE32STR_anybytes", "could_be_base32_encoded"]
|
__all__ = ["b2a", "a2b", "b2a_or_none", "BASE32CHAR_3bits", "BASE32CHAR_1bits", "BASE32CHAR", "BASE32STR_anybytes", "could_be_base32_encoded"]
|
||||||
|
@ -209,10 +209,9 @@ class WaitForDelayedCallsMixin(PollMixin):
|
|||||||
|
|
||||||
@inline_callbacks
|
@inline_callbacks
|
||||||
def until(
|
def until(
|
||||||
action, # type: Callable[[], defer.Deferred[Any]]
|
action: Callable[[], defer.Deferred[Any]],
|
||||||
condition, # type: Callable[[], bool]
|
condition: Callable[[], bool],
|
||||||
):
|
) -> defer.Deferred[None]:
|
||||||
# type: (...) -> defer.Deferred[None]
|
|
||||||
"""
|
"""
|
||||||
Run a Deferred-returning function until a condition is true.
|
Run a Deferred-returning function until a condition is true.
|
||||||
|
|
||||||
|
@ -4,22 +4,10 @@ Polling utility that returns Deferred.
|
|||||||
Ported to Python 3.
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import annotations
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from future.utils import PY2
|
|
||||||
if PY2:
|
|
||||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
try:
|
|
||||||
from typing import List
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
from twisted.internet import task
|
from twisted.internet import task
|
||||||
|
|
||||||
class TimeoutError(Exception):
|
class TimeoutError(Exception):
|
||||||
@ -29,7 +17,7 @@ class PollComplete(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
class PollMixin(object):
|
class PollMixin(object):
|
||||||
_poll_should_ignore_these_errors = [] # type: List[Exception]
|
_poll_should_ignore_these_errors : list[Exception] = []
|
||||||
|
|
||||||
def poll(self, check_f, pollinterval=0.01, timeout=1000):
|
def poll(self, check_f, pollinterval=0.01, timeout=1000):
|
||||||
# Return a Deferred, then call check_f periodically until it returns
|
# Return a Deferred, then call check_f periodically until it returns
|
||||||
|
@ -117,7 +117,7 @@ def boolean_of_arg(arg): # type: (bytes) -> bool
|
|||||||
return arg.lower() in (b"true", b"t", b"1", b"on")
|
return arg.lower() in (b"true", b"t", b"1", b"on")
|
||||||
|
|
||||||
|
|
||||||
def parse_replace_arg(replace): # type: (bytes) -> Union[bool,_OnlyFiles]
|
def parse_replace_arg(replace: bytes) -> Union[bool,_OnlyFiles]:
|
||||||
assert isinstance(replace, bytes)
|
assert isinstance(replace, bytes)
|
||||||
if replace.lower() == b"only-files":
|
if replace.lower() == b"only-files":
|
||||||
return ONLY_FILES
|
return ONLY_FILES
|
||||||
|
7
tox.ini
7
tox.ini
@ -100,10 +100,9 @@ commands =
|
|||||||
[testenv:codechecks]
|
[testenv:codechecks]
|
||||||
basepython = python3
|
basepython = python3
|
||||||
deps =
|
deps =
|
||||||
# Newer versions of PyLint have buggy configuration
|
# Make sure we get a version of PyLint that respects config, and isn't too
|
||||||
# (https://github.com/PyCQA/pylint/issues/4574), so stick to old version
|
# old.
|
||||||
# for now.
|
pylint < 2.18, >2.14
|
||||||
pylint < 2.5
|
|
||||||
# On macOS, git inside of towncrier needs $HOME.
|
# On macOS, git inside of towncrier needs $HOME.
|
||||||
passenv = HOME
|
passenv = HOME
|
||||||
setenv =
|
setenv =
|
||||||
|
Loading…
x
Reference in New Issue
Block a user