mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-18 20:47:54 +00:00
Merge branch 'master' of https://github.com/tahoe-lafs/tahoe-lafs into 1101.feature
This commit is contained in:
commit
4a3668712b
@ -48,8 +48,6 @@ dockerhub-auth-template: &DOCKERHUB_AUTH
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
- "build-image-fedora-35":
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
- "build-image-oraclelinux-8":
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
# Restore later as PyPy38
|
||||
#- "build-image-pypy27-buster":
|
||||
# <<: *DOCKERHUB_CONTEXT
|
||||
@ -88,19 +86,16 @@ workflows:
|
||||
- "ubuntu-22-04":
|
||||
{}
|
||||
|
||||
# Equivalent to RHEL 8; CentOS 8 is dead.
|
||||
- "oraclelinux-8":
|
||||
{}
|
||||
|
||||
- "nixos":
|
||||
name: "<<matrix.pythonVersion>>"
|
||||
nixpkgs: "nixpkgs-unstable"
|
||||
name: "<<matrix.nixpkgs>>-<<matrix.pythonVersion>>"
|
||||
matrix:
|
||||
parameters:
|
||||
nixpkgs:
|
||||
- "nixpkgs-24_11"
|
||||
pythonVersion:
|
||||
- "python39"
|
||||
- "python310"
|
||||
- "python311"
|
||||
- "python312"
|
||||
|
||||
# Eventually, test against PyPy 3.8
|
||||
#- "pypy27-buster":
|
||||
@ -130,7 +125,7 @@ workflows:
|
||||
# of Python, we probably have to do something here.
|
||||
pythonVersion:
|
||||
- "3.9"
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
|
||||
- "integration":
|
||||
# Run even the slow integration tests here. We need the `--` to
|
||||
@ -229,7 +224,7 @@ jobs:
|
||||
# version-specific binary packages so include the Python version
|
||||
# in this key, as well as the canonical source of our
|
||||
# dependencies.
|
||||
- &CACHE_KEY "pip-packages-v1-<< parameters.pythonVersion >>-{{ checksum \"setup.py\" }}"
|
||||
- &CACHE_KEY "pip-packages-v1-<< parameters.pythonVersion >>-{{ checksum \"pyproject.toml\" }}"
|
||||
|
||||
- "run":
|
||||
name: "Fix $env:PATH"
|
||||
@ -249,11 +244,6 @@ jobs:
|
||||
# PowerShell string escape character.
|
||||
Add-Content -Path $Profile.CurrentUserAllHosts -Value "`$env:PATH = `"$p;$q;`$env:PATH`""
|
||||
|
||||
- "run":
|
||||
name: "Display tool versions"
|
||||
command: |
|
||||
python misc/build_helpers/show-tool-versions.py
|
||||
|
||||
- "run":
|
||||
# It's faster to install a wheel than a source package. If we don't
|
||||
# have a cached wheelhouse then build all of the wheels and dump
|
||||
@ -280,10 +270,15 @@ jobs:
|
||||
name: "Install Dependencies"
|
||||
environment:
|
||||
# By this point we should no longer need an index.
|
||||
PIP_NO_INDEX: "1"
|
||||
## PIP_NO_INDEX: "1"
|
||||
command: |
|
||||
python -m pip install .[testenv] .[test]
|
||||
|
||||
- "run":
|
||||
name: "Display tool versions"
|
||||
command: |
|
||||
python misc/build_helpers/show-tool-versions.py
|
||||
|
||||
- "run":
|
||||
name: "Run Unit Tests"
|
||||
environment:
|
||||
@ -540,37 +535,6 @@ jobs:
|
||||
<<: *UTF_8_ENVIRONMENT
|
||||
TAHOE_LAFS_TOX_ENVIRONMENT: "py310"
|
||||
|
||||
oraclelinux-8: &RHEL_DERIV
|
||||
docker:
|
||||
- <<: *DOCKERHUB_AUTH
|
||||
image: "tahoelafsci/oraclelinux:8-py3.8"
|
||||
user: "nobody"
|
||||
|
||||
environment:
|
||||
<<: *UTF_8_ENVIRONMENT
|
||||
TAHOE_LAFS_TOX_ENVIRONMENT: "py38"
|
||||
|
||||
# pip cannot install packages if the working directory is not readable.
|
||||
# We want to run a lot of steps as nobody instead of as root.
|
||||
working_directory: "/tmp/project"
|
||||
|
||||
steps:
|
||||
- "checkout"
|
||||
- run: *SETUP_VIRTUALENV
|
||||
- run: *RUN_TESTS
|
||||
- store_test_results: *STORE_TEST_RESULTS
|
||||
- store_artifacts: *STORE_TEST_LOG
|
||||
- store_artifacts: *STORE_ELIOT_LOG
|
||||
- store_artifacts: *STORE_OTHER_ARTIFACTS
|
||||
- run: *SUBMIT_COVERAGE
|
||||
|
||||
fedora-35:
|
||||
<<: *RHEL_DERIV
|
||||
docker:
|
||||
- <<: *DOCKERHUB_AUTH
|
||||
image: "tahoelafsci/fedora:35-py3"
|
||||
user: "nobody"
|
||||
|
||||
nixos:
|
||||
parameters:
|
||||
nixpkgs:
|
||||
@ -592,15 +556,12 @@ jobs:
|
||||
buildSteps:
|
||||
- "run":
|
||||
name: "Unit Test"
|
||||
environment:
|
||||
# Once dependencies are built, we can allow some more concurrency for our own
|
||||
# test suite.
|
||||
UNITTEST_CORES: 8
|
||||
command: |
|
||||
source .circleci/lib.sh
|
||||
|
||||
# Translate the nixpkgs selection into a flake reference we
|
||||
# can use to override the default nixpkgs input.
|
||||
NIXPKGS=$(nixpkgs_flake_reference <<parameters.nixpkgs>>)
|
||||
|
||||
cache_if_able nix run \
|
||||
--override-input nixpkgs "$NIXPKGS" \
|
||||
nix run \
|
||||
.#<<parameters.pythonVersion>>-unittest -- \
|
||||
--jobs $UNITTEST_CORES \
|
||||
allmydata
|
||||
@ -698,14 +659,6 @@ jobs:
|
||||
PYTHON_VERSION: "3.10"
|
||||
|
||||
|
||||
build-image-oraclelinux-8:
|
||||
<<: *BUILD_IMAGE
|
||||
|
||||
environment:
|
||||
DISTRO: "oraclelinux"
|
||||
TAG: "8"
|
||||
PYTHON_VERSION: "3.8"
|
||||
|
||||
build-image-fedora-35:
|
||||
<<: *BUILD_IMAGE
|
||||
|
||||
@ -730,7 +683,7 @@ executors:
|
||||
# requirements and goals.
|
||||
# https://circleci.com/developer/orbs/orb/circleci/windows#executors-server-2022
|
||||
machine:
|
||||
image: "windows-server-2022-gui:2023.06.1"
|
||||
image: "windows-server-2022-gui:current"
|
||||
shell: "powershell.exe -ExecutionPolicy Bypass"
|
||||
resource_class: "windows.large"
|
||||
|
||||
@ -738,12 +691,8 @@ executors:
|
||||
docker:
|
||||
# Run in a highly Nix-capable environment.
|
||||
- <<: *DOCKERHUB_AUTH
|
||||
image: "nixos/nix:2.16.1"
|
||||
image: "nixos/nix:2.25.3"
|
||||
environment:
|
||||
# CACHIX_AUTH_TOKEN is manually set in the CircleCI web UI and allows us
|
||||
# to push to CACHIX_NAME. CACHIX_NAME tells cachix which cache to push
|
||||
# to.
|
||||
CACHIX_NAME: "tahoe-lafs-opensource"
|
||||
# Let us use features marked "experimental". For example, most/all of
|
||||
# the `nix <subcommand>` forms.
|
||||
NIX_CONFIG: "experimental-features = nix-command flakes"
|
||||
@ -766,42 +715,21 @@ commands:
|
||||
type: "steps"
|
||||
|
||||
steps:
|
||||
- "run":
|
||||
# Get cachix for Nix-friendly caching.
|
||||
name: "Install Basic Dependencies"
|
||||
command: |
|
||||
# Get some build environment dependencies and let them float on a
|
||||
# certain release branch. These aren't involved in the actual
|
||||
# package build (only in CI environment setup) so the fact that
|
||||
# they float shouldn't hurt reproducibility.
|
||||
NIXPKGS="nixpkgs/nixos-23.05"
|
||||
nix profile install $NIXPKGS#cachix $NIXPKGS#bash $NIXPKGS#jp
|
||||
|
||||
# Activate our cachix cache for "binary substitution". This sets
|
||||
# up configuration tht lets Nix download something from the cache
|
||||
# instead of building it locally, if possible.
|
||||
cachix use "${CACHIX_NAME}"
|
||||
|
||||
- "checkout"
|
||||
|
||||
- "run":
|
||||
# The Nix package doesn't know how to do this part, unfortunately.
|
||||
name: "Generate version"
|
||||
command: |
|
||||
nix-shell \
|
||||
-p 'python3.withPackages (ps: [ ps.setuptools ])' \
|
||||
--run 'python setup.py update_version'
|
||||
|
||||
- "run":
|
||||
name: "Build Package"
|
||||
environment:
|
||||
# CircleCI build environment looks like it has a zillion and a half cores.
|
||||
# Don't let Nix autodetect this high core count because it blows up memory
|
||||
# usage and fails the test run. Pick a number of cores that suits the build
|
||||
# environment we're paying for (the free one!).
|
||||
DEPENDENCY_CORES: 3
|
||||
command: |
|
||||
source .circleci/lib.sh
|
||||
NIXPKGS=$(nixpkgs_flake_reference <<parameters.nixpkgs>>)
|
||||
cache_if_able nix build \
|
||||
nix build \
|
||||
--verbose \
|
||||
--print-build-logs \
|
||||
--cores "$DEPENDENCY_CORES" \
|
||||
--override-input nixpkgs "$NIXPKGS" \
|
||||
.#<<parameters.pythonVersion>>-tahoe-lafs
|
||||
|
||||
- steps: "<<parameters.buildSteps>>"
|
||||
|
@ -26,7 +26,7 @@ virtualenv --python "${PYTHON}" "${BOOTSTRAP_VENV}"
|
||||
PIP="${BOOTSTRAP_VENV}/bin/pip"
|
||||
|
||||
# Tell pip where it can find any existing wheels.
|
||||
export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}"
|
||||
##export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}"
|
||||
|
||||
# Get "certifi" to avoid bug #2913. Basically if a `setup_requires=...` causes
|
||||
# a package to be installed (with setuptools) then it'll fail on certain
|
||||
|
148
.circleci/lib.sh
148
.circleci/lib.sh
@ -1,148 +0,0 @@
|
||||
# CircleCI build environment looks like it has a zillion and a half cores.
|
||||
# Don't let Nix autodetect this high core count because it blows up memory
|
||||
# usage and fails the test run. Pick a number of cores that suits the build
|
||||
# environment we're paying for (the free one!).
|
||||
DEPENDENCY_CORES=3
|
||||
|
||||
# Once dependencies are built, we can allow some more concurrency for our own
|
||||
# test suite.
|
||||
UNITTEST_CORES=8
|
||||
|
||||
# Run a command, enabling cache writes to cachix if possible. The command is
|
||||
# accepted as a variable number of positional arguments (like argv).
|
||||
function cache_if_able() {
|
||||
# Dump some info about our build environment.
|
||||
describe_build
|
||||
|
||||
if is_cache_writeable; then
|
||||
# If the cache is available we'll use it. This lets fork owners set
|
||||
# up their own caching if they want.
|
||||
echo "Cachix credentials present; will attempt to write to cache."
|
||||
|
||||
# The `cachix watch-exec ...` does our cache population. When it sees
|
||||
# something added to the store (I guess) it pushes it to the named
|
||||
# cache.
|
||||
cachix watch-exec "${CACHIX_NAME}" -- "$@"
|
||||
else
|
||||
if is_cache_required; then
|
||||
echo "Required credentials (CACHIX_AUTH_TOKEN) are missing."
|
||||
return 1
|
||||
else
|
||||
echo "Cachix credentials missing; will not attempt cache writes."
|
||||
"$@"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function is_cache_writeable() {
|
||||
# We can only *push* to the cache if we have a CACHIX_AUTH_TOKEN. in-repo
|
||||
# jobs will get this from CircleCI configuration but jobs from forks may
|
||||
# not.
|
||||
[ -v CACHIX_AUTH_TOKEN ]
|
||||
}
|
||||
|
||||
function is_cache_required() {
|
||||
# If we're building in tahoe-lafs/tahoe-lafs then we must use the cache.
|
||||
# If we're building anything from a fork then we're allowed to not have
|
||||
# the credentials.
|
||||
is_upstream
|
||||
}
|
||||
|
||||
# Return success if the origin of this build is the tahoe-lafs/tahoe-lafs
|
||||
# repository itself (and so we expect to have cache credentials available),
|
||||
# failure otherwise.
|
||||
#
|
||||
# See circleci.txt for notes about how this determination is made.
|
||||
function is_upstream() {
|
||||
# CIRCLE_PROJECT_USERNAME is set to the org the build is happening for.
|
||||
# If a PR targets a fork of the repo then this is set to something other
|
||||
# than "tahoe-lafs".
|
||||
[ "$CIRCLE_PROJECT_USERNAME" == "tahoe-lafs" ] &&
|
||||
|
||||
# CIRCLE_BRANCH is set to the real branch name for in-repo PRs and
|
||||
# "pull/NNNN" for pull requests from forks.
|
||||
#
|
||||
# CIRCLE_PULL_REQUESTS is set to a comma-separated list of the full
|
||||
# URLs of the PR pages which share an underlying branch, with one of
|
||||
# them ended with that same "pull/NNNN" for PRs from forks.
|
||||
! any_element_endswith "/$CIRCLE_BRANCH" "," "$CIRCLE_PULL_REQUESTS"
|
||||
}
|
||||
|
||||
# Return success if splitting $3 on $2 results in an array with any element
|
||||
# that ends with $1, failure otherwise.
|
||||
function any_element_endswith() {
|
||||
suffix=$1
|
||||
shift
|
||||
|
||||
sep=$1
|
||||
shift
|
||||
|
||||
haystack=$1
|
||||
shift
|
||||
|
||||
IFS="${sep}" read -r -a elements <<< "$haystack"
|
||||
for elem in "${elements[@]}"; do
|
||||
if endswith "$suffix" "$elem"; then
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
# Return success if $2 ends with $1, failure otherwise.
|
||||
function endswith() {
|
||||
suffix=$1
|
||||
shift
|
||||
|
||||
haystack=$1
|
||||
shift
|
||||
|
||||
case "$haystack" in
|
||||
*${suffix})
|
||||
return 0
|
||||
;;
|
||||
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
function describe_build() {
|
||||
echo "Building PR for user/org: ${CIRCLE_PROJECT_USERNAME}"
|
||||
echo "Building branch: ${CIRCLE_BRANCH}"
|
||||
if is_upstream; then
|
||||
echo "Upstream build."
|
||||
else
|
||||
echo "Non-upstream build."
|
||||
fi
|
||||
if is_cache_required; then
|
||||
echo "Cache is required."
|
||||
else
|
||||
echo "Cache not required."
|
||||
fi
|
||||
if is_cache_writeable; then
|
||||
echo "Cache is writeable."
|
||||
else
|
||||
echo "Cache not writeable."
|
||||
fi
|
||||
}
|
||||
|
||||
# Inspect the flake input metadata for an input of a given name and return the
|
||||
# revision at which that input is pinned. If the input does not exist then
|
||||
# return garbage (probably "null").
|
||||
read_input_revision() {
|
||||
input_name=$1
|
||||
shift
|
||||
|
||||
nix flake metadata --json | jp --unquoted 'locks.nodes."'"$input_name"'".locked.rev'
|
||||
}
|
||||
|
||||
# Return a flake reference that refers to a certain revision of nixpkgs. The
|
||||
# certain revision is the revision to which the specified input is pinned.
|
||||
nixpkgs_flake_reference() {
|
||||
input_name=$1
|
||||
shift
|
||||
|
||||
echo "github:NixOS/nixpkgs?rev=$(read_input_revision $input_name)"
|
||||
}
|
@ -67,7 +67,7 @@ TIMEOUT="timeout --kill-after 1m 45m"
|
||||
# via tox and then scraping it out is hideous and failure prone.
|
||||
export SUBUNITREPORTER_OUTPUT_PATH="${SUBUNIT2}"
|
||||
export TAHOE_LAFS_TRIAL_ARGS="${TAHOE_LAFS_TRIAL_ARGS:---reporter=subunitv2-file --rterrors}"
|
||||
export PIP_NO_INDEX="1"
|
||||
##export PIP_NO_INDEX="1"
|
||||
|
||||
# Make output unbuffered, so progress reports from subunitv2-file get streamed
|
||||
# and notify CircleCI we're still alive.
|
||||
|
@ -25,8 +25,8 @@ TAHOE_LAFS_TOX_ARGS=$1
|
||||
shift || :
|
||||
|
||||
# Tell pip where it can find any existing wheels.
|
||||
export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}"
|
||||
export PIP_NO_INDEX="1"
|
||||
##export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}"
|
||||
##export PIP_NO_INDEX="1"
|
||||
|
||||
# Get everything else installed in it, too.
|
||||
"${BOOTSTRAP_VENV}"/bin/tox \
|
||||
|
40
.github/workflows/ci.yml
vendored
40
.github/workflows/ci.yml
vendored
@ -45,16 +45,14 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos-12
|
||||
- os: macos-14
|
||||
python-version: "3.12"
|
||||
# We only support PyPy on Linux at the moment.
|
||||
- os: ubuntu-latest
|
||||
python-version: "pypy-3.8"
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-22.04
|
||||
python-version: "pypy-3.9"
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-22.04
|
||||
python-version: "3.12"
|
||||
- os: windows-latest
|
||||
- os: windows-2022
|
||||
python-version: "3.12"
|
||||
|
||||
steps:
|
||||
@ -73,7 +71,7 @@ jobs:
|
||||
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade tox tox-gh-actions setuptools
|
||||
pip install --upgrade tox tox-gh-actions
|
||||
pip list
|
||||
|
||||
- name: Display tool versions
|
||||
@ -95,15 +93,15 @@ jobs:
|
||||
python -m tox | python misc/windows-enospc/passthrough.py
|
||||
|
||||
- name: Upload eliot.log
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: eliot.log
|
||||
name: "eliot-${{ matrix.os }}-python-${{ matrix.python-version }}.log"
|
||||
path: eliot.log
|
||||
|
||||
- name: Upload trial log
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test.log
|
||||
name: "test-${{ matrix.os }}-python-${{ matrix.python-version }}.log"
|
||||
path: _trial_temp/test.log
|
||||
|
||||
# Upload this job's coverage data to Coveralls. While there is a GitHub
|
||||
@ -142,7 +140,7 @@ jobs:
|
||||
finish-coverage-report:
|
||||
needs:
|
||||
- "coverage"
|
||||
runs-on: "ubuntu-latest"
|
||||
runs-on: "ubuntu-22.04"
|
||||
container: "python:3-slim"
|
||||
steps:
|
||||
- name: "Indicate completion to coveralls.io"
|
||||
@ -162,8 +160,8 @@ jobs:
|
||||
# 22.04 has some issue with Tor at the moment:
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3943
|
||||
- ubuntu-20.04
|
||||
- macos-12
|
||||
- windows-latest
|
||||
- macos-14
|
||||
- windows-2022
|
||||
python-version:
|
||||
- "3.11"
|
||||
force-foolscap:
|
||||
@ -187,7 +185,7 @@ jobs:
|
||||
brew install tor
|
||||
|
||||
- name: Install Tor [Windows]
|
||||
if: matrix.os == 'windows-latest'
|
||||
if: matrix.os == 'windows-2022'
|
||||
uses: crazy-max/ghaction-chocolatey@v2
|
||||
with:
|
||||
args: install tor
|
||||
@ -232,10 +230,10 @@ jobs:
|
||||
tox -e integration -- --force-foolscap integration/
|
||||
|
||||
- name: Upload eliot.log in case of failure
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
if: failure()
|
||||
with:
|
||||
name: integration.eliot.json
|
||||
name: "integration.eliot-${{ matrix.os }}-python-${{ matrix.python-version }}.json"
|
||||
path: integration.eliot.json
|
||||
|
||||
packaging:
|
||||
@ -244,9 +242,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- macos-12
|
||||
- windows-latest
|
||||
- ubuntu-latest
|
||||
- macos-14
|
||||
- windows-2022
|
||||
- ubuntu-22.04
|
||||
python-version:
|
||||
- 3.9
|
||||
|
||||
@ -279,7 +277,7 @@ jobs:
|
||||
run: dist/Tahoe-LAFS/tahoe --version
|
||||
|
||||
- name: Upload PyInstaller package
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Tahoe-LAFS-${{ matrix.os }}-Python-${{ matrix.python-version }}
|
||||
path: dist/Tahoe-LAFS-*-*.*
|
||||
|
13
MANIFEST.in
13
MANIFEST.in
@ -1,13 +0,0 @@
|
||||
include README.rst
|
||||
include COPYING.GPL COPYING.TGPPL.rst CREDITS Makefile NEWS.rst Tahoe.home
|
||||
include relnotes.txt
|
||||
include Dockerfile
|
||||
include tox.ini .appveyor.yml .travis.yml
|
||||
include .coveragerc
|
||||
recursive-include src *.xhtml *.js *.png *.css *.svg *.txt *.yaml
|
||||
graft docs
|
||||
graft misc
|
||||
graft static
|
||||
graft integration
|
||||
|
||||
global-exclude *~ *.pyc
|
@ -56,7 +56,7 @@ Once ``tahoe --version`` works, see `How to Run Tahoe-LAFS <docs/running.rst>`__
|
||||
🐍 Python 2
|
||||
-----------
|
||||
|
||||
Python 3.8 or later is required.
|
||||
Python 3.9 or later is required.
|
||||
If you are still using Python 2.7, use Tahoe-LAFS version 1.17.1.
|
||||
|
||||
|
||||
|
54
docs/glossary.rst
Normal file
54
docs/glossary.rst
Normal file
@ -0,0 +1,54 @@
|
||||
.. -*- coding: utf-8 -*-
|
||||
|
||||
============================
|
||||
Glossary of Tahoe-LAFS Terms
|
||||
============================
|
||||
|
||||
|
||||
.. glossary::
|
||||
|
||||
`Foolscap <https://github.com/warner/foolscap/>`_
|
||||
an RPC/RMI (Remote Procedure Call / Remote Method Invocation) protocol for use with Twisted
|
||||
|
||||
storage server
|
||||
a Tahoe-LAFS process configured to offer storage and reachable over the network for store and retrieve operations
|
||||
|
||||
storage service
|
||||
a Python object held in memory in the storage server which provides the implementation of the storage protocol
|
||||
|
||||
introducer
|
||||
a Tahoe-LAFS process at a known location configured to re-publish announcements about the location of storage servers
|
||||
|
||||
:ref:`fURLs <fURLs>`
|
||||
a self-authenticating URL-like string which can be used to locate a remote object using the Foolscap protocol
|
||||
(the storage service is an example of such an object)
|
||||
|
||||
:ref:`NURLs <NURLs>`
|
||||
a self-authenticating URL-like string almost exactly like a fURL but without being tied to Foolscap
|
||||
|
||||
swissnum
|
||||
a short random string which is part of a fURL/NURL and which acts as a shared secret to authorize clients to use a storage service
|
||||
|
||||
lease
|
||||
state associated with a share informing a storage server of the duration of storage desired by a client
|
||||
|
||||
share
|
||||
a single unit of client-provided arbitrary data to be stored by a storage server
|
||||
(in practice, one of the outputs of applying ZFEC encoding to some ciphertext with some additional metadata attached)
|
||||
|
||||
bucket
|
||||
a group of one or more immutable shares held by a storage server and having a common storage index
|
||||
|
||||
slot
|
||||
a group of one or more mutable shares held by a storage server and having a common storage index
|
||||
(sometimes "slot" is considered a synonym for "storage index of a slot")
|
||||
|
||||
storage index
|
||||
a 16 byte string which can address a slot or a bucket
|
||||
(in practice, derived by hashing the encryption key associated with contents of that slot or bucket)
|
||||
|
||||
write enabler
|
||||
a short secret string which storage servers require to be presented before allowing mutation of any mutable share
|
||||
|
||||
lease renew secret
|
||||
a short secret string which storage servers required to be presented before allowing a particular lease to be renewed
|
@ -32,6 +32,8 @@ preserving your privacy and security.
|
||||
anonymity-configuration
|
||||
known_issues
|
||||
|
||||
glossary
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:caption: Tahoe-LAFS in Depth
|
||||
|
@ -16,53 +16,49 @@ The Tahoe-LAFS client will also need to change but it is not expected that it wi
|
||||
Glossary
|
||||
--------
|
||||
|
||||
.. glossary::
|
||||
`Foolscap <https://github.com/warner/foolscap/>`_
|
||||
an RPC/RMI (Remote Procedure Call / Remote Method Invocation) protocol for use with Twisted
|
||||
|
||||
`Foolscap <https://github.com/warner/foolscap/>`_
|
||||
an RPC/RMI (Remote Procedure Call / Remote Method Invocation) protocol for use with Twisted
|
||||
storage server
|
||||
a Tahoe-LAFS process configured to offer storage and reachable over the network for store and retrieve operations
|
||||
|
||||
storage server
|
||||
a Tahoe-LAFS process configured to offer storage and reachable over the network for store and retrieve operations
|
||||
storage service
|
||||
a Python object held in memory in the storage server which provides the implementation of the storage protocol
|
||||
|
||||
storage service
|
||||
a Python object held in memory in the storage server which provides the implementation of the storage protocol
|
||||
introducer
|
||||
a Tahoe-LAFS process at a known location configured to re-publish announcements about the location of storage servers
|
||||
|
||||
introducer
|
||||
a Tahoe-LAFS process at a known location configured to re-publish announcements about the location of storage servers
|
||||
:ref:`fURLs <fURLs>`
|
||||
a self-authenticating URL-like string which can be used to locate a remote object using the Foolscap protocol (the storage service is an example of such an object)
|
||||
|
||||
:ref:`fURLs <fURLs>`
|
||||
a self-authenticating URL-like string which can be used to locate a remote object using the Foolscap protocol
|
||||
(the storage service is an example of such an object)
|
||||
:ref:`NURLs <NURLs>`
|
||||
a self-authenticating URL-like string almost exactly like a fURL but without being tied to Foolscap
|
||||
|
||||
:ref:`NURLs <NURLs>`
|
||||
a self-authenticating URL-like string almost exactly like a fURL but without being tied to Foolscap
|
||||
swissnum
|
||||
a short random string which is part of a fURL/NURL and which acts as a shared secret to authorize clients to use a storage service
|
||||
|
||||
swissnum
|
||||
a short random string which is part of a fURL/NURL and which acts as a shared secret to authorize clients to use a storage service
|
||||
lease
|
||||
state associated with a share informing a storage server of the duration of storage desired by a client
|
||||
|
||||
lease
|
||||
state associated with a share informing a storage server of the duration of storage desired by a client
|
||||
share
|
||||
a single unit of client-provided arbitrary data to be stored by a storage server (in practice, one of the outputs of applying ZFEC encoding to some ciphertext with some additional metadata attached)
|
||||
|
||||
share
|
||||
a single unit of client-provided arbitrary data to be stored by a storage server
|
||||
(in practice, one of the outputs of applying ZFEC encoding to some ciphertext with some additional metadata attached)
|
||||
bucket
|
||||
a group of one or more immutable shares held by a storage server and having a common storage index
|
||||
|
||||
bucket
|
||||
a group of one or more immutable shares held by a storage server and having a common storage index
|
||||
slot
|
||||
a group of one or more mutable shares held by a storage server and having a common storage index (sometimes "slot" is considered a synonym for "storage index of a slot")
|
||||
|
||||
slot
|
||||
a group of one or more mutable shares held by a storage server and having a common storage index
|
||||
(sometimes "slot" is considered a synonym for "storage index of a slot")
|
||||
storage index
|
||||
a 16 byte string which can address a slot or a bucket (in practice, derived by hashing the encryption key associated with contents of that slot or bucket)
|
||||
|
||||
storage index
|
||||
a 16 byte string which can address a slot or a bucket
|
||||
(in practice, derived by hashing the encryption key associated with contents of that slot or bucket)
|
||||
write enabler
|
||||
a short secret string which storage servers require to be presented before allowing mutation of any mutable share
|
||||
|
||||
write enabler
|
||||
a short secret string which storage servers require to be presented before allowing mutation of any mutable share
|
||||
lease renew secret
|
||||
a short secret string which storage servers required to be presented before allowing a particular lease to be renewed
|
||||
|
||||
lease renew secret
|
||||
a short secret string which storage servers required to be presented before allowing a particular lease to be renewed
|
||||
Additional terms related to the Tahoe-LAFS project in general are defined in the :doc:`../glossary`
|
||||
|
||||
The key words
|
||||
"MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL"
|
||||
|
60
flake.lock
generated
60
flake.lock
generated
@ -3,11 +3,11 @@
|
||||
"flake-compat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1673956053,
|
||||
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
|
||||
"lastModified": 1733328505,
|
||||
"narHash": "sha256-NeCCThCEP3eCl2l/+27kNNK7QrwZB1IJCrXfrbv5oqU=",
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
|
||||
"rev": "ff81ac966bb2cae68946d5ed5fc4994f96d0ffec",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -21,11 +21,11 @@
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1687709756,
|
||||
"narHash": "sha256-Y5wKlQSkgEK2weWdOu4J3riRd+kV/VCgHsqLNTTWQ/0=",
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "dbabf0ca0c0c4bce6ea5eaf65af5cb694d2082c7",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -34,50 +34,18 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-22_11": {
|
||||
"nixpkgs-24_11": {
|
||||
"locked": {
|
||||
"lastModified": 1688392541,
|
||||
"narHash": "sha256-lHrKvEkCPTUO+7tPfjIcb7Trk6k31rz18vkyqmkeJfY=",
|
||||
"lastModified": 1733261153,
|
||||
"narHash": "sha256-eq51hyiaIwtWo19fPEeE0Zr2s83DYMKJoukNLgGGpek=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "ea4c80b39be4c09702b0cb3b42eab59e2ba4f24b",
|
||||
"rev": "b681065d0919f7eb5309a93cea2cfa84dec9aa88",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-22.11",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-23_05": {
|
||||
"locked": {
|
||||
"lastModified": 1689885880,
|
||||
"narHash": "sha256-2ikAcvHKkKh8J/eUrwMA+wy1poscC+oL1RkN1V3RmT8=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "fa793b06f56896b7d1909e4b69977c7bf842b2f0",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-23.05",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-unstable": {
|
||||
"locked": {
|
||||
"lastModified": 1689791806,
|
||||
"narHash": "sha256-QpXjfiyBFwa7MV/J6nM5FoBreks9O7j9cAZxV22MR8A=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "439ba0789ff84dddea64eb2d47a4a0d4887dbb1f",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "pull/244135/head",
|
||||
"ref": "nixos-24.11",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
@ -87,11 +55,9 @@
|
||||
"flake-compat": "flake-compat",
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": [
|
||||
"nixpkgs-unstable"
|
||||
"nixpkgs-24_11"
|
||||
],
|
||||
"nixpkgs-22_11": "nixpkgs-22_11",
|
||||
"nixpkgs-23_05": "nixpkgs-23_05",
|
||||
"nixpkgs-unstable": "nixpkgs-unstable"
|
||||
"nixpkgs-24_11": "nixpkgs-24_11"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
|
35
flake.nix
35
flake.nix
@ -1,14 +1,6 @@
|
||||
{
|
||||
description = "Tahoe-LAFS, free and open decentralized data store";
|
||||
|
||||
nixConfig = {
|
||||
# Supply configuration for the build cache updated by our CI system. This
|
||||
# should allow most users to avoid having to build a large number of
|
||||
# packages (otherwise necessary due to our Python package overrides).
|
||||
substituters = ["https://tahoe-lafs-opensource.cachix.org"];
|
||||
trusted-public-keys = ["tahoe-lafs-opensource.cachix.org-1:eIKCHOPJYceJ2gb74l6e0mayuSdXqiavxYeAio0LFGo="];
|
||||
};
|
||||
|
||||
inputs = {
|
||||
# A couple possible nixpkgs pins. Ideally these could be selected easily
|
||||
# from the command line but there seems to be no syntax/support for that.
|
||||
@ -20,25 +12,12 @@
|
||||
# requirements. We could decide in the future that supporting multiple
|
||||
# releases of NixOS at a time is worthwhile and then pins like these will
|
||||
# help us test each of those releases.
|
||||
"nixpkgs-22_11" = {
|
||||
url = github:NixOS/nixpkgs?ref=nixos-22.11;
|
||||
};
|
||||
"nixpkgs-23_05" = {
|
||||
url = github:NixOS/nixpkgs?ref=nixos-23.05;
|
||||
"nixpkgs-24_11" = {
|
||||
url = github:NixOS/nixpkgs?ref=nixos-24.11;
|
||||
};
|
||||
|
||||
# We depend on a very new python-cryptography which is not yet available
|
||||
# from any release branch of nixpkgs. However, it is contained in a PR
|
||||
# currently up for review. Point our nixpkgs at that for now.
|
||||
"nixpkgs-unstable" = {
|
||||
url = github:NixOS/nixpkgs?ref=pull/244135/head;
|
||||
};
|
||||
|
||||
# Point the default nixpkgs at one of those. This avoids having getting a
|
||||
# _third_ package set involved and gives a way to provide what should be a
|
||||
# working experience by default (that is, if nixpkgs doesn't get
|
||||
# overridden).
|
||||
nixpkgs.follows = "nixpkgs-unstable";
|
||||
# Point the default nixpkgs at one of those.
|
||||
nixpkgs.follows = "nixpkgs-24_11";
|
||||
|
||||
# Also get flake-utils for simplified multi-system definitions.
|
||||
flake-utils = {
|
||||
@ -153,7 +132,8 @@
|
||||
[ tahoe-lafs ] ++
|
||||
tahoe-lafs.passthru.extras.i2p ++
|
||||
tahoe-lafs.passthru.extras.tor ++
|
||||
tahoe-lafs.passthru.extras.unittest
|
||||
tahoe-lafs.passthru.extras.unittest ++
|
||||
[ hatchling hatch-vcs ]
|
||||
)).overrideAttrs (old: {
|
||||
# See the similar override in makeRuntimeEnv'.
|
||||
name = packageName pyVersion;
|
||||
@ -219,10 +199,11 @@
|
||||
program =
|
||||
let
|
||||
python = "${makeTestEnv pyVersion}/bin/python";
|
||||
hatchling = "${makeTestEnv pyVersion}/bin/hatchling";
|
||||
in
|
||||
writeScript "unit-tests"
|
||||
''
|
||||
${python} setup.py update_version
|
||||
${hatchling} build --hooks-only # Write _version.py
|
||||
export TAHOE_LAFS_HYPOTHESIS_PROFILE=ci
|
||||
export PYTHONPATH=$PWD/src
|
||||
${python} -m twisted.trial "$@"
|
||||
|
@ -161,6 +161,6 @@ def test_anonymous_client(reactor, request, temp_dir, flog_gatherer, tor_network
|
||||
yield util.await_client_ready(normie)
|
||||
|
||||
anonymoose = yield _create_anonymous_node(reactor, 'anonymoose', 8102, request, temp_dir, flog_gatherer, tor_network, introducer_furl, 1)
|
||||
yield util.await_client_ready(anonymoose, minimum_number_of_servers=1, timeout=600)
|
||||
yield util.await_client_ready(anonymoose, minimum_number_of_servers=1, timeout=1200)
|
||||
|
||||
yield upload_to_one_download_from_the_other(reactor, temp_dir, normie, anonymoose)
|
||||
|
0
newsfragments/4114.minor
Normal file
0
newsfragments/4114.minor
Normal file
0
newsfragments/4115.minor
Normal file
0
newsfragments/4115.minor
Normal file
1
newsfragments/4116.documentation
Normal file
1
newsfragments/4116.documentation
Normal file
@ -0,0 +1 @@
|
||||
Add a global Sphinx generated glossary. Link the static GBS glossary to the global glossary.
|
0
newsfragments/4126.minor
Normal file
0
newsfragments/4126.minor
Normal file
0
newsfragments/4130.minor
Normal file
0
newsfragments/4130.minor
Normal file
1
newsfragments/4132.minor
Normal file
1
newsfragments/4132.minor
Normal file
@ -0,0 +1 @@
|
||||
|
1
newsfragments/4133.installation
Normal file
1
newsfragments/4133.installation
Normal file
@ -0,0 +1 @@
|
||||
Now using the "hatch" build system, and pyproject.toml (exclusively)
|
4
newsfragments/4134.minor
Normal file
4
newsfragments/4134.minor
Normal file
@ -0,0 +1,4 @@
|
||||
Avoid private cache from Cachix until we can restore it.
|
||||
Update nixpkgs to 24.11 wich is well cached for now.
|
||||
Stop packaging and testing on nixpkgs/python39 (too old).
|
||||
Start packaging and testing on nixpkgs/python312 instead.
|
1
newsfragments/4141.minor
Normal file
1
newsfragments/4141.minor
Normal file
@ -0,0 +1 @@
|
||||
pinning base images of builders to improve reproducibility on checks for PRs
|
@ -1,9 +0,0 @@
|
||||
{ klein, fetchPypi }:
|
||||
klein.overrideAttrs (old: rec {
|
||||
pname = "klein";
|
||||
version = "23.5.0";
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
sha256 = "sha256-kGkSt6tBDZp/NRICg5w81zoqwHe9AHHIYcMfDu92Aoc=";
|
||||
};
|
||||
})
|
@ -1,57 +0,0 @@
|
||||
# package https://gitlab.com/tahoe-lafs/pycddl
|
||||
#
|
||||
# also in the process of being pushed upstream
|
||||
# https://github.com/NixOS/nixpkgs/pull/221220
|
||||
#
|
||||
# we should switch to the upstream package when it is available from our
|
||||
# minimum version of nixpkgs.
|
||||
#
|
||||
# if you need to update this package to a new pycddl release then
|
||||
#
|
||||
# 1. change value given to `buildPythonPackage` for `version` to match the new
|
||||
# release
|
||||
#
|
||||
# 2. change the value given to `fetchPypi` for `sha256` to `lib.fakeHash`
|
||||
#
|
||||
# 3. run `nix-build`
|
||||
#
|
||||
# 4. there will be an error about a hash mismatch. change the value given to
|
||||
# `fetchPypi` for `sha256` to the "actual" hash value report.
|
||||
#
|
||||
# 5. change the value given to `cargoDeps` for `hash` to lib.fakeHash`.
|
||||
#
|
||||
# 6. run `nix-build`
|
||||
#
|
||||
# 7. there will be an error about a hash mismatch. change the value given to
|
||||
# `cargoDeps` for `hash` to the "actual" hash value report.
|
||||
#
|
||||
# 8. run `nix-build`. it should succeed. if it does not, seek assistance.
|
||||
#
|
||||
{ lib, fetchPypi, python, buildPythonPackage, rustPlatform }:
|
||||
buildPythonPackage rec {
|
||||
pname = "pycddl";
|
||||
version = "0.6.1";
|
||||
format = "pyproject";
|
||||
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
sha256 = "sha256-63fe8UJXEH6t4l7ujV8JDvlGb7q3kL6fHHATFdklzFc=";
|
||||
};
|
||||
|
||||
# Without this, when building for PyPy, `maturin build` seems to fail to
|
||||
# find the interpreter at all and then fails early in the build process with
|
||||
# an error saying "unsupported Python interpreter". We can easily point
|
||||
# directly at the relevant interpreter, so do that.
|
||||
maturinBuildFlags = [ "--interpreter" python.executable ];
|
||||
|
||||
nativeBuildInputs = with rustPlatform; [
|
||||
maturinBuildHook
|
||||
cargoSetupHook
|
||||
];
|
||||
|
||||
cargoDeps = rustPlatform.fetchCargoTarball {
|
||||
inherit src;
|
||||
name = "${pname}-${version}";
|
||||
hash = "sha256-ssDEKRd3Y9/10oXBZHCxvlRkl9KMh3pGYbCkM4rXThQ=";
|
||||
};
|
||||
}
|
@ -1,10 +0,0 @@
|
||||
{ pyopenssl, fetchPypi, isPyPy }:
|
||||
pyopenssl.overrideAttrs (old: rec {
|
||||
pname = "pyOpenSSL";
|
||||
version = "23.2.0";
|
||||
name = "${pname}-${version}";
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
sha256 = "J2+TH1WkUufeppxxc+mE6ypEB85BPJGKo0tV+C+bi6w=";
|
||||
};
|
||||
})
|
@ -42,40 +42,10 @@ in {
|
||||
tahoe-lafs-src = self.lib.cleanSource ../.;
|
||||
};
|
||||
|
||||
# Some dependencies aren't packaged in nixpkgs so supply our own packages.
|
||||
pycddl = self.callPackage ./pycddl.nix { };
|
||||
txi2p = self.callPackage ./txi2p.nix { };
|
||||
|
||||
# Some packages are of somewhat too-old versions - update them.
|
||||
klein = self.callPackage ./klein.nix {
|
||||
# Avoid infinite recursion.
|
||||
inherit (super) klein;
|
||||
};
|
||||
txtorcon = self.callPackage ./txtorcon.nix {
|
||||
inherit (super) txtorcon;
|
||||
};
|
||||
|
||||
# With our customized package set a Twisted unit test fails. Patch the
|
||||
# Twisted test suite to skip that test.
|
||||
# Filed upstream at https://github.com/twisted/twisted/issues/11877
|
||||
twisted = super.twisted.overrideAttrs (old: {
|
||||
patches = (old.patches or []) ++ [ ./twisted.patch ];
|
||||
});
|
||||
|
||||
# Update the version of pyopenssl - and since we're doing that anyway, we
|
||||
# don't need the docs. Unfortunately this triggers a lot of rebuilding of
|
||||
# dependent packages.
|
||||
pyopenssl = dontBuildDocs (self.callPackage ./pyopenssl.nix {
|
||||
inherit (super) pyopenssl;
|
||||
});
|
||||
|
||||
# The cryptography that we get from nixpkgs to satisfy the pyopenssl upgrade
|
||||
# that we did breaks service-identity ... so get a newer version that works.
|
||||
service-identity = self.callPackage ./service-identity.nix { };
|
||||
|
||||
# collections-extended is currently broken for Python 3.11 in nixpkgs but
|
||||
# we know where a working version lives.
|
||||
collections-extended = self.callPackage ./collections-extended.nix {
|
||||
# Avoid infinite recursion.
|
||||
inherit (super) collections-extended;
|
||||
};
|
||||
|
||||
|
@ -1,61 +0,0 @@
|
||||
{ lib
|
||||
, attrs
|
||||
, buildPythonPackage
|
||||
, cryptography
|
||||
, fetchFromGitHub
|
||||
, hatch-fancy-pypi-readme
|
||||
, hatch-vcs
|
||||
, hatchling
|
||||
, idna
|
||||
, pyasn1
|
||||
, pyasn1-modules
|
||||
, pytestCheckHook
|
||||
, pythonOlder
|
||||
, setuptools
|
||||
}:
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "service-identity";
|
||||
version = "23.1.0";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.8";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "pyca";
|
||||
repo = pname;
|
||||
rev = "refs/tags/${version}";
|
||||
hash = "sha256-PGDtsDgRwh7GuuM4OuExiy8L4i3Foo+OD0wMrndPkvo=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
hatch-fancy-pypi-readme
|
||||
hatch-vcs
|
||||
hatchling
|
||||
setuptools
|
||||
];
|
||||
|
||||
propagatedBuildInputs = [
|
||||
attrs
|
||||
cryptography
|
||||
idna
|
||||
pyasn1
|
||||
pyasn1-modules
|
||||
];
|
||||
|
||||
nativeCheckInputs = [
|
||||
pytestCheckHook
|
||||
];
|
||||
|
||||
pythonImportsCheck = [
|
||||
"service_identity"
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
description = "Service identity verification for pyOpenSSL";
|
||||
homepage = "https://service-identity.readthedocs.io";
|
||||
changelog = "https://github.com/pyca/service-identity/releases/tag/${version}";
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ fab ];
|
||||
};
|
||||
}
|
@ -9,6 +9,7 @@ in
|
||||
}:
|
||||
buildPythonPackage rec {
|
||||
inherit pname version;
|
||||
pyproject = true;
|
||||
src = tahoe-lafs-src;
|
||||
propagatedBuildInputs = with pythonPackages; [
|
||||
attrs
|
||||
@ -22,6 +23,8 @@ buildPythonPackage rec {
|
||||
filelock
|
||||
foolscap
|
||||
future
|
||||
hatchling
|
||||
hatch-vcs
|
||||
klein
|
||||
magic-wormhole
|
||||
netifaces
|
||||
@ -50,7 +53,7 @@ buildPythonPackage rec {
|
||||
txtorcon
|
||||
];
|
||||
i2p = [
|
||||
txi2p
|
||||
txi2p-tahoe
|
||||
];
|
||||
unittest = [
|
||||
beautifulsoup4
|
||||
|
@ -1,12 +0,0 @@
|
||||
diff --git a/src/twisted/internet/test/test_endpoints.py b/src/twisted/internet/test/test_endpoints.py
|
||||
index c650fd8aa6..a1754fd533 100644
|
||||
--- a/src/twisted/internet/test/test_endpoints.py
|
||||
+++ b/src/twisted/internet/test/test_endpoints.py
|
||||
@@ -4214,6 +4214,7 @@ class WrapClientTLSParserTests(unittest.TestCase):
|
||||
connectionCreator = connectionCreatorFromEndpoint(reactor, endpoint)
|
||||
self.assertEqual(connectionCreator._hostname, "\xe9xample.example.com")
|
||||
|
||||
+ @skipIf(True, "self.assertFalse(plainClient.transport.disconnecting) fails")
|
||||
def test_tls(self):
|
||||
"""
|
||||
When passed a string endpoint description beginning with C{tls:},
|
@ -1,39 +0,0 @@
|
||||
# package https://github.com/tahoe-lafs/txi2p
|
||||
#
|
||||
# if you need to update this package to a new txi2p release then
|
||||
#
|
||||
# 1. change value given to `buildPythonPackage` for `version` to match the new
|
||||
# release
|
||||
#
|
||||
# 2. change the value given to `fetchPypi` for `sha256` to `lib.fakeHash`
|
||||
#
|
||||
# 3. run `nix-build`
|
||||
#
|
||||
# 4. there will be an error about a hash mismatch. change the value given to
|
||||
# `fetchPypi` for `sha256` to the "actual" hash value report.
|
||||
#
|
||||
# 5. if there are new runtime dependencies then add them to the argument list
|
||||
# at the top. if there are new test dependencies add them to the
|
||||
# `checkInputs` list.
|
||||
#
|
||||
# 6. run `nix-build`. it should succeed. if it does not, seek assistance.
|
||||
#
|
||||
{ fetchPypi
|
||||
, buildPythonPackage
|
||||
, parsley
|
||||
, twisted
|
||||
, unittestCheckHook
|
||||
}:
|
||||
buildPythonPackage rec {
|
||||
pname = "txi2p-tahoe";
|
||||
version = "0.3.7";
|
||||
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
hash = "sha256-+Vs9zaFS+ACI14JNxEme93lnWmncdZyFAmnTH0yhOiY=";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [ twisted parsley ];
|
||||
checkInputs = [ unittestCheckHook ];
|
||||
pythonImportsCheck = [ "parsley" "ometa"];
|
||||
}
|
@ -1,9 +0,0 @@
|
||||
{ txtorcon, fetchPypi }:
|
||||
txtorcon.overrideAttrs (old: rec {
|
||||
pname = "txtorcon";
|
||||
version = "23.5.0";
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
hash = "sha256-k/2Aqd1QX2mNCGT+k9uLapwRRLX+uRUwggtw7YmCZRw=";
|
||||
};
|
||||
})
|
300
pyproject.toml
300
pyproject.toml
@ -1,3 +1,299 @@
|
||||
[project]
|
||||
name = "tahoe-lafs"
|
||||
dynamic = ["version"]
|
||||
description = "secure, decentralized, fault-tolerant file store"
|
||||
readme = "README.rst"
|
||||
requires-python = ">=3.9"
|
||||
license = "GPL-2.0-or-later" # see README.rst -- there is an alternative licence
|
||||
authors = [
|
||||
{ name = "the Tahoe-LAFS project", email = "tahoe-dev@lists.tahoe-lafs.org" }
|
||||
]
|
||||
#keywords = [
|
||||
# "privacy"
|
||||
#]
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Environment :: Console",
|
||||
"Environment :: Web Environment",
|
||||
"License :: OSI Approved :: GNU General Public License (GPL)",
|
||||
"License :: DFSG approved",
|
||||
"License :: Other/Proprietary License",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"Intended Audience :: System Administrators",
|
||||
"Operating System :: Microsoft",
|
||||
"Operating System :: Microsoft :: Windows",
|
||||
"Operating System :: Unix",
|
||||
"Operating System :: POSIX :: Linux",
|
||||
"Operating System :: POSIX",
|
||||
"Operating System :: MacOS :: MacOS X",
|
||||
"Operating System :: OS Independent",
|
||||
"Natural Language :: English",
|
||||
"Programming Language :: C",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Topic :: Utilities",
|
||||
"Topic :: System :: Systems Administration",
|
||||
"Topic :: System :: Filesystems",
|
||||
"Topic :: System :: Distributed Computing",
|
||||
"Topic :: Software Development :: Libraries",
|
||||
"Topic :: System :: Archiving :: Backup",
|
||||
"Topic :: System :: Archiving :: Mirroring",
|
||||
"Topic :: System :: Archiving",
|
||||
]
|
||||
dependencies = [
|
||||
"zfec >= 1.1.0",
|
||||
|
||||
# zope.interface >= 3.6.0 is required for Twisted >= 12.1.0.
|
||||
"zope.interface >= 3.6.0",
|
||||
|
||||
# * foolscap < 0.5.1 had a performance bug which spent O(N**2) CPU for
|
||||
# transferring large mutable files of size N.
|
||||
# * foolscap < 0.6 is incompatible with Twisted 10.2.0.
|
||||
# * foolscap 0.6.1 quiets a DeprecationWarning.
|
||||
# * foolscap < 0.6.3 is incompatible with Twisted 11.1.0 and newer.
|
||||
# * foolscap 0.8.0 generates 2048-bit RSA-with-SHA-256 signatures,
|
||||
# rather than 1024-bit RSA-with-MD5. This also allows us to work
|
||||
# with a FIPS build of OpenSSL.
|
||||
# * foolscap >= 0.12.3 provides tcp/tor/i2p connection handlers we need,
|
||||
# and allocate_tcp_port
|
||||
# * foolscap >= 0.12.5 has ConnectionInfo and ReconnectionInfo
|
||||
# * foolscap >= 0.12.6 has an i2p.sam_endpoint() that takes kwargs
|
||||
# * foolscap 0.13.2 drops i2p support completely
|
||||
# * foolscap >= 21.7 is necessary for Python 3 with i2p support.
|
||||
# * foolscap >= 23.3 is necessary for Python 3.11.
|
||||
"foolscap >= 21.7.0",
|
||||
"foolscap >= 23.3.0; python_version > '3.10'",
|
||||
|
||||
# * cryptography 2.6 introduced some ed25519 APIs we rely on. Note that
|
||||
# Twisted[conch] also depends on cryptography and Twisted[tls]
|
||||
# transitively depends on cryptography. So it's anyone's guess what
|
||||
# version of cryptography will *really* be installed.
|
||||
"cryptography >= 2.6",
|
||||
|
||||
# * Used for custom HTTPS validation
|
||||
"pyOpenSSL >= 23.2.0",
|
||||
|
||||
# * The SFTP frontend depends on Twisted 11.0.0 to fix the SSH server
|
||||
# rekeying bug <https://twistedmatrix.com/trac/ticket/4395>
|
||||
# * The SFTP frontend and manhole depend on the conch extra. However, we
|
||||
# can't explicitly declare that without an undesirable dependency on gmpy,
|
||||
# as explained in ticket #2740.
|
||||
# * Due to a setuptools bug, we need to declare a dependency on the tls
|
||||
# extra even though we only depend on it via foolscap.
|
||||
# * Twisted >= 15.1.0 is the first version that provided the [tls] extra.
|
||||
# * Twisted-16.1.0 fixes https://twistedmatrix.com/trac/ticket/8223,
|
||||
# which otherwise causes test_system to fail (DirtyReactorError, due to
|
||||
# leftover timers)
|
||||
# * Twisted-16.4.0 introduces `python -m twisted.trial` which is needed
|
||||
# for coverage testing
|
||||
# * Twisted 16.6.0 drops the undesirable gmpy dependency from the conch
|
||||
# extra, letting us use that extra instead of trying to duplicate its
|
||||
# dependencies here. Twisted[conch] >18.7 introduces a dependency on
|
||||
# bcrypt. It is nice to avoid that if the user ends up with an older
|
||||
# version of Twisted. That's hard to express except by using the extra.
|
||||
#
|
||||
# * Twisted 18.4.0 adds `client` and `host` attributes to `Request` in the
|
||||
# * initializer, needed by logic in our custom `Request` subclass.
|
||||
#
|
||||
# In a perfect world, Twisted[conch] would be a dependency of an "sftp"
|
||||
# extra. However, pip fails to resolve the dependencies all
|
||||
# dependencies when asked for Twisted[tls] *and* Twisted[conch].
|
||||
# Specifically, "Twisted[conch]" (as the later requirement) is ignored.
|
||||
# If there were an Tahoe-LAFS sftp extra that dependended on
|
||||
# Twisted[conch] and install_requires only included Twisted[tls] then
|
||||
# `pip install tahoe-lafs[sftp]` would not install requirements
|
||||
# specified by Twisted[conch]. Since this would be the *whole point* of
|
||||
# an sftp extra in Tahoe-LAFS, there is no point in having one.
|
||||
# * Twisted 19.10 introduces Site.getContentFile which we use to get
|
||||
# temporary upload files placed into a per-node temporary directory.
|
||||
# * Twisted 22.8.0 added support for coroutine-returning functions in many
|
||||
# places (mainly via `maybeDeferred`)
|
||||
"Twisted[tls,conch] >= 22.8.0",
|
||||
|
||||
"PyYAML >= 3.11",
|
||||
|
||||
"six >= 1.10.0",
|
||||
|
||||
# For 'tahoe invite' and 'tahoe join'
|
||||
"magic-wormhole >= 0.10.2",
|
||||
|
||||
# We want a new enough version to support custom JSON encoders.
|
||||
"eliot >= 1.14.0",
|
||||
|
||||
"pyrsistent",
|
||||
|
||||
# A great way to define types of values.
|
||||
"attrs >= 20.1.0",
|
||||
|
||||
# WebSocket library for twisted and asyncio
|
||||
"autobahn >= 22.4.3",
|
||||
|
||||
# Support for Python 3 transition
|
||||
"future >= 0.18.2",
|
||||
|
||||
# Discover local network configuration
|
||||
"netifaces",
|
||||
|
||||
# Utility code:
|
||||
"pyutil >= 3.3.0",
|
||||
|
||||
# Linux distribution detection:
|
||||
"distro >= 1.4.0",
|
||||
|
||||
# For the RangeMap datastructure. Need 2.0.2 at least for bugfixes.
|
||||
"collections-extended >= 2.0.2",
|
||||
|
||||
# HTTP server and client
|
||||
# Latest version is necessary to work with latest werkzeug:
|
||||
"klein >= 23.5.0",
|
||||
# 2.2.0 has a bug: https://github.com/pallets/werkzeug/issues/2465
|
||||
"werkzeug != 2.2.0",
|
||||
"treq",
|
||||
# 5.6.0 excluded because https://github.com/agronholm/cbor2/issues/208
|
||||
"cbor2 != 5.6.0",
|
||||
|
||||
# 0.6 adds the ability to decode CBOR. 0.6.1 fixes PyPy.
|
||||
"pycddl >= 0.6.1",
|
||||
|
||||
# Command-line parsing
|
||||
"click >= 8.1.1",
|
||||
|
||||
# for pid-file support
|
||||
"psutil",
|
||||
"filelock",
|
||||
|
||||
# Duplicate the Twisted pywin32 dependency here. See
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2392 for some
|
||||
# discussion.
|
||||
"pywin32 != 226;sys_platform=='win32'"
|
||||
]
|
||||
|
||||
|
||||
[project.scripts]
|
||||
tahoe = "allmydata.scripts.runner:run"
|
||||
grid-manager = "allmydata.cli.grid_manager:grid_manager"
|
||||
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://tahoe-lafs.org/"
|
||||
Documentation = "https://tahoe-lafs.readthedocs.org/"
|
||||
"Source code" = "https://github.com/tahoe-lafs/tahoe-lafs/"
|
||||
|
||||
|
||||
[project.optional-dependencies]
|
||||
tor = [
|
||||
# 23.5 added support for custom TLS contexts in web_agent(), which is
|
||||
# needed for the HTTP storage client to run over Tor.
|
||||
"txtorcon >= 23.5.0",
|
||||
]
|
||||
i2p = [
|
||||
# txi2p has Python 3 support in master branch, but it has not been
|
||||
# released -- see https://github.com/str4d/txi2p/issues/10. We
|
||||
# could use a fork for Python 3 until txi2p's maintainers are back
|
||||
# in action. For Python 2, we could continue using the txi2p
|
||||
# version about which no one has complained to us so far.
|
||||
"txi2p; python_version < '3.0'",
|
||||
"txi2p-tahoe >= 0.3.5; python_version > '3.0'",
|
||||
]
|
||||
build = [
|
||||
"dulwich",
|
||||
"gpg",
|
||||
"hatchling",
|
||||
"hatch-vcs"
|
||||
]
|
||||
|
||||
testenv = [
|
||||
# Pin all of these versions for the same reason you ever want to
|
||||
# pin anything: to prevent new releases with regressions from
|
||||
# introducing spurious failures into CI runs for whatever
|
||||
# development work is happening at the time. The versions
|
||||
# selected here are just the current versions at the time.
|
||||
# Bumping them to keep up with future releases is fine as long
|
||||
# as those releases are known to actually work.
|
||||
"pip==23.3.1",
|
||||
"wheel==0.41.3",
|
||||
"subunitreporter==23.8.0",
|
||||
"python-subunit==1.4.2",
|
||||
"junitxml==0.7",
|
||||
"coverage==7.2.5",
|
||||
]
|
||||
|
||||
# Here are the library dependencies of the test suite.
|
||||
test = [
|
||||
"mock",
|
||||
"pytest",
|
||||
"pytest-twisted",
|
||||
"hypothesis >= 3.6.1",
|
||||
"towncrier",
|
||||
"testtools",
|
||||
"fixtures",
|
||||
"beautifulsoup4",
|
||||
"html5lib",
|
||||
# Pin old version until
|
||||
# https://github.com/paramiko/paramiko/issues/1961 is fixed.
|
||||
"paramiko < 2.9",
|
||||
"pytest-timeout",
|
||||
# Does our OpenMetrics endpoint adhere to the spec:
|
||||
"prometheus-client == 0.11.0",
|
||||
|
||||
"tahoe-lafs[tor]", # our own "tor" extra
|
||||
"tahoe-lafs[i2p]" # our own "i2p" extra
|
||||
]
|
||||
|
||||
|
||||
|
||||
|
||||
[tool.hatch.version]
|
||||
source = "vcs"
|
||||
tag-pattern = "tahoe-lafs-(.*)"
|
||||
|
||||
[tool.hatch.build.hooks.vcs]
|
||||
version-file = "src/allmydata/_version.py"
|
||||
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
requires = ["hatchling", "hatch-vcs"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
# https://github.com/ofek/hatch-vcs/issues/35#issuecomment-1452025896
|
||||
[tool.hatch.build]
|
||||
include = [
|
||||
"src/",
|
||||
"COPYING.GPL",
|
||||
"COPYING.TGPPL.rst",
|
||||
"CREDITS",
|
||||
"Makefile",
|
||||
"NEWS.rst",
|
||||
"Tahoe.home",
|
||||
"relnotes.txt",
|
||||
"Dockerfile",
|
||||
"tox.ini",
|
||||
".appveyor.yml",
|
||||
".travis.yml",
|
||||
".coveragerc",
|
||||
"*.xhtml",
|
||||
"*.png",
|
||||
"*.css",
|
||||
"*.svg",
|
||||
"docs/",
|
||||
"misc/",
|
||||
"static/",
|
||||
"integration/",
|
||||
"src/allmydata/test/data/*.txt",
|
||||
"src/allmydata/test/data/*.yaml"
|
||||
]
|
||||
exclude = [
|
||||
"*~",
|
||||
"*.pyc",
|
||||
"#*#",
|
||||
"venv*/",
|
||||
".tox/"
|
||||
]
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/allmydata"]
|
||||
|
17
setup.cfg
17
setup.cfg
@ -1,17 +0,0 @@
|
||||
[aliases]
|
||||
build = update_version build
|
||||
sdist = update_version sdist
|
||||
install = update_version install
|
||||
develop = update_version develop
|
||||
bdist_egg = update_version bdist_egg
|
||||
bdist_wheel = update_version bdist_wheel
|
||||
|
||||
# This has been replaced by ruff (see .ruff.toml), which has same checks as
|
||||
# flake8 plus many more, and is also faster. However, we're keeping this config
|
||||
# in case people still use flake8 in IDEs, etc..
|
||||
[flake8]
|
||||
# Enforce all pyflakes constraints, and also prohibit tabs for indentation.
|
||||
# Reference:
|
||||
# https://flake8.pycqa.org/en/latest/user/error-codes.html
|
||||
# https://pycodestyle.pycqa.org/en/latest/intro.html#error-codes
|
||||
select = F, W191
|
452
setup.py
452
setup.py
@ -1,452 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
|
||||
# Tahoe-LAFS -- secure, distributed storage grid
|
||||
#
|
||||
# Copyright © 2006-2012 The Tahoe-LAFS Software Foundation
|
||||
#
|
||||
# This file is part of Tahoe-LAFS.
|
||||
#
|
||||
# See the docs/about.rst file for licensing information.
|
||||
|
||||
import os, subprocess, re
|
||||
from io import open
|
||||
|
||||
basedir = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
# locate our version number
|
||||
|
||||
def read_version_py(infname):
|
||||
try:
|
||||
verstrline = open(infname, "rt").read()
|
||||
except EnvironmentError:
|
||||
return None
|
||||
else:
|
||||
VSRE = r"^verstr = ['\"]([^'\"]*)['\"]"
|
||||
mo = re.search(VSRE, verstrline, re.M)
|
||||
if mo:
|
||||
return mo.group(1)
|
||||
|
||||
VERSION_PY_FILENAME = 'src/allmydata/_version.py'
|
||||
version = read_version_py(VERSION_PY_FILENAME)
|
||||
|
||||
install_requires = [
|
||||
# importlib.resources.files and friends are new in Python 3.9.
|
||||
"importlib_resources; python_version < '3.9'",
|
||||
|
||||
"zfec >= 1.1.0",
|
||||
|
||||
# zope.interface >= 3.6.0 is required for Twisted >= 12.1.0.
|
||||
"zope.interface >= 3.6.0",
|
||||
|
||||
# * foolscap < 0.5.1 had a performance bug which spent O(N**2) CPU for
|
||||
# transferring large mutable files of size N.
|
||||
# * foolscap < 0.6 is incompatible with Twisted 10.2.0.
|
||||
# * foolscap 0.6.1 quiets a DeprecationWarning.
|
||||
# * foolscap < 0.6.3 is incompatible with Twisted 11.1.0 and newer.
|
||||
# * foolscap 0.8.0 generates 2048-bit RSA-with-SHA-256 signatures,
|
||||
# rather than 1024-bit RSA-with-MD5. This also allows us to work
|
||||
# with a FIPS build of OpenSSL.
|
||||
# * foolscap >= 0.12.3 provides tcp/tor/i2p connection handlers we need,
|
||||
# and allocate_tcp_port
|
||||
# * foolscap >= 0.12.5 has ConnectionInfo and ReconnectionInfo
|
||||
# * foolscap >= 0.12.6 has an i2p.sam_endpoint() that takes kwargs
|
||||
# * foolscap 0.13.2 drops i2p support completely
|
||||
# * foolscap >= 21.7 is necessary for Python 3 with i2p support.
|
||||
# * foolscap >= 23.3 is necessary for Python 3.11.
|
||||
"foolscap >= 21.7.0",
|
||||
"foolscap >= 23.3.0; python_version > '3.10'",
|
||||
|
||||
# * cryptography 2.6 introduced some ed25519 APIs we rely on. Note that
|
||||
# Twisted[conch] also depends on cryptography and Twisted[tls]
|
||||
# transitively depends on cryptography. So it's anyone's guess what
|
||||
# version of cryptography will *really* be installed.
|
||||
"cryptography >= 2.6",
|
||||
|
||||
# * Used for custom HTTPS validation
|
||||
"pyOpenSSL >= 23.2.0",
|
||||
|
||||
# * The SFTP frontend depends on Twisted 11.0.0 to fix the SSH server
|
||||
# rekeying bug <https://twistedmatrix.com/trac/ticket/4395>
|
||||
# * The SFTP frontend and manhole depend on the conch extra. However, we
|
||||
# can't explicitly declare that without an undesirable dependency on gmpy,
|
||||
# as explained in ticket #2740.
|
||||
# * Due to a setuptools bug, we need to declare a dependency on the tls
|
||||
# extra even though we only depend on it via foolscap.
|
||||
# * Twisted >= 15.1.0 is the first version that provided the [tls] extra.
|
||||
# * Twisted-16.1.0 fixes https://twistedmatrix.com/trac/ticket/8223,
|
||||
# which otherwise causes test_system to fail (DirtyReactorError, due to
|
||||
# leftover timers)
|
||||
# * Twisted-16.4.0 introduces `python -m twisted.trial` which is needed
|
||||
# for coverage testing
|
||||
# * Twisted 16.6.0 drops the undesirable gmpy dependency from the conch
|
||||
# extra, letting us use that extra instead of trying to duplicate its
|
||||
# dependencies here. Twisted[conch] >18.7 introduces a dependency on
|
||||
# bcrypt. It is nice to avoid that if the user ends up with an older
|
||||
# version of Twisted. That's hard to express except by using the extra.
|
||||
#
|
||||
# * Twisted 18.4.0 adds `client` and `host` attributes to `Request` in the
|
||||
# * initializer, needed by logic in our custom `Request` subclass.
|
||||
#
|
||||
# In a perfect world, Twisted[conch] would be a dependency of an "sftp"
|
||||
# extra. However, pip fails to resolve the dependencies all
|
||||
# dependencies when asked for Twisted[tls] *and* Twisted[conch].
|
||||
# Specifically, "Twisted[conch]" (as the later requirement) is ignored.
|
||||
# If there were an Tahoe-LAFS sftp extra that dependended on
|
||||
# Twisted[conch] and install_requires only included Twisted[tls] then
|
||||
# `pip install tahoe-lafs[sftp]` would not install requirements
|
||||
# specified by Twisted[conch]. Since this would be the *whole point* of
|
||||
# an sftp extra in Tahoe-LAFS, there is no point in having one.
|
||||
# * Twisted 19.10 introduces Site.getContentFile which we use to get
|
||||
# temporary upload files placed into a per-node temporary directory.
|
||||
# * Twisted 22.8.0 added support for coroutine-returning functions in many
|
||||
# places (mainly via `maybeDeferred`)
|
||||
"Twisted[tls,conch] >= 22.8.0",
|
||||
|
||||
"PyYAML >= 3.11",
|
||||
|
||||
"six >= 1.10.0",
|
||||
|
||||
# for 'tahoe invite' and 'tahoe join'
|
||||
"magic-wormhole >= 0.10.2",
|
||||
|
||||
# We want a new enough version to support custom JSON encoders.
|
||||
"eliot >= 1.14.0",
|
||||
|
||||
"pyrsistent",
|
||||
|
||||
# A great way to define types of values.
|
||||
"attrs >= 20.1.0",
|
||||
|
||||
# WebSocket library for twisted and asyncio
|
||||
"autobahn >= 22.4.3",
|
||||
|
||||
# Support for Python 3 transition
|
||||
"future >= 0.18.2",
|
||||
|
||||
# Discover local network configuration
|
||||
"netifaces",
|
||||
|
||||
# Utility code:
|
||||
"pyutil >= 3.3.0",
|
||||
|
||||
# Linux distribution detection:
|
||||
"distro >= 1.4.0",
|
||||
|
||||
# For the RangeMap datastructure. Need 2.0.2 at least for bugfixes.
|
||||
"collections-extended >= 2.0.2",
|
||||
|
||||
# HTTP server and client
|
||||
# Latest version is necessary to work with latest werkzeug:
|
||||
"klein >= 23.5.0",
|
||||
# 2.2.0 has a bug: https://github.com/pallets/werkzeug/issues/2465
|
||||
"werkzeug != 2.2.0",
|
||||
"treq",
|
||||
# 5.6.0 excluded because https://github.com/agronholm/cbor2/issues/208
|
||||
"cbor2 != 5.6.0",
|
||||
|
||||
# 0.6 adds the ability to decode CBOR. 0.6.1 fixes PyPy.
|
||||
"pycddl >= 0.6.1",
|
||||
|
||||
# Command-line parsing
|
||||
"click >= 8.1.1",
|
||||
|
||||
# for pid-file support
|
||||
"psutil",
|
||||
"filelock",
|
||||
]
|
||||
|
||||
tor_requires = [
|
||||
# 23.5 added support for custom TLS contexts in web_agent(), which is
|
||||
# needed for the HTTP storage client to run over Tor.
|
||||
"txtorcon >= 23.5.0",
|
||||
]
|
||||
|
||||
i2p_requires = [
|
||||
# txi2p has Python 3 support in master branch, but it has not been
|
||||
# released -- see https://github.com/str4d/txi2p/issues/10. We
|
||||
# could use a fork for Python 3 until txi2p's maintainers are back
|
||||
# in action. For Python 2, we could continue using the txi2p
|
||||
# version about which no one has complained to us so far.
|
||||
"txi2p; python_version < '3.0'",
|
||||
"txi2p-tahoe >= 0.3.5; python_version > '3.0'",
|
||||
]
|
||||
|
||||
if len(sys.argv) > 1 and sys.argv[1] == '--fakedependency':
|
||||
del sys.argv[1]
|
||||
install_requires += ["fakedependency >= 1.0.0"]
|
||||
|
||||
from setuptools import find_packages, setup
|
||||
from setuptools import Command
|
||||
from setuptools.command import install
|
||||
|
||||
|
||||
trove_classifiers=[
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Environment :: Console",
|
||||
"Environment :: Web Environment",
|
||||
"License :: OSI Approved :: GNU General Public License (GPL)",
|
||||
"License :: DFSG approved",
|
||||
"License :: Other/Proprietary License",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"Intended Audience :: System Administrators",
|
||||
"Operating System :: Microsoft",
|
||||
"Operating System :: Microsoft :: Windows",
|
||||
"Operating System :: Unix",
|
||||
"Operating System :: POSIX :: Linux",
|
||||
"Operating System :: POSIX",
|
||||
"Operating System :: MacOS :: MacOS X",
|
||||
"Operating System :: OS Independent",
|
||||
"Natural Language :: English",
|
||||
"Programming Language :: C",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Topic :: Utilities",
|
||||
"Topic :: System :: Systems Administration",
|
||||
"Topic :: System :: Filesystems",
|
||||
"Topic :: System :: Distributed Computing",
|
||||
"Topic :: Software Development :: Libraries",
|
||||
"Topic :: System :: Archiving :: Backup",
|
||||
"Topic :: System :: Archiving :: Mirroring",
|
||||
"Topic :: System :: Archiving",
|
||||
]
|
||||
|
||||
|
||||
GIT_VERSION_BODY = '''
|
||||
# This _version.py is generated from git metadata by the tahoe setup.py.
|
||||
|
||||
__pkgname__ = "%(pkgname)s"
|
||||
real_version = "%(version)s"
|
||||
full_version = "%(full)s"
|
||||
branch = "%(branch)s"
|
||||
verstr = "%(normalized)s"
|
||||
__version__ = verstr
|
||||
'''
|
||||
|
||||
def run_command(args, cwd=None):
|
||||
use_shell = sys.platform == "win32"
|
||||
try:
|
||||
p = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=cwd, shell=use_shell)
|
||||
except EnvironmentError as e: # if this gives a SyntaxError, note that Tahoe-LAFS requires Python 3.8+
|
||||
print("Warning: unable to run %r." % (" ".join(args),))
|
||||
print(e)
|
||||
return None
|
||||
stdout = p.communicate()[0].strip()
|
||||
if p.returncode != 0:
|
||||
print("Warning: %r returned error code %r." % (" ".join(args), p.returncode))
|
||||
return None
|
||||
return stdout
|
||||
|
||||
|
||||
def versions_from_git(tag_prefix):
|
||||
# This runs 'git' from the directory that contains this file. That either
|
||||
# means someone ran a setup.py command (and this code is in
|
||||
# versioneer.py, thus the containing directory is the root of the source
|
||||
# tree), or someone ran a project-specific entry point (and this code is
|
||||
# in _version.py, thus the containing directory is somewhere deeper in
|
||||
# the source tree). This only gets called if the git-archive 'subst'
|
||||
# variables were *not* expanded, and _version.py hasn't already been
|
||||
# rewritten with a short version string, meaning we're inside a checked
|
||||
# out source tree.
|
||||
|
||||
# versions_from_git (as copied from python-versioneer) returns strings
|
||||
# like "1.9.0-25-gb73aba9-dirty", which means we're in a tree with
|
||||
# uncommited changes (-dirty), the latest checkin is revision b73aba9,
|
||||
# the most recent tag was 1.9.0, and b73aba9 has 25 commits that weren't
|
||||
# in 1.9.0 . The narrow-minded NormalizedVersion parser that takes our
|
||||
# output (meant to enable sorting of version strings) refuses most of
|
||||
# that. Tahoe uses a function named suggest_normalized_version() that can
|
||||
# handle "1.9.0.post25", so dumb down our output to match.
|
||||
|
||||
try:
|
||||
source_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
except NameError as e:
|
||||
# some py2exe/bbfreeze/non-CPython implementations don't do __file__
|
||||
print("Warning: unable to find version because we could not obtain the source directory.")
|
||||
print(e)
|
||||
return {}
|
||||
stdout = run_command(["git", "describe", "--tags", "--dirty", "--always"],
|
||||
cwd=source_dir)
|
||||
if stdout is None:
|
||||
# run_command already complained.
|
||||
return {}
|
||||
stdout = stdout.decode("ascii")
|
||||
if not stdout.startswith(tag_prefix):
|
||||
print("Warning: tag %r doesn't start with prefix %r." % (stdout, tag_prefix))
|
||||
return {}
|
||||
version = stdout[len(tag_prefix):]
|
||||
pieces = version.split("-")
|
||||
if len(pieces) == 1:
|
||||
normalized_version = pieces[0]
|
||||
else:
|
||||
normalized_version = "%s.post%s" % (pieces[0], pieces[1])
|
||||
|
||||
stdout = run_command(["git", "rev-parse", "HEAD"], cwd=source_dir)
|
||||
if stdout is None:
|
||||
# run_command already complained.
|
||||
return {}
|
||||
full = stdout.decode("ascii").strip()
|
||||
if version.endswith("-dirty"):
|
||||
full += "-dirty"
|
||||
normalized_version += ".dev0"
|
||||
|
||||
# Thanks to Jistanidiot at <http://stackoverflow.com/questions/6245570/get-current-branch-name>.
|
||||
stdout = run_command(["git", "rev-parse", "--abbrev-ref", "HEAD"], cwd=source_dir)
|
||||
branch = (stdout or b"unknown").decode("ascii").strip()
|
||||
|
||||
# this returns native strings (bytes on py2, unicode on py3)
|
||||
return {"version": version, "normalized": normalized_version,
|
||||
"full": full, "branch": branch}
|
||||
|
||||
# setup.cfg has an [aliases] section which runs "update_version" before many
|
||||
# commands (like "build" and "sdist") that need to know our package version
|
||||
# ahead of time. If you add different commands (or if we forgot some), you
|
||||
# may need to add it to setup.cfg and configure it to run update_version
|
||||
# before your command.
|
||||
|
||||
class UpdateVersion(Command):
|
||||
description = "update _version.py from revision-control metadata"
|
||||
user_options = install.install.user_options
|
||||
|
||||
def initialize_options(self):
|
||||
pass
|
||||
def finalize_options(self):
|
||||
pass
|
||||
def run(self):
|
||||
global version
|
||||
verstr = version
|
||||
if os.path.isdir(os.path.join(basedir, ".git")):
|
||||
verstr = self.try_from_git()
|
||||
|
||||
if verstr:
|
||||
self.distribution.metadata.version = verstr
|
||||
else:
|
||||
print("""\
|
||||
********************************************************************
|
||||
Warning: no version information found. This may cause tests to fail.
|
||||
********************************************************************
|
||||
""")
|
||||
|
||||
def try_from_git(self):
|
||||
# If we change the release tag names, we must change this too
|
||||
versions = versions_from_git("tahoe-lafs-")
|
||||
|
||||
# setup.py might be run by either py2 or py3 (when run by tox, which
|
||||
# uses py3 on modern debian/ubuntu distros). We want this generated
|
||||
# file to contain native strings on both (str=bytes in py2,
|
||||
# str=unicode in py3)
|
||||
if versions:
|
||||
body = GIT_VERSION_BODY % {
|
||||
"pkgname": self.distribution.get_name(),
|
||||
"version": versions["version"],
|
||||
"normalized": versions["normalized"],
|
||||
"full": versions["full"],
|
||||
"branch": versions["branch"],
|
||||
}
|
||||
f = open(VERSION_PY_FILENAME, "wb")
|
||||
f.write(body.encode("ascii"))
|
||||
f.close()
|
||||
print("Wrote normalized version %r into '%s'" % (versions["normalized"], VERSION_PY_FILENAME))
|
||||
|
||||
return versions.get("normalized", None)
|
||||
|
||||
class PleaseUseTox(Command):
|
||||
user_options = []
|
||||
def initialize_options(self):
|
||||
pass
|
||||
def finalize_options(self):
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
print("ERROR: Please use 'tox' to run the test suite.")
|
||||
sys.exit(1)
|
||||
|
||||
setup_args = {}
|
||||
if version:
|
||||
setup_args["version"] = version
|
||||
|
||||
setup(name="tahoe-lafs", # also set in __init__.py
|
||||
description='secure, decentralized, fault-tolerant file store',
|
||||
long_description=open('README.rst', 'r', encoding='utf-8').read(),
|
||||
author='the Tahoe-LAFS project',
|
||||
author_email='tahoe-dev@lists.tahoe-lafs.org',
|
||||
url='https://tahoe-lafs.org/',
|
||||
license='GNU GPL', # see README.rst -- there is an alternative licence
|
||||
cmdclass={"update_version": UpdateVersion,
|
||||
"test": PleaseUseTox,
|
||||
},
|
||||
package_dir = {'':'src'},
|
||||
packages=find_packages('src') + ['allmydata.test.plugins'],
|
||||
classifiers=trove_classifiers,
|
||||
# We support Python 3.8 or later, 3.13 is untested for now
|
||||
python_requires=">=3.8, <3.13",
|
||||
install_requires=install_requires,
|
||||
extras_require={
|
||||
# Duplicate the Twisted pywin32 dependency here. See
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2392 for some
|
||||
# discussion.
|
||||
':sys_platform=="win32"': ["pywin32 != 226"],
|
||||
"build": [
|
||||
"dulwich",
|
||||
"gpg",
|
||||
],
|
||||
|
||||
# Here are the dependencies required to set up a reproducible test
|
||||
# environment. This could be for CI or local development. These
|
||||
# are *not* library dependencies of the test suite itself. They are
|
||||
# the tools we use to run the test suite at all.
|
||||
"testenv": [
|
||||
# Pin all of these versions for the same reason you ever want to
|
||||
# pin anything: to prevent new releases with regressions from
|
||||
# introducing spurious failures into CI runs for whatever
|
||||
# development work is happening at the time. The versions
|
||||
# selected here are just the current versions at the time.
|
||||
# Bumping them to keep up with future releases is fine as long
|
||||
# as those releases are known to actually work.
|
||||
"pip==23.3.1",
|
||||
"wheel==0.41.3",
|
||||
"subunitreporter==23.8.0",
|
||||
"python-subunit==1.4.2",
|
||||
"junitxml==0.7",
|
||||
"coverage==7.2.5",
|
||||
],
|
||||
|
||||
# Here are the library dependencies of the test suite.
|
||||
"test": [
|
||||
"mock",
|
||||
"pytest",
|
||||
"pytest-twisted",
|
||||
"hypothesis >= 3.6.1",
|
||||
"towncrier",
|
||||
"testtools",
|
||||
"fixtures",
|
||||
"beautifulsoup4",
|
||||
"html5lib",
|
||||
# Pin old version until
|
||||
# https://github.com/paramiko/paramiko/issues/1961 is fixed.
|
||||
"paramiko < 2.9",
|
||||
"pytest-timeout",
|
||||
# Does our OpenMetrics endpoint adhere to the spec:
|
||||
"prometheus-client == 0.11.0",
|
||||
] + tor_requires + i2p_requires,
|
||||
"tor": tor_requires,
|
||||
"i2p": i2p_requires,
|
||||
},
|
||||
package_data={"allmydata.web": ["*.xhtml",
|
||||
"static/*.js", "static/*.png", "static/*.css",
|
||||
"static/img/*.png",
|
||||
"static/css/*.css",
|
||||
],
|
||||
"allmydata": ["ported-modules.txt"],
|
||||
},
|
||||
include_package_data=True,
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'tahoe = allmydata.scripts.runner:run',
|
||||
'grid-manager = allmydata.cli.grid_manager:grid_manager',
|
||||
]
|
||||
},
|
||||
**setup_args
|
||||
)
|
@ -6,7 +6,6 @@ from __future__ import annotations
|
||||
|
||||
|
||||
from typing import (
|
||||
Union,
|
||||
Optional,
|
||||
Sequence,
|
||||
Mapping,
|
||||
@ -45,7 +44,6 @@ from zope.interface import implementer
|
||||
from hyperlink import DecodedURL
|
||||
import treq
|
||||
from treq.client import HTTPClient
|
||||
from treq.testing import StubTreq
|
||||
from OpenSSL import SSL
|
||||
from werkzeug.http import parse_content_range_header
|
||||
|
||||
@ -434,7 +432,7 @@ class StorageClient(object):
|
||||
# The URL should be a HTTPS URL ("https://...")
|
||||
_base_url: DecodedURL
|
||||
_swissnum: bytes
|
||||
_treq: Union[treq, StubTreq, HTTPClient]
|
||||
_treq: HTTPClient
|
||||
_pool: HTTPConnectionPool
|
||||
_clock: IReactorTime
|
||||
# Are we running unit tests?
|
||||
|
@ -27,7 +27,8 @@ from queue import Queue
|
||||
from pycddl import ValidationError as CDDLValidationError
|
||||
from hypothesis import assume, given, strategies as st, settings as hypothesis_settings
|
||||
from fixtures import Fixture, TempDir, MonkeyPatch
|
||||
from treq.testing import StubTreq
|
||||
from treq.client import HTTPClient
|
||||
from treq.testing import StubTreq, RequestTraversalAgent
|
||||
from klein import Klein
|
||||
from hyperlink import DecodedURL
|
||||
from collections_extended import RangeMap
|
||||
@ -714,7 +715,9 @@ class GenericHTTPAPITests(SyncTestCase):
|
||||
If nothing is given in the ``Authorization`` header at all an
|
||||
``Unauthorized`` response is returned.
|
||||
"""
|
||||
client = StubTreq(self.http.http_server.get_resource())
|
||||
client = HTTPClient(
|
||||
RequestTraversalAgent(self.http.http_server.get_resource())
|
||||
)
|
||||
response = self.http.result_of_with_flush(
|
||||
client.request(
|
||||
"GET",
|
||||
|
@ -115,7 +115,7 @@ class CountingDataUploadable(upload.Data):
|
||||
class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||
"""Foolscap integration-y tests."""
|
||||
FORCE_FOOLSCAP_FOR_STORAGE = True
|
||||
timeout = 180
|
||||
timeout = 300
|
||||
|
||||
@property
|
||||
def basedir(self):
|
||||
|
@ -4,11 +4,8 @@ Ported to Python 3.
|
||||
from __future__ import annotations
|
||||
|
||||
from six import ensure_str
|
||||
import sys
|
||||
if sys.version_info[:2] >= (3, 9):
|
||||
from importlib.resources import files as resource_files, as_file
|
||||
else:
|
||||
from importlib_resources import files as resource_files, as_file
|
||||
from importlib.resources import files as resource_files
|
||||
from importlib.resources import as_file
|
||||
from contextlib import ExitStack
|
||||
import weakref
|
||||
from typing import Optional, Union, TypeVar, overload
|
||||
|
6
tox.ini
6
tox.ini
@ -7,19 +7,17 @@
|
||||
# the tox-gh-actions package.
|
||||
[gh-actions]
|
||||
python =
|
||||
3.8: py38-coverage
|
||||
3.9: py39-coverage
|
||||
3.10: py310-coverage
|
||||
3.11: py311-coverage
|
||||
3.12: py312-coverage
|
||||
pypy-3.8: pypy38
|
||||
pypy-3.9: pypy39
|
||||
|
||||
[pytest]
|
||||
twisted = 1
|
||||
|
||||
[tox]
|
||||
envlist = typechecks,codechecks,py{38,39,310,311,312}-{coverage},pypy27,pypy38,pypy39,integration
|
||||
envlist = typechecks,codechecks,py{39,310,311,312}-{coverage},pypy39,integration
|
||||
minversion = 4
|
||||
|
||||
[testenv]
|
||||
@ -138,7 +136,7 @@ commands =
|
||||
# Different versions of Python have a different standard library, and we
|
||||
# want to be compatible with all the variations. For speed's sake we only do
|
||||
# the earliest and latest versions.
|
||||
mypy --python-version=3.8 src
|
||||
mypy --python-version=3.9 src
|
||||
mypy --python-version=3.12 src
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user