Merge branch 'master' into 3521.test_storage_client-no-mock

This commit is contained in:
Jean-Paul Calderone 2020-12-07 12:03:50 -05:00 committed by GitHub
commit d415bda72a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
77 changed files with 2713 additions and 2137 deletions

View File

@ -21,7 +21,7 @@ jobs:
steps: steps:
# Get vcpython27 on Windows + Python 2.7, to build zfec # Get vcpython27 on Windows + Python 2.7, to build netifaces
# extension. See https://chocolatey.org/packages/vcpython27 and # extension. See https://chocolatey.org/packages/vcpython27 and
# https://github.com/crazy-max/ghaction-chocolatey # https://github.com/crazy-max/ghaction-chocolatey
- name: Install MSVC 9.0 for Python 2.7 [Windows] - name: Install MSVC 9.0 for Python 2.7 [Windows]
@ -78,6 +78,15 @@ jobs:
steps: steps:
# Get vcpython27 for Windows + Python 2.7, to build netifaces
# extension. See https://chocolatey.org/packages/vcpython27 and
# https://github.com/crazy-max/ghaction-chocolatey
- name: Install MSVC 9.0 for Python 2.7 [Windows]
if: matrix.os == 'windows-latest' && matrix.python-version == '2.7'
uses: crazy-max/ghaction-chocolatey@v1
with:
args: install vcpython27
- name: Install Tor [Ubuntu] - name: Install Tor [Ubuntu]
if: matrix.os == 'ubuntu-latest' if: matrix.os == 'ubuntu-latest'
run: sudo apt install tor run: sudo apt install tor
@ -92,12 +101,6 @@ jobs:
with: with:
args: install tor args: install tor
- name: Install MSVC 9.0 for Python 2.7 [Windows]
if: matrix.os == 'windows-latest' && matrix.python-version == '2.7'
uses: crazy-max/ghaction-chocolatey@v1
with:
args: install vcpython27
- name: Check out Tahoe-LAFS sources - name: Check out Tahoe-LAFS sources
uses: actions/checkout@v2 uses: actions/checkout@v2
@ -141,7 +144,7 @@ jobs:
steps: steps:
# Get vcpython27 on Windows + Python 2.7, to build zfec # Get vcpython27 for Windows + Python 2.7, to build netifaces
# extension. See https://chocolatey.org/packages/vcpython27 and # extension. See https://chocolatey.org/packages/vcpython27 and
# https://github.com/crazy-max/ghaction-chocolatey # https://github.com/crazy-max/ghaction-chocolatey
- name: Install MSVC 9.0 for Python 2.7 [Windows] - name: Install MSVC 9.0 for Python 2.7 [Windows]

View File

@ -1,9 +1,10 @@
repos: repos:
- repo: local - repo: "local"
hooks: hooks:
- id: codechecks - id: "codechecks"
name: codechecks name: "codechecks"
stages: ["push"] stages: ["push"]
language: "system"
files: ".py$"
entry: "tox -e codechecks" entry: "tox -e codechecks"
language: system pass_filenames: true
pass_filenames: false

View File

@ -13,8 +13,6 @@ MAKEFLAGS += --warn-undefined-variables
MAKEFLAGS += --no-builtin-rules MAKEFLAGS += --no-builtin-rules
# Local target variables # Local target variables
VCS_HOOK_SAMPLES=$(wildcard .git/hooks/*.sample)
VCS_HOOKS=$(VCS_HOOK_SAMPLES:%.sample=%)
PYTHON=python PYTHON=python
export PYTHON export PYTHON
PYFLAKES=flake8 PYFLAKES=flake8
@ -31,15 +29,6 @@ TEST_SUITE=allmydata
default: default:
@echo "no default target" @echo "no default target"
.PHONY: install-vcs-hooks
## Install the VCS hooks to run linters on commit and all tests on push
install-vcs-hooks: .git/hooks/pre-commit .git/hooks/pre-push
.PHONY: uninstall-vcs-hooks
## Remove the VCS hooks
uninstall-vcs-hooks: .tox/create-venvs.log
"./$(dir $(<))py36/bin/pre-commit" uninstall || true
"./$(dir $(<))py36/bin/pre-commit" uninstall -t pre-push || true
.PHONY: test .PHONY: test
## Run all tests and code reports ## Run all tests and code reports
test: .tox/create-venvs.log test: .tox/create-venvs.log
@ -215,7 +204,7 @@ clean:
rm -f *.pkg rm -f *.pkg
.PHONY: distclean .PHONY: distclean
distclean: clean uninstall-vcs-hooks distclean: clean
rm -rf src/*.egg-info rm -rf src/*.egg-info
rm -f src/allmydata/_version.py rm -f src/allmydata/_version.py
rm -f src/allmydata/_appname.py rm -f src/allmydata/_appname.py
@ -261,6 +250,3 @@ src/allmydata/_version.py:
.tox/create-venvs.log: tox.ini setup.py .tox/create-venvs.log: tox.ini setup.py
tox --notest -p all | tee -a "$(@)" tox --notest -p all | tee -a "$(@)"
$(VCS_HOOKS): .tox/create-venvs.log .pre-commit-config.yaml
"./$(dir $(<))py36/bin/pre-commit" install --hook-type $(@:.git/hooks/%=%)

View File

@ -39,9 +39,7 @@ If you are on Windows, please see :doc:`windows` for platform-specific
instructions. instructions.
If you are on a Mac, you can either follow these instructions, or use the If you are on a Mac, you can either follow these instructions, or use the
pre-packaged bundle described in :doc:`OS-X`. The Tahoe project hosts pre-packaged bundle described in :doc:`OS-X`.
pre-compiled "wheels" for all dependencies, so use the ``--find-links=``
option described below to avoid needing a compiler.
Many Linux distributions include Tahoe-LAFS packages. Debian and Ubuntu users Many Linux distributions include Tahoe-LAFS packages. Debian and Ubuntu users
can ``apt-get install tahoe-lafs``. See `OSPackages`_ for other can ``apt-get install tahoe-lafs``. See `OSPackages`_ for other
@ -54,9 +52,14 @@ Preliminaries
============= =============
If you don't use a pre-packaged copy of Tahoe, you can build it yourself. If you don't use a pre-packaged copy of Tahoe, you can build it yourself.
You'll need Python2.7, pip, and virtualenv. On unix-like platforms, you will You'll need Python2.7, pip, and virtualenv.
need a C compiler, the Python development headers, and some libraries Tahoe-LAFS depends on some libraries which require a C compiler to build.
(libffi-dev and libssl-dev). However, for many platforms, PyPI hosts already-built packages of libraries.
If there is no already-built package for your platform,
you will need a C compiler,
the Python development headers,
and some libraries (libffi-dev and libssl-dev).
On a modern Debian/Ubuntu-derived distribution, this command will get you On a modern Debian/Ubuntu-derived distribution, this command will get you
everything you need:: everything you need::
@ -64,8 +67,7 @@ everything you need::
apt-get install build-essential python-dev libffi-dev libssl-dev libyaml-dev python-virtualenv apt-get install build-essential python-dev libffi-dev libssl-dev libyaml-dev python-virtualenv
On OS-X, install pip and virtualenv as described below. If you want to On OS-X, install pip and virtualenv as described below. If you want to
compile the dependencies yourself (instead of using ``--find-links`` to take compile the dependencies yourself, you'll also need to install
advantage of the pre-compiled ones we host), you'll also need to install
Xcode and its command-line tools. Xcode and its command-line tools.
**Note** that Tahoe-LAFS depends on `openssl 1.1.1c` or greater. **Note** that Tahoe-LAFS depends on `openssl 1.1.1c` or greater.
@ -168,12 +170,6 @@ from PyPI with ``venv/bin/pip install tahoe-lafs``. After installation, run
% %
On OS-X, instead of ``pip install tahoe-lafs``, use this command to take
advantage of the hosted pre-compiled wheels::
venv/bin/pip install --find-links=https://tahoe-lafs.org/deps tahoe-lafs
Install From a Source Tarball Install From a Source Tarball
----------------------------- -----------------------------

View File

@ -398,13 +398,13 @@ This section controls *when* Tor and I2P are used. The ``[tor]`` and
``[i2p]`` sections (described later) control *how* Tor/I2P connections are ``[i2p]`` sections (described later) control *how* Tor/I2P connections are
managed. managed.
All Tahoe nodes need to make a connection to the Introducer; the ``[client] All Tahoe nodes need to make a connection to the Introducer; the
introducer.furl`` setting (described below) indicates where the Introducer ``private/introducers.yaml`` file (described below) configures where one or more
lives. Tahoe client nodes must also make connections to storage servers: Introducers live. Tahoe client nodes must also make connections to storage
these targets are specified in announcements that come from the Introducer. servers: these targets are specified in announcements that come from the
Both are expressed as FURLs (a Foolscap URL), which include a list of Introducer. Both are expressed as FURLs (a Foolscap URL), which include a
"connection hints". Each connection hint describes one (of perhaps many) list of "connection hints". Each connection hint describes one (of perhaps
network endpoints where the service might live. many) network endpoints where the service might live.
Connection hints include a type, and look like: Connection hints include a type, and look like:
@ -580,6 +580,8 @@ Client Configuration
``introducer.furl = (FURL string, mandatory)`` ``introducer.furl = (FURL string, mandatory)``
DEPRECATED. See :ref:`introducer-definitions`.
This FURL tells the client how to connect to the introducer. Each This FURL tells the client how to connect to the introducer. Each
Tahoe-LAFS grid is defined by an introducer. The introducer's FURL is Tahoe-LAFS grid is defined by an introducer. The introducer's FURL is
created by the introducer node and written into its private base created by the introducer node and written into its private base
@ -965,29 +967,28 @@ This section describes these other files.
with as many people as possible, put the empty string (so that with as many people as possible, put the empty string (so that
``private/convergence`` is a zero-length file). ``private/convergence`` is a zero-length file).
Additional Introducer Definitions .. _introducer-definitions:
=================================
The ``private/introducers.yaml`` file defines additional Introducers. The Introducer Definitions
first introducer is defined in ``tahoe.cfg``, in ``[client] ======================
introducer.furl``. To use two or more Introducers, choose a locally-unique
"petname" for each one, then define their FURLs in The ``private/introducers.yaml`` file defines Introducers.
``private/introducers.yaml`` like this:: Choose a locally-unique "petname" for each one then define their FURLs in ``private/introducers.yaml`` like this::
introducers: introducers:
petname2: petname2:
furl: FURL2 furl: "FURL2"
petname3: petname3:
furl: FURL3 furl: "FURL3"
Servers will announce themselves to all configured introducers. Clients will Servers will announce themselves to all configured introducers. Clients will
merge the announcements they receive from all introducers. Nothing will merge the announcements they receive from all introducers. Nothing will
re-broadcast an announcement (i.e. telling introducer 2 about something you re-broadcast an announcement (i.e. telling introducer 2 about something you
heard from introducer 1). heard from introducer 1).
If you omit the introducer definitions from both ``tahoe.cfg`` and If you omit the introducer definitions from ``introducers.yaml``,
``introducers.yaml``, the node will not use an Introducer at all. Such the node will not use an Introducer at all.
"introducerless" clients must be configured with static servers (described Such "introducerless" clients must be configured with static servers (described
below), or they will not be able to upload and download files. below), or they will not be able to upload and download files.
Static Server Definitions Static Server Definitions
@ -1152,7 +1153,6 @@ a legal one.
timeout.disconnect = 1800 timeout.disconnect = 1800
[client] [client]
introducer.furl = pb://ok45ssoklj4y7eok5c3xkmj@tcp:tahoe.example:44801/ii3uumo
helper.furl = pb://ggti5ssoklj4y7eok5c3xkmj@tcp:helper.tahoe.example:7054/kk8lhr helper.furl = pb://ggti5ssoklj4y7eok5c3xkmj@tcp:helper.tahoe.example:7054/kk8lhr
[storage] [storage]
@ -1163,6 +1163,11 @@ a legal one.
[helper] [helper]
enabled = True enabled = True
To be introduced to storage servers, here is a sample ``private/introducers.yaml`` which can be used in conjunction::
introducers:
examplegrid:
furl: "pb://ok45ssoklj4y7eok5c3xkmj@tcp:tahoe.example:44801/ii3uumo"
Old Configuration Files Old Configuration Files
======================= =======================

View File

@ -5,23 +5,17 @@ Developer Guide
Pre-commit Checks Pre-commit Checks
----------------- -----------------
This project is configured for use with `pre-commit`_ to install `VCS/git hooks`_ which This project is configured for use with `pre-commit`_ to install `VCS/git hooks`_ which perform some static code analysis checks and other code checks to catch common errors.
perform some static code analysis checks and other code checks to catch common errors These hooks can be configured to run before commits or pushes
before each commit and to run the full self-test suite to find less obvious regressions
before each push to a remote.
For example:: For example::
tahoe-lafs $ make install-vcs-hooks tahoe-lafs $ pre-commit install --hook-type pre-push
...
+ ./.tox//py36/bin/pre-commit install --hook-type pre-commit
pre-commit installed at .git/hooks/pre-commit
+ ./.tox//py36/bin/pre-commit install --hook-type pre-push
pre-commit installed at .git/hooks/pre-push pre-commit installed at .git/hooks/pre-push
tahoe-lafs $ python -c "import pathlib; pathlib.Path('src/allmydata/tabbed.py').write_text('def foo():\\n\\tpass\\n')" tahoe-lafs $ echo "undefined" > src/allmydata/undefined_name.py
tahoe-lafs $ git add src/allmydata/tabbed.py tahoe-lafs $ git add src/allmydata/undefined_name.py
tahoe-lafs $ git commit -a -m "Add a file that violates flake8" tahoe-lafs $ git commit -a -m "Add a file that violates flake8"
... tahoe-lafs $ git push
codechecks...............................................................Failed codechecks...............................................................Failed
- hook id: codechecks - hook id: codechecks
- exit code: 1 - exit code: 1
@ -30,58 +24,17 @@ For example::
codechecks inst-nodeps: ... codechecks inst-nodeps: ...
codechecks installed: ... codechecks installed: ...
codechecks run-test-pre: PYTHONHASHSEED='...' codechecks run-test-pre: PYTHONHASHSEED='...'
codechecks run-test: commands[0] | flake8 src static misc setup.py codechecks run-test: commands[0] | flake8 src/allmydata/undefined_name.py
src/allmydata/tabbed.py:2:1: W191 indentation contains tabs src/allmydata/undefined_name.py:1:1: F821 undefined name 'undefined'
ERROR: InvocationError for command ./tahoe-lafs/.tox/codechecks/bin/flake8 src static misc setup.py (exited with code 1) ERROR: InvocationError for command ./tahoe-lafs/.tox/codechecks/bin/flake8 src/allmydata/undefined_name.py (exited with code 1)
___________________________________ summary ____________________________________ ___________________________________ summary ____________________________________
ERROR: codechecks: commands failed ERROR: codechecks: commands failed
...
To uninstall:: To uninstall::
tahoe-lafs $ make uninstall-vcs-hooks tahoe-lafs $ pre-commit uninstall --hook-type pre-push
...
+ ./.tox/py36/bin/pre-commit uninstall
pre-commit uninstalled
+ ./.tox/py36/bin/pre-commit uninstall -t pre-push
pre-push uninstalled pre-push uninstalled
Note that running the full self-test suite takes several minutes so expect pushing to
take some time. If you can't or don't want to wait for the hooks in some cases, use the
``--no-verify`` option to ``$ git commit ...`` or ``$ git push ...``. Alternatively,
see the `pre-commit`_ documentation and CLI help output and use the committed
`pre-commit configuration`_ as a starting point to write a local, uncommitted
``../.pre-commit-config.local.yaml`` configuration to use instead. For example::
tahoe-lafs $ ./.tox/py36/bin/pre-commit --help
tahoe-lafs $ ./.tox/py36/bin/pre-commit instll --help
tahoe-lafs $ cp "./.pre-commit-config.yaml" "./.pre-commit-config.local.yaml"
tahoe-lafs $ editor "./.pre-commit-config.local.yaml"
...
tahoe-lafs $ ./.tox/py36/bin/pre-commit install -c "./.pre-commit-config.local.yaml" -t pre-push
pre-commit installed at .git/hooks/pre-push
tahoe-lafs $ git commit -a -m "Add a file that violates flake8"
[3398.pre-commit 29f8f43d2] Add a file that violates flake8
1 file changed, 2 insertions(+)
create mode 100644 src/allmydata/tabbed.py
tahoe-lafs $ git push
...
codechecks...............................................................Failed
- hook id: codechecks
- exit code: 1
GLOB sdist-make: ./tahoe-lafs/setup.py
codechecks inst-nodeps: ...
codechecks installed: ...
codechecks run-test-pre: PYTHONHASHSEED='...'
codechecks run-test: commands[0] | flake8 src static misc setup.py
src/allmydata/tabbed.py:2:1: W191 indentation contains tabs
ERROR: InvocationError for command ./tahoe-lafs/.tox/codechecks/bin/flake8 src static misc setup.py (exited with code 1)
___________________________________ summary ____________________________________
ERROR: codechecks: commands failed
...
error: failed to push some refs to 'github.com:jaraco/tahoe-lafs.git'
.. _`pre-commit`: https://pre-commit.com .. _`pre-commit`: https://pre-commit.com

View File

@ -20,7 +20,7 @@ Config setting File Comment
``[node]log_gatherer.furl`` ``BASEDIR/log_gatherer.furl`` (one per line) ``[node]log_gatherer.furl`` ``BASEDIR/log_gatherer.furl`` (one per line)
``[node]timeout.keepalive`` ``BASEDIR/keepalive_timeout`` ``[node]timeout.keepalive`` ``BASEDIR/keepalive_timeout``
``[node]timeout.disconnect`` ``BASEDIR/disconnect_timeout`` ``[node]timeout.disconnect`` ``BASEDIR/disconnect_timeout``
``[client]introducer.furl`` ``BASEDIR/introducer.furl`` ``BASEDIR/introducer.furl`` ``BASEDIR/private/introducers.yaml``
``[client]helper.furl`` ``BASEDIR/helper.furl`` ``[client]helper.furl`` ``BASEDIR/helper.furl``
``[client]key_generator.furl`` ``BASEDIR/key_generator.furl`` ``[client]key_generator.furl`` ``BASEDIR/key_generator.furl``
``[client]stats_gatherer.furl`` ``BASEDIR/stats_gatherer.furl`` ``[client]stats_gatherer.furl`` ``BASEDIR/stats_gatherer.furl``

View File

@ -1,110 +0,0 @@
How to Make a Tahoe-LAFS Release
Any developer with push priveleges can do most of these steps, but a
"Release Maintainer" is required for some signing operations -- these
steps are marked with (Release Maintainer). Currently, the following
people are Release Maintainers:
- Brian Warner (https://github.com/warner)
* select features/PRs for new release [0/2]
- [ ] made sure they are tagged/labeled
- [ ] merged all release PRs
* basic quality checks [0/3]
- [ ] all travis CI checks pass
- [ ] all appveyor checks pass
- [ ] all buildbot workers pass their checks
* freeze master branch [0/1]
- [ ] announced the freeze of the master branch on IRC (i.e. non-release PRs won't be merged until after release)
* sync documentation [0/7]
- [ ] NEWS.rst: (run "tox -e news")
- [ ] added final release name and date to top-most item in NEWS.rst
- [ ] updated relnotes.txt (change next, last versions; summarize NEWS)
- [ ] updated CREDITS
- [ ] updated docs/known_issues.rst
- [ ] docs/INSTALL.rst only points to current tahoe-lafs-X.Y.Z.tar.gz source code file
- [ ] updated https://tahoe-lafs.org/hacktahoelafs/
* sign + build the tag [0/8]
- [ ] code passes all checks / tests (i.e. all CI is green)
- [ ] documentation is ready (see above)
- [ ] (Release Maintainer): git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-X.Y.Z" tahoe-lafs-X.Y.Z
- [ ] build code locally:
tox -e py27,codechecks,deprecations,docs,integration,upcoming-deprecations
- [ ] created tarballs (they'll be in dist/ for later comparison)
tox -e tarballs
- [ ] release version is reporting itself as intended version
ls dist/
- [ ] 'git pull' doesn't pull anything
- [ ] pushed tag to trigger buildslaves
git push official master TAGNAME
- [ ] confirmed Dockerhub built successfully:
https://hub.docker.com/r/tahoelafs/base/builds/
* sign the release artifacts [0/8]
- [ ] (Release Maintainer): pushed signed tag (should trigger Buildbot builders)
- [ ] Buildbot workers built all artifacts successfully
- [ ] downloaded upstream tarballs+wheels
- [ ] announce on IRC that master is unlocked
- [ ] compared upstream tarballs+wheels against local copies
- [ ] (Release Maintainer): signed each upstream artifacts with "gpg -ba -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A FILE"
- [ ] added to relnotes.txt: [0/3]
- [ ] prefix with SHA256 of tarballs
- [ ] release pubkey
- [ ] git revision hash
- [ ] GPG-signed the release email with release key (write to
relnotes.txt.asc) Ideally this is a Release Maintainer, but could
be any developer
* publish release artifacts [0/9]
- [ ] uploaded to PyPI via: twine upload dist/*
- [ ] uploaded *.asc to org ~source/downloads/
- [ ] test install works properly: pip install tahoe-lafs
- [ ] copied the release tarballs and signatures to tahoe-lafs.org: ~source/downloads/
- [ ] moved old release out of ~source/downloads (to downloads/old/?)
- [ ] ensured readthedocs.org updated
- [ ] uploaded wheels to https://tahoe-lafs.org/deps/
- [ ] uploaded release to https://github.com/tahoe-lafs/tahoe-lafs/releases
* check release downloads [0/]
- [ ] test PyPI via: pip install tahoe-lafs
- [ ] https://github.com/tahoe-lafs/tahoe-lafs/releases
- [ ] https://tahoe-lafs.org/downloads/
- [ ] https://tahoe-lafs.org/deps/
* document release in trac [0/]
- [ ] closed the Milestone on the trac Roadmap
* unfreeze master branch [0/]
- [ ] announced on IRC that new PRs will be looked at/merged
* announce new release [0/]
- [ ] sent release email and relnotes.txt.asc to tahoe-announce@tahoe-lafs.org
- [ ] sent release email and relnotes.txt.asc to tahoe-dev@tahoe-lafs.org
- [ ] updated Wiki front page: version on download link, News column
- [ ] updated Wiki "Doc": parade of release notes (with rev of NEWS.rst)
- [ ] make an "announcement of new release" on freshmeat (XXX still a thing?)
- [ ] make an "announcement of new release" on launchpad
- [ ] tweeted as @tahoelafs
- [ ] emailed relnotes.txt.asc to below listed mailing-lists/organizations
- [ ] also announce release to (trimmed from previous version of this doc):
- twisted-python@twistedmatrix.com
- liberationtech@lists.stanford.edu
- lwn@lwn.net
- p2p-hackers@lists.zooko.com
- python-list@python.org
- http://listcultures.org/pipermail/p2presearch_listcultures.org/
- cryptopp-users@googlegroups.com
- (others?)

197
docs/release-checklist.rst Normal file
View File

@ -0,0 +1,197 @@
=================
Release Checklist
=================
These instructions were produced while making the 1.15.0 release. They
are based on the original instructions (in old revisions in the file
`docs/how_to_make_a_tahoe-lafs_release.org`).
Any contributor can do the first part of the release preparation. Only
certain contributors can perform other parts. These are the two main
sections of this checklist (and could be done by different people).
A final section describes how to announce the release.
Any Contributor
---------------
Anyone who can create normal PRs should be able to complete this
portion of the release process.
Prepare for the Release
```````````````````````
The `master` branch should always be releasable.
It may be worth asking (on IRC or mailing-ist) if anything will be
merged imminently (for example, "I will prepare a release this coming
Tuesday if you want to get anything in").
- Create a ticket for the release in Trac
- Ticket number needed in next section
Create Branch and Apply Updates
```````````````````````````````
- Create a branch for release-candidates (e.g. `XXXX.release-1.15.0.rc0`)
- run `tox -e news` to produce a new NEWS.txt file (this does a commit)
- create the news for the release
- newsfragments/<ticket number>.minor
- commit it
- manually fix NEWS.txt
- proper title for latest release ("Release 1.15.0" instead of "Release ...post1432")
- double-check date (maybe release will be in the future)
- spot-check the release notes (these come from the newsfragments
files though so don't do heavy editing)
- commit these changes
- update "relnotes.txt"
- update all mentions of 1.14.0 -> 1.15.0
- update "previous release" statement and date
- summarize major changes
- commit it
- update "CREDITS"
- are there any new contributors in this release?
- one way: git log release-1.14.0.. | grep Author | sort | uniq
- commit it
- update "docs/known_issues.rst" if appropriate
- update "docs/INSTALL.rst" references to the new release
- Push the branch to github
- Create a (draft) PR; this should trigger CI (note that github
doesn't let you create a PR without some changes on the branch so
running + committing the NEWS.txt file achieves that without changing
any code)
- Confirm CI runs successfully on all platforms
Create Release Candidate
````````````````````````
Before "officially" tagging any release, we will make a
release-candidate available. So there will be at least 1.15.0rc0 (for
example). If there are any problems, an rc1 or rc2 etc may also be
released. Anyone can sign these releases (ideally they'd be signed
"officially" as well, but it's better to get them out than to wait for
that).
Typically expert users will be the ones testing release candidates and
they will need to evaluate which contributors' signatures they trust.
- (all steps above are completed)
- sign the release
- git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-1.15.0rc0" tahoe-lafs-1.15.0rc0
- (replace the key-id above with your own)
- build all code locally
- these should all pass:
- tox -e py27,codechecks,docs,integration
- these can fail (ideally they should not of course):
- tox -e deprecations,upcoming-deprecations
- build tarballs
- tox -e tarballs
- confirm it at least exists:
- ls dist/ | grep 1.15.0rc0
- inspect and test the tarballs
- install each in a fresh virtualenv
- run `tahoe` command
- when satisfied, sign the tarballs:
- gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0-py2-none-any.whl
- gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.tar.bz2
- gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.tar.gz
- gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.zip
Privileged Contributor
-----------------------
Steps in this portion require special access to keys or
infrastructure. For example, **access to tahoe-lafs.org** to upload
binaries or edit HTML.
Hack Tahoe-LAFS
```````````````
Did anyone contribute a hack since the last release? If so, then
https://tahoe-lafs.org/hacktahoelafs/ needs to be updated.
Upload Artifacts
````````````````
Any release-candidate or actual release plus signature (.asc file)
need to be uploaded to https://tahoe-lafs.org in `~source/downloads`
- secure-copy all release artifacts to the download area on the
tahoe-lafs.org host machine. `~source/downloads` on there maps to
https://tahoe-lafs.org/downloads/ on the Web.
- scp dist/*1.15.0* username@tahoe-lafs.org:/home/source/downloads
- the following developers have access to do this:
- exarkun
- meejah
- warner
For the actual release, the tarball and signature files need to be
uploaded to PyPI as well.
- how to do this?
- (original guide says only "twine upload dist/*")
- the following developers have access to do this:
- warner
- exarkun (partial?)
- meejah (partial?)
Announcing the Release Candidate
````````````````````````````````
The release-candidate should be announced by posting to the
mailing-list (tahoe-dev@tahoe-lafs.org). For example:
https://tahoe-lafs.org/pipermail/tahoe-dev/2020-October/009995.html
Is The Release Done Yet?
````````````````````````
If anyone reports a problem with a release-candidate then a new
release-candidate should be made once a fix has been merged to
master. Repeat the above instructions with `rc1` or `rc2` or whatever
is appropriate.
Once a release-candidate has marinated for some time then it can be
made into a the actual release.
XXX Write this section when doing 1.15.0 actual release
(In general, this means dropping the "rcX" part of the release and the
tag, uploading those artifacts, uploading to PyPI, ... )
Announcing the Release
----------------------
mailing-lists
`````````````
A new Tahoe release is traditionally announced on our mailing-list
(tahoe-dev@tahoe-lafs.org). The former version of these instructions
also announced the release on the following other lists:
- tahoe-announce@tahoe-lafs.org
- twisted-python@twistedmatrix.com
- liberationtech@lists.stanford.edu
- lwn@lwn.net
- p2p-hackers@lists.zooko.com
- python-list@python.org
- http://listcultures.org/pipermail/p2presearch_listcultures.org/
- cryptopp-users@googlegroups.com
wiki
````
Edit the "News" section of the front page of https://tahoe-lafs.org
with a link to the mailing-list archive of the announcement message.

View File

@ -65,9 +65,9 @@ Running a Client
To construct a client node, run “``tahoe create-client``”, which will create To construct a client node, run “``tahoe create-client``”, which will create
``~/.tahoe`` to be the node's base directory. Acquire the ``introducer.furl`` ``~/.tahoe`` to be the node's base directory. Acquire the ``introducer.furl``
(see below if you are running your own introducer, or use the one from the (see below if you are running your own introducer, or use the one from the
`TestGrid page`_), and paste it after ``introducer.furl =`` in the `TestGrid page`_), and write it to ``~/.tahoe/private/introducers.yaml``
``[client]`` section of ``~/.tahoe/tahoe.cfg``. Then use “``tahoe run (see :ref:`introducer-definitions`). Then use “``tahoe run ~/.tahoe``”.
~/.tahoe``”. After that, the node should be off and running. The first thing After that, the node should be off and running. The first thing
it will do is connect to the introducer and get itself connected to all other it will do is connect to the introducer and get itself connected to all other
nodes on the grid. nodes on the grid.

View File

@ -8,6 +8,7 @@ the data formats used by Tahoe.
:maxdepth: 2 :maxdepth: 2
outline outline
url
uri uri
file-encoding file-encoding
URI-extension URI-extension

165
docs/specifications/url.rst Normal file
View File

@ -0,0 +1,165 @@
URLs
====
The goal of this document is to completely specify the construction and use of the URLs by Tahoe-LAFS for service location.
This includes, but is not limited to, the original Foolscap-based URLs.
These are not to be confused with the URI-like capabilities Tahoe-LAFS uses to refer to stored data.
An attempt is also made to outline the rationale for certain choices about these URLs.
The intended audience for this document is Tahoe-LAFS maintainers and other developers interested in interoperating with Tahoe-LAFS or these URLs.
Background
----------
Tahoe-LAFS first used Foolscap_ for network communication.
Foolscap connection setup takes as an input a Foolscap URL or a *fURL*.
A fURL includes three components:
* the base32-encoded SHA1 hash of the DER form of an x509v3 certificate
* zero or more network addresses [1]_
* an object identifier
A Foolscap client tries to connect to each network address in turn.
If a connection is established then TLS is negotiated.
The server is authenticated by matching its certificate against the hash in the fURL.
A matching certificate serves as proof that the handshaking peer is the correct server.
This serves as the process by which the client authenticates the server.
The client can then exercise further Foolscap functionality using the fURL's object identifier.
If the object identifier is an unguessable, secret string then it serves as a capability.
This unguessable identifier is sometimes called a `swiss number`_ (or swissnum).
The client's use of the swissnum is what allows the server to authorize the client.
.. _`swiss number`: http://wiki.erights.org/wiki/Swiss_number
NURLs
-----
The authentication and authorization properties of fURLs are a good fit for Tahoe-LAFS' requirements.
These are not inherently tied to the Foolscap protocol itself.
In particular they are beneficial to :doc:`../proposed/http-storage-node-protocol` which uses HTTP instead of Foolscap.
It is conceivable they will also be used with WebSockets at some point as well.
Continuing to refer to these URLs as fURLs when they are being used for other protocols may cause confusion.
Therefore,
this document coins the name **NURL** for these URLs.
This can be considered to expand to "**N**\ ew URLs" or "Authe\ **N**\ ticating URLs" or "Authorizi\ **N**\ g URLs" as the reader prefers.
The anticipated use for a **NURL** will still be to establish a TLS connection to a peer.
The protocol run over that TLS connection could be Foolscap though it is more likely to be an HTTP-based protocol (such as GBS).
Syntax
------
The EBNF for a NURL is as follows::
nurl = scheme, hash, "@", net-loc-list, "/", swiss-number, [ version1 ]
scheme = "pb://"
hash = unreserved
net-loc-list = net-loc, [ { ",", net-loc } ]
net-loc = tcp-loc | tor-loc | i2p-loc
tcp-loc = [ "tcp:" ], hostname, [ ":" port ]
tor-loc = "tor:", hostname, [ ":" port ]
i2p-loc = "i2p:", i2p-addr, [ ":" port ]
i2p-addr = { unreserved }, ".i2p"
hostname = domain | IPv4address | IPv6address
swiss-number = segment
version1 = "#v=1"
See https://tools.ietf.org/html/rfc3986#section-3.3 for the definition of ``segment``.
See https://tools.ietf.org/html/rfc2396#appendix-A for the definition of ``unreserved``.
See https://tools.ietf.org/html/draft-main-ipaddr-text-rep-02#section-3.1 for the definition of ``IPv4address``.
See https://tools.ietf.org/html/draft-main-ipaddr-text-rep-02#section-3.2 for the definition of ``IPv6address``.
See https://tools.ietf.org/html/rfc1035#section-2.3.1 for the definition of ``domain``.
Versions
--------
Though all NURLs are syntactically compatible some semantic differences are allowed.
These differences are separated into distinct versions.
Version 0
---------
A Foolscap fURL is considered the canonical definition of a version 0 NURL.
Notably,
the hash component is defined as the base32-encoded SHA1 hash of the DER form of an x509v3 certificate.
A version 0 NURL is identified by the absence of the ``v=1`` fragment.
Examples
~~~~~~~~
* ``pb://sisi4zenj7cxncgvdog7szg3yxbrnamy@tcp:127.1:34399/xphmwz6lx24rh2nxlinni``
* ``pb://2uxmzoqqimpdwowxr24q6w5ekmxcymby@localhost:47877/riqhpojvzwxujhna5szkn``
Version 1
---------
The hash component of a version 1 NURL differs in three ways from the prior version.
1. The hash function used is SHA3-224 instead of SHA1.
The security of SHA1 `continues to be eroded`_.
Contrariwise SHA3 is currently the most recent addition to the SHA family by NIST.
The 224 bit instance is chosen to keep the output short and because it offers greater collision resistance than SHA1 was thought to offer even at its inception
(prior to security research showing actual collision resistance is lower).
2. The hash is computed over the certificate's SPKI instead of the whole certificate.
This allows certificate re-generation so long as the public key remains the same.
This is useful to allow contact information to be updated or extension of validity period.
Use of an SPKI hash has also been `explored by the web community`_ during its flirtation with using it for HTTPS certificate pinning
(though this is now largely abandoned).
.. note::
*Only* the certificate's keypair is pinned by the SPKI hash.
The freedom to change every other part of the certificate is coupled with the fact that all other parts of the certificate contain arbitrary information set by the private key holder.
It is neither guaranteed nor expected that a certificate-issuing authority has validated this information.
Therefore,
*all* certificate fields should be considered within the context of the relationship identified by the SPKI hash.
3. The hash is encoded using urlsafe-base64 (without padding) instead of base32.
This provides a more compact representation and minimizes the usability impacts of switching from a 160 bit hash to a 224 bit hash.
A version 1 NURL is identified by the presence of the ``v=1`` fragment.
Though the length of the hash string (38 bytes) could also be used to differentiate it from a version 0 NURL,
there is no guarantee that this will be effective in differentiating it from future versions so this approach should not be used.
It is possible for a client to unilaterally upgrade a version 0 NURL to a version 1 NURL.
After establishing and authenticating a connection the client will have received a copy of the server's certificate.
This is sufficient to compute the new hash and rewrite the NURL to upgrade it to version 1.
This provides stronger authentication assurances for future uses but it is not required.
Examples
~~~~~~~~
* ``pb://1WUX44xKjKdpGLohmFcBNuIRN-8rlv1Iij_7rQ@tcp:127.1:34399/jhjbc3bjbhk#v=1``
* ``pb://azEu8vlRpnEeYm0DySQDeNY3Z2iJXHC_bsbaAw@localhost:47877/64i4aokv4ej#v=1``
.. _`continues to be eroded`: https://en.wikipedia.org/wiki/SHA-1#Cryptanalysis_and_validation
.. _`explored by the web community`: https://www.imperialviolet.org/2011/05/04/pinning.html
.. _Foolscap: https://github.com/warner/foolscap
.. [1] ``foolscap.furl.decode_furl`` is taken as the canonical definition of the syntax of a fURL.
The **location hints** part of the fURL,
as it is referred to in Foolscap,
is matched by the regular expression fragment ``([^/]*)``.
Since this matches the empty string,
no network addresses are required to form a fURL.
The supporting code around the regular expression also takes extra steps to allow an empty string to match here.
Open Questions
--------------
1. Should we make a hard recommendation that all certificate fields are ignored?
The system makes no guarantees about validation of these fields.
Is it just an unnecessary risk to let a user see them?
2. Should the version specifier be a query-arg-alike or a fragment-alike?
The value is only necessary on the client side which makes it similar to an HTTP URL fragment.
The current Tahoe-LAFS configuration parsing code has special handling of the fragment character (``#``) which makes it unusable.
However,
the configuration parsing code is easily changed.

View File

@ -33,7 +33,7 @@ You can use whatever name you like for the virtualenv, but example uses
3: Use the virtualenv's ``pip`` to install the latest release of Tahoe-LAFS 3: Use the virtualenv's ``pip`` to install the latest release of Tahoe-LAFS
into this virtualenv:: into this virtualenv::
PS C:\Users\me> venv\Scripts\pip install --find-links=https://tahoe-lafs.org/deps/ tahoe-lafs PS C:\Users\me> venv\Scripts\pip install tahoe-lafs
Collecting tahoe-lafs Collecting tahoe-lafs
... ...
Installing collected packages: ... Installing collected packages: ...
@ -69,7 +69,7 @@ The ``pip install tahoe-lafs`` command above will install the latest release
the following command (using pip from the virtualenv, from the root of your the following command (using pip from the virtualenv, from the root of your
git checkout):: git checkout)::
$ venv\Scripts\pip install --find-links=https://tahoe-lafs.org/deps/ . $ venv\Scripts\pip install .
If you're planning to hack on the source code, you might want to add If you're planning to hack on the source code, you might want to add
``--editable`` so you won't have to re-install each time you make a change. ``--editable`` so you won't have to re-install each time you make a change.
@ -77,12 +77,7 @@ If you're planning to hack on the source code, you might want to add
Dependencies Dependencies
------------ ------------
Tahoe-LAFS depends upon several packages that use compiled C code Tahoe-LAFS depends upon several packages that use compiled C code (such as zfec).
(such as zfec). This code must be built separately for each platform This code must be built separately for each platform (Windows, OS-X, and different flavors of Linux).
(Windows, OS-X, and different flavors of Linux). Fortunately, this is now done by upstream packages for most platforms.
The result is that a C compiler is usually not required to install Tahoe-LAFS.
Pre-compiled "wheels" of all Tahoe's dependencies are hosted on the
tahoe-lafs.org website in the ``deps/`` directory. The ``--find-links=``
argument (used in the examples above) instructs ``pip`` to look at that URL
for dependencies. This should avoid the need for anything to be compiled
during the install.

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,6 @@
from __future__ import print_function from __future__ import print_function
import sys import sys
from os import mkdir
from os.path import join from os.path import join
import pytest import pytest
@ -9,6 +8,14 @@ import pytest_twisted
import util import util
from twisted.python.filepath import (
FilePath,
)
from allmydata.test.common import (
write_introducer,
)
# see "conftest.py" for the fixtures (e.g. "tor_network") # see "conftest.py" for the fixtures (e.g. "tor_network")
# XXX: Integration tests that involve Tor do not run reliably on # XXX: Integration tests that involve Tor do not run reliably on
@ -66,12 +73,12 @@ def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_ne
@pytest_twisted.inlineCallbacks @pytest_twisted.inlineCallbacks
def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_gatherer, tor_network, introducer_furl): def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_gatherer, tor_network, introducer_furl):
node_dir = join(temp_dir, name) node_dir = FilePath(temp_dir).child(name)
web_port = "tcp:{}:interface=localhost".format(control_port + 2000) web_port = "tcp:{}:interface=localhost".format(control_port + 2000)
if True: if True:
print("creating", node_dir) print("creating", node_dir.path)
mkdir(node_dir) node_dir.makedirs()
proto = util._DumpOutputProtocol(None) proto = util._DumpOutputProtocol(None)
reactor.spawnProcess( reactor.spawnProcess(
proto, proto,
@ -84,12 +91,15 @@ def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_
'--hide-ip', '--hide-ip',
'--tor-control-port', 'tcp:localhost:{}'.format(control_port), '--tor-control-port', 'tcp:localhost:{}'.format(control_port),
'--listen', 'tor', '--listen', 'tor',
node_dir, node_dir.path,
) )
) )
yield proto.done yield proto.done
with open(join(node_dir, 'tahoe.cfg'), 'w') as f:
# Which services should this client connect to?
write_introducer(node_dir, "default", introducer_furl)
with node_dir.child('tahoe.cfg').open('w') as f:
f.write(''' f.write('''
[node] [node]
nickname = %(name)s nickname = %(name)s
@ -105,15 +115,12 @@ onion = true
onion.private_key_file = private/tor_onion.privkey onion.private_key_file = private/tor_onion.privkey
[client] [client]
# Which services should this client connect to?
introducer.furl = %(furl)s
shares.needed = 1 shares.needed = 1
shares.happy = 1 shares.happy = 1
shares.total = 2 shares.total = 2
''' % { ''' % {
'name': name, 'name': name,
'furl': introducer_furl,
'web_port': web_port, 'web_port': web_port,
'log_furl': flog_gatherer, 'log_furl': flog_gatherer,
'control_port': control_port, 'control_port': control_port,
@ -121,5 +128,5 @@ shares.total = 2
}) })
print("running") print("running")
yield util._run_node(reactor, node_dir, request, None) yield util._run_node(reactor, node_dir.path, request, None)
print("okay, launched") print("okay, launched")

View File

@ -6,6 +6,9 @@ from os.path import exists, join
from six.moves import StringIO from six.moves import StringIO
from functools import partial from functools import partial
from twisted.python.filepath import (
FilePath,
)
from twisted.internet.defer import Deferred, succeed from twisted.internet.defer import Deferred, succeed
from twisted.internet.protocol import ProcessProtocol from twisted.internet.protocol import ProcessProtocol
from twisted.internet.error import ProcessExitedAlready, ProcessDone from twisted.internet.error import ProcessExitedAlready, ProcessDone
@ -257,8 +260,13 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam
def created(_): def created(_):
config_path = join(node_dir, 'tahoe.cfg') config_path = join(node_dir, 'tahoe.cfg')
config = get_config(config_path) config = get_config(config_path)
set_config(config, 'node', 'log_gatherer.furl', flog_gatherer) set_config(
write_config(config_path, config) config,
u'node',
u'log_gatherer.furl',
flog_gatherer.decode("utf-8"),
)
write_config(FilePath(config_path), config)
created_d.addCallback(created) created_d.addCallback(created)
d = Deferred() d = Deferred()

View File

@ -11,8 +11,12 @@ umids = {}
for starting_point in sys.argv[1:]: for starting_point in sys.argv[1:]:
for root, dirs, files in os.walk(starting_point): for root, dirs, files in os.walk(starting_point):
for fn in [f for f in files if f.endswith(".py")]: for f in files:
fn = os.path.join(root, fn) if not f.endswith(".py"):
continue
if f == "check-debugging.py":
continue
fn = os.path.join(root, f)
for lineno,line in enumerate(open(fn, "r").readlines()): for lineno,line in enumerate(open(fn, "r").readlines()):
lineno = lineno+1 lineno = lineno+1
mo = re.search(r"\.setDebugging\(True\)", line) mo = re.search(r"\.setDebugging\(True\)", line)

View File

@ -0,0 +1 @@
Tahoe-LAFS now requires Twisted 19.10.0 or newer. As a result, it now has a transitive dependency on bcrypt.

0
newsfragments/3477.minor Normal file
View File

1
newsfragments/3478.minor Normal file
View File

@ -0,0 +1 @@

View File

@ -0,0 +1 @@
The Tahoe-LAFS project no longer commits to maintaining binary packages for all dependencies at <https://tahoe-lafs.org/deps>. Please use PyPI instead.

1
newsfragments/3503.other Normal file
View File

@ -0,0 +1 @@
The specification section of the Tahoe-LAFS documentation now includes explicit discussion of the security properties of Foolscap "fURLs" on which it depends.

View File

@ -0,0 +1 @@
The ``[client]introducer.furl`` configuration item is now deprecated in favor of the ``private/introducers.yaml`` file.

0
newsfragments/3511.minor Normal file
View File

0
newsfragments/3513.minor Normal file
View File

0
newsfragments/3514.minor Normal file
View File

0
newsfragments/3515.minor Normal file
View File

0
newsfragments/3517.minor Normal file
View File

View File

@ -0,0 +1 @@
Announcements delivered through the introducer system are no longer automatically annotated with copious information about the Tahoe-LAFS software version nor the versions of its dependencies.

0
newsfragments/3520.minor Normal file
View File

0
newsfragments/3537.minor Normal file
View File

View File

@ -0,0 +1 @@
Certain implementation-internal weakref KeyErrors are now handled and should no longer cause user-initiated operations to fail.

0
newsfragments/3542.minor Normal file
View File

0
newsfragments/3547.minor Normal file
View File

View File

@ -15,6 +15,9 @@ self: super: {
# Need version of pyutil that supports Python 3. The version in 19.09 # Need version of pyutil that supports Python 3. The version in 19.09
# is too old. # is too old.
pyutil = python-super.callPackage ./pyutil.nix { }; pyutil = python-super.callPackage ./pyutil.nix { };
# Need a newer version of Twisted, too.
twisted = python-super.callPackage ./twisted.nix { };
}; };
}; };
} }

63
nix/twisted.nix Normal file
View File

@ -0,0 +1,63 @@
{ stdenv
, buildPythonPackage
, fetchPypi
, python
, zope_interface
, incremental
, automat
, constantly
, hyperlink
, pyhamcrest
, attrs
, pyopenssl
, service-identity
, setuptools
, idna
, bcrypt
}:
buildPythonPackage rec {
pname = "Twisted";
version = "19.10.0";
src = fetchPypi {
inherit pname version;
extension = "tar.bz2";
sha256 = "7394ba7f272ae722a74f3d969dcf599bc4ef093bc392038748a490f1724a515d";
};
propagatedBuildInputs = [ zope_interface incremental automat constantly hyperlink pyhamcrest attrs setuptools bcrypt ];
passthru.extras.tls = [ pyopenssl service-identity idna ];
# Patch t.p._inotify to point to libc. Without this,
# twisted.python.runtime.platform.supportsINotify() == False
patchPhase = stdenv.lib.optionalString stdenv.isLinux ''
substituteInPlace src/twisted/python/_inotify.py --replace \
"ctypes.util.find_library('c')" "'${stdenv.glibc.out}/lib/libc.so.6'"
'';
# Generate Twisted's plug-in cache. Twisted users must do it as well. See
# http://twistedmatrix.com/documents/current/core/howto/plugin.html#auto3
# and http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=477103 for
# details.
postFixup = ''
$out/bin/twistd --help > /dev/null
'';
checkPhase = ''
${python.interpreter} -m unittest discover -s twisted/test
'';
# Tests require network
doCheck = false;
meta = with stdenv.lib; {
homepage = https://twistedmatrix.com/;
description = "Twisted, an event-driven networking engine written in Python";
longDescription = ''
Twisted is an event-driven networking engine written in Python
and licensed under the MIT license.
'';
license = licenses.mit;
maintainers = [ ];
};
}

View File

@ -98,7 +98,9 @@ install_requires = [
# `pip install tahoe-lafs[sftp]` would not install requirements # `pip install tahoe-lafs[sftp]` would not install requirements
# specified by Twisted[conch]. Since this would be the *whole point* of # specified by Twisted[conch]. Since this would be the *whole point* of
# an sftp extra in Tahoe-LAFS, there is no point in having one. # an sftp extra in Tahoe-LAFS, there is no point in having one.
"Twisted[tls,conch] >= 18.4.0", # * Twisted 19.10 introduces Site.getContentFile which we use to get
# temporary upload files placed into a per-node temporary directory.
"Twisted[tls,conch] >= 19.10.0",
"PyYAML >= 3.11", "PyYAML >= 3.11",

View File

@ -1,7 +1,8 @@
from past.builtins import unicode
import os, stat, time, weakref import os, stat, time, weakref
from base64 import urlsafe_b64encode from base64 import urlsafe_b64encode
from functools import partial from functools import partial
from errno import ENOENT, EPERM
# On Python 2 this will be the backported package: # On Python 2 this will be the backported package:
from configparser import NoSectionError from configparser import NoSectionError
@ -33,6 +34,7 @@ from allmydata.introducer.client import IntroducerClient
from allmydata.util import ( from allmydata.util import (
hashutil, base32, pollmixin, log, idlib, hashutil, base32, pollmixin, log, idlib,
yamlutil, configutil, yamlutil, configutil,
fileutil,
) )
from allmydata.util.encodingutil import get_filesystem_encoding from allmydata.util.encodingutil import get_filesystem_encoding
from allmydata.util.abbreviate import parse_abbreviated_size from allmydata.util.abbreviate import parse_abbreviated_size
@ -464,57 +466,17 @@ def create_introducer_clients(config, main_tub, _introducer_factory=None):
# we return this list # we return this list
introducer_clients = [] introducer_clients = []
introducers_yaml_filename = config.get_private_path("introducers.yaml") introducers = config.get_introducer_configuration()
introducers_filepath = FilePath(introducers_yaml_filename)
try: for petname, (furl, cache_path) in introducers.items():
with introducers_filepath.open() as f:
introducers_yaml = yamlutil.safe_load(f)
if introducers_yaml is None:
raise EnvironmentError(
EPERM,
"Can't read '{}'".format(introducers_yaml_filename),
introducers_yaml_filename,
)
introducers = introducers_yaml.get("introducers", {})
log.msg(
"found {} introducers in private/introducers.yaml".format(
len(introducers),
)
)
except EnvironmentError as e:
if e.errno != ENOENT:
raise
introducers = {}
if "default" in introducers.keys():
raise ValueError(
"'default' introducer furl cannot be specified in introducers.yaml;"
" please fix impossible configuration."
)
# read furl from tahoe.cfg
tahoe_cfg_introducer_furl = config.get_config("client", "introducer.furl", None)
if tahoe_cfg_introducer_furl == "None":
raise ValueError(
"tahoe.cfg has invalid 'introducer.furl = None':"
" to disable it, use 'introducer.furl ='"
" or omit the key entirely"
)
if tahoe_cfg_introducer_furl:
introducers[u'default'] = {'furl':tahoe_cfg_introducer_furl}
for petname, introducer in introducers.items():
introducer_cache_filepath = FilePath(config.get_private_path("introducer_{}_cache.yaml".format(petname)))
ic = _introducer_factory( ic = _introducer_factory(
main_tub, main_tub,
introducer['furl'].encode("ascii"), furl.encode("ascii"),
config.nickname, config.nickname,
str(allmydata.__full_version__), str(allmydata.__full_version__),
str(_Client.OLDEST_SUPPORTED_VERSION), str(_Client.OLDEST_SUPPORTED_VERSION),
list(node.get_app_versions()),
partial(_sequencer, config), partial(_sequencer, config),
introducer_cache_filepath, cache_path,
) )
introducer_clients.append(ic) introducer_clients.append(ic)
return introducer_clients return introducer_clients
@ -728,10 +690,14 @@ class _Client(node.Node, pollmixin.PollMixin):
return { 'node.uptime': time.time() - self.started_timestamp } return { 'node.uptime': time.time() - self.started_timestamp }
def init_secrets(self): def init_secrets(self):
lease_s = self.config.get_or_create_private_config("secret", _make_secret) # configs are always unicode
def _unicode_make_secret():
return unicode(_make_secret(), "ascii")
lease_s = self.config.get_or_create_private_config(
"secret", _unicode_make_secret).encode("utf-8")
lease_secret = base32.a2b(lease_s) lease_secret = base32.a2b(lease_s)
convergence_s = self.config.get_or_create_private_config('convergence', convergence_s = self.config.get_or_create_private_config(
_make_secret) 'convergence', _unicode_make_secret).encode("utf-8")
self.convergence = base32.a2b(convergence_s) self.convergence = base32.a2b(convergence_s)
self._secret_holder = SecretHolder(lease_secret, self.convergence) self._secret_holder = SecretHolder(lease_secret, self.convergence)
@ -740,9 +706,11 @@ class _Client(node.Node, pollmixin.PollMixin):
# existing key # existing key
def _make_key(): def _make_key():
private_key, _ = ed25519.create_signing_keypair() private_key, _ = ed25519.create_signing_keypair()
return ed25519.string_from_signing_key(private_key) + b"\n" # Config values are always unicode:
return unicode(ed25519.string_from_signing_key(private_key) + b"\n", "utf-8")
private_key_str = self.config.get_or_create_private_config("node.privkey", _make_key) private_key_str = self.config.get_or_create_private_config(
"node.privkey", _make_key).encode("utf-8")
private_key, public_key = ed25519.signing_keypair_from_string(private_key_str) private_key, public_key = ed25519.signing_keypair_from_string(private_key_str)
public_key_str = ed25519.string_from_verifying_key(public_key) public_key_str = ed25519.string_from_verifying_key(public_key)
self.config.write_config_file("node.pubkey", public_key_str + b"\n", "wb") self.config.write_config_file("node.pubkey", public_key_str + b"\n", "wb")
@ -1043,6 +1011,21 @@ class _Client(node.Node, pollmixin.PollMixin):
def set_default_mutable_keysize(self, keysize): def set_default_mutable_keysize(self, keysize):
self._key_generator.set_default_keysize(keysize) self._key_generator.set_default_keysize(keysize)
def _get_tempdir(self):
"""
Determine the path to the directory where temporary files for this node
should be written.
:return bytes: The path which will exist and be a directory.
"""
tempdir_config = self.config.get_config("node", "tempdir", "tmp")
if isinstance(tempdir_config, bytes):
tempdir_config = tempdir_config.decode('utf-8')
tempdir = self.config.get_config_path(tempdir_config)
if not os.path.exists(tempdir):
fileutil.make_dirs(tempdir)
return tempdir
def init_web(self, webport): def init_web(self, webport):
self.log("init_web(webport=%s)", args=(webport,)) self.log("init_web(webport=%s)", args=(webport,))
@ -1050,7 +1033,13 @@ class _Client(node.Node, pollmixin.PollMixin):
nodeurl_path = self.config.get_config_path("node.url") nodeurl_path = self.config.get_config_path("node.url")
staticdir_config = self.config.get_config("node", "web.static", "public_html") staticdir_config = self.config.get_config("node", "web.static", "public_html")
staticdir = self.config.get_config_path(staticdir_config) staticdir = self.config.get_config_path(staticdir_config)
ws = WebishServer(self, webport, nodeurl_path, staticdir) ws = WebishServer(
self,
webport,
self._get_tempdir(),
nodeurl_path,
staticdir,
)
ws.setServiceParent(self) ws.setServiceParent(self)
def init_ftp_server(self): def init_ftp_server(self):
@ -1071,7 +1060,7 @@ class _Client(node.Node, pollmixin.PollMixin):
if accountfile: if accountfile:
accountfile = self.config.get_config_path(accountfile) accountfile = self.config.get_config_path(accountfile)
accounturl = self.config.get_config("sftpd", "accounts.url", None) accounturl = self.config.get_config("sftpd", "accounts.url", None)
sftp_portstr = self.config.get_config("sftpd", "port", "8022") sftp_portstr = self.config.get_config("sftpd", "port", "tcp:8022")
pubkey_file = self.config.get_config("sftpd", "host_pubkey_file") pubkey_file = self.config.get_config("sftpd", "host_pubkey_file")
privkey_file = self.config.get_config("sftpd", "host_privkey_file") privkey_file = self.config.get_config("sftpd", "host_privkey_file")

View File

@ -1975,6 +1975,8 @@ class Dispatcher(object):
class SFTPServer(service.MultiService): class SFTPServer(service.MultiService):
name = "frontend:sftp"
def __init__(self, client, accountfile, accounturl, def __init__(self, client, accountfile, accounturl,
sftp_portstr, pubkey_file, privkey_file): sftp_portstr, pubkey_file, privkey_file):
precondition(isinstance(accountfile, (unicode, NoneType)), accountfile) precondition(isinstance(accountfile, (unicode, NoneType)), accountfile)

View File

@ -1,4 +1,5 @@
from past.builtins import unicode from past.builtins import unicode, long
from six import ensure_text
import time import time
from zope.interface import implementer from zope.interface import implementer
@ -17,30 +18,31 @@ from allmydata.util.assertutil import precondition
class InvalidCacheError(Exception): class InvalidCacheError(Exception):
pass pass
V2 = "http://allmydata.org/tahoe/protocols/introducer/v2" V2 = b"http://allmydata.org/tahoe/protocols/introducer/v2"
@implementer(RIIntroducerSubscriberClient_v2, IIntroducerClient) @implementer(RIIntroducerSubscriberClient_v2, IIntroducerClient)
class IntroducerClient(service.Service, Referenceable): class IntroducerClient(service.Service, Referenceable):
def __init__(self, tub, introducer_furl, def __init__(self, tub, introducer_furl,
nickname, my_version, oldest_supported, nickname, my_version, oldest_supported,
app_versions, sequencer, cache_filepath): sequencer, cache_filepath):
self._tub = tub self._tub = tub
if isinstance(introducer_furl, unicode):
introducer_furl = introducer_furl.encode("utf-8")
self.introducer_furl = introducer_furl self.introducer_furl = introducer_furl
assert type(nickname) is unicode assert type(nickname) is unicode
self._nickname = nickname self._nickname = nickname
self._my_version = my_version self._my_version = my_version
self._oldest_supported = oldest_supported self._oldest_supported = oldest_supported
self._app_versions = app_versions
self._sequencer = sequencer self._sequencer = sequencer
self._cache_filepath = cache_filepath self._cache_filepath = cache_filepath
self._my_subscriber_info = { "version": 0, self._my_subscriber_info = { b"version": 0,
"nickname": self._nickname, b"nickname": self._nickname,
"app-versions": self._app_versions, b"app-versions": [],
"my-version": self._my_version, b"my-version": self._my_version,
"oldest-supported": self._oldest_supported, b"oldest-supported": self._oldest_supported,
} }
self._outbound_announcements = {} # not signed self._outbound_announcements = {} # not signed
@ -114,19 +116,24 @@ class IntroducerClient(service.Service, Referenceable):
announcements = [] announcements = []
for _, value in self._inbound_announcements.items(): for _, value in self._inbound_announcements.items():
ann, key_s, time_stamp = value ann, key_s, time_stamp = value
# On Python 2, bytes strings are encoded into YAML Unicode strings.
# On Python 3, bytes are encoded as YAML bytes. To minimize
# changes, Python 3 for now ensures the same is true.
server_params = { server_params = {
"ann" : ann, "ann" : ann,
"key_s" : key_s, "key_s" : ensure_text(key_s),
} }
announcements.append(server_params) announcements.append(server_params)
announcement_cache_yaml = yamlutil.safe_dump(announcements) announcement_cache_yaml = yamlutil.safe_dump(announcements)
if isinstance(announcement_cache_yaml, unicode):
announcement_cache_yaml = announcement_cache_yaml.encode("utf-8")
self._cache_filepath.setContent(announcement_cache_yaml) self._cache_filepath.setContent(announcement_cache_yaml)
def _got_introducer(self, publisher): def _got_introducer(self, publisher):
self.log("connected to introducer, getting versions") self.log("connected to introducer, getting versions")
default = { "http://allmydata.org/tahoe/protocols/introducer/v1": default = { b"http://allmydata.org/tahoe/protocols/introducer/v1":
{ }, { },
"application-version": "unknown: no get_version()", b"application-version": b"unknown: no get_version()",
} }
d = add_version_to_remote_reference(publisher, default) d = add_version_to_remote_reference(publisher, default)
d.addCallback(self._got_versioned_introducer) d.addCallback(self._got_versioned_introducer)
@ -139,6 +146,7 @@ class IntroducerClient(service.Service, Referenceable):
def _got_versioned_introducer(self, publisher): def _got_versioned_introducer(self, publisher):
self.log("got introducer version: %s" % (publisher.version,)) self.log("got introducer version: %s" % (publisher.version,))
# we require an introducer that speaks at least V2 # we require an introducer that speaks at least V2
assert all(type(V2) == type(v) for v in publisher.version)
if V2 not in publisher.version: if V2 not in publisher.version:
raise InsufficientVersionError("V2", publisher.version) raise InsufficientVersionError("V2", publisher.version)
self._publisher = publisher self._publisher = publisher
@ -163,7 +171,7 @@ class IntroducerClient(service.Service, Referenceable):
self._subscribed_service_names.add(service_name) self._subscribed_service_names.add(service_name)
self._maybe_subscribe() self._maybe_subscribe()
for index,(ann,key_s,when) in self._inbound_announcements.items(): for index,(ann,key_s,when) in self._inbound_announcements.items():
precondition(isinstance(key_s, str), key_s) precondition(isinstance(key_s, bytes), key_s)
servicename = index[0] servicename = index[0]
if servicename == service_name: if servicename == service_name:
eventually(cb, key_s, ann, *args, **kwargs) eventually(cb, key_s, ann, *args, **kwargs)
@ -190,7 +198,7 @@ class IntroducerClient(service.Service, Referenceable):
# "seqnum" and "nonce" will be populated with new values in # "seqnum" and "nonce" will be populated with new values in
# publish(), each time we make a change # publish(), each time we make a change
"nickname": self._nickname, "nickname": self._nickname,
"app-versions": self._app_versions, "app-versions": [],
"my-version": self._my_version, "my-version": self._my_version,
"oldest-supported": self._oldest_supported, "oldest-supported": self._oldest_supported,
@ -239,7 +247,7 @@ class IntroducerClient(service.Service, Referenceable):
# this might raise UnknownKeyError or bad-sig error # this might raise UnknownKeyError or bad-sig error
ann, key_s = unsign_from_foolscap(ann_t) ann, key_s = unsign_from_foolscap(ann_t)
# key is "v0-base32abc123" # key is "v0-base32abc123"
precondition(isinstance(key_s, str), key_s) precondition(isinstance(key_s, bytes), key_s)
except BadSignature: except BadSignature:
self.log("bad signature on inbound announcement: %s" % (ann_t,), self.log("bad signature on inbound announcement: %s" % (ann_t,),
parent=lp, level=log.WEIRD, umid="ZAU15Q") parent=lp, level=log.WEIRD, umid="ZAU15Q")
@ -249,7 +257,7 @@ class IntroducerClient(service.Service, Referenceable):
self._process_announcement(ann, key_s) self._process_announcement(ann, key_s)
def _process_announcement(self, ann, key_s): def _process_announcement(self, ann, key_s):
precondition(isinstance(key_s, str), key_s) precondition(isinstance(key_s, bytes), key_s)
self._debug_counts["inbound_announcement"] += 1 self._debug_counts["inbound_announcement"] += 1
service_name = str(ann["service-name"]) service_name = str(ann["service-name"])
if service_name not in self._subscribed_service_names: if service_name not in self._subscribed_service_names:
@ -258,7 +266,7 @@ class IntroducerClient(service.Service, Referenceable):
self._debug_counts["wrong_service"] += 1 self._debug_counts["wrong_service"] += 1
return return
# for ASCII values, simplejson might give us unicode *or* bytes # for ASCII values, simplejson might give us unicode *or* bytes
if "nickname" in ann and isinstance(ann["nickname"], str): if "nickname" in ann and isinstance(ann["nickname"], bytes):
ann["nickname"] = unicode(ann["nickname"]) ann["nickname"] = unicode(ann["nickname"])
nick_s = ann.get("nickname",u"").encode("utf-8") nick_s = ann.get("nickname",u"").encode("utf-8")
lp2 = self.log(format="announcement for nickname '%(nick)s', service=%(svc)s: %(ann)s", lp2 = self.log(format="announcement for nickname '%(nick)s', service=%(svc)s: %(ann)s",
@ -267,11 +275,11 @@ class IntroducerClient(service.Service, Referenceable):
# how do we describe this node in the logs? # how do we describe this node in the logs?
desc_bits = [] desc_bits = []
assert key_s assert key_s
desc_bits.append("serverid=" + key_s[:20]) desc_bits.append(b"serverid=" + key_s[:20])
if "anonymous-storage-FURL" in ann: if "anonymous-storage-FURL" in ann:
tubid_s = get_tubid_string_from_ann(ann) tubid_s = get_tubid_string_from_ann(ann)
desc_bits.append("tubid=" + tubid_s[:8]) desc_bits.append(b"tubid=" + tubid_s[:8])
description = "/".join(desc_bits) description = b"/".join(desc_bits)
# the index is used to track duplicates # the index is used to track duplicates
index = (service_name, key_s) index = (service_name, key_s)
@ -321,7 +329,7 @@ class IntroducerClient(service.Service, Referenceable):
self._deliver_announcements(key_s, ann) self._deliver_announcements(key_s, ann)
def _deliver_announcements(self, key_s, ann): def _deliver_announcements(self, key_s, ann):
precondition(isinstance(key_s, str), key_s) precondition(isinstance(key_s, bytes), key_s)
service_name = str(ann["service-name"]) service_name = str(ann["service-name"])
for (service_name2,cb,args,kwargs) in self._local_subscribers: for (service_name2,cb,args,kwargs) in self._local_subscribers:
if service_name2 == service_name: if service_name2 == service_name:

View File

@ -1,16 +1,19 @@
from past.builtins import unicode
import re import re
import json
from allmydata.crypto.util import remove_prefix from allmydata.crypto.util import remove_prefix
from allmydata.crypto import ed25519 from allmydata.crypto import ed25519
from allmydata.util import base32, rrefutil from allmydata.util import base32, rrefutil, jsonbytes as json
def get_tubid_string_from_ann(ann): def get_tubid_string_from_ann(ann):
return get_tubid_string(str(ann.get("anonymous-storage-FURL") furl = ann.get("anonymous-storage-FURL") or ann.get("FURL")
or ann.get("FURL"))) if isinstance(furl, unicode):
furl = furl.encode("utf-8")
return get_tubid_string(furl)
def get_tubid_string(furl): def get_tubid_string(furl):
m = re.match(r'pb://(\w+)@', furl) m = re.match(br'pb://(\w+)@', furl)
assert m assert m
return m.group(1).lower() return m.group(1).lower()

View File

@ -1,3 +1,5 @@
from past.builtins import long
from six import ensure_str, ensure_text
import time, os.path, textwrap import time, os.path, textwrap
from zope.interface import implementer from zope.interface import implementer
@ -7,7 +9,7 @@ from twisted.python.failure import Failure
from foolscap.api import Referenceable from foolscap.api import Referenceable
import allmydata import allmydata
from allmydata import node from allmydata import node
from allmydata.util import log, rrefutil from allmydata.util import log, rrefutil, dictutil
from allmydata.util.i2p_provider import create as create_i2p_provider from allmydata.util.i2p_provider import create as create_i2p_provider
from allmydata.util.tor_provider import create as create_tor_provider from allmydata.util.tor_provider import create as create_tor_provider
from allmydata.introducer.interfaces import \ from allmydata.introducer.interfaces import \
@ -122,7 +124,7 @@ class _IntroducerNode(node.Node):
from allmydata.webish import IntroducerWebishServer from allmydata.webish import IntroducerWebishServer
nodeurl_path = self.config.get_config_path(u"node.url") nodeurl_path = self.config.get_config_path(u"node.url")
config_staticdir = self.get_config("node", "web.static", "public_html").decode('utf-8') config_staticdir = self.get_config("node", "web.static", "public_html")
staticdir = self.config.get_config_path(config_staticdir) staticdir = self.config.get_config_path(config_staticdir)
ws = IntroducerWebishServer(self, webport, nodeurl_path, staticdir) ws = IntroducerWebishServer(self, webport, nodeurl_path, staticdir)
ws.setServiceParent(self) ws.setServiceParent(self)
@ -133,8 +135,8 @@ class IntroducerService(service.MultiService, Referenceable):
# v1 is the original protocol, added in 1.0 (but only advertised starting # v1 is the original protocol, added in 1.0 (but only advertised starting
# in 1.3), removed in 1.12. v2 is the new signed protocol, added in 1.10 # in 1.3), removed in 1.12. v2 is the new signed protocol, added in 1.10
VERSION = { #"http://allmydata.org/tahoe/protocols/introducer/v1": { }, VERSION = { #"http://allmydata.org/tahoe/protocols/introducer/v1": { },
"http://allmydata.org/tahoe/protocols/introducer/v2": { }, b"http://allmydata.org/tahoe/protocols/introducer/v2": { },
"application-version": str(allmydata.__full_version__), b"application-version": allmydata.__full_version__.encode("utf-8"),
} }
def __init__(self): def __init__(self):
@ -279,6 +281,10 @@ class IntroducerService(service.MultiService, Referenceable):
def remote_subscribe_v2(self, subscriber, service_name, subscriber_info): def remote_subscribe_v2(self, subscriber, service_name, subscriber_info):
self.log("introducer: subscription[%s] request at %s" self.log("introducer: subscription[%s] request at %s"
% (service_name, subscriber), umid="U3uzLg") % (service_name, subscriber), umid="U3uzLg")
service_name = ensure_str(service_name)
subscriber_info = dictutil.UnicodeKeyDict({
ensure_text(k): v for (k, v) in subscriber_info.items()
})
return self.add_subscriber(subscriber, service_name, subscriber_info) return self.add_subscriber(subscriber, service_name, subscriber_info)
def add_subscriber(self, subscriber, service_name, subscriber_info): def add_subscriber(self, subscriber, service_name, subscriber_info):
@ -302,6 +308,10 @@ class IntroducerService(service.MultiService, Referenceable):
subscriber.notifyOnDisconnect(_remove) subscriber.notifyOnDisconnect(_remove)
# now tell them about any announcements they're interested in # now tell them about any announcements they're interested in
assert {type(service_name)}.issuperset(
set(type(k[0]) for k in self._announcements)), (
service_name, self._announcements.keys()
)
announcements = set( [ ann_t announcements = set( [ ann_t
for idx,(ann_t,canary,ann,when) for idx,(ann_t,canary,ann,when)
in self._announcements.items() in self._announcements.items()

View File

@ -914,7 +914,7 @@ class Publish(object):
def log_goal(self, goal, message=""): def log_goal(self, goal, message=""):
logmsg = [message] logmsg = [message]
for (shnum, server) in sorted([(s,p) for (p,s) in goal]): for (shnum, server) in sorted([(s,p) for (p,s) in goal], key=lambda t: (id(t[0]), id(t[1]))):
logmsg.append("sh%d to [%s]" % (shnum, server.get_name())) logmsg.append("sh%d to [%s]" % (shnum, server.get_name()))
self.log("current goal: %s" % (", ".join(logmsg)), level=log.NOISY) self.log("current goal: %s" % (", ".join(logmsg)), level=log.NOISY)
self.log("we are planning to push new seqnum=#%d" % self._new_seqnum, self.log("we are planning to push new seqnum=#%d" % self._new_seqnum,

View File

@ -19,24 +19,37 @@ import os.path
import re import re
import types import types
import errno import errno
import tempfile
from base64 import b32decode, b32encode from base64 import b32decode, b32encode
from errno import ENOENT, EPERM
from warnings import warn
import attr
# On Python 2 this will be the backported package. # On Python 2 this will be the backported package.
import configparser import configparser
from twisted.python.filepath import (
FilePath,
)
from twisted.python import log as twlog from twisted.python import log as twlog
from twisted.application import service from twisted.application import service
from twisted.python.failure import Failure from twisted.python.failure import Failure
from foolscap.api import Tub, app_versions from foolscap.api import Tub
import foolscap.logging.log import foolscap.logging.log
from allmydata.version_checks import get_package_versions, get_package_versions_string
from allmydata.util import log from allmydata.util import log
from allmydata.util import fileutil, iputil from allmydata.util import fileutil, iputil
from allmydata.util.assertutil import _assert
from allmydata.util.fileutil import abspath_expanduser_unicode from allmydata.util.fileutil import abspath_expanduser_unicode
from allmydata.util.encodingutil import get_filesystem_encoding, quote_output from allmydata.util.encodingutil import get_filesystem_encoding, quote_output
from allmydata.util import configutil from allmydata.util import configutil
from allmydata.util.yamlutil import (
safe_load,
)
from . import (
__full_version__,
)
def _common_valid_config(): def _common_valid_config():
return configutil.ValidConfiguration({ return configutil.ValidConfiguration({
@ -78,11 +91,6 @@ def _common_valid_config():
), ),
}) })
# Add our application versions to the data that Foolscap's LogPublisher
# reports. Foolscap requires native strings.
for thing, things_version in list(get_package_versions().items()):
app_versions.add_version(ensure_str(thing), ensure_str(things_version))
# group 1 will be addr (dotted quad string), group 3 if any will be portnum (string) # group 1 will be addr (dotted quad string), group 3 if any will be portnum (string)
ADDR_RE = re.compile("^([1-9][0-9]*\.[1-9][0-9]*\.[1-9][0-9]*\.[1-9][0-9]*)(:([1-9][0-9]*))?$") ADDR_RE = re.compile("^([1-9][0-9]*\.[1-9][0-9]*\.[1-9][0-9]*\.[1-9][0-9]*)(:([1-9][0-9]*))?$")
@ -192,25 +200,27 @@ def read_config(basedir, portnumfile, generated_files=[], _valid_config=None):
# canonicalize the portnum file # canonicalize the portnum file
portnumfile = os.path.join(basedir, portnumfile) portnumfile = os.path.join(basedir, portnumfile)
# (try to) read the main config file config_path = FilePath(basedir).child("tahoe.cfg")
config_fname = os.path.join(basedir, "tahoe.cfg")
try: try:
parser = configutil.get_config(config_fname) config_str = config_path.getContent()
except EnvironmentError as e: except EnvironmentError as e:
if e.errno != errno.ENOENT: if e.errno != errno.ENOENT:
raise raise
# The file is missing, just create empty ConfigParser. # The file is missing, just create empty ConfigParser.
parser = configutil.get_config_from_string(u"") config_str = u""
else:
config_str = config_str.decode("utf-8-sig")
configutil.validate_config(config_fname, parser, _valid_config) return config_from_string(
basedir,
# make sure we have a private configuration area portnumfile,
fileutil.make_dirs(os.path.join(basedir, "private"), 0o700) config_str,
_valid_config,
return _Config(parser, portnumfile, basedir, config_fname) config_path,
)
def config_from_string(basedir, portnumfile, config_str, _valid_config=None): def config_from_string(basedir, portnumfile, config_str, _valid_config=None, fpath=None):
""" """
load and validate configuration from in-memory string load and validate configuration from in-memory string
""" """
@ -223,16 +233,19 @@ def config_from_string(basedir, portnumfile, config_str, _valid_config=None):
# load configuration from in-memory string # load configuration from in-memory string
parser = configutil.get_config_from_string(config_str) parser = configutil.get_config_from_string(config_str)
fname = "<in-memory>" configutil.validate_config(
configutil.validate_config(fname, parser, _valid_config) "<string>" if fpath is None else fpath.path,
return _Config(parser, portnumfile, basedir, fname) parser,
_valid_config,
)
return _Config(
def get_app_versions(): parser,
""" portnumfile,
:returns: dict of versions important to Foolscap basedir,
""" fpath,
return dict(app_versions.versions) _valid_config,
)
def _error_about_old_config_files(basedir, generated_files): def _error_about_old_config_files(basedir, generated_files):
@ -260,6 +273,7 @@ def _error_about_old_config_files(basedir, generated_files):
raise e raise e
@attr.s
class _Config(object): class _Config(object):
""" """
Manages configuration of a Tahoe 'node directory'. Manages configuration of a Tahoe 'node directory'.
@ -268,30 +282,47 @@ class _Config(object):
class; names and funtionality have been kept the same while moving class; names and funtionality have been kept the same while moving
the code. It probably makes sense for several of these APIs to the code. It probably makes sense for several of these APIs to
have better names. have better names.
:ivar ConfigParser config: The actual configuration values.
:ivar str portnum_fname: filename to use for the port-number file (a
relative path inside basedir).
:ivar str _basedir: path to our "node directory", inside which all
configuration is managed.
:ivar (FilePath|NoneType) config_path: The path actually used to create
the configparser (might be ``None`` if using in-memory data).
:ivar ValidConfiguration valid_config_sections: The validator for the
values in this configuration.
""" """
config = attr.ib(validator=attr.validators.instance_of(configparser.ConfigParser))
portnum_fname = attr.ib()
_basedir = attr.ib(
converter=lambda basedir: abspath_expanduser_unicode(ensure_text(basedir)),
)
config_path = attr.ib(
validator=attr.validators.optional(
attr.validators.instance_of(FilePath),
),
)
valid_config_sections = attr.ib(
default=configutil.ValidConfiguration.everything(),
validator=attr.validators.instance_of(configutil.ValidConfiguration),
)
def __init__(self, configparser, portnum_fname, basedir, config_fname): @property
""" def nickname(self):
:param configparser: a ConfigParser instance nickname = self.get_config("node", "nickname", u"<unspecified>")
assert isinstance(nickname, str)
return nickname
:param portnum_fname: filename to use for the port-number file @property
(a relative path inside basedir) def _config_fname(self):
if self.config_path is None:
:param basedir: path to our "node directory", inside which all return "<string>"
configuration is managed return self.config_path.path
:param config_fname: the pathname actually used to create the
configparser (might be 'fake' if using in-memory data)
"""
self.portnum_fname = portnum_fname
self._basedir = abspath_expanduser_unicode(ensure_text(basedir))
self._config_fname = config_fname
self.config = configparser
self.nickname = self.get_config("node", "nickname", u"<unspecified>")
assert isinstance(self.nickname, str)
def validate(self, valid_config_sections):
configutil.validate_config(self._config_fname, self.config, valid_config_sections)
def write_config_file(self, name, value, mode="w"): def write_config_file(self, name, value, mode="w"):
""" """
@ -336,6 +367,34 @@ class _Config(object):
) )
return default return default
def set_config(self, section, option, value):
"""
Set a config option in a section and re-write the tahoe.cfg file
:param str section: The name of the section in which to set the
option.
:param str option: The name of the option to set.
:param str value: The value of the option.
:raise UnescapedHashError: If the option holds a fURL and there is a
``#`` in the value.
"""
if option.endswith(".furl") and "#" in value:
raise UnescapedHashError(section, option, value)
copied_config = configutil.copy_config(self.config)
configutil.set_config(copied_config, section, option, value)
configutil.validate_config(
self._config_fname,
copied_config,
self.valid_config_sections,
)
if self.config_path is not None:
configutil.write_config(self.config_path, copied_config)
self.config = copied_config
def get_config_from_file(self, name, required=False): def get_config_from_file(self, name, required=False):
"""Get the (string) contents of a config file, or None if the file """Get the (string) contents of a config file, or None if the file
did not exist. If required=True, raise an exception rather than did not exist. If required=True, raise an exception rather than
@ -428,6 +487,97 @@ class _Config(object):
os.path.join(self._basedir, *args) os.path.join(self._basedir, *args)
) )
def get_introducer_configuration(self):
"""
Get configuration for introducers.
:return {unicode: (unicode, FilePath)}: A mapping from introducer
petname to a tuple of the introducer's fURL and local cache path.
"""
introducers_yaml_filename = self.get_private_path("introducers.yaml")
introducers_filepath = FilePath(introducers_yaml_filename)
def get_cache_filepath(petname):
return FilePath(
self.get_private_path("introducer_{}_cache.yaml".format(petname)),
)
try:
with introducers_filepath.open() as f:
introducers_yaml = safe_load(f)
if introducers_yaml is None:
raise EnvironmentError(
EPERM,
"Can't read '{}'".format(introducers_yaml_filename),
introducers_yaml_filename,
)
introducers = {
petname: config["furl"]
for petname, config
in introducers_yaml.get("introducers", {}).items()
}
non_strs = list(
k
for k
in introducers.keys()
if not isinstance(k, str)
)
if non_strs:
raise TypeError(
"Introducer petnames {!r} should have been str".format(
non_strs,
),
)
non_strs = list(
v
for v
in introducers.values()
if not isinstance(v, str)
)
if non_strs:
raise TypeError(
"Introducer fURLs {!r} should have been str".format(
non_strs,
),
)
log.msg(
"found {} introducers in {!r}".format(
len(introducers),
introducers_yaml_filename,
)
)
except EnvironmentError as e:
if e.errno != ENOENT:
raise
introducers = {}
# supported the deprecated [client]introducer.furl item in tahoe.cfg
tahoe_cfg_introducer_furl = self.get_config("client", "introducer.furl", None)
if tahoe_cfg_introducer_furl == "None":
raise ValueError(
"tahoe.cfg has invalid 'introducer.furl = None':"
" to disable it omit the key entirely"
)
if tahoe_cfg_introducer_furl:
warn(
"tahoe.cfg [client]introducer.furl is deprecated; "
"use private/introducers.yaml instead.",
category=DeprecationWarning,
stacklevel=-1,
)
if "default" in introducers:
raise ValueError(
"'default' introducer furl cannot be specified in tahoe.cfg and introducers.yaml;"
" please fix impossible configuration."
)
introducers['default'] = tahoe_cfg_introducer_furl
return {
petname: (furl, get_cache_filepath(petname))
for (petname, furl)
in introducers.items()
}
def create_tub_options(config): def create_tub_options(config):
""" """
@ -740,8 +890,6 @@ class Node(service.MultiService):
self._i2p_provider = i2p_provider self._i2p_provider = i2p_provider
self._tor_provider = tor_provider self._tor_provider = tor_provider
self.init_tempdir()
self.create_log_tub() self.create_log_tub()
self.logSource = "Node" self.logSource = "Node"
self.setup_logging() self.setup_logging()
@ -759,7 +907,7 @@ class Node(service.MultiService):
if self.control_tub is not None: if self.control_tub is not None:
self.control_tub.setServiceParent(self) self.control_tub.setServiceParent(self)
self.log("Node constructed. " + get_package_versions_string()) self.log("Node constructed. " + __full_version__)
iputil.increase_rlimits() iputil.increase_rlimits()
def _is_tub_listening(self): def _is_tub_listening(self):
@ -768,25 +916,6 @@ class Node(service.MultiService):
""" """
return len(self.tub.getListeners()) > 0 return len(self.tub.getListeners()) > 0
def init_tempdir(self):
"""
Initialize/create a directory for temporary files.
"""
tempdir_config = self.config.get_config("node", "tempdir", "tmp")
if isinstance(tempdir_config, bytes):
tempdir_config = tempdir_config.decode('utf-8')
tempdir = self.config.get_config_path(tempdir_config)
if not os.path.exists(tempdir):
fileutil.make_dirs(tempdir)
tempfile.tempdir = tempdir
# this should cause twisted.web.http (which uses
# tempfile.TemporaryFile) to put large request bodies in the given
# directory. Without this, the default temp dir is usually /tmp/,
# which is frequently too small.
temp_fd, test_name = tempfile.mkstemp()
_assert(os.path.dirname(test_name) == tempdir, test_name, tempdir)
os.close(temp_fd) # avoid leak of unneeded fd
# pull this outside of Node's __init__ too, see: # pull this outside of Node's __init__ too, see:
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2948 # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2948
def create_log_tub(self): def create_log_tub(self):

View File

@ -66,9 +66,9 @@ class NodeMaker(object):
memokey = b"I" + bigcap memokey = b"I" + bigcap
else: else:
memokey = b"M" + bigcap memokey = b"M" + bigcap
if memokey in self._node_cache: try:
node = self._node_cache[memokey] node = self._node_cache[memokey]
else: except KeyError:
cap = uri.from_string(bigcap, deep_immutable=deep_immutable, cap = uri.from_string(bigcap, deep_immutable=deep_immutable,
name=name) name=name)
node = self._create_from_single_cap(cap) node = self._create_from_single_cap(cap)

View File

@ -4,14 +4,15 @@ import os, sys, urllib, textwrap
import codecs import codecs
from os.path import join from os.path import join
from yaml import (
safe_dump,
)
# Python 2 compatibility # Python 2 compatibility
from future.utils import PY2 from future.utils import PY2
if PY2: if PY2:
from future.builtins import str # noqa: F401 from future.builtins import str # noqa: F401
# On Python 2 this will be the backported package:
from configparser import NoSectionError
from twisted.python import usage from twisted.python import usage
from allmydata.util.assertutil import precondition from allmydata.util.assertutil import precondition
@ -115,24 +116,42 @@ class NoDefaultBasedirOptions(BasedirOptions):
DEFAULT_ALIAS = u"tahoe" DEFAULT_ALIAS = u"tahoe"
def write_introducer(basedir, petname, furl):
"""
Overwrite the node's ``introducers.yaml`` with a file containing the given
introducer information.
"""
if isinstance(furl, bytes):
furl = furl.decode("utf-8")
basedir.child(b"private").child(b"introducers.yaml").setContent(
safe_dump({
"introducers": {
petname: {
"furl": furl,
},
},
}).encode("ascii"),
)
def get_introducer_furl(nodedir, config): def get_introducer_furl(nodedir, config):
""" """
:return: the introducer FURL for the given node (no matter if it's :return: the introducer FURL for the given node (no matter if it's
a client-type node or an introducer itself) a client-type node or an introducer itself)
""" """
for petname, (furl, cache) in config.get_introducer_configuration().items():
return furl
# We have no configured introducers. Maybe this is running *on* the
# introducer? Let's guess, sure why not.
try: try:
introducer_furl = config.get('client', 'introducer.furl') with open(join(nodedir, "private", "introducer.furl"), "r") as f:
except NoSectionError: return f.read().strip()
# we're not a client; maybe this is running *on* the introducer? except IOError:
try: raise Exception(
with open(join(nodedir, "private", "introducer.furl"), "r") as f: "Can't find introducer FURL in tahoe.cfg nor "
introducer_furl = f.read().strip() "{}/private/introducer.furl".format(nodedir)
except IOError: )
raise Exception(
"Can't find introducer FURL in tahoe.cfg nor "
"{}/private/introducer.furl".format(nodedir)
)
return introducer_furl
def get_aliases(nodedir): def get_aliases(nodedir):

View File

@ -5,11 +5,20 @@ import json
from twisted.internet import reactor, defer from twisted.internet import reactor, defer
from twisted.python.usage import UsageError from twisted.python.usage import UsageError
from allmydata.scripts.common import BasedirOptions, NoDefaultBasedirOptions from twisted.python.filepath import (
FilePath,
)
from allmydata.scripts.common import (
BasedirOptions,
NoDefaultBasedirOptions,
write_introducer,
)
from allmydata.scripts.default_nodedir import _default_nodedir from allmydata.scripts.default_nodedir import _default_nodedir
from allmydata.util.assertutil import precondition from allmydata.util.assertutil import precondition
from allmydata.util.encodingutil import listdir_unicode, argv_to_unicode, quote_local_unicode_path, get_io_encoding from allmydata.util.encodingutil import listdir_unicode, argv_to_unicode, quote_local_unicode_path, get_io_encoding
from allmydata.util import fileutil, i2p_provider, iputil, tor_provider from allmydata.util import fileutil, i2p_provider, iputil, tor_provider
from wormhole import wormhole from wormhole import wormhole
@ -299,12 +308,15 @@ def write_node_config(c, config):
def write_client_config(c, config): def write_client_config(c, config):
# note, config can be a plain dict, it seems -- see introducer = config.get("introducer", None)
# test_configutil.py in test_create_client_config if introducer is not None:
write_introducer(
FilePath(config["basedir"]),
"default",
introducer,
)
c.write("[client]\n") c.write("[client]\n")
c.write("# Which services should this client connect to?\n")
introducer = config.get("introducer", None) or ""
c.write("introducer.furl = %s\n" % introducer)
c.write("helper.furl =\n") c.write("helper.furl =\n")
c.write("#stats_gatherer.furl =\n") c.write("#stats_gatherer.furl =\n")
c.write("\n") c.write("\n")
@ -437,8 +449,11 @@ def create_node(config):
print("Node created in %s" % quote_local_unicode_path(basedir), file=out) print("Node created in %s" % quote_local_unicode_path(basedir), file=out)
tahoe_cfg = quote_local_unicode_path(os.path.join(basedir, "tahoe.cfg")) tahoe_cfg = quote_local_unicode_path(os.path.join(basedir, "tahoe.cfg"))
introducers_yaml = quote_local_unicode_path(
os.path.join(basedir, "private", "introducers.yaml"),
)
if not config.get("introducer", ""): if not config.get("introducer", ""):
print(" Please set [client]introducer.furl= in %s!" % tahoe_cfg, file=out) print(" Please add introducers to %s!" % (introducers_yaml,), file=out)
print(" The node cannot connect to a grid without it.", file=out) print(" The node cannot connect to a grid without it.", file=out)
if not config.get("nickname", ""): if not config.get("nickname", ""):
print(" Please set [node]nickname= in %s" % tahoe_cfg, file=out) print(" Please set [node]nickname= in %s" % tahoe_cfg, file=out)

View File

@ -7,7 +7,6 @@ import six
from twisted.python import usage from twisted.python import usage
from twisted.internet import defer, task, threads from twisted.internet import defer, task, threads
from allmydata.version_checks import get_package_versions_string
from allmydata.scripts.common import get_default_nodedir from allmydata.scripts.common import get_default_nodedir
from allmydata.scripts import debug, create_node, cli, \ from allmydata.scripts import debug, create_node, cli, \
stats_gatherer, admin, tahoe_daemonize, tahoe_start, \ stats_gatherer, admin, tahoe_daemonize, tahoe_start, \
@ -19,6 +18,10 @@ from allmydata.util.eliotutil import (
eliot_logging_service, eliot_logging_service,
) )
from .. import (
__full_version__,
)
_default_nodedir = get_default_nodedir() _default_nodedir = get_default_nodedir()
NODEDIR_HELP = ("Specify which Tahoe node directory should be used. The " NODEDIR_HELP = ("Specify which Tahoe node directory should be used. The "
@ -77,12 +80,10 @@ class Options(usage.Options):
] ]
def opt_version(self): def opt_version(self):
print(get_package_versions_string(debug=True), file=self.stdout) print(__full_version__, file=self.stdout)
self.no_command_needed = True self.no_command_needed = True
def opt_version_and_path(self): opt_version_and_path = opt_version
print(get_package_versions_string(show_paths=True, debug=True), file=self.stdout)
self.no_command_needed = True
opt_eliot_destination = opt_eliot_destination opt_eliot_destination = opt_eliot_destination
opt_help_eliot_destinations = opt_help_eliot_destinations opt_help_eliot_destinations = opt_help_eliot_destinations

View File

@ -1,16 +1,15 @@
from __future__ import print_function from __future__ import print_function
import json import json
from os.path import join
from twisted.python import usage from twisted.python import usage
from twisted.internet import defer, reactor from twisted.internet import defer, reactor
from wormhole import wormhole from wormhole import wormhole
from allmydata.util import configutil
from allmydata.util.encodingutil import argv_to_abspath from allmydata.util.encodingutil import argv_to_abspath
from allmydata.scripts.common import get_default_nodedir, get_introducer_furl from allmydata.scripts.common import get_default_nodedir, get_introducer_furl
from allmydata.node import read_config
class InviteOptions(usage.Options): class InviteOptions(usage.Options):
@ -77,7 +76,7 @@ def invite(options):
basedir = argv_to_abspath(options.parent['node-directory']) basedir = argv_to_abspath(options.parent['node-directory'])
else: else:
basedir = get_default_nodedir() basedir = get_default_nodedir()
config = configutil.get_config(join(basedir, 'tahoe.cfg')) config = read_config(basedir, u"")
out = options.stdout out = options.stdout
err = options.stderr err = options.stderr

View File

@ -8,6 +8,9 @@ if PY2:
from future.builtins import str # noqa: F401 from future.builtins import str # noqa: F401
from six.moves import cStringIO as StringIO from six.moves import cStringIO as StringIO
from twisted.python.filepath import (
FilePath,
)
from twisted.internet import defer, reactor, protocol, error from twisted.internet import defer, reactor, protocol, error
from twisted.application import service, internet from twisted.application import service, internet
from twisted.web import client as tw_client from twisted.web import client as tw_client
@ -21,6 +24,10 @@ from allmydata.util import fileutil, pollmixin
from allmydata.util.fileutil import abspath_expanduser_unicode from allmydata.util.fileutil import abspath_expanduser_unicode
from allmydata.util.encodingutil import get_filesystem_encoding from allmydata.util.encodingutil import get_filesystem_encoding
from allmydata.scripts.common import (
write_introducer,
)
class StallableHTTPGetterDiscarder(tw_client.HTTPPageGetter, object): class StallableHTTPGetterDiscarder(tw_client.HTTPPageGetter, object):
full_speed_ahead = False full_speed_ahead = False
_bytes_so_far = 0 _bytes_so_far = 0
@ -180,16 +187,18 @@ class SystemFramework(pollmixin.PollMixin):
self.introducer_furl = self.introducer.introducer_url self.introducer_furl = self.introducer.introducer_url
def make_nodes(self): def make_nodes(self):
root = FilePath(self.testdir)
self.nodes = [] self.nodes = []
for i in range(self.numnodes): for i in range(self.numnodes):
nodedir = os.path.join(self.testdir, "node%d" % i) nodedir = root.child("node%d" % (i,))
os.mkdir(nodedir) private = nodedir.child("private")
f = open(os.path.join(nodedir, "tahoe.cfg"), "w") private.makedirs()
f.write("[client]\n" write_introducer(nodedir, "default", self.introducer_url)
"introducer.furl = %s\n" config = (
"shares.happy = 1\n" "[client]\n"
"[storage]\n" "shares.happy = 1\n"
% (self.introducer_furl,)) "[storage]\n"
)
# the only tests for which we want the internal nodes to actually # the only tests for which we want the internal nodes to actually
# retain shares are the ones where somebody's going to download # retain shares are the ones where somebody's going to download
# them. # them.
@ -200,13 +209,13 @@ class SystemFramework(pollmixin.PollMixin):
# for these tests, we tell the storage servers to pretend to # for these tests, we tell the storage servers to pretend to
# accept shares, but really just throw them out, since we're # accept shares, but really just throw them out, since we're
# only testing upload and not download. # only testing upload and not download.
f.write("debug_discard = true\n") config += "debug_discard = true\n"
if self.mode in ("receive",): if self.mode in ("receive",):
# for this mode, the client-under-test gets all the shares, # for this mode, the client-under-test gets all the shares,
# so our internal nodes can refuse requests # so our internal nodes can refuse requests
f.write("readonly = true\n") config += "readonly = true\n"
f.close() nodedir.child("tahoe.cfg").setContent(config)
c = client.Client(basedir=nodedir) c = client.Client(basedir=nodedir.path)
c.setServiceParent(self) c.setServiceParent(self)
self.nodes.append(c) self.nodes.append(c)
# the peers will start running, eventually they will connect to each # the peers will start running, eventually they will connect to each
@ -235,16 +244,16 @@ this file are ignored.
quiet = StringIO() quiet = StringIO()
create_node.create_node({'basedir': clientdir}, out=quiet) create_node.create_node({'basedir': clientdir}, out=quiet)
log.msg("DONE MAKING CLIENT") log.msg("DONE MAKING CLIENT")
write_introducer(clientdir, "default", self.introducer_furl)
# now replace tahoe.cfg # now replace tahoe.cfg
# set webport=0 and then ask the node what port it picked. # set webport=0 and then ask the node what port it picked.
f = open(os.path.join(clientdir, "tahoe.cfg"), "w") f = open(os.path.join(clientdir, "tahoe.cfg"), "w")
f.write("[node]\n" f.write("[node]\n"
"web.port = tcp:0:interface=127.0.0.1\n" "web.port = tcp:0:interface=127.0.0.1\n"
"[client]\n" "[client]\n"
"introducer.furl = %s\n"
"shares.happy = 1\n" "shares.happy = 1\n"
"[storage]\n" "[storage]\n"
% (self.introducer_furl,)) )
if self.mode in ("upload-self", "receive"): if self.mode in ("upload-self", "receive"):
# accept and store shares, to trigger the memory consumption bugs # accept and store shares, to trigger the memory consumption bugs

View File

@ -1266,7 +1266,7 @@ class Options(ReallyEqualMixin, unittest.TestCase):
# "tahoe --version" dumps text to stdout and exits # "tahoe --version" dumps text to stdout and exits
stdout = StringIO() stdout = StringIO()
self.failUnlessRaises(SystemExit, self.parse, ["--version"], stdout) self.failUnlessRaises(SystemExit, self.parse, ["--version"], stdout)
self.failUnlessIn(allmydata.__appname__ + ":", stdout.getvalue()) self.failUnlessIn(allmydata.__full_version__, stdout.getvalue())
# but "tahoe SUBCOMMAND --version" should be rejected # but "tahoe SUBCOMMAND --version" should be rejected
self.failUnlessRaises(usage.UsageError, self.parse, self.failUnlessRaises(usage.UsageError, self.parse,
["start", "--version"]) ["start", "--version"])

View File

@ -52,13 +52,8 @@ class Config(unittest.TestCase):
create_node.write_node_config(f, opts) create_node.write_node_config(f, opts)
create_node.write_client_config(f, opts) create_node.write_client_config(f, opts)
config = configutil.get_config(fname)
# should succeed, no exceptions # should succeed, no exceptions
configutil.validate_config( client.read_config(d, "")
fname,
config,
client._valid_config(),
)
@defer.inlineCallbacks @defer.inlineCallbacks
def test_client(self): def test_client(self):

View File

@ -8,7 +8,9 @@ from twisted.internet import defer
from ..common_util import run_cli from ..common_util import run_cli
from ..no_network import GridTestMixin from ..no_network import GridTestMixin
from .common import CLITestMixin from .common import CLITestMixin
from ...client import (
read_config,
)
class _FakeWormhole(object): class _FakeWormhole(object):
@ -81,9 +83,19 @@ class Join(GridTestMixin, CLITestMixin, unittest.TestCase):
) )
self.assertEqual(0, rc) self.assertEqual(0, rc)
config = read_config(node_dir, u"")
self.assertIn(
"pb://foo",
set(
furl
for (furl, cache)
in config.get_introducer_configuration().values()
),
)
with open(join(node_dir, 'tahoe.cfg'), 'r') as f: with open(join(node_dir, 'tahoe.cfg'), 'r') as f:
config = f.read() config = f.read()
self.assertIn("pb://foo", config)
self.assertIn(u"somethinghopefullyunique", config) self.assertIn(u"somethinghopefullyunique", config)
@defer.inlineCallbacks @defer.inlineCallbacks

View File

@ -81,6 +81,9 @@ from allmydata.client import (
config_from_string, config_from_string,
create_client_from_config, create_client_from_config,
) )
from allmydata.scripts.common import (
write_introducer,
)
from ..crypto import ( from ..crypto import (
ed25519, ed25519,
@ -110,7 +113,6 @@ class MemoryIntroducerClient(object):
nickname = attr.ib() nickname = attr.ib()
my_version = attr.ib() my_version = attr.ib()
oldest_supported = attr.ib() oldest_supported = attr.ib()
app_versions = attr.ib()
sequencer = attr.ib() sequencer = attr.ib()
cache_filepath = attr.ib() cache_filepath = attr.ib()
@ -222,8 +224,8 @@ class UseNode(object):
""" """
plugin_config = attr.ib() plugin_config = attr.ib()
storage_plugin = attr.ib() storage_plugin = attr.ib()
basedir = attr.ib() basedir = attr.ib(validator=attr.validators.instance_of(FilePath))
introducer_furl = attr.ib() introducer_furl = attr.ib(validator=attr.validators.instance_of(bytes))
node_config = attr.ib(default=attr.Factory(dict)) node_config = attr.ib(default=attr.Factory(dict))
config = attr.ib(default=None) config = attr.ib(default=None)
@ -247,6 +249,11 @@ class UseNode(object):
config=format_config_items(self.plugin_config), config=format_config_items(self.plugin_config),
) )
write_introducer(
self.basedir,
"default",
self.introducer_furl,
)
self.config = config_from_string( self.config = config_from_string(
self.basedir.asTextMode().path, self.basedir.asTextMode().path,
"tub.port", "tub.port",
@ -255,11 +262,9 @@ class UseNode(object):
{node_config} {node_config}
[client] [client]
introducer.furl = {furl}
storage.plugins = {storage_plugin} storage.plugins = {storage_plugin}
{plugin_config_section} {plugin_config_section}
""".format( """.format(
furl=self.introducer_furl,
storage_plugin=self.storage_plugin, storage_plugin=self.storage_plugin,
node_config=format_config_items(self.node_config), node_config=format_config_items(self.node_config),
plugin_config_section=plugin_config_section, plugin_config_section=plugin_config_section,
@ -1151,8 +1156,9 @@ class _TestCaseMixin(object):
test (including setUp and tearDown messages). test (including setUp and tearDown messages).
* trial-compatible mktemp method * trial-compatible mktemp method
* unittest2-compatible assertRaises helper * unittest2-compatible assertRaises helper
* Automatic cleanup of tempfile.tempdir mutation (pervasive through the * Automatic cleanup of tempfile.tempdir mutation (once pervasive through
Tahoe-LAFS test suite). the Tahoe-LAFS test suite, perhaps gone now but someone should verify
this).
""" """
def setUp(self): def setUp(self):
# Restore the original temporary directory. Node ``init_tempdir`` # Restore the original temporary directory. Node ``init_tempdir``

View File

@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDx5JfaPwE2wfXIQcmlGte9EPAbrTmHPGOF/PuZ71XPa3mZTHMQQuc959gmLxupmcc5o4jYe8VTwT6bbNl6YM+HmCvL3XVH0BqdM2lpKCTB/WzSAyFUv8gSjQVXekRm9wF69tZkPrudqutTLhqXU5ESiUzfhU+CxHQW+kAf10Yd9R68V1f8jkuWjEoeVfCltj7O5fRlpouoTXn83MUAXB3J/wDjpjnjp2PxvXL2x5aCHtzd1WCGEmtWbHZvRA1a0EE233zfXNHg4xLd3ycUqAxoRlCcC230itUBXtr4qgDMzRdsL+HGWrcJ+4yezlQj+l8mc7vi5shNT7HDRfvi/rE7 exarkun@baryon

View File

@ -0,0 +1,27 @@
-----BEGIN OPENSSH PRIVATE KEY-----
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABFwAAAAdzc2gtcn
NhAAAAAwEAAQAAAQEA8eSX2j8BNsH1yEHJpRrXvRDwG605hzxjhfz7me9Vz2t5mUxzEELn
PefYJi8bqZnHOaOI2HvFU8E+m2zZemDPh5gry911R9AanTNpaSgkwf1s0gMhVL/IEo0FV3
pEZvcBevbWZD67narrUy4al1OREolM34VPgsR0FvpAH9dGHfUevFdX/I5LloxKHlXwpbY+
zuX0ZaaLqE15/NzFAFwdyf8A46Y546dj8b1y9seWgh7c3dVghhJrVmx2b0QNWtBBNt9831
zR4OMS3d8nFKgMaEZQnAtt9IrVAV7a+KoAzM0XbC/hxlq3CfuMns5UI/pfJnO74ubITU+x
w0X74v6xOwAAA8gG6fYoBun2KAAAAAdzc2gtcnNhAAABAQDx5JfaPwE2wfXIQcmlGte9EP
AbrTmHPGOF/PuZ71XPa3mZTHMQQuc959gmLxupmcc5o4jYe8VTwT6bbNl6YM+HmCvL3XVH
0BqdM2lpKCTB/WzSAyFUv8gSjQVXekRm9wF69tZkPrudqutTLhqXU5ESiUzfhU+CxHQW+k
Af10Yd9R68V1f8jkuWjEoeVfCltj7O5fRlpouoTXn83MUAXB3J/wDjpjnjp2PxvXL2x5aC
Htzd1WCGEmtWbHZvRA1a0EE233zfXNHg4xLd3ycUqAxoRlCcC230itUBXtr4qgDMzRdsL+
HGWrcJ+4yezlQj+l8mc7vi5shNT7HDRfvi/rE7AAAAAwEAAQAAAQBc8ukC/RjbULbAJ79z
SRhDV2HcULj9ZVAc6XRI13XSyUqlhIHmar7uw8sECTAJAMVUOanY/d56a5RCJxZ+dvrn8K
pLoSJy4N2JMHs95CYTwOzy2i8RoMwhjLzTu3DTW/DerkD9rjlrwYTBpsKjCYKCa+31KgW+
ivzM44aGdbNEyO+yHaxdcyEr3OLcRMppgZmwTieFnG053lCP5XyYRQmZ1a78G6WOzpOgbO
2N6Z1sbEqTMVd3oxFZAbmqA8kE4jLJzRcso/SSK5NDs22JzMfxByJQSlitWzDDvHdWpQpy
8C6Eu7+48ataLI68VOOXuDWDy9Dck0ev89u7Z4vNLWBhAAAAgAndOZZ0C179Um6sn6gmfM
0ttXEaSIqYNGRhkoYqn9vvw03bOMbSnqdEJiwFhbE/rWv7PypB5MeY7tRoCyBMWsUYj0pA
HKSl68diLr5g5EOIRGAWu8e//7T2HgZKOo+VaG1IXgmb7PUoAJ6Tzsmb4jdnYfg+BP/TDd
e9yCcoiT2fAAAAgQD6T7Kr6ECg0ME8vt/ixsjKdA2zS9SIHyjCMXbdMv1Ok1hkr5rRWbbZ
jm79fF+a8pOQUg30Qw2JUx7II50akt2xL6zesGDDUcOHD2GE/B6Ftji53G3fwWZCqeQ5sD
YP25qAWlrqDBGJvF+hkEdlceS8etYJ3XWXjNIYwfR7frQvkQAAAIEA92Pq3FWH63TS3Lqe
mQjhfNV75tU0AwENG+xlI1g0nQb7Qsdbm6rIg6XqewUfw03Q+/AqPvwG/1mbyVF7jRZ+qw
cl69yM70c9qY74GHjIIOOcC8Kgv29LQrm/VqVp0Lesn5RA8SIiLcMfyYBTEX8V9VY99Zkd
v6WwRr4XK1bPRgsAAAAOZXhhcmt1bkBiYXJ5b24BAgMEBQ==
-----END OPENSSH PRIVATE KEY-----

View File

@ -360,7 +360,7 @@ class NoNetworkGrid(service.MultiService):
to complete properly to complete properly
""" """
if self._setup_errors: if self._setup_errors:
raise self._setup_errors[0].value self._setup_errors[0].raiseException()
@defer.inlineCallbacks @defer.inlineCallbacks
def make_client(self, i, write_config=True): def make_client(self, i, write_config=True):

View File

@ -0,0 +1,111 @@
"""
Hypothesis strategies use for testing Tahoe-LAFS.
"""
from hypothesis.strategies import (
one_of,
builds,
binary,
)
from ..uri import (
WriteableSSKFileURI,
WriteableMDMFFileURI,
DirectoryURI,
MDMFDirectoryURI,
)
def write_capabilities():
"""
Build ``IURI`` providers representing all kinds of write capabilities.
"""
return one_of([
ssk_capabilities(),
mdmf_capabilities(),
dir2_capabilities(),
dir2_mdmf_capabilities(),
])
def ssk_capabilities():
"""
Build ``WriteableSSKFileURI`` instances.
"""
return builds(
WriteableSSKFileURI,
ssk_writekeys(),
ssk_fingerprints(),
)
def _writekeys(size=16):
"""
Build ``bytes`` representing write keys.
"""
return binary(min_size=size, max_size=size)
def ssk_writekeys():
"""
Build ``bytes`` representing SSK write keys.
"""
return _writekeys()
def _fingerprints(size=32):
"""
Build ``bytes`` representing fingerprints.
"""
return binary(min_size=size, max_size=size)
def ssk_fingerprints():
"""
Build ``bytes`` representing SSK fingerprints.
"""
return _fingerprints()
def mdmf_capabilities():
"""
Build ``WriteableMDMFFileURI`` instances.
"""
return builds(
WriteableMDMFFileURI,
mdmf_writekeys(),
mdmf_fingerprints(),
)
def mdmf_writekeys():
"""
Build ``bytes`` representing MDMF write keys.
"""
return _writekeys()
def mdmf_fingerprints():
"""
Build ``bytes`` representing MDMF fingerprints.
"""
return _fingerprints()
def dir2_capabilities():
"""
Build ``DirectoryURI`` instances.
"""
return builds(
DirectoryURI,
ssk_capabilities(),
)
def dir2_mdmf_capabilities():
"""
Build ``MDMFDirectoryURI`` instances.
"""
return builds(
MDMFDirectoryURI,
mdmf_capabilities(),
)

View File

@ -1,5 +1,4 @@
import os, sys import os, sys
import mock
from functools import ( from functools import (
partial, partial,
) )
@ -12,6 +11,15 @@ from fixtures import (
Fixture, Fixture,
TempDir, TempDir,
) )
from hypothesis import (
given,
)
from hypothesis.strategies import (
sampled_from,
booleans,
)
from eliot.testing import ( from eliot.testing import (
capture_logging, capture_logging,
assertHasAction, assertHasAction,
@ -39,11 +47,11 @@ from testtools.twistedsupport import (
import allmydata import allmydata
import allmydata.util.log import allmydata.util.log
from allmydata.nodemaker import (
NodeMaker,
)
from allmydata.node import OldConfigError, UnescapedHashError, create_node_dir from allmydata.node import OldConfigError, UnescapedHashError, create_node_dir
from allmydata.frontends.auth import NeedRootcapLookupScheme from allmydata.frontends.auth import NeedRootcapLookupScheme
from allmydata.version_checks import (
get_package_versions_string,
)
from allmydata import client from allmydata import client
from allmydata.storage_client import ( from allmydata.storage_client import (
StorageClientConfig, StorageClientConfig,
@ -58,11 +66,15 @@ from allmydata.util import (
from allmydata.util.fileutil import abspath_expanduser_unicode from allmydata.util.fileutil import abspath_expanduser_unicode
from allmydata.interfaces import IFilesystemNode, IFileNode, \ from allmydata.interfaces import IFilesystemNode, IFileNode, \
IImmutableFileNode, IMutableFileNode, IDirectoryNode IImmutableFileNode, IMutableFileNode, IDirectoryNode
from allmydata.scripts.common import (
write_introducer,
)
from foolscap.api import flushEventualQueue from foolscap.api import flushEventualQueue
import allmydata.test.common_util as testutil import allmydata.test.common_util as testutil
from .common import ( from .common import (
EMPTY_CLIENT_CONFIG, EMPTY_CLIENT_CONFIG,
SyncTestCase, SyncTestCase,
AsyncBrokenTestCase,
UseTestPlugins, UseTestPlugins,
MemoryIntroducerClient, MemoryIntroducerClient,
get_published_announcements, get_published_announcements,
@ -72,16 +84,13 @@ from .matchers import (
matches_storage_announcement, matches_storage_announcement,
matches_furl, matches_furl,
) )
from .strategies import (
write_capabilities,
)
SOME_FURL = b"pb://abcde@nowhere/fake" SOME_FURL = b"pb://abcde@nowhere/fake"
BASECONFIG = ("[client]\n" BASECONFIG = "[client]\n"
"introducer.furl = \n"
)
BASECONFIG_I = ("[client]\n"
"introducer.furl = %s\n"
)
class Basic(testutil.ReallyEqualMixin, unittest.TestCase): class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
def test_loadable(self): def test_loadable(self):
@ -123,14 +132,14 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
def write_config(s): def write_config(s):
config = ("[client]\n" config = ("[client]\n"
"introducer.furl = %s\n" % s) "helper.furl = %s\n" % s)
fileutil.write(os.path.join(basedir, "tahoe.cfg"), config) fileutil.write(os.path.join(basedir, "tahoe.cfg"), config)
for s in should_fail: for s in should_fail:
write_config(s) write_config(s)
with self.assertRaises(UnescapedHashError) as ctx: with self.assertRaises(UnescapedHashError) as ctx:
yield client.create_client(basedir) yield client.create_client(basedir)
self.assertIn("[client]introducer.furl", str(ctx.exception)) self.assertIn("[client]helper.furl", str(ctx.exception))
def test_unreadable_config(self): def test_unreadable_config(self):
if sys.platform == "win32": if sys.platform == "win32":
@ -422,19 +431,32 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
""" """
configuration for sftpd results in it being started configuration for sftpd results in it being started
""" """
root = FilePath(self.mktemp())
root.makedirs()
accounts = root.child(b"sftp-accounts")
accounts.touch()
data = FilePath(__file__).sibling(b"data")
privkey = data.child(b"openssh-rsa-2048.txt")
pubkey = data.child(b"openssh-rsa-2048.pub.txt")
basedir = u"client.Basic.test_ftp_create" basedir = u"client.Basic.test_ftp_create"
create_node_dir(basedir, "testing") create_node_dir(basedir, "testing")
with open(os.path.join(basedir, "tahoe.cfg"), "w") as f: with open(os.path.join(basedir, "tahoe.cfg"), "w") as f:
f.write( f.write((
'[sftpd]\n' '[sftpd]\n'
'enabled = true\n' 'enabled = true\n'
'accounts.file = foo\n' 'accounts.file = {}\n'
'host_pubkey_file = pubkey\n' 'host_pubkey_file = {}\n'
'host_privkey_file = privkey\n' 'host_privkey_file = {}\n'
) ).format(accounts.path, pubkey.path, privkey.path))
with mock.patch('allmydata.frontends.sftpd.SFTPServer') as p:
yield client.create_client(basedir) client_node = yield client.create_client(
self.assertTrue(p.called) basedir,
)
sftp = client_node.getServiceNamed("frontend:sftp")
self.assertIs(sftp.parent, client_node)
@defer.inlineCallbacks @defer.inlineCallbacks
def test_ftp_auth_keyfile(self): def test_ftp_auth_keyfile(self):
@ -621,8 +643,6 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
self.failIfEqual(str(allmydata.__version__), "unknown") self.failIfEqual(str(allmydata.__version__), "unknown")
self.failUnless("." in str(allmydata.__full_version__), self.failUnless("." in str(allmydata.__full_version__),
"non-numeric version in '%s'" % allmydata.__version__) "non-numeric version in '%s'" % allmydata.__version__)
all_versions = get_package_versions_string()
self.failUnless(allmydata.__appname__ in all_versions)
# also test stats # also test stats
stats = c.get_stats() stats = c.get_stats()
self.failUnless("node.uptime" in stats) self.failUnless("node.uptime" in stats)
@ -670,12 +690,13 @@ class AnonymousStorage(SyncTestCase):
""" """
If anonymous storage access is enabled then the client announces it. If anonymous storage access is enabled then the client announces it.
""" """
basedir = self.id() basedir = FilePath(self.id())
os.makedirs(basedir + b"/private") basedir.child("private").makedirs()
write_introducer(basedir, "someintroducer", SOME_FURL)
config = client.config_from_string( config = client.config_from_string(
basedir, basedir.path,
"tub.port", "tub.port",
BASECONFIG_I % (SOME_FURL,) + ( BASECONFIG + (
"[storage]\n" "[storage]\n"
"enabled = true\n" "enabled = true\n"
"anonymous = true\n" "anonymous = true\n"
@ -689,7 +710,7 @@ class AnonymousStorage(SyncTestCase):
get_published_announcements(node), get_published_announcements(node),
MatchesListwise([ MatchesListwise([
matches_storage_announcement( matches_storage_announcement(
basedir, basedir.path,
anonymous=True, anonymous=True,
), ),
]), ]),
@ -701,12 +722,13 @@ class AnonymousStorage(SyncTestCase):
If anonymous storage access is disabled then the client does not announce If anonymous storage access is disabled then the client does not announce
it nor does it write a fURL for it to beneath the node directory. it nor does it write a fURL for it to beneath the node directory.
""" """
basedir = self.id() basedir = FilePath(self.id())
os.makedirs(basedir + b"/private") basedir.child("private").makedirs()
write_introducer(basedir, "someintroducer", SOME_FURL)
config = client.config_from_string( config = client.config_from_string(
basedir, basedir.path,
"tub.port", "tub.port",
BASECONFIG_I % (SOME_FURL,) + ( BASECONFIG + (
"[storage]\n" "[storage]\n"
"enabled = true\n" "enabled = true\n"
"anonymous = false\n" "anonymous = false\n"
@ -720,7 +742,7 @@ class AnonymousStorage(SyncTestCase):
get_published_announcements(node), get_published_announcements(node),
MatchesListwise([ MatchesListwise([
matches_storage_announcement( matches_storage_announcement(
basedir, basedir.path,
anonymous=False, anonymous=False,
), ),
]), ]),
@ -738,12 +760,12 @@ class AnonymousStorage(SyncTestCase):
possible to reach the anonymous storage server via the originally possible to reach the anonymous storage server via the originally
published fURL. published fURL.
""" """
basedir = self.id() basedir = FilePath(self.id())
os.makedirs(basedir + b"/private") basedir.child("private").makedirs()
enabled_config = client.config_from_string( enabled_config = client.config_from_string(
basedir, basedir.path,
"tub.port", "tub.port",
BASECONFIG_I % (SOME_FURL,) + ( BASECONFIG + (
"[storage]\n" "[storage]\n"
"enabled = true\n" "enabled = true\n"
"anonymous = true\n" "anonymous = true\n"
@ -765,9 +787,9 @@ class AnonymousStorage(SyncTestCase):
) )
disabled_config = client.config_from_string( disabled_config = client.config_from_string(
basedir, basedir.path,
"tub.port", "tub.port",
BASECONFIG_I % (SOME_FURL,) + ( BASECONFIG + (
"[storage]\n" "[storage]\n"
"enabled = true\n" "enabled = true\n"
"anonymous = false\n" "anonymous = false\n"
@ -787,8 +809,8 @@ class IntroducerClients(unittest.TestCase):
def test_invalid_introducer_furl(self): def test_invalid_introducer_furl(self):
""" """
An introducer.furl of 'None' is invalid and causes An introducer.furl of 'None' in the deprecated [client]introducer.furl
create_introducer_clients to fail. field is invalid and causes `create_introducer_clients` to fail.
""" """
cfg = ( cfg = (
"[client]\n" "[client]\n"
@ -953,20 +975,28 @@ class Run(unittest.TestCase, testutil.StallMixin):
@defer.inlineCallbacks @defer.inlineCallbacks
def test_loadable(self): def test_loadable(self):
basedir = "test_client.Run.test_loadable" """
os.mkdir(basedir) A configuration consisting only of an introducer can be turned into a
client node.
"""
basedir = FilePath("test_client.Run.test_loadable")
private = basedir.child("private")
private.makedirs()
dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus" dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus"
fileutil.write(os.path.join(basedir, "tahoe.cfg"), BASECONFIG_I % dummy) write_introducer(basedir, "someintroducer", dummy)
fileutil.write(os.path.join(basedir, client._Client.EXIT_TRIGGER_FILE), "") basedir.child("tahoe.cfg").setContent(BASECONFIG)
yield client.create_client(basedir) basedir.child(client._Client.EXIT_TRIGGER_FILE).touch()
yield client.create_client(basedir.path)
@defer.inlineCallbacks @defer.inlineCallbacks
def test_reloadable(self): def test_reloadable(self):
basedir = "test_client.Run.test_reloadable" basedir = FilePath("test_client.Run.test_reloadable")
os.mkdir(basedir) private = basedir.child("private")
private.makedirs()
dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus" dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus"
fileutil.write(os.path.join(basedir, "tahoe.cfg"), BASECONFIG_I % dummy) write_introducer(basedir, "someintroducer", dummy)
c1 = yield client.create_client(basedir) basedir.child("tahoe.cfg").setContent(BASECONFIG)
c1 = yield client.create_client(basedir.path)
c1.setServiceParent(self.sparent) c1.setServiceParent(self.sparent)
# delay to let the service start up completely. I'm not entirely sure # delay to let the service start up completely. I'm not entirely sure
@ -988,11 +1018,102 @@ class Run(unittest.TestCase, testutil.StallMixin):
# also change _check_exit_trigger to use it instead of a raw # also change _check_exit_trigger to use it instead of a raw
# reactor.stop, also instrument the shutdown event in an # reactor.stop, also instrument the shutdown event in an
# attribute that we can check.) # attribute that we can check.)
c2 = yield client.create_client(basedir) c2 = yield client.create_client(basedir.path)
c2.setServiceParent(self.sparent) c2.setServiceParent(self.sparent)
yield c2.disownServiceParent() yield c2.disownServiceParent()
class NodeMaker(testutil.ReallyEqualMixin, unittest.TestCase): class NodeMakerTests(testutil.ReallyEqualMixin, AsyncBrokenTestCase):
def _make_node_maker(self, mode, writecap, deep_immutable):
"""
Create a callable which can create an ``IFilesystemNode`` provider for the
given cap.
:param unicode mode: The read/write combination to pass to
``NodeMaker.create_from_cap``. If it contains ``u"r"`` then a
readcap will be passed in. If it contains ``u"w"`` then a
writecap will be passed in.
:param IURI writecap: The capability for which to create a node.
:param bool deep_immutable: Whether to request a "deep immutable" node
which forces the result to be an immutable ``IFilesystemNode`` (I
think -exarkun).
"""
if writecap.is_mutable():
# It's just not a valid combination to have a mutable alongside
# deep_immutable = True. It's easier to fix deep_immutable than
# writecap to clear up this conflict.
deep_immutable = False
if "r" in mode:
readcap = writecap.get_readonly().to_string()
else:
readcap = None
if "w" in mode:
writecap = writecap.to_string()
else:
writecap = None
nm = NodeMaker(
storage_broker=None,
secret_holder=None,
history=None,
uploader=None,
terminator=None,
default_encoding_parameters={u"k": 1, u"n": 1},
mutable_file_default=None,
key_generator=None,
blacklist=None,
)
return partial(
nm.create_from_cap,
writecap,
readcap,
deep_immutable,
)
@given(
mode=sampled_from(["w", "r", "rw"]),
writecap=write_capabilities(),
deep_immutable=booleans(),
)
def test_cached_result(self, mode, writecap, deep_immutable):
"""
``NodeMaker.create_from_cap`` returns the same object when called with the
same arguments.
"""
make_node = self._make_node_maker(mode, writecap, deep_immutable)
original = make_node()
additional = make_node()
self.assertThat(
original,
Is(additional),
)
@given(
mode=sampled_from(["w", "r", "rw"]),
writecap=write_capabilities(),
deep_immutable=booleans(),
)
def test_cache_expired(self, mode, writecap, deep_immutable):
"""
After the node object returned by an earlier call to
``NodeMaker.create_from_cap`` has been garbage collected, a new call
to ``NodeMaker.create_from_cap`` returns a node object, maybe even a
new one although we can't really prove it.
"""
make_node = self._make_node_maker(mode, writecap, deep_immutable)
make_node()
additional = make_node()
self.assertThat(
additional,
AfterPreprocessing(
lambda node: node.get_readonly_uri(),
Equals(writecap.get_readonly().to_string()),
),
)
@defer.inlineCallbacks @defer.inlineCallbacks
def test_maker(self): def test_maker(self):
@ -1127,12 +1248,18 @@ class StorageAnnouncementTests(SyncTestCase):
""" """
def setUp(self): def setUp(self):
super(StorageAnnouncementTests, self).setUp() super(StorageAnnouncementTests, self).setUp()
self.basedir = self.useFixture(TempDir()).path self.basedir = FilePath(self.useFixture(TempDir()).path)
create_node_dir(self.basedir, u"") create_node_dir(self.basedir.path, u"")
# Write an introducer configuration or we can't observer
# announcements.
write_introducer(self.basedir, "someintroducer", SOME_FURL)
def get_config(self, storage_enabled, more_storage="", more_sections=""): def get_config(self, storage_enabled, more_storage="", more_sections=""):
return """ return """
[client]
# Empty
[node] [node]
tub.location = tcp:192.0.2.0:1234 tub.location = tcp:192.0.2.0:1234
@ -1140,9 +1267,6 @@ tub.location = tcp:192.0.2.0:1234
enabled = {storage_enabled} enabled = {storage_enabled}
{more_storage} {more_storage}
[client]
introducer.furl = pb://abcde@nowhere/fake
{more_sections} {more_sections}
""".format( """.format(
storage_enabled=storage_enabled, storage_enabled=storage_enabled,
@ -1156,7 +1280,7 @@ introducer.furl = pb://abcde@nowhere/fake
No storage announcement is published if storage is not enabled. No storage announcement is published if storage is not enabled.
""" """
config = client.config_from_string( config = client.config_from_string(
self.basedir, self.basedir.path,
"tub.port", "tub.port",
self.get_config(storage_enabled=False), self.get_config(storage_enabled=False),
) )
@ -1178,7 +1302,7 @@ introducer.furl = pb://abcde@nowhere/fake
storage is enabled. storage is enabled.
""" """
config = client.config_from_string( config = client.config_from_string(
self.basedir, self.basedir.path,
"tub.port", "tub.port",
self.get_config(storage_enabled=True), self.get_config(storage_enabled=True),
) )
@ -1195,7 +1319,7 @@ introducer.furl = pb://abcde@nowhere/fake
# Match the following list (of one element) ... # Match the following list (of one element) ...
MatchesListwise([ MatchesListwise([
# The only element in the list ... # The only element in the list ...
matches_storage_announcement(self.basedir), matches_storage_announcement(self.basedir.path),
]), ]),
)), )),
) )
@ -1210,7 +1334,7 @@ introducer.furl = pb://abcde@nowhere/fake
value = u"thing" value = u"thing"
config = client.config_from_string( config = client.config_from_string(
self.basedir, self.basedir.path,
"tub.port", "tub.port",
self.get_config( self.get_config(
storage_enabled=True, storage_enabled=True,
@ -1230,7 +1354,7 @@ introducer.furl = pb://abcde@nowhere/fake
get_published_announcements, get_published_announcements,
MatchesListwise([ MatchesListwise([
matches_storage_announcement( matches_storage_announcement(
self.basedir, self.basedir.path,
options=[ options=[
matches_dummy_announcement( matches_dummy_announcement(
u"tahoe-lafs-dummy-v1", u"tahoe-lafs-dummy-v1",
@ -1251,7 +1375,7 @@ introducer.furl = pb://abcde@nowhere/fake
self.useFixture(UseTestPlugins()) self.useFixture(UseTestPlugins())
config = client.config_from_string( config = client.config_from_string(
self.basedir, self.basedir.path,
"tub.port", "tub.port",
self.get_config( self.get_config(
storage_enabled=True, storage_enabled=True,
@ -1273,7 +1397,7 @@ introducer.furl = pb://abcde@nowhere/fake
get_published_announcements, get_published_announcements,
MatchesListwise([ MatchesListwise([
matches_storage_announcement( matches_storage_announcement(
self.basedir, self.basedir.path,
options=[ options=[
matches_dummy_announcement( matches_dummy_announcement(
u"tahoe-lafs-dummy-v1", u"tahoe-lafs-dummy-v1",
@ -1299,7 +1423,7 @@ introducer.furl = pb://abcde@nowhere/fake
self.useFixture(UseTestPlugins()) self.useFixture(UseTestPlugins())
config = client.config_from_string( config = client.config_from_string(
self.basedir, self.basedir.path,
"tub.port", "tub.port",
self.get_config( self.get_config(
storage_enabled=True, storage_enabled=True,
@ -1335,7 +1459,7 @@ introducer.furl = pb://abcde@nowhere/fake
self.useFixture(UseTestPlugins()) self.useFixture(UseTestPlugins())
config = client.config_from_string( config = client.config_from_string(
self.basedir, self.basedir.path,
"tub.port", "tub.port",
self.get_config( self.get_config(
storage_enabled=True, storage_enabled=True,
@ -1351,7 +1475,7 @@ introducer.furl = pb://abcde@nowhere/fake
get_published_announcements, get_published_announcements,
MatchesListwise([ MatchesListwise([
matches_storage_announcement( matches_storage_announcement(
self.basedir, self.basedir.path,
options=[ options=[
matches_dummy_announcement( matches_dummy_announcement(
u"tahoe-lafs-dummy-v1", u"tahoe-lafs-dummy-v1",
@ -1373,7 +1497,7 @@ introducer.furl = pb://abcde@nowhere/fake
self.useFixture(UseTestPlugins()) self.useFixture(UseTestPlugins())
config = client.config_from_string( config = client.config_from_string(
self.basedir, self.basedir.path,
"tub.port", "tub.port",
self.get_config( self.get_config(
storage_enabled=True, storage_enabled=True,
@ -1400,7 +1524,7 @@ introducer.furl = pb://abcde@nowhere/fake
available on the system. available on the system.
""" """
config = client.config_from_string( config = client.config_from_string(
self.basedir, self.basedir.path,
"tub.port", "tub.port",
self.get_config( self.get_config(
storage_enabled=True, storage_enabled=True,

View File

@ -14,12 +14,89 @@ if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401 from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
import os.path import os.path
from configparser import (
ConfigParser,
)
from functools import (
partial,
)
from hypothesis import (
given,
)
from hypothesis.strategies import (
dictionaries,
text,
characters,
)
from twisted.python.filepath import (
FilePath,
)
from twisted.trial import unittest from twisted.trial import unittest
from allmydata.util import configutil from allmydata.util import configutil
def arbitrary_config_dicts(
min_sections=0,
max_sections=3,
max_section_name_size=8,
max_items_per_section=3,
max_item_length=8,
max_value_length=8,
):
"""
Build ``dict[str, dict[str, str]]`` instances populated with arbitrary
configurations.
"""
identifier_text = partial(
text,
# Don't allow most control characters or spaces
alphabet=characters(
blacklist_categories=('Cc', 'Cs', 'Zs'),
),
)
return dictionaries(
identifier_text(
min_size=1,
max_size=max_section_name_size,
),
dictionaries(
identifier_text(
min_size=1,
max_size=max_item_length,
),
text(max_size=max_value_length),
max_size=max_items_per_section,
),
min_size=min_sections,
max_size=max_sections,
)
def to_configparser(dictconfig):
"""
Take a ``dict[str, dict[str, str]]`` and turn it into the corresponding
populated ``ConfigParser`` instance.
"""
cp = ConfigParser()
for section, items in dictconfig.items():
cp.add_section(section)
for k, v in items.items():
cp.set(
section,
k,
# ConfigParser has a feature that everyone knows and loves
# where it will use %-style interpolation to substitute
# values from one part of the config into another part of
# the config. Escape all our `%`s to avoid hitting this
# and complicating things.
v.replace("%", "%%"),
)
return cp
class ConfigUtilTests(unittest.TestCase): class ConfigUtilTests(unittest.TestCase):
def setUp(self): def setUp(self):
super(ConfigUtilTests, self).setUp() super(ConfigUtilTests, self).setUp()
@ -55,7 +132,7 @@ enabled = false
# test that set_config can mutate an existing option # test that set_config can mutate an existing option
configutil.set_config(config, "node", "nickname", "Alice!") configutil.set_config(config, "node", "nickname", "Alice!")
configutil.write_config(tahoe_cfg, config) configutil.write_config(FilePath(tahoe_cfg), config)
config = configutil.get_config(tahoe_cfg) config = configutil.get_config(tahoe_cfg)
self.failUnlessEqual(config.get("node", "nickname"), "Alice!") self.failUnlessEqual(config.get("node", "nickname"), "Alice!")
@ -63,19 +140,21 @@ enabled = false
# test that set_config can set a new option # test that set_config can set a new option
descriptor = "Twas brillig, and the slithy toves Did gyre and gimble in the wabe" descriptor = "Twas brillig, and the slithy toves Did gyre and gimble in the wabe"
configutil.set_config(config, "node", "descriptor", descriptor) configutil.set_config(config, "node", "descriptor", descriptor)
configutil.write_config(tahoe_cfg, config) configutil.write_config(FilePath(tahoe_cfg), config)
config = configutil.get_config(tahoe_cfg) config = configutil.get_config(tahoe_cfg)
self.failUnlessEqual(config.get("node", "descriptor"), descriptor) self.failUnlessEqual(config.get("node", "descriptor"), descriptor)
def test_config_validation_success(self): def test_config_validation_success(self):
fname = self.create_tahoe_cfg('[node]\nvalid = foo\n') """
``configutil.validate_config`` returns ``None`` when the configuration it
config = configutil.get_config(fname) is given has nothing more than the static sections and items defined
by the validator.
"""
# should succeed, no exceptions # should succeed, no exceptions
configutil.validate_config( configutil.validate_config(
fname, "<test_config_validation_success>",
config, to_configparser({"node": {"valid": "foo"}}),
self.static_valid_config, self.static_valid_config,
) )
@ -85,24 +164,20 @@ enabled = false
validation but are matched by the dynamic validation is considered validation but are matched by the dynamic validation is considered
valid. valid.
""" """
fname = self.create_tahoe_cfg('[node]\nvalid = foo\n')
config = configutil.get_config(fname)
# should succeed, no exceptions # should succeed, no exceptions
configutil.validate_config( configutil.validate_config(
fname, "<test_config_dynamic_validation_success>",
config, to_configparser({"node": {"valid": "foo"}}),
self.dynamic_valid_config, self.dynamic_valid_config,
) )
def test_config_validation_invalid_item(self): def test_config_validation_invalid_item(self):
fname = self.create_tahoe_cfg('[node]\nvalid = foo\ninvalid = foo\n') config = to_configparser({"node": {"valid": "foo", "invalid": "foo"}})
config = configutil.get_config(fname)
e = self.assertRaises( e = self.assertRaises(
configutil.UnknownConfigError, configutil.UnknownConfigError,
configutil.validate_config, configutil.validate_config,
fname, config, "<test_config_validation_invalid_item>",
config,
self.static_valid_config, self.static_valid_config,
) )
self.assertIn("section [node] contains unknown option 'invalid'", str(e)) self.assertIn("section [node] contains unknown option 'invalid'", str(e))
@ -112,13 +187,12 @@ enabled = false
A configuration with a section that is matched by neither the static nor A configuration with a section that is matched by neither the static nor
dynamic validators is rejected. dynamic validators is rejected.
""" """
fname = self.create_tahoe_cfg('[node]\nvalid = foo\n[invalid]\n') config = to_configparser({"node": {"valid": "foo"}, "invalid": {}})
config = configutil.get_config(fname)
e = self.assertRaises( e = self.assertRaises(
configutil.UnknownConfigError, configutil.UnknownConfigError,
configutil.validate_config, configutil.validate_config,
fname, config, "<test_config_validation_invalid_section>",
config,
self.static_valid_config, self.static_valid_config,
) )
self.assertIn("contains unknown section [invalid]", str(e)) self.assertIn("contains unknown section [invalid]", str(e))
@ -128,13 +202,12 @@ enabled = false
A configuration with a section that is matched by neither the static nor A configuration with a section that is matched by neither the static nor
dynamic validators is rejected. dynamic validators is rejected.
""" """
fname = self.create_tahoe_cfg('[node]\nvalid = foo\n[invalid]\n') config = to_configparser({"node": {"valid": "foo"}, "invalid": {}})
config = configutil.get_config(fname)
e = self.assertRaises( e = self.assertRaises(
configutil.UnknownConfigError, configutil.UnknownConfigError,
configutil.validate_config, configutil.validate_config,
fname, config, "<test_config_dynamic_validation_invalid_section>",
config,
self.dynamic_valid_config, self.dynamic_valid_config,
) )
self.assertIn("contains unknown section [invalid]", str(e)) self.assertIn("contains unknown section [invalid]", str(e))
@ -144,13 +217,12 @@ enabled = false
A configuration with a section, item pair that is matched by neither the A configuration with a section, item pair that is matched by neither the
static nor dynamic validators is rejected. static nor dynamic validators is rejected.
""" """
fname = self.create_tahoe_cfg('[node]\nvalid = foo\ninvalid = foo\n') config = to_configparser({"node": {"valid": "foo", "invalid": "foo"}})
config = configutil.get_config(fname)
e = self.assertRaises( e = self.assertRaises(
configutil.UnknownConfigError, configutil.UnknownConfigError,
configutil.validate_config, configutil.validate_config,
fname, config, "<test_config_dynamic_validation_invalid_item>",
config,
self.dynamic_valid_config, self.dynamic_valid_config,
) )
self.assertIn("section [node] contains unknown option 'invalid'", str(e)) self.assertIn("section [node] contains unknown option 'invalid'", str(e))
@ -163,3 +235,61 @@ enabled = false
config = configutil.get_config(fname) config = configutil.get_config(fname)
self.assertEqual(config.get("node", "a"), "foo") self.assertEqual(config.get("node", "a"), "foo")
self.assertEqual(config.get("node", "b"), "bar") self.assertEqual(config.get("node", "b"), "bar")
@given(arbitrary_config_dicts())
def test_everything_valid(self, cfgdict):
"""
``validate_config`` returns ``None`` when the validator is
``ValidConfiguration.everything()``.
"""
cfg = to_configparser(cfgdict)
self.assertIs(
configutil.validate_config(
"<test_everything_valid>",
cfg,
configutil.ValidConfiguration.everything(),
),
None,
)
@given(arbitrary_config_dicts(min_sections=1))
def test_nothing_valid(self, cfgdict):
"""
``validate_config`` raises ``UnknownConfigError`` when the validator is
``ValidConfiguration.nothing()`` for all non-empty configurations.
"""
cfg = to_configparser(cfgdict)
with self.assertRaises(configutil.UnknownConfigError):
configutil.validate_config(
"<test_everything_valid>",
cfg,
configutil.ValidConfiguration.nothing(),
)
def test_nothing_empty_valid(self):
"""
``validate_config`` returns ``None`` when the validator is
``ValidConfiguration.nothing()`` if the configuration is empty.
"""
cfg = ConfigParser()
self.assertIs(
configutil.validate_config(
"<test_everything_valid>",
cfg,
configutil.ValidConfiguration.nothing(),
),
None,
)
@given(arbitrary_config_dicts())
def test_copy_config(self, cfgdict):
"""
``copy_config`` creates a new ``ConfigParser`` object containing the same
values as its input.
"""
cfg = to_configparser(cfgdict)
copied = configutil.copy_config(cfg)
# Should be equal
self.assertEqual(cfg, copied)
# But not because they're the same object.
self.assertIsNot(cfg, copied)

View File

@ -1,3 +1,16 @@
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from six import ensure_binary, ensure_text
import os, re, itertools import os, re, itertools
from base64 import b32decode from base64 import b32decode
@ -39,8 +52,11 @@ from allmydata.util import pollmixin, idlib, fileutil, yamlutil
from allmydata.util.iputil import ( from allmydata.util.iputil import (
listenOnUnused, listenOnUnused,
) )
from allmydata.scripts.common import (
write_introducer,
)
import allmydata.test.common_util as testutil import allmydata.test.common_util as testutil
from allmydata.test.common import ( from .common import (
SyncTestCase, SyncTestCase,
AsyncTestCase, AsyncTestCase,
AsyncBrokenTestCase, AsyncBrokenTestCase,
@ -100,7 +116,7 @@ class Node(testutil.SignalMixin, testutil.ReallyEqualMixin, AsyncTestCase):
q1 = yield create_introducer(basedir) q1 = yield create_introducer(basedir)
del q1 del q1
# new nodes create unguessable furls in private/introducer.furl # new nodes create unguessable furls in private/introducer.furl
ifurl = fileutil.read(private_fn) ifurl = fileutil.read(private_fn, mode="r")
self.failUnless(ifurl) self.failUnless(ifurl)
ifurl = ifurl.strip() ifurl = ifurl.strip()
self.failIf(ifurl.endswith("/introducer"), ifurl) self.failIf(ifurl.endswith("/introducer"), ifurl)
@ -120,7 +136,7 @@ class Node(testutil.SignalMixin, testutil.ReallyEqualMixin, AsyncTestCase):
q2 = yield create_introducer(basedir) q2 = yield create_introducer(basedir)
del q2 del q2
self.failIf(os.path.exists(public_fn)) self.failIf(os.path.exists(public_fn))
ifurl2 = fileutil.read(private_fn) ifurl2 = fileutil.read(private_fn, mode="r")
self.failUnless(ifurl2) self.failUnless(ifurl2)
self.failUnlessEqual(ifurl2.strip(), guessable) self.failUnlessEqual(ifurl2.strip(), guessable)
@ -155,7 +171,7 @@ class ServiceMixin(object):
class Introducer(ServiceMixin, AsyncTestCase): class Introducer(ServiceMixin, AsyncTestCase):
def test_create(self): def test_create(self):
ic = IntroducerClient(None, "introducer.furl", u"my_nickname", ic = IntroducerClient(None, "introducer.furl", u"my_nickname",
"my_version", "oldest_version", {}, fakeseq, "my_version", "oldest_version", fakeseq,
FilePath(self.mktemp())) FilePath(self.mktemp()))
self.failUnless(isinstance(ic, IntroducerClient)) self.failUnless(isinstance(ic, IntroducerClient))
@ -169,7 +185,7 @@ def fakeseq():
seqnum_counter = itertools.count(1) seqnum_counter = itertools.count(1)
def realseq(): def realseq():
return seqnum_counter.next(), str(os.randint(1,100000)) return next(seqnum_counter), str(os.randint(1,100000))
def make_ann(furl): def make_ann(furl):
ann = { "anonymous-storage-FURL": furl, ann = { "anonymous-storage-FURL": furl,
@ -188,25 +204,25 @@ class Client(AsyncTestCase):
def test_duplicate_receive_v2(self): def test_duplicate_receive_v2(self):
ic1 = IntroducerClient(None, ic1 = IntroducerClient(None,
"introducer.furl", u"my_nickname", "introducer.furl", u"my_nickname",
"ver23", "oldest_version", {}, fakeseq, "ver23", "oldest_version", fakeseq,
FilePath(self.mktemp())) FilePath(self.mktemp()))
# we use a second client just to create a different-looking # we use a second client just to create a different-looking
# announcement # announcement
ic2 = IntroducerClient(None, ic2 = IntroducerClient(None,
"introducer.furl", u"my_nickname", "introducer.furl", u"my_nickname",
"ver24","oldest_version",{}, fakeseq, "ver24","oldest_version",fakeseq,
FilePath(self.mktemp())) FilePath(self.mktemp()))
announcements = [] announcements = []
def _received(key_s, ann): def _received(key_s, ann):
announcements.append( (key_s, ann) ) announcements.append( (key_s, ann) )
ic1.subscribe_to("storage", _received) ic1.subscribe_to("storage", _received)
furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/gydnp" furl1 = b"pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/gydnp"
furl1a = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:7777/gydnp" furl1a = b"pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:7777/gydnp"
furl2 = "pb://ttwwooyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/ttwwoo" furl2 = b"pb://ttwwooyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/ttwwoo"
private_key, public_key = ed25519.create_signing_keypair() private_key, public_key = ed25519.create_signing_keypair()
public_key_str = ed25519.string_from_verifying_key(public_key) public_key_str = ed25519.string_from_verifying_key(public_key)
pubkey_s = remove_prefix(public_key_str, "pub-") pubkey_s = remove_prefix(public_key_str, b"pub-")
# ann1: ic1, furl1 # ann1: ic1, furl1
# ann1a: ic1, furl1a (same SturdyRef, different connection hints) # ann1a: ic1, furl1a (same SturdyRef, different connection hints)
@ -226,7 +242,7 @@ class Client(AsyncTestCase):
self.failUnlessEqual(len(announcements), 1) self.failUnlessEqual(len(announcements), 1)
key_s,ann = announcements[0] key_s,ann = announcements[0]
self.failUnlessEqual(key_s, pubkey_s) self.failUnlessEqual(key_s, pubkey_s)
self.failUnlessEqual(ann["anonymous-storage-FURL"], furl1) self.failUnlessEqual(ensure_binary(ann["anonymous-storage-FURL"]), furl1)
self.failUnlessEqual(ann["my-version"], "ver23") self.failUnlessEqual(ann["my-version"], "ver23")
d.addCallback(_then1) d.addCallback(_then1)
@ -260,7 +276,7 @@ class Client(AsyncTestCase):
self.failUnlessEqual(len(announcements), 2) self.failUnlessEqual(len(announcements), 2)
key_s,ann = announcements[-1] key_s,ann = announcements[-1]
self.failUnlessEqual(key_s, pubkey_s) self.failUnlessEqual(key_s, pubkey_s)
self.failUnlessEqual(ann["anonymous-storage-FURL"], furl1) self.failUnlessEqual(ensure_binary(ann["anonymous-storage-FURL"]), furl1)
self.failUnlessEqual(ann["my-version"], "ver24") self.failUnlessEqual(ann["my-version"], "ver24")
d.addCallback(_then3) d.addCallback(_then3)
@ -272,7 +288,7 @@ class Client(AsyncTestCase):
self.failUnlessEqual(len(announcements), 3) self.failUnlessEqual(len(announcements), 3)
key_s,ann = announcements[-1] key_s,ann = announcements[-1]
self.failUnlessEqual(key_s, pubkey_s) self.failUnlessEqual(key_s, pubkey_s)
self.failUnlessEqual(ann["anonymous-storage-FURL"], furl1a) self.failUnlessEqual(ensure_binary(ann["anonymous-storage-FURL"]), furl1a)
self.failUnlessEqual(ann["my-version"], "ver23") self.failUnlessEqual(ann["my-version"], "ver23")
d.addCallback(_then4) d.addCallback(_then4)
@ -288,7 +304,7 @@ class Client(AsyncTestCase):
self.failUnlessEqual(len(announcements2), 1) self.failUnlessEqual(len(announcements2), 1)
key_s,ann = announcements2[-1] key_s,ann = announcements2[-1]
self.failUnlessEqual(key_s, pubkey_s) self.failUnlessEqual(key_s, pubkey_s)
self.failUnlessEqual(ann["anonymous-storage-FURL"], furl1a) self.failUnlessEqual(ensure_binary(ann["anonymous-storage-FURL"]), furl1a)
self.failUnlessEqual(ann["my-version"], "ver23") self.failUnlessEqual(ann["my-version"], "ver23")
d.addCallback(_then5) d.addCallback(_then5)
return d return d
@ -298,9 +314,9 @@ class Server(AsyncTestCase):
i = IntroducerService() i = IntroducerService()
ic1 = IntroducerClient(None, ic1 = IntroducerClient(None,
"introducer.furl", u"my_nickname", "introducer.furl", u"my_nickname",
"ver23", "oldest_version", {}, realseq, "ver23", "oldest_version", realseq,
FilePath(self.mktemp())) FilePath(self.mktemp()))
furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/gydnp" furl1 = b"pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/gydnp"
private_key, _ = ed25519.create_signing_keypair() private_key, _ = ed25519.create_signing_keypair()
@ -396,9 +412,9 @@ class Queue(SystemTestMixin, AsyncTestCase):
tub2 = Tub() tub2 = Tub()
tub2.setServiceParent(self.parent) tub2.setServiceParent(self.parent)
c = IntroducerClient(tub2, ifurl, c = IntroducerClient(tub2, ifurl,
u"nickname", "version", "oldest", {}, fakeseq, u"nickname", "version", "oldest", fakeseq,
FilePath(self.mktemp())) FilePath(self.mktemp()))
furl1 = "pb://onug64tu@127.0.0.1:123/short" # base32("short") furl1 = b"pb://onug64tu@127.0.0.1:123/short" # base32("short")
private_key, _ = ed25519.create_signing_keypair() private_key, _ = ed25519.create_signing_keypair()
d = introducer.disownServiceParent() d = introducer.disownServiceParent()
@ -420,7 +436,7 @@ class Queue(SystemTestMixin, AsyncTestCase):
def _done(ign): def _done(ign):
v = introducer.get_announcements()[0] v = introducer.get_announcements()[0]
furl = v.announcement["anonymous-storage-FURL"] furl = v.announcement["anonymous-storage-FURL"]
self.failUnlessEqual(furl, furl1) self.failUnlessEqual(ensure_binary(furl), furl1)
d.addCallback(_done) d.addCallback(_done)
# now let the ack get back # now let the ack get back
@ -446,7 +462,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
iff = os.path.join(self.basedir, "introducer.furl") iff = os.path.join(self.basedir, "introducer.furl")
tub = self.central_tub tub = self.central_tub
ifurl = self.central_tub.registerReference(introducer, furlFile=iff) ifurl = self.central_tub.registerReference(introducer, furlFile=iff)
self.introducer_furl = ifurl self.introducer_furl = ifurl.encode("utf-8")
# we have 5 clients who publish themselves as storage servers, and a # we have 5 clients who publish themselves as storage servers, and a
# sixth which does which not. All 6 clients subscriber to hear about # sixth which does which not. All 6 clients subscriber to hear about
@ -477,7 +493,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
c = IntroducerClient(tub, self.introducer_furl, c = IntroducerClient(tub, self.introducer_furl,
NICKNAME % str(i), NICKNAME % str(i),
"version", "oldest", "version", "oldest",
{"component": "component-v1"}, fakeseq, fakeseq,
FilePath(self.mktemp())) FilePath(self.mktemp()))
received_announcements[c] = {} received_announcements[c] = {}
def got(key_s_or_tubid, ann, announcements): def got(key_s_or_tubid, ann, announcements):
@ -487,7 +503,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
subscribing_clients.append(c) subscribing_clients.append(c)
expected_announcements[i] += 1 # all expect a 'storage' announcement expected_announcements[i] += 1 # all expect a 'storage' announcement
node_furl = tub.registerReference(Referenceable()) node_furl = tub.registerReference(Referenceable()).encode("utf-8")
private_key, public_key = ed25519.create_signing_keypair() private_key, public_key = ed25519.create_signing_keypair()
public_key_str = ed25519.string_from_verifying_key(public_key) public_key_str = ed25519.string_from_verifying_key(public_key)
privkeys[i] = private_key privkeys[i] = private_key
@ -504,7 +520,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
if i == 2: if i == 2:
# also publish something that nobody cares about # also publish something that nobody cares about
boring_furl = tub.registerReference(Referenceable()) boring_furl = tub.registerReference(Referenceable()).encode("utf-8")
c.publish("boring", make_ann(boring_furl), private_key) c.publish("boring", make_ann(boring_furl), private_key)
c.setServiceParent(self.parent) c.setServiceParent(self.parent)
@ -581,7 +597,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
serverid0 = printable_serverids[0] serverid0 = printable_serverids[0]
ann = anns[serverid0] ann = anns[serverid0]
nick = ann["nickname"] nick = ann["nickname"]
self.failUnlessEqual(type(nick), unicode) self.assertIsInstance(nick, str)
self.failUnlessEqual(nick, NICKNAME % "0") self.failUnlessEqual(nick, NICKNAME % "0")
for c in publishing_clients: for c in publishing_clients:
cdc = c._debug_counts cdc = c._debug_counts
@ -592,7 +608,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
self.failUnlessEqual(cdc["outbound_message"], expected) self.failUnlessEqual(cdc["outbound_message"], expected)
# now check the web status, make sure it renders without error # now check the web status, make sure it renders without error
ir = introweb.IntroducerRoot(self.parent) ir = introweb.IntroducerRoot(self.parent)
self.parent.nodeid = "NODEID" self.parent.nodeid = b"NODEID"
log.msg("_check1 done") log.msg("_check1 done")
return flattenString(None, ir._create_element()) return flattenString(None, ir._create_element())
d.addCallback(_check1) d.addCallback(_check1)
@ -602,7 +618,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
self.assertIn(NICKNAME % "0", text) # a v2 client self.assertIn(NICKNAME % "0", text) # a v2 client
self.assertIn(NICKNAME % "1", text) # another v2 client self.assertIn(NICKNAME % "1", text) # another v2 client
for i in range(NUM_STORAGE): for i in range(NUM_STORAGE):
self.assertIn(printable_serverids[i], text, self.assertIn(ensure_text(printable_serverids[i]), text,
(i,printable_serverids[i],text)) (i,printable_serverids[i],text))
# make sure there isn't a double-base32ed string too # make sure there isn't a double-base32ed string too
self.assertNotIn(idlib.nodeid_b2a(printable_serverids[i]), text, self.assertNotIn(idlib.nodeid_b2a(printable_serverids[i]), text,
@ -642,7 +658,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
self.create_tub(self.central_portnum) self.create_tub(self.central_portnum)
newfurl = self.central_tub.registerReference(self.the_introducer, newfurl = self.central_tub.registerReference(self.the_introducer,
furlFile=iff) furlFile=iff)
assert newfurl == self.introducer_furl assert ensure_binary(newfurl) == self.introducer_furl
d.addCallback(_restart_introducer_tub) d.addCallback(_restart_introducer_tub)
d.addCallback(_wait_for_connected) d.addCallback(_wait_for_connected)
@ -694,7 +710,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
self.the_introducer = introducer self.the_introducer = introducer
newfurl = self.central_tub.registerReference(self.the_introducer, newfurl = self.central_tub.registerReference(self.the_introducer,
furlFile=iff) furlFile=iff)
assert newfurl == self.introducer_furl assert ensure_binary(newfurl) == self.introducer_furl
d.addCallback(_restart_introducer) d.addCallback(_restart_introducer)
d.addCallback(_wait_for_connected) d.addCallback(_wait_for_connected)
@ -737,11 +753,10 @@ class ClientInfo(AsyncTestCase):
def test_client_v2(self): def test_client_v2(self):
introducer = IntroducerService() introducer = IntroducerService()
tub = introducer_furl = None tub = introducer_furl = None
app_versions = {"whizzy": "fizzy"}
client_v2 = IntroducerClient(tub, introducer_furl, NICKNAME % u"v2", client_v2 = IntroducerClient(tub, introducer_furl, NICKNAME % u"v2",
"my_version", "oldest", app_versions, "my_version", "oldest",
fakeseq, FilePath(self.mktemp())) fakeseq, FilePath(self.mktemp()))
#furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:0/swissnum" #furl1 = b"pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:0/swissnum"
#ann_s = make_ann_t(client_v2, furl1, None, 10) #ann_s = make_ann_t(client_v2, furl1, None, 10)
#introducer.remote_publish_v2(ann_s, Referenceable()) #introducer.remote_publish_v2(ann_s, Referenceable())
subscriber = FakeRemoteReference() subscriber = FakeRemoteReference()
@ -751,7 +766,6 @@ class ClientInfo(AsyncTestCase):
self.failUnlessEqual(len(subs), 1) self.failUnlessEqual(len(subs), 1)
s0 = subs[0] s0 = subs[0]
self.failUnlessEqual(s0.service_name, "storage") self.failUnlessEqual(s0.service_name, "storage")
self.failUnlessEqual(s0.app_versions, app_versions)
self.failUnlessEqual(s0.nickname, NICKNAME % u"v2") self.failUnlessEqual(s0.nickname, NICKNAME % u"v2")
self.failUnlessEqual(s0.version, "my_version") self.failUnlessEqual(s0.version, "my_version")
@ -760,14 +774,13 @@ class Announcements(AsyncTestCase):
def test_client_v2_signed(self): def test_client_v2_signed(self):
introducer = IntroducerService() introducer = IntroducerService()
tub = introducer_furl = None tub = introducer_furl = None
app_versions = {"whizzy": "fizzy"}
client_v2 = IntroducerClient(tub, introducer_furl, u"nick-v2", client_v2 = IntroducerClient(tub, introducer_furl, u"nick-v2",
"my_version", "oldest", app_versions, "my_version", "oldest",
fakeseq, FilePath(self.mktemp())) fakeseq, FilePath(self.mktemp()))
furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:0/swissnum" furl1 = b"pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:0/swissnum"
private_key, public_key = ed25519.create_signing_keypair() private_key, public_key = ed25519.create_signing_keypair()
public_key_str = remove_prefix(ed25519.string_from_verifying_key(public_key), "pub-") public_key_str = remove_prefix(ed25519.string_from_verifying_key(public_key), b"pub-")
ann_t0 = make_ann_t(client_v2, furl1, private_key, 10) ann_t0 = make_ann_t(client_v2, furl1, private_key, 10)
canary0 = Referenceable() canary0 = Referenceable()
@ -776,11 +789,10 @@ class Announcements(AsyncTestCase):
self.failUnlessEqual(len(a), 1) self.failUnlessEqual(len(a), 1)
self.assertThat(a[0].canary, Is(canary0)) self.assertThat(a[0].canary, Is(canary0))
self.failUnlessEqual(a[0].index, ("storage", public_key_str)) self.failUnlessEqual(a[0].index, ("storage", public_key_str))
self.failUnlessEqual(a[0].announcement["app-versions"], app_versions)
self.failUnlessEqual(a[0].nickname, u"nick-v2") self.failUnlessEqual(a[0].nickname, u"nick-v2")
self.failUnlessEqual(a[0].service_name, "storage") self.failUnlessEqual(a[0].service_name, "storage")
self.failUnlessEqual(a[0].version, "my_version") self.failUnlessEqual(a[0].version, "my_version")
self.failUnlessEqual(a[0].announcement["anonymous-storage-FURL"], furl1) self.failUnlessEqual(ensure_binary(a[0].announcement["anonymous-storage-FURL"]), furl1)
def _load_cache(self, cache_filepath): def _load_cache(self, cache_filepath):
with cache_filepath.open() as f: with cache_filepath.open() as f:
@ -788,26 +800,32 @@ class Announcements(AsyncTestCase):
@defer.inlineCallbacks @defer.inlineCallbacks
def test_client_cache(self): def test_client_cache(self):
basedir = "introducer/ClientSeqnums/test_client_cache_1" """
fileutil.make_dirs(basedir) Announcements received by an introducer client are written to that
cache_filepath = FilePath(os.path.join(basedir, "private", introducer client's cache file.
"introducer_default_cache.yaml")) """
basedir = FilePath("introducer/ClientSeqnums/test_client_cache_1")
private = basedir.child("private")
private.makedirs()
write_introducer(basedir, "default", "nope")
cache_filepath = basedir.descendant([
"private",
"introducer_default_cache.yaml",
])
# if storage is enabled, the Client will publish its storage server # if storage is enabled, the Client will publish its storage server
# during startup (although the announcement will wait in a queue # during startup (although the announcement will wait in a queue
# until the introducer connection is established). To avoid getting # until the introducer connection is established). To avoid getting
# confused by this, disable storage. # confused by this, disable storage.
with open(os.path.join(basedir, "tahoe.cfg"), "w") as f: with basedir.child("tahoe.cfg").open("w") as f:
f.write("[client]\n") f.write(b"[storage]\n")
f.write("introducer.furl = nope\n") f.write(b"enabled = false\n")
f.write("[storage]\n")
f.write("enabled = false\n")
c = yield create_client(basedir) c = yield create_client(basedir.path)
ic = c.introducer_clients[0] ic = c.introducer_clients[0]
private_key, public_key = ed25519.create_signing_keypair() private_key, public_key = ed25519.create_signing_keypair()
public_key_str = remove_prefix(ed25519.string_from_verifying_key(public_key), "pub-") public_key_str = remove_prefix(ed25519.string_from_verifying_key(public_key), b"pub-")
furl1 = "pb://onug64tu@127.0.0.1:123/short" # base32("short") furl1 = b"pb://onug64tu@127.0.0.1:123/short" # base32("short")
ann_t = make_ann_t(ic, furl1, private_key, 1) ann_t = make_ann_t(ic, furl1, private_key, 1)
ic.got_announcements([ann_t]) ic.got_announcements([ann_t])
@ -816,29 +834,29 @@ class Announcements(AsyncTestCase):
# check the cache for the announcement # check the cache for the announcement
announcements = self._load_cache(cache_filepath) announcements = self._load_cache(cache_filepath)
self.failUnlessEqual(len(announcements), 1) self.failUnlessEqual(len(announcements), 1)
self.failUnlessEqual(announcements[0]['key_s'], public_key_str) self.failUnlessEqual(ensure_binary(announcements[0]['key_s']), public_key_str)
ann = announcements[0]["ann"] ann = announcements[0]["ann"]
self.failUnlessEqual(ann["anonymous-storage-FURL"], furl1) self.failUnlessEqual(ensure_binary(ann["anonymous-storage-FURL"]), furl1)
self.failUnlessEqual(ann["seqnum"], 1) self.failUnlessEqual(ann["seqnum"], 1)
# a new announcement that replaces the first should replace the # a new announcement that replaces the first should replace the
# cached entry, not duplicate it # cached entry, not duplicate it
furl2 = furl1 + "er" furl2 = furl1 + b"er"
ann_t2 = make_ann_t(ic, furl2, private_key, 2) ann_t2 = make_ann_t(ic, furl2, private_key, 2)
ic.got_announcements([ann_t2]) ic.got_announcements([ann_t2])
yield flushEventualQueue() yield flushEventualQueue()
announcements = self._load_cache(cache_filepath) announcements = self._load_cache(cache_filepath)
self.failUnlessEqual(len(announcements), 1) self.failUnlessEqual(len(announcements), 1)
self.failUnlessEqual(announcements[0]['key_s'], public_key_str) self.failUnlessEqual(ensure_binary(announcements[0]['key_s']), public_key_str)
ann = announcements[0]["ann"] ann = announcements[0]["ann"]
self.failUnlessEqual(ann["anonymous-storage-FURL"], furl2) self.failUnlessEqual(ensure_binary(ann["anonymous-storage-FURL"]), furl2)
self.failUnlessEqual(ann["seqnum"], 2) self.failUnlessEqual(ann["seqnum"], 2)
# but a third announcement with a different key should add to the # but a third announcement with a different key should add to the
# cache # cache
private_key2, public_key2 = ed25519.create_signing_keypair() private_key2, public_key2 = ed25519.create_signing_keypair()
public_key_str2 = remove_prefix(ed25519.string_from_verifying_key(public_key2), "pub-") public_key_str2 = remove_prefix(ed25519.string_from_verifying_key(public_key2), b"pub-")
furl3 = "pb://onug64tu@127.0.0.1:456/short" furl3 = b"pb://onug64tu@127.0.0.1:456/short"
ann_t3 = make_ann_t(ic, furl3, private_key2, 1) ann_t3 = make_ann_t(ic, furl3, private_key2, 1)
ic.got_announcements([ann_t3]) ic.got_announcements([ann_t3])
yield flushEventualQueue() yield flushEventualQueue()
@ -846,15 +864,15 @@ class Announcements(AsyncTestCase):
announcements = self._load_cache(cache_filepath) announcements = self._load_cache(cache_filepath)
self.failUnlessEqual(len(announcements), 2) self.failUnlessEqual(len(announcements), 2)
self.failUnlessEqual(set([public_key_str, public_key_str2]), self.failUnlessEqual(set([public_key_str, public_key_str2]),
set([a["key_s"] for a in announcements])) set([ensure_binary(a["key_s"]) for a in announcements]))
self.failUnlessEqual(set([furl2, furl3]), self.failUnlessEqual(set([furl2, furl3]),
set([a["ann"]["anonymous-storage-FURL"] set([ensure_binary(a["ann"]["anonymous-storage-FURL"])
for a in announcements])) for a in announcements]))
# test loading # test loading
yield flushEventualQueue() yield flushEventualQueue()
ic2 = IntroducerClient(None, "introducer.furl", u"my_nickname", ic2 = IntroducerClient(None, "introducer.furl", u"my_nickname",
"my_version", "oldest_version", {}, fakeseq, "my_version", "oldest_version", fakeseq,
ic._cache_filepath) ic._cache_filepath)
announcements = {} announcements = {}
def got(key_s, ann): def got(key_s, ann):
@ -864,12 +882,12 @@ class Announcements(AsyncTestCase):
yield flushEventualQueue() yield flushEventualQueue()
self.failUnless(public_key_str in announcements) self.failUnless(public_key_str in announcements)
self.failUnlessEqual(announcements[public_key_str]["anonymous-storage-FURL"], self.failUnlessEqual(ensure_binary(announcements[public_key_str]["anonymous-storage-FURL"]),
furl2) furl2)
self.failUnlessEqual(announcements[public_key_str2]["anonymous-storage-FURL"], self.failUnlessEqual(ensure_binary(announcements[public_key_str2]["anonymous-storage-FURL"]),
furl3) furl3)
c2 = yield create_client(basedir) c2 = yield create_client(basedir.path)
c2.introducer_clients[0]._load_announcements() c2.introducer_clients[0]._load_announcements()
yield flushEventualQueue() yield flushEventualQueue()
self.assertEqual(c2.storage_broker.get_all_serverids(), self.assertEqual(c2.storage_broker.get_all_serverids(),
@ -879,27 +897,24 @@ class ClientSeqnums(AsyncBrokenTestCase):
@defer.inlineCallbacks @defer.inlineCallbacks
def test_client(self): def test_client(self):
basedir = "introducer/ClientSeqnums/test_client" basedir = FilePath("introducer/ClientSeqnums/test_client")
fileutil.make_dirs(basedir) private = basedir.child("private")
private.makedirs()
write_introducer(basedir, "default", "nope")
# if storage is enabled, the Client will publish its storage server # if storage is enabled, the Client will publish its storage server
# during startup (although the announcement will wait in a queue # during startup (although the announcement will wait in a queue
# until the introducer connection is established). To avoid getting # until the introducer connection is established). To avoid getting
# confused by this, disable storage. # confused by this, disable storage.
f = open(os.path.join(basedir, "tahoe.cfg"), "w") with basedir.child("tahoe.cfg").open("w") as f:
f.write("[client]\n") f.write(b"[storage]\n")
f.write("introducer.furl = nope\n") f.write(b"enabled = false\n")
f.write("[storage]\n")
f.write("enabled = false\n")
f.close()
c = yield create_client(basedir) c = yield create_client(basedir.path)
ic = c.introducer_clients[0] ic = c.introducer_clients[0]
outbound = ic._outbound_announcements outbound = ic._outbound_announcements
published = ic._published_announcements published = ic._published_announcements
def read_seqnum(): def read_seqnum():
f = open(os.path.join(basedir, "announcement-seqnum")) seqnum = basedir.child("announcement-seqnum").getContent()
seqnum = f.read().strip()
f.close()
return int(seqnum) return int(seqnum)
ic.publish("sA", {"key": "value1"}, c._node_private_key) ic.publish("sA", {"key": "value1"}, c._node_private_key)
@ -907,7 +922,9 @@ class ClientSeqnums(AsyncBrokenTestCase):
self.failUnless("sA" in outbound) self.failUnless("sA" in outbound)
self.failUnlessEqual(outbound["sA"]["seqnum"], 1) self.failUnlessEqual(outbound["sA"]["seqnum"], 1)
nonce1 = outbound["sA"]["nonce"] nonce1 = outbound["sA"]["nonce"]
self.failUnless(isinstance(nonce1, str)) self.failUnless(isinstance(nonce1, bytes))
# Make nonce unicode, to match JSON:
outbound["sA"]["nonce"] = str(nonce1, "utf-8")
self.failUnlessEqual(json.loads(published["sA"][0]), self.failUnlessEqual(json.loads(published["sA"][0]),
outbound["sA"]) outbound["sA"])
# [1] is the signature, [2] is the pubkey # [1] is the signature, [2] is the pubkey
@ -921,8 +938,11 @@ class ClientSeqnums(AsyncBrokenTestCase):
self.failUnless("sA" in outbound) self.failUnless("sA" in outbound)
self.failUnlessEqual(outbound["sA"]["seqnum"], 2) self.failUnlessEqual(outbound["sA"]["seqnum"], 2)
nonce2 = outbound["sA"]["nonce"] nonce2 = outbound["sA"]["nonce"]
self.failUnless(isinstance(nonce2, str)) self.failUnless(isinstance(nonce2, bytes))
self.failIfEqual(nonce1, nonce2) self.failIfEqual(nonce1, nonce2)
# Make nonce unicode, to match JSON:
outbound["sA"]["nonce"] = str(nonce2, "utf-8")
outbound["sB"]["nonce"] = str(outbound["sB"]["nonce"], "utf-8")
self.failUnlessEqual(json.loads(published["sA"][0]), self.failUnlessEqual(json.loads(published["sA"][0]),
outbound["sA"]) outbound["sA"])
self.failUnlessEqual(json.loads(published["sB"][0]), self.failUnlessEqual(json.loads(published["sB"][0]),
@ -954,7 +974,7 @@ class NonV1Server(SystemTestMixin, AsyncTestCase):
tub.setServiceParent(self.parent) tub.setServiceParent(self.parent)
listenOnUnused(tub) listenOnUnused(tub)
c = IntroducerClient(tub, self.introducer_furl, c = IntroducerClient(tub, self.introducer_furl,
u"nickname-client", "version", "oldest", {}, u"nickname-client", "version", "oldest",
fakeseq, FilePath(self.mktemp())) fakeseq, FilePath(self.mktemp()))
announcements = {} announcements = {}
def got(key_s, ann): def got(key_s, ann):
@ -979,11 +999,11 @@ class DecodeFurl(SyncTestCase):
def test_decode(self): def test_decode(self):
# make sure we have a working base64.b32decode. The one in # make sure we have a working base64.b32decode. The one in
# python2.4.[01] was broken. # python2.4.[01] was broken.
furl = 'pb://t5g7egomnnktbpydbuijt6zgtmw4oqi5@127.0.0.1:51857/hfzv36i' furl = b'pb://t5g7egomnnktbpydbuijt6zgtmw4oqi5@127.0.0.1:51857/hfzv36i'
m = re.match(r'pb://(\w+)@', furl) m = re.match(br'pb://(\w+)@', furl)
assert m assert m
nodeid = b32decode(m.group(1).upper()) nodeid = b32decode(m.group(1).upper())
self.failUnlessEqual(nodeid, "\x9fM\xf2\x19\xcckU0\xbf\x03\r\x10\x99\xfb&\x9b-\xc7A\x1d") self.failUnlessEqual(nodeid, b"\x9fM\xf2\x19\xcckU0\xbf\x03\r\x10\x99\xfb&\x9b-\xc7A\x1d")
class Signatures(SyncTestCase): class Signatures(SyncTestCase):
@ -995,11 +1015,11 @@ class Signatures(SyncTestCase):
(msg, sig, key) = ann_t (msg, sig, key) = ann_t
self.failUnlessEqual(type(msg), type("".encode("utf-8"))) # bytes self.failUnlessEqual(type(msg), type("".encode("utf-8"))) # bytes
self.failUnlessEqual(json.loads(msg.decode("utf-8")), ann) self.failUnlessEqual(json.loads(msg.decode("utf-8")), ann)
self.failUnless(sig.startswith("v0-")) self.failUnless(sig.startswith(b"v0-"))
self.failUnless(key.startswith("v0-")) self.failUnless(key.startswith(b"v0-"))
(ann2,key2) = unsign_from_foolscap(ann_t) (ann2,key2) = unsign_from_foolscap(ann_t)
self.failUnlessEqual(ann2, ann) self.failUnlessEqual(ann2, ann)
self.failUnlessEqual("pub-" + key2, public_key_str) self.failUnlessEqual(b"pub-" + key2, public_key_str)
# not signed # not signed
self.failUnlessRaises(UnknownKeyError, self.failUnlessRaises(UnknownKeyError,
@ -1014,26 +1034,25 @@ class Signatures(SyncTestCase):
# unrecognized signatures # unrecognized signatures
self.failUnlessRaises(UnknownKeyError, self.failUnlessRaises(UnknownKeyError,
unsign_from_foolscap, (bad_msg, "v999-sig", key)) unsign_from_foolscap, (bad_msg, b"v999-sig", key))
self.failUnlessRaises(UnknownKeyError, self.failUnlessRaises(UnknownKeyError,
unsign_from_foolscap, (bad_msg, sig, "v999-key")) unsign_from_foolscap, (bad_msg, sig, b"v999-key"))
def test_unsigned_announcement(self): def test_unsigned_announcement(self):
ed25519.verifying_key_from_string(b"pub-v0-wodst6ly4f7i7akt2nxizsmmy2rlmer6apltl56zctn67wfyu5tq") ed25519.verifying_key_from_string(b"pub-v0-wodst6ly4f7i7akt2nxizsmmy2rlmer6apltl56zctn67wfyu5tq")
mock_tub = Mock() mock_tub = Mock()
ic = IntroducerClient( ic = IntroducerClient(
mock_tub, mock_tub,
u"pb://", b"pb://",
u"fake_nick", u"fake_nick",
"0.0.0", "0.0.0",
"1.2.3", "1.2.3",
{},
(0, u"i am a nonce"), (0, u"i am a nonce"),
"invalid", "invalid",
) )
self.assertEqual(0, ic._debug_counts["inbound_announcement"]) self.assertEqual(0, ic._debug_counts["inbound_announcement"])
ic.got_announcements([ ic.got_announcements([
("message", "v0-aaaaaaa", "v0-wodst6ly4f7i7akt2nxizsmmy2rlmer6apltl56zctn67wfyu5tq") (b"message", b"v0-aaaaaaa", b"v0-wodst6ly4f7i7akt2nxizsmmy2rlmer6apltl56zctn67wfyu5tq")
]) ])
# we should have rejected this announcement due to a bad signature # we should have rejected this announcement due to a bad signature
self.assertEqual(0, ic._debug_counts["inbound_announcement"]) self.assertEqual(0, ic._debug_counts["inbound_announcement"])

View File

@ -24,9 +24,6 @@ class MultiIntroTests(unittest.TestCase):
config = {'hide-ip':False, 'listen': 'tcp', config = {'hide-ip':False, 'listen': 'tcp',
'port': None, 'location': None, 'hostname': 'example.net'} 'port': None, 'location': None, 'hostname': 'example.net'}
write_node_config(c, config) write_node_config(c, config)
fake_furl = "furl1"
c.write("[client]\n")
c.write("introducer.furl = %s\n" % fake_furl)
c.write("[storage]\n") c.write("[storage]\n")
c.write("enabled = false\n") c.write("enabled = false\n")
c.close() c.close()
@ -36,8 +33,10 @@ class MultiIntroTests(unittest.TestCase):
@defer.inlineCallbacks @defer.inlineCallbacks
def test_introducer_count(self): def test_introducer_count(self):
""" Ensure that the Client creates same number of introducer clients """
as found in "basedir/private/introducers" config file. """ If there are two introducers configured in ``introducers.yaml`` then
``Client`` creates two introducer clients.
"""
connections = { connections = {
'introducers': { 'introducers': {
u'intro1':{ 'furl': 'furl1' }, u'intro1':{ 'furl': 'furl1' },
@ -50,25 +49,13 @@ class MultiIntroTests(unittest.TestCase):
ic_count = len(myclient.introducer_clients) ic_count = len(myclient.introducer_clients)
# assertions # assertions
self.failUnlessEqual(ic_count, 3) self.failUnlessEqual(ic_count, len(connections["introducers"]))
@defer.inlineCallbacks
def test_introducer_count_commented(self):
""" Ensure that the Client creates same number of introducer clients
as found in "basedir/private/introducers" config file when there is one
commented."""
self.yaml_path.setContent(INTRODUCERS_CFG_FURLS_COMMENTED)
# get a client and count of introducer_clients
myclient = yield create_client(self.basedir)
ic_count = len(myclient.introducer_clients)
# assertions
self.failUnlessEqual(ic_count, 2)
@defer.inlineCallbacks @defer.inlineCallbacks
def test_read_introducer_furl_from_tahoecfg(self): def test_read_introducer_furl_from_tahoecfg(self):
""" Ensure that the Client reads the introducer.furl config item from """
the tahoe.cfg file. """ The deprecated [client]introducer.furl item is still read and respected.
"""
# create a custom tahoe.cfg # create a custom tahoe.cfg
c = open(os.path.join(self.basedir, "tahoe.cfg"), "w") c = open(os.path.join(self.basedir, "tahoe.cfg"), "w")
config = {'hide-ip':False, 'listen': 'tcp', config = {'hide-ip':False, 'listen': 'tcp',
@ -87,20 +74,42 @@ class MultiIntroTests(unittest.TestCase):
# assertions # assertions
self.failUnlessEqual(fake_furl, tahoe_cfg_furl) self.failUnlessEqual(fake_furl, tahoe_cfg_furl)
self.assertEqual(
list(
warning["message"]
for warning
in self.flushWarnings()
if warning["category"] is DeprecationWarning
),
["tahoe.cfg [client]introducer.furl is deprecated; "
"use private/introducers.yaml instead."],
)
@defer.inlineCallbacks @defer.inlineCallbacks
def test_reject_default_in_yaml(self): def test_reject_default_in_yaml(self):
connections = {'introducers': { """
u'default': { 'furl': 'furl1' }, If an introducer is configured in tahoe.cfg with the deprecated
}} [client]introducer.furl then a "default" introducer in
introducers.yaml is rejected.
"""
connections = {
'introducers': {
u'default': { 'furl': 'furl1' },
},
}
self.yaml_path.setContent(yamlutil.safe_dump(connections)) self.yaml_path.setContent(yamlutil.safe_dump(connections))
FilePath(self.basedir).child("tahoe.cfg").setContent(
"[client]\n"
"introducer.furl = furl1\n"
)
with self.assertRaises(ValueError) as ctx: with self.assertRaises(ValueError) as ctx:
yield create_client(self.basedir) yield create_client(self.basedir)
self.assertEquals( self.assertEquals(
str(ctx.exception), str(ctx.exception),
"'default' introducer furl cannot be specified in introducers.yaml; please " "'default' introducer furl cannot be specified in tahoe.cfg and introducers.yaml; "
"fix impossible configuration.", "please fix impossible configuration.",
) )
SIMPLE_YAML = """ SIMPLE_YAML = """
@ -126,8 +135,6 @@ class NoDefault(unittest.TestCase):
config = {'hide-ip':False, 'listen': 'tcp', config = {'hide-ip':False, 'listen': 'tcp',
'port': None, 'location': None, 'hostname': 'example.net'} 'port': None, 'location': None, 'hostname': 'example.net'}
write_node_config(c, config) write_node_config(c, config)
c.write("[client]\n")
c.write("# introducer.furl =\n") # omit default
c.write("[storage]\n") c.write("[storage]\n")
c.write("enabled = false\n") c.write("enabled = false\n")
c.close() c.close()

View File

@ -6,7 +6,7 @@ from __future__ import division
from __future__ import print_function from __future__ import print_function
from __future__ import unicode_literals from __future__ import unicode_literals
from future.utils import PY2, native_str from future.utils import PY2
if PY2: if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
@ -29,6 +29,9 @@ from hypothesis.strategies import (
from unittest import skipIf from unittest import skipIf
from twisted.python.filepath import (
FilePath,
)
from twisted.trial import unittest from twisted.trial import unittest
from twisted.internet import defer from twisted.internet import defer
@ -46,14 +49,17 @@ from allmydata.node import (
_tub_portlocation, _tub_portlocation,
formatTimeTahoeStyle, formatTimeTahoeStyle,
UnescapedHashError, UnescapedHashError,
get_app_versions,
) )
from allmydata.introducer.server import create_introducer from allmydata.introducer.server import create_introducer
from allmydata import client from allmydata import client
from allmydata.util import fileutil, iputil from allmydata.util import fileutil, iputil
from allmydata.util.namespace import Namespace from allmydata.util.namespace import Namespace
from allmydata.util.configutil import UnknownConfigError from allmydata.util.configutil import (
ValidConfiguration,
UnknownConfigError,
)
from allmydata.util.i2p_provider import create as create_i2p_provider from allmydata.util.i2p_provider import create as create_i2p_provider
from allmydata.util.tor_provider import create as create_tor_provider from allmydata.util.tor_provider import create as create_tor_provider
import allmydata.test.common_util as testutil import allmydata.test.common_util as testutil
@ -101,16 +107,6 @@ class TestCase(testutil.SignalMixin, unittest.TestCase):
# conflict with another service to prove it. # conflict with another service to prove it.
self._available_port = 22 self._available_port = 22
def test_application_versions(self):
"""
Application versions should all have the same type, the native string.
This test is due to the Foolscap limitations, if Foolscap is fixed or
removed it can be deleted.
"""
app_types = set(type(o) for o in get_app_versions())
self.assertEqual(app_types, {native_str})
def _test_location( def _test_location(
self, self,
expected_addresses, expected_addresses,
@ -442,6 +438,78 @@ class TestCase(testutil.SignalMixin, unittest.TestCase):
yield client.create_client(basedir) yield client.create_client(basedir)
self.failUnless(ns.called) self.failUnless(ns.called)
def test_set_config_unescaped_furl_hash(self):
"""
``_Config.set_config`` raises ``UnescapedHashError`` if the item being set
is a furl and the value includes ``"#"`` and does not set the value.
"""
basedir = self.mktemp()
new_config = config_from_string(basedir, "", "")
with self.assertRaises(UnescapedHashError):
new_config.set_config("foo", "bar.furl", "value#1")
with self.assertRaises(MissingConfigEntry):
new_config.get_config("foo", "bar.furl")
def test_set_config_new_section(self):
"""
``_Config.set_config`` can be called with the name of a section that does
not already exist to create that section and set an item in it.
"""
basedir = self.mktemp()
new_config = config_from_string(basedir, "", "", ValidConfiguration.everything())
new_config.set_config("foo", "bar", "value1")
self.assertEqual(
new_config.get_config("foo", "bar"),
"value1"
)
def test_set_config_replace(self):
"""
``_Config.set_config`` can be called with a section and item that already
exists to change an existing value to a new one.
"""
basedir = self.mktemp()
new_config = config_from_string(basedir, "", "", ValidConfiguration.everything())
new_config.set_config("foo", "bar", "value1")
new_config.set_config("foo", "bar", "value2")
self.assertEqual(
new_config.get_config("foo", "bar"),
"value2"
)
def test_set_config_write(self):
"""
``_Config.set_config`` persists the configuration change so it can be
re-loaded later.
"""
# Let our nonsense config through
valid_config = ValidConfiguration.everything()
basedir = FilePath(self.mktemp())
basedir.makedirs()
cfg = basedir.child(b"tahoe.cfg")
cfg.setContent(b"")
new_config = read_config(basedir.path, "", [], valid_config)
new_config.set_config("foo", "bar", "value1")
loaded_config = read_config(basedir.path, "", [], valid_config)
self.assertEqual(
loaded_config.get_config("foo", "bar"),
"value1",
)
def test_set_config_rejects_invalid_config(self):
"""
``_Config.set_config`` raises ``UnknownConfigError`` if the section or
item is not recognized by the validation object and does not set the
value.
"""
# Make everything invalid.
valid_config = ValidConfiguration.nothing()
new_config = config_from_string(self.mktemp(), "", "", valid_config)
with self.assertRaises(UnknownConfigError):
new_config.set_config("foo", "bar", "baz")
with self.assertRaises(MissingConfigEntry):
new_config.get_config("foo", "bar")
class TestMissingPorts(unittest.TestCase): class TestMissingPorts(unittest.TestCase):
""" """
@ -616,8 +684,6 @@ class TestMissingPorts(unittest.TestCase):
BASE_CONFIG = """ BASE_CONFIG = """
[client]
introducer.furl = empty
[tor] [tor]
enabled = false enabled = false
[i2p] [i2p]

View File

@ -12,7 +12,6 @@ from twisted.internet import reactor
from twisted.python import usage from twisted.python import usage
from twisted.internet.defer import ( from twisted.internet.defer import (
inlineCallbacks, inlineCallbacks,
returnValue,
DeferredList, DeferredList,
) )
from twisted.python.filepath import FilePath from twisted.python.filepath import FilePath
@ -20,12 +19,9 @@ from twisted.python.runtime import (
platform, platform,
) )
from allmydata.util import fileutil, pollmixin from allmydata.util import fileutil, pollmixin
from allmydata.util.encodingutil import unicode_to_argv, unicode_to_output, \ from allmydata.util.encodingutil import unicode_to_argv, unicode_to_output
get_filesystem_encoding
from allmydata.test import common_util from allmydata.test import common_util
from allmydata.version_checks import normalized_version
import allmydata import allmydata
from allmydata import __appname__
from .common_util import parse_cli, run_cli from .common_util import parse_cli, run_cli
from .cli_node_api import ( from .cli_node_api import (
CLINodeAPI, CLINodeAPI,
@ -58,17 +54,6 @@ rootdir = get_root_from_file(srcfile)
class RunBinTahoeMixin(object): class RunBinTahoeMixin(object):
@inlineCallbacks
def find_import_location(self):
res = yield self.run_bintahoe(["--version-and-path"])
out, err, rc_or_sig = res
self.assertEqual(rc_or_sig, 0, res)
lines = out.splitlines()
tahoe_pieces = lines[0].split()
self.assertEqual(tahoe_pieces[0], "%s:" % (__appname__,), (tahoe_pieces, res))
returnValue(tahoe_pieces[-1].strip("()"))
def run_bintahoe(self, args, stdin=None, python_options=[], env=None): def run_bintahoe(self, args, stdin=None, python_options=[], env=None):
command = sys.executable command = sys.executable
argv = python_options + ["-m", "allmydata.scripts.runner"] + args argv = python_options + ["-m", "allmydata.scripts.runner"] + args
@ -86,64 +71,6 @@ class RunBinTahoeMixin(object):
class BinTahoe(common_util.SignalMixin, unittest.TestCase, RunBinTahoeMixin): class BinTahoe(common_util.SignalMixin, unittest.TestCase, RunBinTahoeMixin):
@inlineCallbacks
def test_the_right_code(self):
# running "tahoe" in a subprocess should find the same code that
# holds this test file, else something is weird
test_path = os.path.dirname(os.path.dirname(os.path.normcase(os.path.realpath(srcfile))))
bintahoe_import_path = yield self.find_import_location()
same = (bintahoe_import_path == test_path)
if not same:
msg = ("My tests and my 'tahoe' executable are using different paths.\n"
"tahoe: %r\n"
"tests: %r\n"
"( according to the test source filename %r)\n" %
(bintahoe_import_path, test_path, srcfile))
if (not isinstance(rootdir, unicode) and
rootdir.decode(get_filesystem_encoding(), 'replace') != rootdir):
msg += ("However, this may be a false alarm because the import path\n"
"is not representable in the filesystem encoding.")
raise unittest.SkipTest(msg)
else:
msg += "Please run the tests in a virtualenv that includes both the Tahoe-LAFS library and the 'tahoe' executable."
self.fail(msg)
def test_path(self):
d = self.run_bintahoe(["--version-and-path"])
def _cb(res):
out, err, rc_or_sig = res
self.failUnlessEqual(rc_or_sig, 0, str(res))
# Fail unless the __appname__ package is *this* version *and*
# was loaded from *this* source directory.
required_verstr = str(allmydata.__version__)
self.failIfEqual(required_verstr, "unknown",
"We don't know our version, because this distribution didn't come "
"with a _version.py and 'setup.py update_version' hasn't been run.")
srcdir = os.path.dirname(os.path.dirname(os.path.normcase(os.path.realpath(srcfile))))
info = repr((res, allmydata.__appname__, required_verstr, srcdir))
appverpath = out.split(')')[0]
(appverfull, path) = appverpath.split('] (')
(appver, comment) = appverfull.split(' [')
(branch, full_version) = comment.split(': ')
(app, ver) = appver.split(': ')
self.failUnlessEqual(app, allmydata.__appname__, info)
norm_ver = normalized_version(ver)
norm_required = normalized_version(required_verstr)
self.failUnlessEqual(norm_ver, norm_required, info)
self.failUnlessEqual(path, srcdir, info)
self.failUnlessEqual(branch, allmydata.branch)
self.failUnlessEqual(full_version, allmydata.full_version)
d.addCallback(_cb)
return d
def test_unicode_arguments_and_output(self): def test_unicode_arguments_and_output(self):
tricky = u"\u2621" tricky = u"\u2621"
try: try:
@ -165,8 +92,8 @@ class BinTahoe(common_util.SignalMixin, unittest.TestCase, RunBinTahoeMixin):
d = self.run_bintahoe(["--version"], python_options=["-t"]) d = self.run_bintahoe(["--version"], python_options=["-t"])
def _cb(res): def _cb(res):
out, err, rc_or_sig = res out, err, rc_or_sig = res
self.failUnlessEqual(rc_or_sig, 0, str(res)) self.assertEqual(rc_or_sig, 0, str(res))
self.failUnless(out.startswith(allmydata.__appname__+':'), str(res)) self.assertTrue(out.startswith(allmydata.__appname__ + '/'), str(res))
d.addCallback(_cb) d.addCallback(_cb)
return d return d

View File

@ -473,7 +473,7 @@ class StoragePluginWebPresence(AsyncTestCase):
}, },
storage_plugin=self.storage_plugin, storage_plugin=self.storage_plugin,
basedir=self.basedir, basedir=self.basedir,
introducer_furl=ensure_text(SOME_FURL), introducer_furl=SOME_FURL,
)) ))
self.node = yield self.node_fixture.create_node() self.node = yield self.node_fixture.create_node()
self.webish = self.node.getServiceNamed(WebishServer.name) self.webish = self.node.getServiceNamed(WebishServer.name)

View File

@ -33,6 +33,9 @@ from allmydata.mutable.publish import MutableData
from foolscap.api import DeadReferenceError, fireEventually, flushEventualQueue from foolscap.api import DeadReferenceError, fireEventually, flushEventualQueue
from twisted.python.failure import Failure from twisted.python.failure import Failure
from twisted.python.filepath import (
FilePath,
)
from .common import ( from .common import (
TEST_RSA_KEY_SIZE, TEST_RSA_KEY_SIZE,
@ -47,6 +50,9 @@ from .web.common import (
from allmydata.test.test_runner import RunBinTahoeMixin from allmydata.test.test_runner import RunBinTahoeMixin
from . import common_util as testutil from . import common_util as testutil
from .common_util import run_cli from .common_util import run_cli
from ..scripts.common import (
write_introducer,
)
LARGE_DATA = """ LARGE_DATA = """
This is some data to publish to the remote grid.., which needs to be large This is some data to publish to the remote grid.., which needs to be large
@ -806,8 +812,6 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
except1 = set(range(self.numclients)) - {1} except1 = set(range(self.numclients)) - {1}
feature_matrix = { feature_matrix = {
# client 1 uses private/introducers.yaml, not tahoe.cfg
("client", "introducer.furl"): except1,
("client", "nickname"): except1, ("client", "nickname"): except1,
# client 1 has to auto-assign an address. # client 1 has to auto-assign an address.
@ -833,7 +837,6 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
setnode = partial(setconf, config, which, "node") setnode = partial(setconf, config, which, "node")
sethelper = partial(setconf, config, which, "helper") sethelper = partial(setconf, config, which, "helper")
setclient("introducer.furl", self.introducer_furl)
setnode("nickname", u"client %d \N{BLACK SMILING FACE}" % (which,)) setnode("nickname", u"client %d \N{BLACK SMILING FACE}" % (which,))
if self.stats_gatherer_furl: if self.stats_gatherer_furl:
@ -850,13 +853,11 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
sethelper("enabled", "True") sethelper("enabled", "True")
if which == 1: iyaml = ("introducers:\n"
# clients[1] uses private/introducers.yaml, not tahoe.cfg " petname2:\n"
iyaml = ("introducers:\n" " furl: %s\n") % self.introducer_furl
" petname2:\n" iyaml_fn = os.path.join(basedir, "private", "introducers.yaml")
" furl: %s\n") % self.introducer_furl fileutil.write(iyaml_fn, iyaml)
iyaml_fn = os.path.join(basedir, "private", "introducers.yaml")
fileutil.write(iyaml_fn, iyaml)
return _render_config(config) return _render_config(config)
@ -905,16 +906,21 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
# usually this node is *not* parented to our self.sparent, so we can # usually this node is *not* parented to our self.sparent, so we can
# shut it down separately from the rest, to exercise the # shut it down separately from the rest, to exercise the
# connection-lost code # connection-lost code
basedir = self.getdir("client%d" % client_num) basedir = FilePath(self.getdir("client%d" % client_num))
if not os.path.isdir(basedir): basedir.makedirs()
fileutil.make_dirs(basedir)
config = "[client]\n" config = "[client]\n"
config += "introducer.furl = %s\n" % self.introducer_furl
if helper_furl: if helper_furl:
config += "helper.furl = %s\n" % helper_furl config += "helper.furl = %s\n" % helper_furl
fileutil.write(os.path.join(basedir, 'tahoe.cfg'), config) basedir.child("tahoe.cfg").setContent(config)
private = basedir.child("private")
private.makedirs()
write_introducer(
basedir,
"default",
self.introducer_furl,
)
c = yield client.create_client(basedir) c = yield client.create_client(basedir.path)
self.clients.append(c) self.clients.append(c)
c.set_default_mutable_keysize(TEST_RSA_KEY_SIZE) c.set_default_mutable_keysize(TEST_RSA_KEY_SIZE)
self.numclients += 1 self.numclients += 1

View File

@ -1,275 +0,0 @@
"""
Tests for allmydata.util.verlib and allmydata.version_checks.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import sys
import pkg_resources
from operator import (
setitem,
)
from twisted.trial import unittest
from allmydata.version_checks import (
_cross_check as cross_check,
_extract_openssl_version as extract_openssl_version,
_get_package_versions_and_locations as get_package_versions_and_locations,
)
from allmydata.util.verlib import NormalizedVersion as V, \
IrrationalVersionError, \
suggest_normalized_version as suggest
class MockSSL(object):
SSLEAY_VERSION = 0
SSLEAY_CFLAGS = 2
def __init__(self, version, compiled_without_heartbeats=False):
self.opts = {
self.SSLEAY_VERSION: version,
self.SSLEAY_CFLAGS: compiled_without_heartbeats and 'compiler: gcc -DOPENSSL_NO_HEARTBEATS'
or 'compiler: gcc',
}
def SSLeay_version(self, which):
return self.opts[which]
class CheckRequirement(unittest.TestCase):
def test_packages_from_pkg_resources(self):
if hasattr(sys, 'frozen'):
raise unittest.SkipTest("This test doesn't apply to frozen builds.")
class MockPackage(object):
def __init__(self, project_name, version, location):
self.project_name = project_name
self.version = version
self.location = location
def call_pkg_resources_require(*args):
return [MockPackage("Foo", "1.0", "/path")]
self.patch(pkg_resources, 'require', call_pkg_resources_require)
(packages, errors) = get_package_versions_and_locations()
self.failUnlessIn(("foo", ("1.0", "/path", "according to pkg_resources")), packages)
self.failIfEqual(errors, [])
self.failUnlessEqual([e for e in errors if "was not found by pkg_resources" not in e], [])
def test_cross_check_unparseable_versions(self):
# The bug in #1355 is triggered when a version string from either pkg_resources or import
# is not parseable at all by normalized_version.
res = cross_check({"foo": ("unparseable", "")}, [("foo", ("1.0", "", None))])
self.failUnlessEqual(res, [])
res = cross_check({"foo": ("1.0", "")}, [("foo", ("unparseable", "", None))])
self.failUnlessEqual(res, [])
res = cross_check({"foo": ("unparseable", "")}, [("foo", ("unparseable", "", None))])
self.failUnlessEqual(res, [])
def test_cross_check(self):
res = cross_check({}, [])
self.failUnlessEqual(res, [])
res = cross_check({}, [("tahoe-lafs", ("1.0", "", "blah"))])
self.failUnlessEqual(res, [])
res = cross_check({"foo": ("unparseable", "")}, [])
self.failUnlessEqual(res, [])
res = cross_check({"argparse": ("unparseable", "")}, [])
self.failUnlessEqual(res, [])
res = cross_check({}, [("foo", ("unparseable", "", None))])
self.failUnlessEqual(len(res), 1)
self.assertTrue(("version 'unparseable'" in res[0]) or ("version u'unparseable'" in res[0]))
self.failUnlessIn("was not found by pkg_resources", res[0])
res = cross_check({"distribute": ("1.0", "/somewhere")}, [("setuptools", ("2.0", "/somewhere", "distribute"))])
self.failUnlessEqual(res, [])
res = cross_check({"distribute": ("1.0", "/somewhere")}, [("setuptools", ("2.0", "/somewhere", None))])
self.failUnlessEqual(len(res), 1)
self.failUnlessIn("location mismatch", res[0])
res = cross_check({"distribute": ("1.0", "/somewhere")}, [("setuptools", ("2.0", "/somewhere_different", None))])
self.failUnlessEqual(len(res), 1)
self.failUnlessIn("location mismatch", res[0])
res = cross_check({"zope.interface": ("1.0", "")}, [("zope.interface", ("unknown", "", None))])
self.failUnlessEqual(res, [])
res = cross_check({"zope.interface": ("unknown", "")}, [("zope.interface", ("unknown", "", None))])
self.failUnlessEqual(res, [])
res = cross_check({"foo": ("1.0", "")}, [("foo", ("unknown", "", None))])
self.failUnlessEqual(len(res), 1)
self.failUnlessIn("could not find a version number", res[0])
res = cross_check({"foo": ("unknown", "")}, [("foo", ("unknown", "", None))])
self.failUnlessEqual(res, [])
# When pkg_resources and import both find a package, there is only a warning if both
# the version and the path fail to match.
res = cross_check({"foo": ("1.0", "/somewhere")}, [("foo", ("2.0", "/somewhere", None))])
self.failUnlessEqual(res, [])
res = cross_check({"foo": ("1.0", "/somewhere")}, [("foo", ("1.0", "/somewhere_different", None))])
self.failUnlessEqual(res, [])
res = cross_check({"foo": ("1.0-r123", "/somewhere")}, [("foo", ("1.0.post123", "/somewhere_different", None))])
self.failUnlessEqual(res, [])
res = cross_check({"foo": ("1.0", "/somewhere")}, [("foo", ("2.0", "/somewhere_different", None))])
self.failUnlessEqual(len(res), 1)
self.assertTrue(("but version '2.0'" in res[0]) or ("but version u'2.0'" in res[0]))
def test_extract_openssl_version(self):
self.failUnlessEqual(extract_openssl_version(MockSSL("")),
("", None, None))
self.failUnlessEqual(extract_openssl_version(MockSSL("NotOpenSSL a.b.c foo")),
("NotOpenSSL", None, "a.b.c foo"))
self.failUnlessEqual(extract_openssl_version(MockSSL("OpenSSL a.b.c")),
("a.b.c", None, None))
self.failUnlessEqual(extract_openssl_version(MockSSL("OpenSSL 1.0.1e 11 Feb 2013")),
("1.0.1e", None, "11 Feb 2013"))
self.failUnlessEqual(extract_openssl_version(MockSSL("OpenSSL 1.0.1e 11 Feb 2013", compiled_without_heartbeats=True)),
("1.0.1e", None, "11 Feb 2013, no heartbeats"))
# based on https://bitbucket.org/tarek/distutilsversion/src/17df9a7d96ef/test_verlib.py
class VersionTestCase(unittest.TestCase):
versions = ((V('1.0'), '1.0'),
(V('1.1'), '1.1'),
(V('1.2.3'), '1.2.3'),
(V('1.2'), '1.2'),
(V('1.2.3a4'), '1.2.3a4'),
(V('1.2c4'), '1.2c4'),
(V('1.2.3.4'), '1.2.3.4'),
(V('1.2.3.4.0b3'), '1.2.3.4b3'),
(V('1.2.0.0.0'), '1.2'),
(V('1.0.dev345'), '1.0.dev345'),
(V('1.0.post456.dev623'), '1.0.post456.dev623'))
def test_basic_versions(self):
for v, s in self.versions:
self.failUnlessEqual(str(v), s)
def test_from_parts(self):
for v, s in self.versions:
parts = v.parts
v2 = V.from_parts(*parts)
self.failUnlessEqual(v, v2)
self.failUnlessEqual(str(v), str(v2))
def test_irrational_versions(self):
irrational = ('1', '1.2a', '1.2.3b', '1.02', '1.2a03',
'1.2a3.04', '1.2.dev.2', '1.2dev', '1.2.dev',
'1.2.dev2.post2', '1.2.post2.dev3.post4')
for s in irrational:
self.failUnlessRaises(IrrationalVersionError, V, s)
def test_comparison(self):
self.failUnlessRaises(TypeError, lambda: V('1.2.0') == '1.2')
self.failUnlessEqual(V('1.2.0'), V('1.2'))
self.failIfEqual(V('1.2.0'), V('1.2.3'))
self.failUnless(V('1.2.0') < V('1.2.3'))
self.failUnless(V('1.0') > V('1.0b2'))
self.failUnless(V('1.0') > V('1.0c2') > V('1.0c1') > V('1.0b2') > V('1.0b1')
> V('1.0a2') > V('1.0a1'))
self.failUnless(V('1.0.0') > V('1.0.0c2') > V('1.0.0c1') > V('1.0.0b2') > V('1.0.0b1')
> V('1.0.0a2') > V('1.0.0a1'))
self.failUnless(V('1.0') < V('1.0.post456.dev623'))
self.failUnless(V('1.0.post456.dev623') < V('1.0.post456') < V('1.0.post1234'))
self.failUnless(V('1.0a1')
< V('1.0a2.dev456')
< V('1.0a2')
< V('1.0a2.1.dev456') # e.g. need to do a quick post release on 1.0a2
< V('1.0a2.1')
< V('1.0b1.dev456')
< V('1.0b2')
< V('1.0c1')
< V('1.0c2.dev456')
< V('1.0c2')
< V('1.0.dev7')
< V('1.0.dev18')
< V('1.0.dev456')
< V('1.0.dev1234')
< V('1.0')
< V('1.0.post456.dev623') # development version of a post release
< V('1.0.post456'))
def test_suggest_normalized_version(self):
self.failUnlessEqual(suggest('1.0'), '1.0')
self.failUnlessEqual(suggest('1.0-alpha1'), '1.0a1')
self.failUnlessEqual(suggest('1.0c2'), '1.0c2')
self.failUnlessEqual(suggest('walla walla washington'), None)
self.failUnlessEqual(suggest('2.4c1'), '2.4c1')
# from setuptools
self.failUnlessEqual(suggest('0.4a1.r10'), '0.4a1.post10')
self.failUnlessEqual(suggest('0.7a1dev-r66608'), '0.7a1.dev66608')
self.failUnlessEqual(suggest('0.6a9.dev-r41475'), '0.6a9.dev41475')
self.failUnlessEqual(suggest('2.4preview1'), '2.4c1')
self.failUnlessEqual(suggest('2.4pre1') , '2.4c1')
self.failUnlessEqual(suggest('2.1-rc2'), '2.1c2')
# from pypi
self.failUnlessEqual(suggest('0.1dev'), '0.1.dev0')
self.failUnlessEqual(suggest('0.1.dev'), '0.1.dev0')
# we want to be able to parse Twisted
# development versions are like post releases in Twisted
self.failUnlessEqual(suggest('9.0.0+r2363'), '9.0.0.post2363')
# pre-releases are using markers like "pre1"
self.failUnlessEqual(suggest('9.0.0pre1'), '9.0.0c1')
# we want to be able to parse Tcl-TK
# they use "p1" "p2" for post releases
self.failUnlessEqual(suggest('1.4p1'), '1.4.post1')
# from darcsver
self.failUnlessEqual(suggest('1.8.1-r4956'), '1.8.1.post4956')
# zetuptoolz
self.failUnlessEqual(suggest('0.6c16dev3'), '0.6c16.dev3')
class T(unittest.TestCase):
def test_report_import_error(self):
"""
get_package_versions_and_locations reports a dependency if a dependency
cannot be imported.
"""
# Make sure we don't leave the system in a bad state.
self.addCleanup(
lambda foolscap=sys.modules["foolscap"]: setitem(
sys.modules,
"foolscap",
foolscap,
),
)
# Make it look like Foolscap isn't installed.
sys.modules["foolscap"] = None
vers_and_locs, errors = get_package_versions_and_locations()
foolscap_stuffs = [stuff for (pkg, stuff) in vers_and_locs if pkg == 'foolscap']
self.failUnlessEqual(len(foolscap_stuffs), 1)
self.failUnless([e for e in errors if "\'foolscap\' could not be imported" in e])

View File

@ -127,7 +127,7 @@ class IntroducerWeb(unittest.TestCase):
assert_soup_has_text( assert_soup_has_text(
self, self,
soup, soup,
u"%s: %s" % (allmydata.__appname__, allmydata.__version__), allmydata.__full_version__,
) )
assert_soup_has_text(self, soup, u"no peers!") assert_soup_has_text(self, soup, u"no peers!")
assert_soup_has_text(self, soup, u"subscribers!") assert_soup_has_text(self, soup, u"subscribers!")

View File

@ -6,6 +6,9 @@ import treq
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from twisted.python.filepath import (
FilePath,
)
from twisted.application import service from twisted.application import service
from twisted.internet import defer from twisted.internet import defer
from twisted.internet.defer import inlineCallbacks, returnValue from twisted.internet.defer import inlineCallbacks, returnValue
@ -316,8 +319,16 @@ class WebMixin(TimezoneMixin):
self.staticdir = self.mktemp() self.staticdir = self.mktemp()
self.clock = Clock() self.clock = Clock()
self.fakeTime = 86460 # 1d 0h 1m 0s self.fakeTime = 86460 # 1d 0h 1m 0s
self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir, tempdir = FilePath(self.mktemp())
clock=self.clock, now_fn=lambda:self.fakeTime) tempdir.makedirs()
self.ws = webish.WebishServer(
self.s,
"0",
tempdir=tempdir.path,
staticdir=self.staticdir,
clock=self.clock,
now_fn=lambda:self.fakeTime,
)
self.ws.setServiceParent(self.s) self.ws.setServiceParent(self.s)
self.webish_port = self.ws.getPortnum() self.webish_port = self.ws.getPortnum()
self.webish_url = self.ws.getURL() self.webish_url = self.ws.getURL()

View File

@ -5,6 +5,19 @@ Tests for ``allmydata.webish``.
from uuid import ( from uuid import (
uuid4, uuid4,
) )
from errno import (
EACCES,
)
from io import (
BytesIO,
)
from hypothesis import (
given,
)
from hypothesis.strategies import (
integers,
)
from testtools.matchers import ( from testtools.matchers import (
AfterPreprocessing, AfterPreprocessing,
@ -12,8 +25,13 @@ from testtools.matchers import (
Equals, Equals,
MatchesAll, MatchesAll,
Not, Not,
IsInstance,
HasLength,
) )
from twisted.python.runtime import (
platform,
)
from twisted.python.filepath import ( from twisted.python.filepath import (
FilePath, FilePath,
) )
@ -30,7 +48,7 @@ from ..common import (
from ...webish import ( from ...webish import (
TahoeLAFSRequest, TahoeLAFSRequest,
tahoe_lafs_site, TahoeLAFSSite,
) )
@ -96,7 +114,7 @@ class TahoeLAFSRequestTests(SyncTestCase):
class TahoeLAFSSiteTests(SyncTestCase): class TahoeLAFSSiteTests(SyncTestCase):
""" """
Tests for the ``Site`` created by ``tahoe_lafs_site``. Tests for ``TahoeLAFSSite``.
""" """
def _test_censoring(self, path, censored): def _test_censoring(self, path, censored):
""" """
@ -112,7 +130,7 @@ class TahoeLAFSSiteTests(SyncTestCase):
""" """
logPath = self.mktemp() logPath = self.mktemp()
site = tahoe_lafs_site(Resource(), logPath=logPath) site = TahoeLAFSSite(self.mktemp(), Resource(), logPath=logPath)
site.startFactory() site.startFactory()
channel = DummyChannel() channel = DummyChannel()
@ -170,6 +188,106 @@ class TahoeLAFSSiteTests(SyncTestCase):
b"/uri?uri=[CENSORED]", b"/uri?uri=[CENSORED]",
) )
def _create_request(self, tempdir):
"""
Create and return a new ``TahoeLAFSRequest`` hooked up to a
``TahoeLAFSSite``.
:param bytes tempdir: The temporary directory to give to the site.
:return TahoeLAFSRequest: The new request instance.
"""
site = TahoeLAFSSite(tempdir.path, Resource(), logPath=self.mktemp())
site.startFactory()
channel = DummyChannel()
channel.site = site
request = TahoeLAFSRequest(channel)
return request
@given(integers(min_value=0, max_value=1024 * 1024 - 1))
def test_small_content(self, request_body_size):
"""
A request body smaller than 1 MiB is kept in memory.
"""
tempdir = FilePath(self.mktemp())
request = self._create_request(tempdir)
request.gotLength(request_body_size)
self.assertThat(
request.content,
IsInstance(BytesIO),
)
def _large_request_test(self, request_body_size):
"""
Assert that when a request with a body of of the given size is received
its content is written to the directory the ``TahoeLAFSSite`` is
configured with.
"""
tempdir = FilePath(self.mktemp())
tempdir.makedirs()
request = self._create_request(tempdir)
# So. Bad news. The temporary file for the uploaded content is
# unnamed (and this isn't even necessarily a bad thing since it is how
# you get automatic on-process-exit cleanup behavior on POSIX). It's
# not visible by inspecting the filesystem. It has no name we can
# discover. Then how do we verify it is written to the right place?
# The question itself is meaningless if we try to be too precise. It
# *has* no filesystem location. However, it is still stored *on* some
# filesystem. We still want to make sure it is on the filesystem we
# specified because otherwise it might be on a filesystem that's too
# small or undesirable in some other way.
#
# I don't know of any way to ask a file descriptor which filesystem
# it's on, either, though. It might be the case that the [f]statvfs()
# result could be compared somehow to infer the filesystem but
# ... it's not clear what the failure modes might be there, across
# different filesystems and runtime environments.
#
# Another approach is to make the temp directory unwriteable and
# observe the failure when an attempt is made to create a file there.
# This is hardly a lovely solution but at least it's kind of simple.
#
# It would be nice if it worked consistently cross-platform but on
# Windows os.chmod is more or less broken.
if platform.isWindows():
request.gotLength(request_body_size)
self.assertThat(
tempdir.children(),
HasLength(1),
)
else:
tempdir.chmod(0o550)
with self.assertRaises(OSError) as ctx:
request.gotLength(request_body_size)
raise Exception(
"OSError not raised, instead tempdir.children() = {}".format(
tempdir.children(),
),
)
self.assertThat(
ctx.exception.errno,
Equals(EACCES),
)
def test_unknown_request_size(self):
"""
A request body with an unknown size is written to a file in the temporary
directory passed to ``TahoeLAFSSite``.
"""
self._large_request_test(None)
@given(integers(min_value=1024 * 1024))
def test_large_request(self, request_body_size):
"""
A request body of 1 MiB or more is written to a file in the temporary
directory passed to ``TahoeLAFSSite``.
"""
self._large_request_test(request_body_size)
def param(name, value): def param(name, value):
return u"; {}={}".format(name, value) return u"; {}={}".format(name, value)

View File

@ -141,6 +141,7 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_helper", "allmydata.test.test_helper",
"allmydata.test.test_humanreadable", "allmydata.test.test_humanreadable",
"allmydata.test.test_immutable", "allmydata.test.test_immutable",
"allmydata.test.test_introducer",
"allmydata.test.test_iputil", "allmydata.test.test_iputil",
"allmydata.test.test_log", "allmydata.test.test_log",
"allmydata.test.test_monitor", "allmydata.test.test_monitor",
@ -159,5 +160,4 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_upload", "allmydata.test.test_upload",
"allmydata.test.test_uri", "allmydata.test.test_uri",
"allmydata.test.test_util", "allmydata.test.test_util",
"allmydata.test.test_version",
] ]

View File

@ -20,6 +20,10 @@ from configparser import ConfigParser
import attr import attr
from twisted.python.runtime import (
platform,
)
class UnknownConfigError(Exception): class UnknownConfigError(Exception):
""" """
@ -59,8 +63,25 @@ def set_config(config, section, option, value):
assert config.get(section, option) == value assert config.get(section, option) == value
def write_config(tahoe_cfg, config): def write_config(tahoe_cfg, config):
with open(tahoe_cfg, "w") as f: """
config.write(f) Write a configuration to a file.
:param FilePath tahoe_cfg: The path to which to write the config.
:param ConfigParser config: The configuration to write.
:return: ``None``
"""
tmp = tahoe_cfg.temporarySibling()
# FilePath.open can only open files in binary mode which does not work
# with ConfigParser.write.
with open(tmp.path, "wt") as fp:
config.write(fp)
# Windows doesn't have atomic overwrite semantics for moveTo. Thus we end
# up slightly less than atomic.
if platform.isWindows():
tahoe_cfg.remove()
tmp.moveTo(tahoe_cfg)
def validate_config(fname, cfg, valid_config): def validate_config(fname, cfg, valid_config):
""" """
@ -102,10 +123,34 @@ class ValidConfiguration(object):
an item name as bytes and returns True if that section, item pair is an item name as bytes and returns True if that section, item pair is
valid, False otherwise. valid, False otherwise.
""" """
_static_valid_sections = attr.ib() _static_valid_sections = attr.ib(
validator=attr.validators.instance_of(dict)
)
_is_valid_section = attr.ib(default=lambda section_name: False) _is_valid_section = attr.ib(default=lambda section_name: False)
_is_valid_item = attr.ib(default=lambda section_name, item_name: False) _is_valid_item = attr.ib(default=lambda section_name, item_name: False)
@classmethod
def everything(cls):
"""
Create a validator which considers everything valid.
"""
return cls(
{},
lambda section_name: True,
lambda section_name, item_name: True,
)
@classmethod
def nothing(cls):
"""
Create a validator which considers nothing valid.
"""
return cls(
{},
lambda section_name: False,
lambda section_name, item_name: False,
)
def is_valid_section(self, section_name): def is_valid_section(self, section_name):
""" """
:return: True if the given section name is valid, False otherwise. :return: True if the given section name is valid, False otherwise.
@ -136,6 +181,23 @@ class ValidConfiguration(object):
) )
def copy_config(old):
"""
Return a brand new ``ConfigParser`` containing the same values as
the given object.
:param ConfigParser old: The configuration to copy.
:return ConfigParser: The new object containing the same configuration.
"""
new = ConfigParser()
for section_name in old.sections():
new.add_section(section_name)
for k, v in old.items(section_name):
new.set(section_name, k, v.replace("%", "%%"))
return new
def _either(f, g): def _either(f, g):
""" """
:return: A function which returns True if either f or g returns True. :return: A function which returns True if either f or g returns True.

View File

@ -1,334 +0,0 @@
"""
Produce reports about the versions of Python software in use by Tahoe-LAFS
for debugging and auditing purposes.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
__all__ = [
"PackagingError",
"get_package_versions",
"get_package_versions_string",
"normalized_version",
]
import os, platform, re, sys, traceback, pkg_resources
import six
import distro
from . import (
__appname__,
full_version,
branch,
)
from .util import (
verlib,
)
if getattr(sys, 'frozen', None):
# "Frozen" python interpreters (i.e., standalone executables
# generated by PyInstaller and other, similar utilities) run
# independently of a traditional setuptools-based packaging
# environment, and so pkg_resources.get_distribution() cannot be
# used in such cases to gather a list of requirements at runtime
# (and because a frozen application is one that has already been
# "installed", an empty list suffices here).
_INSTALL_REQUIRES = []
else:
_INSTALL_REQUIRES = list(
str(req)
for req
in pkg_resources.get_distribution(__appname__).requires()
)
class PackagingError(EnvironmentError):
"""
Raised when there is an error in packaging of Tahoe-LAFS or its
dependencies which makes it impossible to proceed safely.
"""
def get_package_versions():
return dict([(k, v) for k, (v, l, c) in _vers_and_locs_list])
def get_package_versions_string(show_paths=False, debug=False):
res = []
for p, (v, loc, comment) in _vers_and_locs_list:
info = str(p) + ": " + str(v)
if comment:
info = info + " [%s]" % str(comment)
if show_paths:
info = info + " (%s)" % str(loc)
res.append(info)
output = "\n".join(res) + "\n"
if _cross_check_errors:
output += _get_error_string(_cross_check_errors, debug=debug)
return output
_distributor_id_cmdline_re = re.compile("(?:Distributor ID:)\s*(.*)", re.I)
_release_cmdline_re = re.compile("(?:Release:)\s*(.*)", re.I)
_distributor_id_file_re = re.compile("(?:DISTRIB_ID\s*=)\s*(.*)", re.I)
_release_file_re = re.compile("(?:DISTRIB_RELEASE\s*=)\s*(.*)", re.I)
_distname = None
_version = None
def normalized_version(verstr, what=None):
try:
suggested = verlib.suggest_normalized_version(verstr) or verstr
return verlib.NormalizedVersion(suggested)
except verlib.IrrationalVersionError:
raise
except Exception:
cls, value, trace = sys.exc_info()
new_exc = PackagingError("could not parse %s due to %s: %s"
% (what or repr(verstr), cls.__name__, value))
six.reraise(cls, new_exc, trace)
def _get_error_string(errors, debug=False):
msg = "\n%s\n" % ("\n".join(errors),)
if debug:
msg += (
"\n"
"For debugging purposes, the PYTHONPATH was\n"
" %r\n"
"install_requires was\n"
" %r\n"
"sys.path after importing pkg_resources was\n"
" %s\n"
% (
os.environ.get('PYTHONPATH'),
_INSTALL_REQUIRES,
(os.pathsep+"\n ").join(sys.path),
)
)
return msg
def _cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list):
"""This function returns a list of errors due to any failed cross-checks."""
from ._auto_deps import not_import_versionable
errors = []
not_pkg_resourceable = ['python', 'platform', __appname__.lower(), 'openssl']
for name, (imp_ver, imp_loc, imp_comment) in imported_vers_and_locs_list:
name = name.lower()
if name not in not_pkg_resourceable:
if name not in pkg_resources_vers_and_locs:
if name == "setuptools" and "distribute" in pkg_resources_vers_and_locs:
pr_ver, pr_loc = pkg_resources_vers_and_locs["distribute"]
if not (os.path.normpath(os.path.realpath(pr_loc)) == os.path.normpath(os.path.realpath(imp_loc))
and imp_comment == "distribute"):
errors.append("Warning: dependency 'setuptools' found to be version %r of 'distribute' from %r "
"by pkg_resources, but 'import setuptools' gave version %r [%s] from %r. "
"A version mismatch is expected, but a location mismatch is not."
% (pr_ver, pr_loc, imp_ver, imp_comment or 'probably *not* distribute', imp_loc))
else:
errors.append("Warning: dependency %r (version %r imported from %r) was not found by pkg_resources."
% (name, imp_ver, imp_loc))
continue
pr_ver, pr_loc = pkg_resources_vers_and_locs[name]
if imp_ver is None and imp_loc is None:
errors.append("Warning: dependency %r could not be imported. pkg_resources thought it should be possible "
"to import version %r from %r.\nThe exception trace was %r."
% (name, pr_ver, pr_loc, imp_comment))
continue
# If the pkg_resources version is identical to the imported version, don't attempt
# to normalize them, since it is unnecessary and may fail (ticket #2499).
if imp_ver != 'unknown' and pr_ver == imp_ver:
continue
try:
pr_normver = normalized_version(pr_ver)
except verlib.IrrationalVersionError:
continue
except Exception as e:
errors.append("Warning: version number %r found for dependency %r by pkg_resources could not be parsed. "
"The version found by import was %r from %r. "
"pkg_resources thought it should be found at %r. "
"The exception was %s: %s"
% (pr_ver, name, imp_ver, imp_loc, pr_loc, e.__class__.__name__, e))
else:
if imp_ver == 'unknown':
if name not in not_import_versionable:
errors.append("Warning: unexpectedly could not find a version number for dependency %r imported from %r. "
"pkg_resources thought it should be version %r at %r."
% (name, imp_loc, pr_ver, pr_loc))
else:
try:
imp_normver = normalized_version(imp_ver)
except verlib.IrrationalVersionError:
continue
except Exception as e:
errors.append("Warning: version number %r found for dependency %r (imported from %r) could not be parsed. "
"pkg_resources thought it should be version %r at %r. "
"The exception was %s: %s"
% (imp_ver, name, imp_loc, pr_ver, pr_loc, e.__class__.__name__, e))
else:
if pr_ver == 'unknown' or (pr_normver != imp_normver):
if not os.path.normpath(os.path.realpath(pr_loc)) == os.path.normpath(os.path.realpath(imp_loc)):
errors.append("Warning: dependency %r found to have version number %r (normalized to %r, from %r) "
"by pkg_resources, but version %r (normalized to %r, from %r) by import."
% (name, pr_ver, str(pr_normver), pr_loc, imp_ver, str(imp_normver), imp_loc))
return errors
def _get_openssl_version():
try:
from OpenSSL import SSL
return _extract_openssl_version(SSL)
except Exception:
return ("unknown", None, None)
def _extract_openssl_version(ssl_module):
openssl_version = ssl_module.SSLeay_version(ssl_module.SSLEAY_VERSION)
if openssl_version.startswith('OpenSSL '):
openssl_version = openssl_version[8 :]
(version, _, comment) = openssl_version.partition(' ')
try:
openssl_cflags = ssl_module.SSLeay_version(ssl_module.SSLEAY_CFLAGS)
if '-DOPENSSL_NO_HEARTBEATS' in openssl_cflags.split(' '):
comment += ", no heartbeats"
except Exception:
pass
return (version, None, comment if comment else None)
def _get_platform():
# Our version of platform.platform(), telling us both less and more than the
# Python Standard Library's version does.
# We omit details such as the Linux kernel version number, but we add a
# more detailed and correct rendition of the Linux distribution and
# distribution-version.
if "linux" in platform.system().lower():
return (
platform.system() + "-" +
"_".join(distro.linux_distribution()[:2]) + "-" +
platform.machine() + "-" +
"_".join([x for x in platform.architecture() if x])
)
else:
return platform.platform()
def _get_package_versions_and_locations():
import warnings
from ._auto_deps import package_imports, global_deprecation_messages, deprecation_messages, \
runtime_warning_messages, warning_imports, ignorable
def package_dir(srcfile):
return os.path.dirname(os.path.dirname(os.path.normcase(os.path.realpath(srcfile))))
# pkg_resources.require returns the distribution that pkg_resources attempted to put
# on sys.path, which can differ from the one that we actually import due to #1258,
# or any other bug that causes sys.path to be set up incorrectly. Therefore we
# must import the packages in order to check their versions and paths.
# This is to suppress all UserWarnings and various DeprecationWarnings and RuntimeWarnings
# (listed in _auto_deps.py).
warnings.filterwarnings("ignore", category=UserWarning, append=True)
for msg in global_deprecation_messages + deprecation_messages:
warnings.filterwarnings("ignore", category=DeprecationWarning, message=msg, append=True)
for msg in runtime_warning_messages:
warnings.filterwarnings("ignore", category=RuntimeWarning, message=msg, append=True)
try:
for modulename in warning_imports:
try:
__import__(modulename)
except (ImportError, SyntaxError):
pass
finally:
# Leave suppressions for UserWarnings and global_deprecation_messages active.
for _ in runtime_warning_messages + deprecation_messages:
warnings.filters.pop()
packages = []
pkg_resources_vers_and_locs = dict()
if not hasattr(sys, 'frozen'):
pkg_resources_vers_and_locs = {
p.project_name.lower(): (str(p.version), p.location)
for p
in pkg_resources.require(_INSTALL_REQUIRES)
}
def get_version(module):
if hasattr(module, '__version__'):
return str(getattr(module, '__version__'))
elif hasattr(module, 'version'):
ver = getattr(module, 'version')
if isinstance(ver, tuple):
return '.'.join(map(str, ver))
else:
return str(ver)
else:
return 'unknown'
for pkgname, modulename in [(__appname__, 'allmydata')] + package_imports:
if modulename:
try:
__import__(modulename)
module = sys.modules[modulename]
except (ImportError, SyntaxError):
etype, emsg, etrace = sys.exc_info()
trace_info = (etype, str(emsg), ([None] + traceback.extract_tb(etrace))[-1])
packages.append( (pkgname, (None, None, trace_info)) )
else:
comment = None
if pkgname == __appname__:
comment = "%s: %s" % (branch, full_version)
elif pkgname == 'setuptools' and hasattr(module, '_distribute'):
# distribute does not report its version in any module variables
comment = 'distribute'
ver = get_version(module)
loc = package_dir(module.__file__)
if ver == "unknown" and pkgname in pkg_resources_vers_and_locs:
(pr_ver, pr_loc) = pkg_resources_vers_and_locs[pkgname]
if loc == os.path.normcase(os.path.realpath(pr_loc)):
ver = pr_ver
packages.append( (pkgname, (ver, loc, comment)) )
elif pkgname == 'python':
packages.append( (pkgname, (platform.python_version(), sys.executable, None)) )
elif pkgname == 'platform':
packages.append( (pkgname, (_get_platform(), None, None)) )
elif pkgname == 'OpenSSL':
packages.append( (pkgname, _get_openssl_version()) )
cross_check_errors = []
if len(pkg_resources_vers_and_locs) > 0:
imported_packages = set([p.lower() for (p, _) in packages])
extra_packages = []
for pr_name, (pr_ver, pr_loc) in pkg_resources_vers_and_locs.items():
if pr_name not in imported_packages and pr_name not in ignorable:
extra_packages.append( (pr_name, (pr_ver, pr_loc, "according to pkg_resources")) )
cross_check_errors = _cross_check(pkg_resources_vers_and_locs, packages)
packages += extra_packages
return packages, cross_check_errors
_vers_and_locs_list, _cross_check_errors = _get_package_versions_and_locations()

View File

@ -6,7 +6,6 @@ from twisted.python.filepath import FilePath
from twisted.web import static from twisted.web import static
import allmydata import allmydata
import json import json
from allmydata.version_checks import get_package_versions_string
from allmydata.util import idlib from allmydata.util import idlib
from allmydata.web.common import ( from allmydata.web.common import (
render_time, render_time,
@ -89,7 +88,7 @@ class IntroducerRootElement(Element):
self.introducer_service = introducer_service self.introducer_service = introducer_service
self.node_data_dict = { self.node_data_dict = {
"my_nodeid": idlib.nodeid_b2a(self.introducer_node.nodeid), "my_nodeid": idlib.nodeid_b2a(self.introducer_node.nodeid),
"version": get_package_versions_string(), "version": allmydata.__full_version__,
"import_path": str(allmydata).replace("/", "/ "), # XXX kludge for wrapping "import_path": str(allmydata).replace("/", "/ "), # XXX kludge for wrapping
"rendered_at": render_time(time.time()), "rendered_at": render_time(time.time()),
} }
@ -105,7 +104,7 @@ class IntroducerRootElement(Element):
if ad.service_name not in services: if ad.service_name not in services:
services[ad.service_name] = 0 services[ad.service_name] = 0
services[ad.service_name] += 1 services[ad.service_name] += 1
service_names = services.keys() service_names = list(services.keys())
service_names.sort() service_names.sort()
return u", ".join(u"{}: {}".format(service_name, services[service_name]) return u", ".join(u"{}: {}".format(service_name, services[service_name])
for service_name in service_names) for service_name in service_names)

View File

@ -21,7 +21,6 @@ from twisted.web.template import (
) )
import allmydata # to display import path import allmydata # to display import path
from allmydata.version_checks import get_package_versions_string
from allmydata.util import log from allmydata.util import log
from allmydata.interfaces import IFileNode from allmydata.interfaces import IFileNode
from allmydata.web import ( from allmydata.web import (
@ -566,7 +565,7 @@ class RootElement(Element):
@renderer @renderer
def version(self, req, tag): def version(self, req, tag):
return tag(get_package_versions_string()) return tag(allmydata.__full_version__)
@renderer @renderer
def import_path(self, req, tag): def import_path(self, req, tag):

View File

@ -1,13 +1,13 @@
from six import ensure_str from six import ensure_str
import re, time import re, time, tempfile
from functools import (
partial,
)
from cgi import ( from cgi import (
FieldStorage, FieldStorage,
) )
from io import (
BytesIO,
)
from twisted.application import service, strports, internet from twisted.application import service, strports, internet
from twisted.web import static from twisted.web import static
@ -150,17 +150,34 @@ def _logFormatter(logDateTime, request):
) )
tahoe_lafs_site = partial( class TahoeLAFSSite(Site, object):
Site, """
requestFactory=TahoeLAFSRequest, The HTTP protocol factory used by Tahoe-LAFS.
logFormatter=_logFormatter,
) Among the behaviors provided:
* A configurable temporary directory where large request bodies can be
written so they don't stay in memory.
* A log formatter that writes some access logs but omits capability
strings to help keep them secret.
"""
requestFactory = TahoeLAFSRequest
def __init__(self, tempdir, *args, **kwargs):
Site.__init__(self, *args, logFormatter=_logFormatter, **kwargs)
self._tempdir = tempdir
def getContentFile(self, length):
if length is None or length >= 1024 * 1024:
return tempfile.TemporaryFile(dir=self._tempdir)
return BytesIO()
class WebishServer(service.MultiService): class WebishServer(service.MultiService):
name = "webish" name = "webish"
def __init__(self, client, webport, nodeurl_path=None, staticdir=None, def __init__(self, client, webport, tempdir, nodeurl_path=None, staticdir=None,
clock=None, now_fn=time.time): clock=None, now_fn=time.time):
service.MultiService.__init__(self) service.MultiService.__init__(self)
# the 'data' argument to all render() methods default to the Client # the 'data' argument to all render() methods default to the Client
@ -170,7 +187,7 @@ class WebishServer(service.MultiService):
# time in a deterministic manner. # time in a deterministic manner.
self.root = root.Root(client, clock, now_fn) self.root = root.Root(client, clock, now_fn)
self.buildServer(webport, nodeurl_path, staticdir) self.buildServer(webport, tempdir, nodeurl_path, staticdir)
# If set, clock is a twisted.internet.task.Clock that the tests # If set, clock is a twisted.internet.task.Clock that the tests
# use to test ophandle expiration. # use to test ophandle expiration.
@ -180,9 +197,9 @@ class WebishServer(service.MultiService):
self.root.putChild(b"storage-plugins", StoragePlugins(client)) self.root.putChild(b"storage-plugins", StoragePlugins(client))
def buildServer(self, webport, nodeurl_path, staticdir): def buildServer(self, webport, tempdir, nodeurl_path, staticdir):
self.webport = webport self.webport = webport
self.site = tahoe_lafs_site(self.root) self.site = TahoeLAFSSite(tempdir, self.root)
self.staticdir = staticdir # so tests can check self.staticdir = staticdir # so tests can check
if staticdir: if staticdir:
self.root.putChild("static", static.File(staticdir)) self.root.putChild("static", static.File(staticdir))
@ -260,4 +277,4 @@ class IntroducerWebishServer(WebishServer):
def __init__(self, introducer, webport, nodeurl_path=None, staticdir=None): def __init__(self, introducer, webport, nodeurl_path=None, staticdir=None):
service.MultiService.__init__(self) service.MultiService.__init__(self)
self.root = introweb.IntroducerRoot(introducer) self.root = introweb.IntroducerRoot(introducer)
self.buildServer(webport, nodeurl_path, staticdir) self.buildServer(webport, tempfile.tempdir, nodeurl_path, staticdir)

14
tox.ini
View File

@ -95,12 +95,16 @@ setenv =
# .decode(getattr(sys.stdout, "encoding", "utf8")) # .decode(getattr(sys.stdout, "encoding", "utf8"))
# `TypeError: decode() argument 1 must be string, not None` # `TypeError: decode() argument 1 must be string, not None`
PYTHONIOENCODING=utf_8 PYTHONIOENCODING=utf_8
# If no positional arguments are given, try to run the checks on the
# entire codebase, including various pieces of supporting code.
DEFAULT_FILES=src integration static misc setup.py
commands = commands =
flake8 src integration static misc setup.py flake8 {posargs:{env:DEFAULT_FILES}}
python misc/coding_tools/check-umids.py src python misc/coding_tools/check-umids.py {posargs:{env:DEFAULT_FILES}}
python misc/coding_tools/check-debugging.py python misc/coding_tools/check-debugging.py {posargs:{env:DEFAULT_FILES}}
python misc/coding_tools/find-trailing-spaces.py -r src static misc setup.py python misc/coding_tools/find-trailing-spaces.py -r {posargs:{env:DEFAULT_FILES}}
python misc/coding_tools/check-miscaptures.py python misc/coding_tools/check-miscaptures.py {posargs:{env:DEFAULT_FILES}}
# If towncrier.check fails, you forgot to add a towncrier news # If towncrier.check fails, you forgot to add a towncrier news
# fragment explaining the change in this branch. Create one at # fragment explaining the change in this branch. Create one at