mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-29 17:28:53 +00:00
Merge branch 'master' into 3544.furls-not-bytes-maybe
This commit is contained in:
commit
dcd865897e
@ -1,9 +1,10 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: local
|
- repo: "local"
|
||||||
hooks:
|
hooks:
|
||||||
- id: codechecks
|
- id: "codechecks"
|
||||||
name: codechecks
|
name: "codechecks"
|
||||||
stages: ["push"]
|
stages: ["push"]
|
||||||
|
language: "system"
|
||||||
|
files: ".py$"
|
||||||
entry: "tox -e codechecks"
|
entry: "tox -e codechecks"
|
||||||
language: system
|
pass_filenames: true
|
||||||
pass_filenames: false
|
|
||||||
|
16
Makefile
16
Makefile
@ -13,8 +13,6 @@ MAKEFLAGS += --warn-undefined-variables
|
|||||||
MAKEFLAGS += --no-builtin-rules
|
MAKEFLAGS += --no-builtin-rules
|
||||||
|
|
||||||
# Local target variables
|
# Local target variables
|
||||||
VCS_HOOK_SAMPLES=$(wildcard .git/hooks/*.sample)
|
|
||||||
VCS_HOOKS=$(VCS_HOOK_SAMPLES:%.sample=%)
|
|
||||||
PYTHON=python
|
PYTHON=python
|
||||||
export PYTHON
|
export PYTHON
|
||||||
PYFLAKES=flake8
|
PYFLAKES=flake8
|
||||||
@ -31,15 +29,6 @@ TEST_SUITE=allmydata
|
|||||||
default:
|
default:
|
||||||
@echo "no default target"
|
@echo "no default target"
|
||||||
|
|
||||||
.PHONY: install-vcs-hooks
|
|
||||||
## Install the VCS hooks to run linters on commit and all tests on push
|
|
||||||
install-vcs-hooks: .git/hooks/pre-commit .git/hooks/pre-push
|
|
||||||
.PHONY: uninstall-vcs-hooks
|
|
||||||
## Remove the VCS hooks
|
|
||||||
uninstall-vcs-hooks: .tox/create-venvs.log
|
|
||||||
"./$(dir $(<))py36/bin/pre-commit" uninstall || true
|
|
||||||
"./$(dir $(<))py36/bin/pre-commit" uninstall -t pre-push || true
|
|
||||||
|
|
||||||
.PHONY: test
|
.PHONY: test
|
||||||
## Run all tests and code reports
|
## Run all tests and code reports
|
||||||
test: .tox/create-venvs.log
|
test: .tox/create-venvs.log
|
||||||
@ -215,7 +204,7 @@ clean:
|
|||||||
rm -f *.pkg
|
rm -f *.pkg
|
||||||
|
|
||||||
.PHONY: distclean
|
.PHONY: distclean
|
||||||
distclean: clean uninstall-vcs-hooks
|
distclean: clean
|
||||||
rm -rf src/*.egg-info
|
rm -rf src/*.egg-info
|
||||||
rm -f src/allmydata/_version.py
|
rm -f src/allmydata/_version.py
|
||||||
rm -f src/allmydata/_appname.py
|
rm -f src/allmydata/_appname.py
|
||||||
@ -261,6 +250,3 @@ src/allmydata/_version.py:
|
|||||||
|
|
||||||
.tox/create-venvs.log: tox.ini setup.py
|
.tox/create-venvs.log: tox.ini setup.py
|
||||||
tox --notest -p all | tee -a "$(@)"
|
tox --notest -p all | tee -a "$(@)"
|
||||||
|
|
||||||
$(VCS_HOOKS): .tox/create-venvs.log .pre-commit-config.yaml
|
|
||||||
"./$(dir $(<))py36/bin/pre-commit" install --hook-type $(@:.git/hooks/%=%)
|
|
||||||
|
@ -398,13 +398,13 @@ This section controls *when* Tor and I2P are used. The ``[tor]`` and
|
|||||||
``[i2p]`` sections (described later) control *how* Tor/I2P connections are
|
``[i2p]`` sections (described later) control *how* Tor/I2P connections are
|
||||||
managed.
|
managed.
|
||||||
|
|
||||||
All Tahoe nodes need to make a connection to the Introducer; the ``[client]
|
All Tahoe nodes need to make a connection to the Introducer; the
|
||||||
introducer.furl`` setting (described below) indicates where the Introducer
|
``private/introducers.yaml`` file (described below) configures where one or more
|
||||||
lives. Tahoe client nodes must also make connections to storage servers:
|
Introducers live. Tahoe client nodes must also make connections to storage
|
||||||
these targets are specified in announcements that come from the Introducer.
|
servers: these targets are specified in announcements that come from the
|
||||||
Both are expressed as FURLs (a Foolscap URL), which include a list of
|
Introducer. Both are expressed as FURLs (a Foolscap URL), which include a
|
||||||
"connection hints". Each connection hint describes one (of perhaps many)
|
list of "connection hints". Each connection hint describes one (of perhaps
|
||||||
network endpoints where the service might live.
|
many) network endpoints where the service might live.
|
||||||
|
|
||||||
Connection hints include a type, and look like:
|
Connection hints include a type, and look like:
|
||||||
|
|
||||||
@ -580,6 +580,8 @@ Client Configuration
|
|||||||
|
|
||||||
``introducer.furl = (FURL string, mandatory)``
|
``introducer.furl = (FURL string, mandatory)``
|
||||||
|
|
||||||
|
DEPRECATED. See :ref:`introducer-definitions`.
|
||||||
|
|
||||||
This FURL tells the client how to connect to the introducer. Each
|
This FURL tells the client how to connect to the introducer. Each
|
||||||
Tahoe-LAFS grid is defined by an introducer. The introducer's FURL is
|
Tahoe-LAFS grid is defined by an introducer. The introducer's FURL is
|
||||||
created by the introducer node and written into its private base
|
created by the introducer node and written into its private base
|
||||||
@ -965,29 +967,28 @@ This section describes these other files.
|
|||||||
with as many people as possible, put the empty string (so that
|
with as many people as possible, put the empty string (so that
|
||||||
``private/convergence`` is a zero-length file).
|
``private/convergence`` is a zero-length file).
|
||||||
|
|
||||||
Additional Introducer Definitions
|
.. _introducer-definitions:
|
||||||
=================================
|
|
||||||
|
|
||||||
The ``private/introducers.yaml`` file defines additional Introducers. The
|
Introducer Definitions
|
||||||
first introducer is defined in ``tahoe.cfg``, in ``[client]
|
======================
|
||||||
introducer.furl``. To use two or more Introducers, choose a locally-unique
|
|
||||||
"petname" for each one, then define their FURLs in
|
The ``private/introducers.yaml`` file defines Introducers.
|
||||||
``private/introducers.yaml`` like this::
|
Choose a locally-unique "petname" for each one then define their FURLs in ``private/introducers.yaml`` like this::
|
||||||
|
|
||||||
introducers:
|
introducers:
|
||||||
petname2:
|
petname2:
|
||||||
furl: FURL2
|
furl: "FURL2"
|
||||||
petname3:
|
petname3:
|
||||||
furl: FURL3
|
furl: "FURL3"
|
||||||
|
|
||||||
Servers will announce themselves to all configured introducers. Clients will
|
Servers will announce themselves to all configured introducers. Clients will
|
||||||
merge the announcements they receive from all introducers. Nothing will
|
merge the announcements they receive from all introducers. Nothing will
|
||||||
re-broadcast an announcement (i.e. telling introducer 2 about something you
|
re-broadcast an announcement (i.e. telling introducer 2 about something you
|
||||||
heard from introducer 1).
|
heard from introducer 1).
|
||||||
|
|
||||||
If you omit the introducer definitions from both ``tahoe.cfg`` and
|
If you omit the introducer definitions from ``introducers.yaml``,
|
||||||
``introducers.yaml``, the node will not use an Introducer at all. Such
|
the node will not use an Introducer at all.
|
||||||
"introducerless" clients must be configured with static servers (described
|
Such "introducerless" clients must be configured with static servers (described
|
||||||
below), or they will not be able to upload and download files.
|
below), or they will not be able to upload and download files.
|
||||||
|
|
||||||
Static Server Definitions
|
Static Server Definitions
|
||||||
@ -1152,7 +1153,6 @@ a legal one.
|
|||||||
timeout.disconnect = 1800
|
timeout.disconnect = 1800
|
||||||
|
|
||||||
[client]
|
[client]
|
||||||
introducer.furl = pb://ok45ssoklj4y7eok5c3xkmj@tcp:tahoe.example:44801/ii3uumo
|
|
||||||
helper.furl = pb://ggti5ssoklj4y7eok5c3xkmj@tcp:helper.tahoe.example:7054/kk8lhr
|
helper.furl = pb://ggti5ssoklj4y7eok5c3xkmj@tcp:helper.tahoe.example:7054/kk8lhr
|
||||||
|
|
||||||
[storage]
|
[storage]
|
||||||
@ -1163,6 +1163,11 @@ a legal one.
|
|||||||
[helper]
|
[helper]
|
||||||
enabled = True
|
enabled = True
|
||||||
|
|
||||||
|
To be introduced to storage servers, here is a sample ``private/introducers.yaml`` which can be used in conjunction::
|
||||||
|
|
||||||
|
introducers:
|
||||||
|
examplegrid:
|
||||||
|
furl: "pb://ok45ssoklj4y7eok5c3xkmj@tcp:tahoe.example:44801/ii3uumo"
|
||||||
|
|
||||||
Old Configuration Files
|
Old Configuration Files
|
||||||
=======================
|
=======================
|
||||||
|
@ -5,23 +5,17 @@ Developer Guide
|
|||||||
Pre-commit Checks
|
Pre-commit Checks
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
This project is configured for use with `pre-commit`_ to install `VCS/git hooks`_ which
|
This project is configured for use with `pre-commit`_ to install `VCS/git hooks`_ which perform some static code analysis checks and other code checks to catch common errors.
|
||||||
perform some static code analysis checks and other code checks to catch common errors
|
These hooks can be configured to run before commits or pushes
|
||||||
before each commit and to run the full self-test suite to find less obvious regressions
|
|
||||||
before each push to a remote.
|
|
||||||
|
|
||||||
For example::
|
For example::
|
||||||
|
|
||||||
tahoe-lafs $ make install-vcs-hooks
|
tahoe-lafs $ pre-commit install --hook-type pre-push
|
||||||
...
|
|
||||||
+ ./.tox//py36/bin/pre-commit install --hook-type pre-commit
|
|
||||||
pre-commit installed at .git/hooks/pre-commit
|
|
||||||
+ ./.tox//py36/bin/pre-commit install --hook-type pre-push
|
|
||||||
pre-commit installed at .git/hooks/pre-push
|
pre-commit installed at .git/hooks/pre-push
|
||||||
tahoe-lafs $ python -c "import pathlib; pathlib.Path('src/allmydata/tabbed.py').write_text('def foo():\\n\\tpass\\n')"
|
tahoe-lafs $ echo "undefined" > src/allmydata/undefined_name.py
|
||||||
tahoe-lafs $ git add src/allmydata/tabbed.py
|
tahoe-lafs $ git add src/allmydata/undefined_name.py
|
||||||
tahoe-lafs $ git commit -a -m "Add a file that violates flake8"
|
tahoe-lafs $ git commit -a -m "Add a file that violates flake8"
|
||||||
...
|
tahoe-lafs $ git push
|
||||||
codechecks...............................................................Failed
|
codechecks...............................................................Failed
|
||||||
- hook id: codechecks
|
- hook id: codechecks
|
||||||
- exit code: 1
|
- exit code: 1
|
||||||
@ -30,58 +24,17 @@ For example::
|
|||||||
codechecks inst-nodeps: ...
|
codechecks inst-nodeps: ...
|
||||||
codechecks installed: ...
|
codechecks installed: ...
|
||||||
codechecks run-test-pre: PYTHONHASHSEED='...'
|
codechecks run-test-pre: PYTHONHASHSEED='...'
|
||||||
codechecks run-test: commands[0] | flake8 src static misc setup.py
|
codechecks run-test: commands[0] | flake8 src/allmydata/undefined_name.py
|
||||||
src/allmydata/tabbed.py:2:1: W191 indentation contains tabs
|
src/allmydata/undefined_name.py:1:1: F821 undefined name 'undefined'
|
||||||
ERROR: InvocationError for command ./tahoe-lafs/.tox/codechecks/bin/flake8 src static misc setup.py (exited with code 1)
|
ERROR: InvocationError for command ./tahoe-lafs/.tox/codechecks/bin/flake8 src/allmydata/undefined_name.py (exited with code 1)
|
||||||
___________________________________ summary ____________________________________
|
___________________________________ summary ____________________________________
|
||||||
ERROR: codechecks: commands failed
|
ERROR: codechecks: commands failed
|
||||||
...
|
|
||||||
|
|
||||||
To uninstall::
|
To uninstall::
|
||||||
|
|
||||||
tahoe-lafs $ make uninstall-vcs-hooks
|
tahoe-lafs $ pre-commit uninstall --hook-type pre-push
|
||||||
...
|
|
||||||
+ ./.tox/py36/bin/pre-commit uninstall
|
|
||||||
pre-commit uninstalled
|
|
||||||
+ ./.tox/py36/bin/pre-commit uninstall -t pre-push
|
|
||||||
pre-push uninstalled
|
pre-push uninstalled
|
||||||
|
|
||||||
Note that running the full self-test suite takes several minutes so expect pushing to
|
|
||||||
take some time. If you can't or don't want to wait for the hooks in some cases, use the
|
|
||||||
``--no-verify`` option to ``$ git commit ...`` or ``$ git push ...``. Alternatively,
|
|
||||||
see the `pre-commit`_ documentation and CLI help output and use the committed
|
|
||||||
`pre-commit configuration`_ as a starting point to write a local, uncommitted
|
|
||||||
``../.pre-commit-config.local.yaml`` configuration to use instead. For example::
|
|
||||||
|
|
||||||
tahoe-lafs $ ./.tox/py36/bin/pre-commit --help
|
|
||||||
tahoe-lafs $ ./.tox/py36/bin/pre-commit instll --help
|
|
||||||
tahoe-lafs $ cp "./.pre-commit-config.yaml" "./.pre-commit-config.local.yaml"
|
|
||||||
tahoe-lafs $ editor "./.pre-commit-config.local.yaml"
|
|
||||||
...
|
|
||||||
tahoe-lafs $ ./.tox/py36/bin/pre-commit install -c "./.pre-commit-config.local.yaml" -t pre-push
|
|
||||||
pre-commit installed at .git/hooks/pre-push
|
|
||||||
tahoe-lafs $ git commit -a -m "Add a file that violates flake8"
|
|
||||||
[3398.pre-commit 29f8f43d2] Add a file that violates flake8
|
|
||||||
1 file changed, 2 insertions(+)
|
|
||||||
create mode 100644 src/allmydata/tabbed.py
|
|
||||||
tahoe-lafs $ git push
|
|
||||||
...
|
|
||||||
codechecks...............................................................Failed
|
|
||||||
- hook id: codechecks
|
|
||||||
- exit code: 1
|
|
||||||
|
|
||||||
GLOB sdist-make: ./tahoe-lafs/setup.py
|
|
||||||
codechecks inst-nodeps: ...
|
|
||||||
codechecks installed: ...
|
|
||||||
codechecks run-test-pre: PYTHONHASHSEED='...'
|
|
||||||
codechecks run-test: commands[0] | flake8 src static misc setup.py
|
|
||||||
src/allmydata/tabbed.py:2:1: W191 indentation contains tabs
|
|
||||||
ERROR: InvocationError for command ./tahoe-lafs/.tox/codechecks/bin/flake8 src static misc setup.py (exited with code 1)
|
|
||||||
___________________________________ summary ____________________________________
|
|
||||||
ERROR: codechecks: commands failed
|
|
||||||
...
|
|
||||||
|
|
||||||
error: failed to push some refs to 'github.com:jaraco/tahoe-lafs.git'
|
|
||||||
|
|
||||||
|
|
||||||
.. _`pre-commit`: https://pre-commit.com
|
.. _`pre-commit`: https://pre-commit.com
|
||||||
|
@ -20,7 +20,7 @@ Config setting File Comment
|
|||||||
``[node]log_gatherer.furl`` ``BASEDIR/log_gatherer.furl`` (one per line)
|
``[node]log_gatherer.furl`` ``BASEDIR/log_gatherer.furl`` (one per line)
|
||||||
``[node]timeout.keepalive`` ``BASEDIR/keepalive_timeout``
|
``[node]timeout.keepalive`` ``BASEDIR/keepalive_timeout``
|
||||||
``[node]timeout.disconnect`` ``BASEDIR/disconnect_timeout``
|
``[node]timeout.disconnect`` ``BASEDIR/disconnect_timeout``
|
||||||
``[client]introducer.furl`` ``BASEDIR/introducer.furl``
|
``BASEDIR/introducer.furl`` ``BASEDIR/private/introducers.yaml``
|
||||||
``[client]helper.furl`` ``BASEDIR/helper.furl``
|
``[client]helper.furl`` ``BASEDIR/helper.furl``
|
||||||
``[client]key_generator.furl`` ``BASEDIR/key_generator.furl``
|
``[client]key_generator.furl`` ``BASEDIR/key_generator.furl``
|
||||||
``[client]stats_gatherer.furl`` ``BASEDIR/stats_gatherer.furl``
|
``[client]stats_gatherer.furl`` ``BASEDIR/stats_gatherer.furl``
|
||||||
|
@ -65,9 +65,9 @@ Running a Client
|
|||||||
To construct a client node, run “``tahoe create-client``”, which will create
|
To construct a client node, run “``tahoe create-client``”, which will create
|
||||||
``~/.tahoe`` to be the node's base directory. Acquire the ``introducer.furl``
|
``~/.tahoe`` to be the node's base directory. Acquire the ``introducer.furl``
|
||||||
(see below if you are running your own introducer, or use the one from the
|
(see below if you are running your own introducer, or use the one from the
|
||||||
`TestGrid page`_), and paste it after ``introducer.furl =`` in the
|
`TestGrid page`_), and write it to ``~/.tahoe/private/introducers.yaml``
|
||||||
``[client]`` section of ``~/.tahoe/tahoe.cfg``. Then use “``tahoe run
|
(see :ref:`introducer-definitions`). Then use “``tahoe run ~/.tahoe``”.
|
||||||
~/.tahoe``”. After that, the node should be off and running. The first thing
|
After that, the node should be off and running. The first thing
|
||||||
it will do is connect to the introducer and get itself connected to all other
|
it will do is connect to the introducer and get itself connected to all other
|
||||||
nodes on the grid.
|
nodes on the grid.
|
||||||
|
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
from os import mkdir
|
|
||||||
from os.path import join
|
from os.path import join
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
@ -9,6 +8,14 @@ import pytest_twisted
|
|||||||
|
|
||||||
import util
|
import util
|
||||||
|
|
||||||
|
from twisted.python.filepath import (
|
||||||
|
FilePath,
|
||||||
|
)
|
||||||
|
|
||||||
|
from allmydata.test.common import (
|
||||||
|
write_introducer,
|
||||||
|
)
|
||||||
|
|
||||||
# see "conftest.py" for the fixtures (e.g. "tor_network")
|
# see "conftest.py" for the fixtures (e.g. "tor_network")
|
||||||
|
|
||||||
# XXX: Integration tests that involve Tor do not run reliably on
|
# XXX: Integration tests that involve Tor do not run reliably on
|
||||||
@ -66,12 +73,12 @@ def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_ne
|
|||||||
|
|
||||||
@pytest_twisted.inlineCallbacks
|
@pytest_twisted.inlineCallbacks
|
||||||
def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_gatherer, tor_network, introducer_furl):
|
def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_gatherer, tor_network, introducer_furl):
|
||||||
node_dir = join(temp_dir, name)
|
node_dir = FilePath(temp_dir).child(name)
|
||||||
web_port = "tcp:{}:interface=localhost".format(control_port + 2000)
|
web_port = "tcp:{}:interface=localhost".format(control_port + 2000)
|
||||||
|
|
||||||
if True:
|
if True:
|
||||||
print("creating", node_dir)
|
print("creating", node_dir.path)
|
||||||
mkdir(node_dir)
|
node_dir.makedirs()
|
||||||
proto = util._DumpOutputProtocol(None)
|
proto = util._DumpOutputProtocol(None)
|
||||||
reactor.spawnProcess(
|
reactor.spawnProcess(
|
||||||
proto,
|
proto,
|
||||||
@ -84,12 +91,15 @@ def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_
|
|||||||
'--hide-ip',
|
'--hide-ip',
|
||||||
'--tor-control-port', 'tcp:localhost:{}'.format(control_port),
|
'--tor-control-port', 'tcp:localhost:{}'.format(control_port),
|
||||||
'--listen', 'tor',
|
'--listen', 'tor',
|
||||||
node_dir,
|
node_dir.path,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
yield proto.done
|
yield proto.done
|
||||||
|
|
||||||
with open(join(node_dir, 'tahoe.cfg'), 'w') as f:
|
|
||||||
|
# Which services should this client connect to?
|
||||||
|
write_introducer(node_dir, "default", introducer_furl)
|
||||||
|
with node_dir.child('tahoe.cfg').open('w') as f:
|
||||||
f.write('''
|
f.write('''
|
||||||
[node]
|
[node]
|
||||||
nickname = %(name)s
|
nickname = %(name)s
|
||||||
@ -105,15 +115,12 @@ onion = true
|
|||||||
onion.private_key_file = private/tor_onion.privkey
|
onion.private_key_file = private/tor_onion.privkey
|
||||||
|
|
||||||
[client]
|
[client]
|
||||||
# Which services should this client connect to?
|
|
||||||
introducer.furl = %(furl)s
|
|
||||||
shares.needed = 1
|
shares.needed = 1
|
||||||
shares.happy = 1
|
shares.happy = 1
|
||||||
shares.total = 2
|
shares.total = 2
|
||||||
|
|
||||||
''' % {
|
''' % {
|
||||||
'name': name,
|
'name': name,
|
||||||
'furl': introducer_furl,
|
|
||||||
'web_port': web_port,
|
'web_port': web_port,
|
||||||
'log_furl': flog_gatherer,
|
'log_furl': flog_gatherer,
|
||||||
'control_port': control_port,
|
'control_port': control_port,
|
||||||
@ -121,5 +128,5 @@ shares.total = 2
|
|||||||
})
|
})
|
||||||
|
|
||||||
print("running")
|
print("running")
|
||||||
yield util._run_node(reactor, node_dir, request, None)
|
yield util._run_node(reactor, node_dir.path, request, None)
|
||||||
print("okay, launched")
|
print("okay, launched")
|
||||||
|
@ -11,8 +11,12 @@ umids = {}
|
|||||||
|
|
||||||
for starting_point in sys.argv[1:]:
|
for starting_point in sys.argv[1:]:
|
||||||
for root, dirs, files in os.walk(starting_point):
|
for root, dirs, files in os.walk(starting_point):
|
||||||
for fn in [f for f in files if f.endswith(".py")]:
|
for f in files:
|
||||||
fn = os.path.join(root, fn)
|
if not f.endswith(".py"):
|
||||||
|
continue
|
||||||
|
if f == "check-debugging.py":
|
||||||
|
continue
|
||||||
|
fn = os.path.join(root, f)
|
||||||
for lineno,line in enumerate(open(fn, "r").readlines()):
|
for lineno,line in enumerate(open(fn, "r").readlines()):
|
||||||
lineno = lineno+1
|
lineno = lineno+1
|
||||||
mo = re.search(r"\.setDebugging\(True\)", line)
|
mo = re.search(r"\.setDebugging\(True\)", line)
|
||||||
|
1
newsfragments/3504.configuration
Normal file
1
newsfragments/3504.configuration
Normal file
@ -0,0 +1 @@
|
|||||||
|
The ``[client]introducer.furl`` configuration item is now deprecated in favor of the ``private/introducers.yaml`` file.
|
0
newsfragments/3515.minor
Normal file
0
newsfragments/3515.minor
Normal file
0
newsfragments/3520.minor
Normal file
0
newsfragments/3520.minor
Normal file
1
newsfragments/3539.bugfix
Normal file
1
newsfragments/3539.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Certain implementation-internal weakref KeyErrors are now handled and should no longer cause user-initiated operations to fail.
|
0
newsfragments/3547.minor
Normal file
0
newsfragments/3547.minor
Normal file
@ -3,7 +3,6 @@ from past.builtins import unicode
|
|||||||
import os, stat, time, weakref
|
import os, stat, time, weakref
|
||||||
from base64 import urlsafe_b64encode
|
from base64 import urlsafe_b64encode
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from errno import ENOENT, EPERM
|
|
||||||
|
|
||||||
# On Python 2 this will be the backported package:
|
# On Python 2 this will be the backported package:
|
||||||
from configparser import NoSectionError
|
from configparser import NoSectionError
|
||||||
@ -467,56 +466,17 @@ def create_introducer_clients(config, main_tub, _introducer_factory=None):
|
|||||||
# we return this list
|
# we return this list
|
||||||
introducer_clients = []
|
introducer_clients = []
|
||||||
|
|
||||||
introducers_yaml_filename = config.get_private_path("introducers.yaml")
|
introducers = config.get_introducer_configuration()
|
||||||
introducers_filepath = FilePath(introducers_yaml_filename)
|
|
||||||
|
|
||||||
try:
|
for petname, (furl, cache_path) in introducers.items():
|
||||||
with introducers_filepath.open() as f:
|
|
||||||
introducers_yaml = yamlutil.safe_load(f)
|
|
||||||
if introducers_yaml is None:
|
|
||||||
raise EnvironmentError(
|
|
||||||
EPERM,
|
|
||||||
"Can't read '{}'".format(introducers_yaml_filename),
|
|
||||||
introducers_yaml_filename,
|
|
||||||
)
|
|
||||||
introducers = introducers_yaml.get("introducers", {})
|
|
||||||
log.msg(
|
|
||||||
"found {} introducers in private/introducers.yaml".format(
|
|
||||||
len(introducers),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
except EnvironmentError as e:
|
|
||||||
if e.errno != ENOENT:
|
|
||||||
raise
|
|
||||||
introducers = {}
|
|
||||||
|
|
||||||
if "default" in introducers.keys():
|
|
||||||
raise ValueError(
|
|
||||||
"'default' introducer furl cannot be specified in introducers.yaml;"
|
|
||||||
" please fix impossible configuration."
|
|
||||||
)
|
|
||||||
|
|
||||||
# read furl from tahoe.cfg
|
|
||||||
tahoe_cfg_introducer_furl = config.get_config("client", "introducer.furl", None)
|
|
||||||
if tahoe_cfg_introducer_furl == "None":
|
|
||||||
raise ValueError(
|
|
||||||
"tahoe.cfg has invalid 'introducer.furl = None':"
|
|
||||||
" to disable it, use 'introducer.furl ='"
|
|
||||||
" or omit the key entirely"
|
|
||||||
)
|
|
||||||
if tahoe_cfg_introducer_furl:
|
|
||||||
introducers[u'default'] = {'furl':tahoe_cfg_introducer_furl}
|
|
||||||
|
|
||||||
for petname, introducer in introducers.items():
|
|
||||||
introducer_cache_filepath = FilePath(config.get_private_path("introducer_{}_cache.yaml".format(petname)))
|
|
||||||
ic = _introducer_factory(
|
ic = _introducer_factory(
|
||||||
main_tub,
|
main_tub,
|
||||||
introducer['furl'].encode("ascii"),
|
furl.encode("ascii"),
|
||||||
config.nickname,
|
config.nickname,
|
||||||
str(allmydata.__full_version__),
|
str(allmydata.__full_version__),
|
||||||
str(_Client.OLDEST_SUPPORTED_VERSION),
|
str(_Client.OLDEST_SUPPORTED_VERSION),
|
||||||
partial(_sequencer, config),
|
partial(_sequencer, config),
|
||||||
introducer_cache_filepath,
|
cache_path,
|
||||||
)
|
)
|
||||||
introducer_clients.append(ic)
|
introducer_clients.append(ic)
|
||||||
return introducer_clients
|
return introducer_clients
|
||||||
@ -1100,7 +1060,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
|||||||
if accountfile:
|
if accountfile:
|
||||||
accountfile = self.config.get_config_path(accountfile)
|
accountfile = self.config.get_config_path(accountfile)
|
||||||
accounturl = self.config.get_config("sftpd", "accounts.url", None)
|
accounturl = self.config.get_config("sftpd", "accounts.url", None)
|
||||||
sftp_portstr = self.config.get_config("sftpd", "port", "8022")
|
sftp_portstr = self.config.get_config("sftpd", "port", "tcp:8022")
|
||||||
pubkey_file = self.config.get_config("sftpd", "host_pubkey_file")
|
pubkey_file = self.config.get_config("sftpd", "host_pubkey_file")
|
||||||
privkey_file = self.config.get_config("sftpd", "host_privkey_file")
|
privkey_file = self.config.get_config("sftpd", "host_privkey_file")
|
||||||
|
|
||||||
|
@ -1975,6 +1975,8 @@ class Dispatcher(object):
|
|||||||
|
|
||||||
|
|
||||||
class SFTPServer(service.MultiService):
|
class SFTPServer(service.MultiService):
|
||||||
|
name = "frontend:sftp"
|
||||||
|
|
||||||
def __init__(self, client, accountfile, accounturl,
|
def __init__(self, client, accountfile, accounturl,
|
||||||
sftp_portstr, pubkey_file, privkey_file):
|
sftp_portstr, pubkey_file, privkey_file):
|
||||||
precondition(isinstance(accountfile, (unicode, NoneType)), accountfile)
|
precondition(isinstance(accountfile, (unicode, NoneType)), accountfile)
|
||||||
|
@ -20,6 +20,8 @@ import re
|
|||||||
import types
|
import types
|
||||||
import errno
|
import errno
|
||||||
from base64 import b32decode, b32encode
|
from base64 import b32decode, b32encode
|
||||||
|
from errno import ENOENT, EPERM
|
||||||
|
from warnings import warn
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
|
|
||||||
@ -41,6 +43,9 @@ from allmydata.util import fileutil, iputil
|
|||||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||||
from allmydata.util.encodingutil import get_filesystem_encoding, quote_output
|
from allmydata.util.encodingutil import get_filesystem_encoding, quote_output
|
||||||
from allmydata.util import configutil
|
from allmydata.util import configutil
|
||||||
|
from allmydata.util.yamlutil import (
|
||||||
|
safe_load,
|
||||||
|
)
|
||||||
|
|
||||||
from . import (
|
from . import (
|
||||||
__full_version__,
|
__full_version__,
|
||||||
@ -482,6 +487,97 @@ class _Config(object):
|
|||||||
os.path.join(self._basedir, *args)
|
os.path.join(self._basedir, *args)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def get_introducer_configuration(self):
|
||||||
|
"""
|
||||||
|
Get configuration for introducers.
|
||||||
|
|
||||||
|
:return {unicode: (unicode, FilePath)}: A mapping from introducer
|
||||||
|
petname to a tuple of the introducer's fURL and local cache path.
|
||||||
|
"""
|
||||||
|
introducers_yaml_filename = self.get_private_path("introducers.yaml")
|
||||||
|
introducers_filepath = FilePath(introducers_yaml_filename)
|
||||||
|
|
||||||
|
def get_cache_filepath(petname):
|
||||||
|
return FilePath(
|
||||||
|
self.get_private_path("introducer_{}_cache.yaml".format(petname)),
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with introducers_filepath.open() as f:
|
||||||
|
introducers_yaml = safe_load(f)
|
||||||
|
if introducers_yaml is None:
|
||||||
|
raise EnvironmentError(
|
||||||
|
EPERM,
|
||||||
|
"Can't read '{}'".format(introducers_yaml_filename),
|
||||||
|
introducers_yaml_filename,
|
||||||
|
)
|
||||||
|
introducers = {
|
||||||
|
petname: config["furl"]
|
||||||
|
for petname, config
|
||||||
|
in introducers_yaml.get("introducers", {}).items()
|
||||||
|
}
|
||||||
|
non_strs = list(
|
||||||
|
k
|
||||||
|
for k
|
||||||
|
in introducers.keys()
|
||||||
|
if not isinstance(k, str)
|
||||||
|
)
|
||||||
|
if non_strs:
|
||||||
|
raise TypeError(
|
||||||
|
"Introducer petnames {!r} should have been str".format(
|
||||||
|
non_strs,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
non_strs = list(
|
||||||
|
v
|
||||||
|
for v
|
||||||
|
in introducers.values()
|
||||||
|
if not isinstance(v, str)
|
||||||
|
)
|
||||||
|
if non_strs:
|
||||||
|
raise TypeError(
|
||||||
|
"Introducer fURLs {!r} should have been str".format(
|
||||||
|
non_strs,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
log.msg(
|
||||||
|
"found {} introducers in {!r}".format(
|
||||||
|
len(introducers),
|
||||||
|
introducers_yaml_filename,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except EnvironmentError as e:
|
||||||
|
if e.errno != ENOENT:
|
||||||
|
raise
|
||||||
|
introducers = {}
|
||||||
|
|
||||||
|
# supported the deprecated [client]introducer.furl item in tahoe.cfg
|
||||||
|
tahoe_cfg_introducer_furl = self.get_config("client", "introducer.furl", None)
|
||||||
|
if tahoe_cfg_introducer_furl == "None":
|
||||||
|
raise ValueError(
|
||||||
|
"tahoe.cfg has invalid 'introducer.furl = None':"
|
||||||
|
" to disable it omit the key entirely"
|
||||||
|
)
|
||||||
|
if tahoe_cfg_introducer_furl:
|
||||||
|
warn(
|
||||||
|
"tahoe.cfg [client]introducer.furl is deprecated; "
|
||||||
|
"use private/introducers.yaml instead.",
|
||||||
|
category=DeprecationWarning,
|
||||||
|
stacklevel=-1,
|
||||||
|
)
|
||||||
|
if "default" in introducers:
|
||||||
|
raise ValueError(
|
||||||
|
"'default' introducer furl cannot be specified in tahoe.cfg and introducers.yaml;"
|
||||||
|
" please fix impossible configuration."
|
||||||
|
)
|
||||||
|
introducers['default'] = tahoe_cfg_introducer_furl
|
||||||
|
|
||||||
|
return {
|
||||||
|
petname: (furl, get_cache_filepath(petname))
|
||||||
|
for (petname, furl)
|
||||||
|
in introducers.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def create_tub_options(config):
|
def create_tub_options(config):
|
||||||
"""
|
"""
|
||||||
|
@ -66,9 +66,9 @@ class NodeMaker(object):
|
|||||||
memokey = b"I" + bigcap
|
memokey = b"I" + bigcap
|
||||||
else:
|
else:
|
||||||
memokey = b"M" + bigcap
|
memokey = b"M" + bigcap
|
||||||
if memokey in self._node_cache:
|
try:
|
||||||
node = self._node_cache[memokey]
|
node = self._node_cache[memokey]
|
||||||
else:
|
except KeyError:
|
||||||
cap = uri.from_string(bigcap, deep_immutable=deep_immutable,
|
cap = uri.from_string(bigcap, deep_immutable=deep_immutable,
|
||||||
name=name)
|
name=name)
|
||||||
node = self._create_from_single_cap(cap)
|
node = self._create_from_single_cap(cap)
|
||||||
|
@ -4,14 +4,15 @@ import os, sys, urllib, textwrap
|
|||||||
import codecs
|
import codecs
|
||||||
from os.path import join
|
from os.path import join
|
||||||
|
|
||||||
|
from yaml import (
|
||||||
|
safe_dump,
|
||||||
|
)
|
||||||
|
|
||||||
# Python 2 compatibility
|
# Python 2 compatibility
|
||||||
from future.utils import PY2
|
from future.utils import PY2
|
||||||
if PY2:
|
if PY2:
|
||||||
from future.builtins import str # noqa: F401
|
from future.builtins import str # noqa: F401
|
||||||
|
|
||||||
# On Python 2 this will be the backported package:
|
|
||||||
from configparser import NoSectionError
|
|
||||||
|
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
|
|
||||||
from allmydata.util.assertutil import precondition
|
from allmydata.util.assertutil import precondition
|
||||||
@ -115,24 +116,42 @@ class NoDefaultBasedirOptions(BasedirOptions):
|
|||||||
DEFAULT_ALIAS = u"tahoe"
|
DEFAULT_ALIAS = u"tahoe"
|
||||||
|
|
||||||
|
|
||||||
|
def write_introducer(basedir, petname, furl):
|
||||||
|
"""
|
||||||
|
Overwrite the node's ``introducers.yaml`` with a file containing the given
|
||||||
|
introducer information.
|
||||||
|
"""
|
||||||
|
if isinstance(furl, bytes):
|
||||||
|
furl = furl.decode("utf-8")
|
||||||
|
basedir.child(b"private").child(b"introducers.yaml").setContent(
|
||||||
|
safe_dump({
|
||||||
|
"introducers": {
|
||||||
|
petname: {
|
||||||
|
"furl": furl,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}).encode("ascii"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_introducer_furl(nodedir, config):
|
def get_introducer_furl(nodedir, config):
|
||||||
"""
|
"""
|
||||||
:return: the introducer FURL for the given node (no matter if it's
|
:return: the introducer FURL for the given node (no matter if it's
|
||||||
a client-type node or an introducer itself)
|
a client-type node or an introducer itself)
|
||||||
"""
|
"""
|
||||||
|
for petname, (furl, cache) in config.get_introducer_configuration().items():
|
||||||
|
return furl
|
||||||
|
|
||||||
|
# We have no configured introducers. Maybe this is running *on* the
|
||||||
|
# introducer? Let's guess, sure why not.
|
||||||
try:
|
try:
|
||||||
introducer_furl = config.get('client', 'introducer.furl')
|
with open(join(nodedir, "private", "introducer.furl"), "r") as f:
|
||||||
except NoSectionError:
|
return f.read().strip()
|
||||||
# we're not a client; maybe this is running *on* the introducer?
|
except IOError:
|
||||||
try:
|
raise Exception(
|
||||||
with open(join(nodedir, "private", "introducer.furl"), "r") as f:
|
"Can't find introducer FURL in tahoe.cfg nor "
|
||||||
introducer_furl = f.read().strip()
|
"{}/private/introducer.furl".format(nodedir)
|
||||||
except IOError:
|
)
|
||||||
raise Exception(
|
|
||||||
"Can't find introducer FURL in tahoe.cfg nor "
|
|
||||||
"{}/private/introducer.furl".format(nodedir)
|
|
||||||
)
|
|
||||||
return introducer_furl
|
|
||||||
|
|
||||||
|
|
||||||
def get_aliases(nodedir):
|
def get_aliases(nodedir):
|
||||||
|
@ -5,11 +5,20 @@ import json
|
|||||||
|
|
||||||
from twisted.internet import reactor, defer
|
from twisted.internet import reactor, defer
|
||||||
from twisted.python.usage import UsageError
|
from twisted.python.usage import UsageError
|
||||||
from allmydata.scripts.common import BasedirOptions, NoDefaultBasedirOptions
|
from twisted.python.filepath import (
|
||||||
|
FilePath,
|
||||||
|
)
|
||||||
|
|
||||||
|
from allmydata.scripts.common import (
|
||||||
|
BasedirOptions,
|
||||||
|
NoDefaultBasedirOptions,
|
||||||
|
write_introducer,
|
||||||
|
)
|
||||||
from allmydata.scripts.default_nodedir import _default_nodedir
|
from allmydata.scripts.default_nodedir import _default_nodedir
|
||||||
from allmydata.util.assertutil import precondition
|
from allmydata.util.assertutil import precondition
|
||||||
from allmydata.util.encodingutil import listdir_unicode, argv_to_unicode, quote_local_unicode_path, get_io_encoding
|
from allmydata.util.encodingutil import listdir_unicode, argv_to_unicode, quote_local_unicode_path, get_io_encoding
|
||||||
from allmydata.util import fileutil, i2p_provider, iputil, tor_provider
|
from allmydata.util import fileutil, i2p_provider, iputil, tor_provider
|
||||||
|
|
||||||
from wormhole import wormhole
|
from wormhole import wormhole
|
||||||
|
|
||||||
|
|
||||||
@ -299,12 +308,15 @@ def write_node_config(c, config):
|
|||||||
|
|
||||||
|
|
||||||
def write_client_config(c, config):
|
def write_client_config(c, config):
|
||||||
# note, config can be a plain dict, it seems -- see
|
introducer = config.get("introducer", None)
|
||||||
# test_configutil.py in test_create_client_config
|
if introducer is not None:
|
||||||
|
write_introducer(
|
||||||
|
FilePath(config["basedir"]),
|
||||||
|
"default",
|
||||||
|
introducer,
|
||||||
|
)
|
||||||
|
|
||||||
c.write("[client]\n")
|
c.write("[client]\n")
|
||||||
c.write("# Which services should this client connect to?\n")
|
|
||||||
introducer = config.get("introducer", None) or ""
|
|
||||||
c.write("introducer.furl = %s\n" % introducer)
|
|
||||||
c.write("helper.furl =\n")
|
c.write("helper.furl =\n")
|
||||||
c.write("#stats_gatherer.furl =\n")
|
c.write("#stats_gatherer.furl =\n")
|
||||||
c.write("\n")
|
c.write("\n")
|
||||||
@ -437,8 +449,11 @@ def create_node(config):
|
|||||||
|
|
||||||
print("Node created in %s" % quote_local_unicode_path(basedir), file=out)
|
print("Node created in %s" % quote_local_unicode_path(basedir), file=out)
|
||||||
tahoe_cfg = quote_local_unicode_path(os.path.join(basedir, "tahoe.cfg"))
|
tahoe_cfg = quote_local_unicode_path(os.path.join(basedir, "tahoe.cfg"))
|
||||||
|
introducers_yaml = quote_local_unicode_path(
|
||||||
|
os.path.join(basedir, "private", "introducers.yaml"),
|
||||||
|
)
|
||||||
if not config.get("introducer", ""):
|
if not config.get("introducer", ""):
|
||||||
print(" Please set [client]introducer.furl= in %s!" % tahoe_cfg, file=out)
|
print(" Please add introducers to %s!" % (introducers_yaml,), file=out)
|
||||||
print(" The node cannot connect to a grid without it.", file=out)
|
print(" The node cannot connect to a grid without it.", file=out)
|
||||||
if not config.get("nickname", ""):
|
if not config.get("nickname", ""):
|
||||||
print(" Please set [node]nickname= in %s" % tahoe_cfg, file=out)
|
print(" Please set [node]nickname= in %s" % tahoe_cfg, file=out)
|
||||||
|
@ -1,16 +1,15 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import json
|
import json
|
||||||
from os.path import join
|
|
||||||
|
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
|
|
||||||
from wormhole import wormhole
|
from wormhole import wormhole
|
||||||
|
|
||||||
from allmydata.util import configutil
|
|
||||||
from allmydata.util.encodingutil import argv_to_abspath
|
from allmydata.util.encodingutil import argv_to_abspath
|
||||||
from allmydata.scripts.common import get_default_nodedir, get_introducer_furl
|
from allmydata.scripts.common import get_default_nodedir, get_introducer_furl
|
||||||
|
from allmydata.node import read_config
|
||||||
|
|
||||||
|
|
||||||
class InviteOptions(usage.Options):
|
class InviteOptions(usage.Options):
|
||||||
@ -77,7 +76,7 @@ def invite(options):
|
|||||||
basedir = argv_to_abspath(options.parent['node-directory'])
|
basedir = argv_to_abspath(options.parent['node-directory'])
|
||||||
else:
|
else:
|
||||||
basedir = get_default_nodedir()
|
basedir = get_default_nodedir()
|
||||||
config = configutil.get_config(join(basedir, 'tahoe.cfg'))
|
config = read_config(basedir, u"")
|
||||||
out = options.stdout
|
out = options.stdout
|
||||||
err = options.stderr
|
err = options.stderr
|
||||||
|
|
||||||
|
@ -8,6 +8,9 @@ if PY2:
|
|||||||
from future.builtins import str # noqa: F401
|
from future.builtins import str # noqa: F401
|
||||||
from six.moves import cStringIO as StringIO
|
from six.moves import cStringIO as StringIO
|
||||||
|
|
||||||
|
from twisted.python.filepath import (
|
||||||
|
FilePath,
|
||||||
|
)
|
||||||
from twisted.internet import defer, reactor, protocol, error
|
from twisted.internet import defer, reactor, protocol, error
|
||||||
from twisted.application import service, internet
|
from twisted.application import service, internet
|
||||||
from twisted.web import client as tw_client
|
from twisted.web import client as tw_client
|
||||||
@ -21,6 +24,10 @@ from allmydata.util import fileutil, pollmixin
|
|||||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||||
from allmydata.util.encodingutil import get_filesystem_encoding
|
from allmydata.util.encodingutil import get_filesystem_encoding
|
||||||
|
|
||||||
|
from allmydata.scripts.common import (
|
||||||
|
write_introducer,
|
||||||
|
)
|
||||||
|
|
||||||
class StallableHTTPGetterDiscarder(tw_client.HTTPPageGetter, object):
|
class StallableHTTPGetterDiscarder(tw_client.HTTPPageGetter, object):
|
||||||
full_speed_ahead = False
|
full_speed_ahead = False
|
||||||
_bytes_so_far = 0
|
_bytes_so_far = 0
|
||||||
@ -180,16 +187,18 @@ class SystemFramework(pollmixin.PollMixin):
|
|||||||
self.introducer_furl = self.introducer.introducer_url
|
self.introducer_furl = self.introducer.introducer_url
|
||||||
|
|
||||||
def make_nodes(self):
|
def make_nodes(self):
|
||||||
|
root = FilePath(self.testdir)
|
||||||
self.nodes = []
|
self.nodes = []
|
||||||
for i in range(self.numnodes):
|
for i in range(self.numnodes):
|
||||||
nodedir = os.path.join(self.testdir, "node%d" % i)
|
nodedir = root.child("node%d" % (i,))
|
||||||
os.mkdir(nodedir)
|
private = nodedir.child("private")
|
||||||
f = open(os.path.join(nodedir, "tahoe.cfg"), "w")
|
private.makedirs()
|
||||||
f.write("[client]\n"
|
write_introducer(nodedir, "default", self.introducer_url)
|
||||||
"introducer.furl = %s\n"
|
config = (
|
||||||
"shares.happy = 1\n"
|
"[client]\n"
|
||||||
"[storage]\n"
|
"shares.happy = 1\n"
|
||||||
% (self.introducer_furl,))
|
"[storage]\n"
|
||||||
|
)
|
||||||
# the only tests for which we want the internal nodes to actually
|
# the only tests for which we want the internal nodes to actually
|
||||||
# retain shares are the ones where somebody's going to download
|
# retain shares are the ones where somebody's going to download
|
||||||
# them.
|
# them.
|
||||||
@ -200,13 +209,13 @@ class SystemFramework(pollmixin.PollMixin):
|
|||||||
# for these tests, we tell the storage servers to pretend to
|
# for these tests, we tell the storage servers to pretend to
|
||||||
# accept shares, but really just throw them out, since we're
|
# accept shares, but really just throw them out, since we're
|
||||||
# only testing upload and not download.
|
# only testing upload and not download.
|
||||||
f.write("debug_discard = true\n")
|
config += "debug_discard = true\n"
|
||||||
if self.mode in ("receive",):
|
if self.mode in ("receive",):
|
||||||
# for this mode, the client-under-test gets all the shares,
|
# for this mode, the client-under-test gets all the shares,
|
||||||
# so our internal nodes can refuse requests
|
# so our internal nodes can refuse requests
|
||||||
f.write("readonly = true\n")
|
config += "readonly = true\n"
|
||||||
f.close()
|
nodedir.child("tahoe.cfg").setContent(config)
|
||||||
c = client.Client(basedir=nodedir)
|
c = client.Client(basedir=nodedir.path)
|
||||||
c.setServiceParent(self)
|
c.setServiceParent(self)
|
||||||
self.nodes.append(c)
|
self.nodes.append(c)
|
||||||
# the peers will start running, eventually they will connect to each
|
# the peers will start running, eventually they will connect to each
|
||||||
@ -235,16 +244,16 @@ this file are ignored.
|
|||||||
quiet = StringIO()
|
quiet = StringIO()
|
||||||
create_node.create_node({'basedir': clientdir}, out=quiet)
|
create_node.create_node({'basedir': clientdir}, out=quiet)
|
||||||
log.msg("DONE MAKING CLIENT")
|
log.msg("DONE MAKING CLIENT")
|
||||||
|
write_introducer(clientdir, "default", self.introducer_furl)
|
||||||
# now replace tahoe.cfg
|
# now replace tahoe.cfg
|
||||||
# set webport=0 and then ask the node what port it picked.
|
# set webport=0 and then ask the node what port it picked.
|
||||||
f = open(os.path.join(clientdir, "tahoe.cfg"), "w")
|
f = open(os.path.join(clientdir, "tahoe.cfg"), "w")
|
||||||
f.write("[node]\n"
|
f.write("[node]\n"
|
||||||
"web.port = tcp:0:interface=127.0.0.1\n"
|
"web.port = tcp:0:interface=127.0.0.1\n"
|
||||||
"[client]\n"
|
"[client]\n"
|
||||||
"introducer.furl = %s\n"
|
|
||||||
"shares.happy = 1\n"
|
"shares.happy = 1\n"
|
||||||
"[storage]\n"
|
"[storage]\n"
|
||||||
% (self.introducer_furl,))
|
)
|
||||||
|
|
||||||
if self.mode in ("upload-self", "receive"):
|
if self.mode in ("upload-self", "receive"):
|
||||||
# accept and store shares, to trigger the memory consumption bugs
|
# accept and store shares, to trigger the memory consumption bugs
|
||||||
|
@ -8,7 +8,9 @@ from twisted.internet import defer
|
|||||||
from ..common_util import run_cli
|
from ..common_util import run_cli
|
||||||
from ..no_network import GridTestMixin
|
from ..no_network import GridTestMixin
|
||||||
from .common import CLITestMixin
|
from .common import CLITestMixin
|
||||||
|
from ...client import (
|
||||||
|
read_config,
|
||||||
|
)
|
||||||
|
|
||||||
class _FakeWormhole(object):
|
class _FakeWormhole(object):
|
||||||
|
|
||||||
@ -81,9 +83,19 @@ class Join(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(0, rc)
|
self.assertEqual(0, rc)
|
||||||
|
|
||||||
|
config = read_config(node_dir, u"")
|
||||||
|
self.assertIn(
|
||||||
|
"pb://foo",
|
||||||
|
set(
|
||||||
|
furl
|
||||||
|
for (furl, cache)
|
||||||
|
in config.get_introducer_configuration().values()
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
with open(join(node_dir, 'tahoe.cfg'), 'r') as f:
|
with open(join(node_dir, 'tahoe.cfg'), 'r') as f:
|
||||||
config = f.read()
|
config = f.read()
|
||||||
self.assertIn("pb://foo", config)
|
|
||||||
self.assertIn(u"somethinghopefullyunique", config)
|
self.assertIn(u"somethinghopefullyunique", config)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
@ -81,6 +81,9 @@ from allmydata.client import (
|
|||||||
config_from_string,
|
config_from_string,
|
||||||
create_client_from_config,
|
create_client_from_config,
|
||||||
)
|
)
|
||||||
|
from allmydata.scripts.common import (
|
||||||
|
write_introducer,
|
||||||
|
)
|
||||||
|
|
||||||
from ..crypto import (
|
from ..crypto import (
|
||||||
ed25519,
|
ed25519,
|
||||||
@ -221,8 +224,8 @@ class UseNode(object):
|
|||||||
"""
|
"""
|
||||||
plugin_config = attr.ib()
|
plugin_config = attr.ib()
|
||||||
storage_plugin = attr.ib()
|
storage_plugin = attr.ib()
|
||||||
basedir = attr.ib()
|
basedir = attr.ib(validator=attr.validators.instance_of(FilePath))
|
||||||
introducer_furl = attr.ib()
|
introducer_furl = attr.ib(validator=attr.validators.instance_of(bytes))
|
||||||
node_config = attr.ib(default=attr.Factory(dict))
|
node_config = attr.ib(default=attr.Factory(dict))
|
||||||
|
|
||||||
config = attr.ib(default=None)
|
config = attr.ib(default=None)
|
||||||
@ -246,6 +249,11 @@ class UseNode(object):
|
|||||||
config=format_config_items(self.plugin_config),
|
config=format_config_items(self.plugin_config),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
write_introducer(
|
||||||
|
self.basedir,
|
||||||
|
"default",
|
||||||
|
self.introducer_furl,
|
||||||
|
)
|
||||||
self.config = config_from_string(
|
self.config = config_from_string(
|
||||||
self.basedir.asTextMode().path,
|
self.basedir.asTextMode().path,
|
||||||
"tub.port",
|
"tub.port",
|
||||||
@ -254,11 +262,9 @@ class UseNode(object):
|
|||||||
{node_config}
|
{node_config}
|
||||||
|
|
||||||
[client]
|
[client]
|
||||||
introducer.furl = {furl}
|
|
||||||
storage.plugins = {storage_plugin}
|
storage.plugins = {storage_plugin}
|
||||||
{plugin_config_section}
|
{plugin_config_section}
|
||||||
""".format(
|
""".format(
|
||||||
furl=self.introducer_furl,
|
|
||||||
storage_plugin=self.storage_plugin,
|
storage_plugin=self.storage_plugin,
|
||||||
node_config=format_config_items(self.node_config),
|
node_config=format_config_items(self.node_config),
|
||||||
plugin_config_section=plugin_config_section,
|
plugin_config_section=plugin_config_section,
|
||||||
|
1
src/allmydata/test/data/openssh-rsa-2048.pub.txt
Normal file
1
src/allmydata/test/data/openssh-rsa-2048.pub.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDx5JfaPwE2wfXIQcmlGte9EPAbrTmHPGOF/PuZ71XPa3mZTHMQQuc959gmLxupmcc5o4jYe8VTwT6bbNl6YM+HmCvL3XVH0BqdM2lpKCTB/WzSAyFUv8gSjQVXekRm9wF69tZkPrudqutTLhqXU5ESiUzfhU+CxHQW+kAf10Yd9R68V1f8jkuWjEoeVfCltj7O5fRlpouoTXn83MUAXB3J/wDjpjnjp2PxvXL2x5aCHtzd1WCGEmtWbHZvRA1a0EE233zfXNHg4xLd3ycUqAxoRlCcC230itUBXtr4qgDMzRdsL+HGWrcJ+4yezlQj+l8mc7vi5shNT7HDRfvi/rE7 exarkun@baryon
|
27
src/allmydata/test/data/openssh-rsa-2048.txt
Normal file
27
src/allmydata/test/data/openssh-rsa-2048.txt
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
-----BEGIN OPENSSH PRIVATE KEY-----
|
||||||
|
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABFwAAAAdzc2gtcn
|
||||||
|
NhAAAAAwEAAQAAAQEA8eSX2j8BNsH1yEHJpRrXvRDwG605hzxjhfz7me9Vz2t5mUxzEELn
|
||||||
|
PefYJi8bqZnHOaOI2HvFU8E+m2zZemDPh5gry911R9AanTNpaSgkwf1s0gMhVL/IEo0FV3
|
||||||
|
pEZvcBevbWZD67narrUy4al1OREolM34VPgsR0FvpAH9dGHfUevFdX/I5LloxKHlXwpbY+
|
||||||
|
zuX0ZaaLqE15/NzFAFwdyf8A46Y546dj8b1y9seWgh7c3dVghhJrVmx2b0QNWtBBNt9831
|
||||||
|
zR4OMS3d8nFKgMaEZQnAtt9IrVAV7a+KoAzM0XbC/hxlq3CfuMns5UI/pfJnO74ubITU+x
|
||||||
|
w0X74v6xOwAAA8gG6fYoBun2KAAAAAdzc2gtcnNhAAABAQDx5JfaPwE2wfXIQcmlGte9EP
|
||||||
|
AbrTmHPGOF/PuZ71XPa3mZTHMQQuc959gmLxupmcc5o4jYe8VTwT6bbNl6YM+HmCvL3XVH
|
||||||
|
0BqdM2lpKCTB/WzSAyFUv8gSjQVXekRm9wF69tZkPrudqutTLhqXU5ESiUzfhU+CxHQW+k
|
||||||
|
Af10Yd9R68V1f8jkuWjEoeVfCltj7O5fRlpouoTXn83MUAXB3J/wDjpjnjp2PxvXL2x5aC
|
||||||
|
Htzd1WCGEmtWbHZvRA1a0EE233zfXNHg4xLd3ycUqAxoRlCcC230itUBXtr4qgDMzRdsL+
|
||||||
|
HGWrcJ+4yezlQj+l8mc7vi5shNT7HDRfvi/rE7AAAAAwEAAQAAAQBc8ukC/RjbULbAJ79z
|
||||||
|
SRhDV2HcULj9ZVAc6XRI13XSyUqlhIHmar7uw8sECTAJAMVUOanY/d56a5RCJxZ+dvrn8K
|
||||||
|
pLoSJy4N2JMHs95CYTwOzy2i8RoMwhjLzTu3DTW/DerkD9rjlrwYTBpsKjCYKCa+31KgW+
|
||||||
|
ivzM44aGdbNEyO+yHaxdcyEr3OLcRMppgZmwTieFnG053lCP5XyYRQmZ1a78G6WOzpOgbO
|
||||||
|
2N6Z1sbEqTMVd3oxFZAbmqA8kE4jLJzRcso/SSK5NDs22JzMfxByJQSlitWzDDvHdWpQpy
|
||||||
|
8C6Eu7+48ataLI68VOOXuDWDy9Dck0ev89u7Z4vNLWBhAAAAgAndOZZ0C179Um6sn6gmfM
|
||||||
|
0ttXEaSIqYNGRhkoYqn9vvw03bOMbSnqdEJiwFhbE/rWv7PypB5MeY7tRoCyBMWsUYj0pA
|
||||||
|
HKSl68diLr5g5EOIRGAWu8e//7T2HgZKOo+VaG1IXgmb7PUoAJ6Tzsmb4jdnYfg+BP/TDd
|
||||||
|
e9yCcoiT2fAAAAgQD6T7Kr6ECg0ME8vt/ixsjKdA2zS9SIHyjCMXbdMv1Ok1hkr5rRWbbZ
|
||||||
|
jm79fF+a8pOQUg30Qw2JUx7II50akt2xL6zesGDDUcOHD2GE/B6Ftji53G3fwWZCqeQ5sD
|
||||||
|
YP25qAWlrqDBGJvF+hkEdlceS8etYJ3XWXjNIYwfR7frQvkQAAAIEA92Pq3FWH63TS3Lqe
|
||||||
|
mQjhfNV75tU0AwENG+xlI1g0nQb7Qsdbm6rIg6XqewUfw03Q+/AqPvwG/1mbyVF7jRZ+qw
|
||||||
|
cl69yM70c9qY74GHjIIOOcC8Kgv29LQrm/VqVp0Lesn5RA8SIiLcMfyYBTEX8V9VY99Zkd
|
||||||
|
v6WwRr4XK1bPRgsAAAAOZXhhcmt1bkBiYXJ5b24BAgMEBQ==
|
||||||
|
-----END OPENSSH PRIVATE KEY-----
|
111
src/allmydata/test/strategies.py
Normal file
111
src/allmydata/test/strategies.py
Normal file
@ -0,0 +1,111 @@
|
|||||||
|
"""
|
||||||
|
Hypothesis strategies use for testing Tahoe-LAFS.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from hypothesis.strategies import (
|
||||||
|
one_of,
|
||||||
|
builds,
|
||||||
|
binary,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ..uri import (
|
||||||
|
WriteableSSKFileURI,
|
||||||
|
WriteableMDMFFileURI,
|
||||||
|
DirectoryURI,
|
||||||
|
MDMFDirectoryURI,
|
||||||
|
)
|
||||||
|
|
||||||
|
def write_capabilities():
|
||||||
|
"""
|
||||||
|
Build ``IURI`` providers representing all kinds of write capabilities.
|
||||||
|
"""
|
||||||
|
return one_of([
|
||||||
|
ssk_capabilities(),
|
||||||
|
mdmf_capabilities(),
|
||||||
|
dir2_capabilities(),
|
||||||
|
dir2_mdmf_capabilities(),
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
def ssk_capabilities():
|
||||||
|
"""
|
||||||
|
Build ``WriteableSSKFileURI`` instances.
|
||||||
|
"""
|
||||||
|
return builds(
|
||||||
|
WriteableSSKFileURI,
|
||||||
|
ssk_writekeys(),
|
||||||
|
ssk_fingerprints(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _writekeys(size=16):
|
||||||
|
"""
|
||||||
|
Build ``bytes`` representing write keys.
|
||||||
|
"""
|
||||||
|
return binary(min_size=size, max_size=size)
|
||||||
|
|
||||||
|
|
||||||
|
def ssk_writekeys():
|
||||||
|
"""
|
||||||
|
Build ``bytes`` representing SSK write keys.
|
||||||
|
"""
|
||||||
|
return _writekeys()
|
||||||
|
|
||||||
|
|
||||||
|
def _fingerprints(size=32):
|
||||||
|
"""
|
||||||
|
Build ``bytes`` representing fingerprints.
|
||||||
|
"""
|
||||||
|
return binary(min_size=size, max_size=size)
|
||||||
|
|
||||||
|
|
||||||
|
def ssk_fingerprints():
|
||||||
|
"""
|
||||||
|
Build ``bytes`` representing SSK fingerprints.
|
||||||
|
"""
|
||||||
|
return _fingerprints()
|
||||||
|
|
||||||
|
|
||||||
|
def mdmf_capabilities():
|
||||||
|
"""
|
||||||
|
Build ``WriteableMDMFFileURI`` instances.
|
||||||
|
"""
|
||||||
|
return builds(
|
||||||
|
WriteableMDMFFileURI,
|
||||||
|
mdmf_writekeys(),
|
||||||
|
mdmf_fingerprints(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def mdmf_writekeys():
|
||||||
|
"""
|
||||||
|
Build ``bytes`` representing MDMF write keys.
|
||||||
|
"""
|
||||||
|
return _writekeys()
|
||||||
|
|
||||||
|
|
||||||
|
def mdmf_fingerprints():
|
||||||
|
"""
|
||||||
|
Build ``bytes`` representing MDMF fingerprints.
|
||||||
|
"""
|
||||||
|
return _fingerprints()
|
||||||
|
|
||||||
|
|
||||||
|
def dir2_capabilities():
|
||||||
|
"""
|
||||||
|
Build ``DirectoryURI`` instances.
|
||||||
|
"""
|
||||||
|
return builds(
|
||||||
|
DirectoryURI,
|
||||||
|
ssk_capabilities(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def dir2_mdmf_capabilities():
|
||||||
|
"""
|
||||||
|
Build ``MDMFDirectoryURI`` instances.
|
||||||
|
"""
|
||||||
|
return builds(
|
||||||
|
MDMFDirectoryURI,
|
||||||
|
mdmf_capabilities(),
|
||||||
|
)
|
@ -1,5 +1,4 @@
|
|||||||
import os, sys
|
import os, sys
|
||||||
import mock
|
|
||||||
from functools import (
|
from functools import (
|
||||||
partial,
|
partial,
|
||||||
)
|
)
|
||||||
@ -12,6 +11,15 @@ from fixtures import (
|
|||||||
Fixture,
|
Fixture,
|
||||||
TempDir,
|
TempDir,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from hypothesis import (
|
||||||
|
given,
|
||||||
|
)
|
||||||
|
from hypothesis.strategies import (
|
||||||
|
sampled_from,
|
||||||
|
booleans,
|
||||||
|
)
|
||||||
|
|
||||||
from eliot.testing import (
|
from eliot.testing import (
|
||||||
capture_logging,
|
capture_logging,
|
||||||
assertHasAction,
|
assertHasAction,
|
||||||
@ -39,6 +47,9 @@ from testtools.twistedsupport import (
|
|||||||
import allmydata
|
import allmydata
|
||||||
import allmydata.util.log
|
import allmydata.util.log
|
||||||
|
|
||||||
|
from allmydata.nodemaker import (
|
||||||
|
NodeMaker,
|
||||||
|
)
|
||||||
from allmydata.node import OldConfigError, UnescapedHashError, create_node_dir
|
from allmydata.node import OldConfigError, UnescapedHashError, create_node_dir
|
||||||
from allmydata.frontends.auth import NeedRootcapLookupScheme
|
from allmydata.frontends.auth import NeedRootcapLookupScheme
|
||||||
from allmydata import client
|
from allmydata import client
|
||||||
@ -55,11 +66,15 @@ from allmydata.util import (
|
|||||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||||
from allmydata.interfaces import IFilesystemNode, IFileNode, \
|
from allmydata.interfaces import IFilesystemNode, IFileNode, \
|
||||||
IImmutableFileNode, IMutableFileNode, IDirectoryNode
|
IImmutableFileNode, IMutableFileNode, IDirectoryNode
|
||||||
|
from allmydata.scripts.common import (
|
||||||
|
write_introducer,
|
||||||
|
)
|
||||||
from foolscap.api import flushEventualQueue
|
from foolscap.api import flushEventualQueue
|
||||||
import allmydata.test.common_util as testutil
|
import allmydata.test.common_util as testutil
|
||||||
from .common import (
|
from .common import (
|
||||||
EMPTY_CLIENT_CONFIG,
|
EMPTY_CLIENT_CONFIG,
|
||||||
SyncTestCase,
|
SyncTestCase,
|
||||||
|
AsyncBrokenTestCase,
|
||||||
UseTestPlugins,
|
UseTestPlugins,
|
||||||
MemoryIntroducerClient,
|
MemoryIntroducerClient,
|
||||||
get_published_announcements,
|
get_published_announcements,
|
||||||
@ -69,16 +84,13 @@ from .matchers import (
|
|||||||
matches_storage_announcement,
|
matches_storage_announcement,
|
||||||
matches_furl,
|
matches_furl,
|
||||||
)
|
)
|
||||||
|
from .strategies import (
|
||||||
|
write_capabilities,
|
||||||
|
)
|
||||||
|
|
||||||
SOME_FURL = "pb://abcde@nowhere/fake"
|
SOME_FURL = "pb://abcde@nowhere/fake"
|
||||||
|
|
||||||
BASECONFIG = ("[client]\n"
|
BASECONFIG = "[client]\n"
|
||||||
"introducer.furl = \n"
|
|
||||||
)
|
|
||||||
|
|
||||||
BASECONFIG_I = ("[client]\n"
|
|
||||||
"introducer.furl = %s\n"
|
|
||||||
)
|
|
||||||
|
|
||||||
class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
|
class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||||
def test_loadable(self):
|
def test_loadable(self):
|
||||||
@ -120,14 +132,14 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
|
|||||||
|
|
||||||
def write_config(s):
|
def write_config(s):
|
||||||
config = ("[client]\n"
|
config = ("[client]\n"
|
||||||
"introducer.furl = %s\n" % s)
|
"helper.furl = %s\n" % s)
|
||||||
fileutil.write(os.path.join(basedir, "tahoe.cfg"), config)
|
fileutil.write(os.path.join(basedir, "tahoe.cfg"), config)
|
||||||
|
|
||||||
for s in should_fail:
|
for s in should_fail:
|
||||||
write_config(s)
|
write_config(s)
|
||||||
with self.assertRaises(UnescapedHashError) as ctx:
|
with self.assertRaises(UnescapedHashError) as ctx:
|
||||||
yield client.create_client(basedir)
|
yield client.create_client(basedir)
|
||||||
self.assertIn("[client]introducer.furl", str(ctx.exception))
|
self.assertIn("[client]helper.furl", str(ctx.exception))
|
||||||
|
|
||||||
def test_unreadable_config(self):
|
def test_unreadable_config(self):
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
@ -419,19 +431,32 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
configuration for sftpd results in it being started
|
configuration for sftpd results in it being started
|
||||||
"""
|
"""
|
||||||
|
root = FilePath(self.mktemp())
|
||||||
|
root.makedirs()
|
||||||
|
accounts = root.child(b"sftp-accounts")
|
||||||
|
accounts.touch()
|
||||||
|
|
||||||
|
data = FilePath(__file__).sibling(b"data")
|
||||||
|
privkey = data.child(b"openssh-rsa-2048.txt")
|
||||||
|
pubkey = data.child(b"openssh-rsa-2048.pub.txt")
|
||||||
|
|
||||||
basedir = u"client.Basic.test_ftp_create"
|
basedir = u"client.Basic.test_ftp_create"
|
||||||
create_node_dir(basedir, "testing")
|
create_node_dir(basedir, "testing")
|
||||||
with open(os.path.join(basedir, "tahoe.cfg"), "w") as f:
|
with open(os.path.join(basedir, "tahoe.cfg"), "w") as f:
|
||||||
f.write(
|
f.write((
|
||||||
'[sftpd]\n'
|
'[sftpd]\n'
|
||||||
'enabled = true\n'
|
'enabled = true\n'
|
||||||
'accounts.file = foo\n'
|
'accounts.file = {}\n'
|
||||||
'host_pubkey_file = pubkey\n'
|
'host_pubkey_file = {}\n'
|
||||||
'host_privkey_file = privkey\n'
|
'host_privkey_file = {}\n'
|
||||||
)
|
).format(accounts.path, pubkey.path, privkey.path))
|
||||||
with mock.patch('allmydata.frontends.sftpd.SFTPServer') as p:
|
|
||||||
yield client.create_client(basedir)
|
client_node = yield client.create_client(
|
||||||
self.assertTrue(p.called)
|
basedir,
|
||||||
|
)
|
||||||
|
sftp = client_node.getServiceNamed("frontend:sftp")
|
||||||
|
self.assertIs(sftp.parent, client_node)
|
||||||
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_ftp_auth_keyfile(self):
|
def test_ftp_auth_keyfile(self):
|
||||||
@ -665,12 +690,13 @@ class AnonymousStorage(SyncTestCase):
|
|||||||
"""
|
"""
|
||||||
If anonymous storage access is enabled then the client announces it.
|
If anonymous storage access is enabled then the client announces it.
|
||||||
"""
|
"""
|
||||||
basedir = self.id()
|
basedir = FilePath(self.id())
|
||||||
os.makedirs(basedir + b"/private")
|
basedir.child("private").makedirs()
|
||||||
|
write_introducer(basedir, "someintroducer", SOME_FURL)
|
||||||
config = client.config_from_string(
|
config = client.config_from_string(
|
||||||
basedir,
|
basedir.path,
|
||||||
"tub.port",
|
"tub.port",
|
||||||
BASECONFIG_I % (SOME_FURL,) + (
|
BASECONFIG + (
|
||||||
"[storage]\n"
|
"[storage]\n"
|
||||||
"enabled = true\n"
|
"enabled = true\n"
|
||||||
"anonymous = true\n"
|
"anonymous = true\n"
|
||||||
@ -684,7 +710,7 @@ class AnonymousStorage(SyncTestCase):
|
|||||||
get_published_announcements(node),
|
get_published_announcements(node),
|
||||||
MatchesListwise([
|
MatchesListwise([
|
||||||
matches_storage_announcement(
|
matches_storage_announcement(
|
||||||
basedir,
|
basedir.path,
|
||||||
anonymous=True,
|
anonymous=True,
|
||||||
),
|
),
|
||||||
]),
|
]),
|
||||||
@ -696,12 +722,13 @@ class AnonymousStorage(SyncTestCase):
|
|||||||
If anonymous storage access is disabled then the client does not announce
|
If anonymous storage access is disabled then the client does not announce
|
||||||
it nor does it write a fURL for it to beneath the node directory.
|
it nor does it write a fURL for it to beneath the node directory.
|
||||||
"""
|
"""
|
||||||
basedir = self.id()
|
basedir = FilePath(self.id())
|
||||||
os.makedirs(basedir + b"/private")
|
basedir.child("private").makedirs()
|
||||||
|
write_introducer(basedir, "someintroducer", SOME_FURL)
|
||||||
config = client.config_from_string(
|
config = client.config_from_string(
|
||||||
basedir,
|
basedir.path,
|
||||||
"tub.port",
|
"tub.port",
|
||||||
BASECONFIG_I % (SOME_FURL,) + (
|
BASECONFIG + (
|
||||||
"[storage]\n"
|
"[storage]\n"
|
||||||
"enabled = true\n"
|
"enabled = true\n"
|
||||||
"anonymous = false\n"
|
"anonymous = false\n"
|
||||||
@ -715,7 +742,7 @@ class AnonymousStorage(SyncTestCase):
|
|||||||
get_published_announcements(node),
|
get_published_announcements(node),
|
||||||
MatchesListwise([
|
MatchesListwise([
|
||||||
matches_storage_announcement(
|
matches_storage_announcement(
|
||||||
basedir,
|
basedir.path,
|
||||||
anonymous=False,
|
anonymous=False,
|
||||||
),
|
),
|
||||||
]),
|
]),
|
||||||
@ -733,12 +760,12 @@ class AnonymousStorage(SyncTestCase):
|
|||||||
possible to reach the anonymous storage server via the originally
|
possible to reach the anonymous storage server via the originally
|
||||||
published fURL.
|
published fURL.
|
||||||
"""
|
"""
|
||||||
basedir = self.id()
|
basedir = FilePath(self.id())
|
||||||
os.makedirs(basedir + b"/private")
|
basedir.child("private").makedirs()
|
||||||
enabled_config = client.config_from_string(
|
enabled_config = client.config_from_string(
|
||||||
basedir,
|
basedir.path,
|
||||||
"tub.port",
|
"tub.port",
|
||||||
BASECONFIG_I % (SOME_FURL,) + (
|
BASECONFIG + (
|
||||||
"[storage]\n"
|
"[storage]\n"
|
||||||
"enabled = true\n"
|
"enabled = true\n"
|
||||||
"anonymous = true\n"
|
"anonymous = true\n"
|
||||||
@ -760,9 +787,9 @@ class AnonymousStorage(SyncTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
disabled_config = client.config_from_string(
|
disabled_config = client.config_from_string(
|
||||||
basedir,
|
basedir.path,
|
||||||
"tub.port",
|
"tub.port",
|
||||||
BASECONFIG_I % (SOME_FURL,) + (
|
BASECONFIG + (
|
||||||
"[storage]\n"
|
"[storage]\n"
|
||||||
"enabled = true\n"
|
"enabled = true\n"
|
||||||
"anonymous = false\n"
|
"anonymous = false\n"
|
||||||
@ -782,8 +809,8 @@ class IntroducerClients(unittest.TestCase):
|
|||||||
|
|
||||||
def test_invalid_introducer_furl(self):
|
def test_invalid_introducer_furl(self):
|
||||||
"""
|
"""
|
||||||
An introducer.furl of 'None' is invalid and causes
|
An introducer.furl of 'None' in the deprecated [client]introducer.furl
|
||||||
create_introducer_clients to fail.
|
field is invalid and causes `create_introducer_clients` to fail.
|
||||||
"""
|
"""
|
||||||
cfg = (
|
cfg = (
|
||||||
"[client]\n"
|
"[client]\n"
|
||||||
@ -948,20 +975,28 @@ class Run(unittest.TestCase, testutil.StallMixin):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_loadable(self):
|
def test_loadable(self):
|
||||||
basedir = "test_client.Run.test_loadable"
|
"""
|
||||||
os.mkdir(basedir)
|
A configuration consisting only of an introducer can be turned into a
|
||||||
|
client node.
|
||||||
|
"""
|
||||||
|
basedir = FilePath("test_client.Run.test_loadable")
|
||||||
|
private = basedir.child("private")
|
||||||
|
private.makedirs()
|
||||||
dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus"
|
dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus"
|
||||||
fileutil.write(os.path.join(basedir, "tahoe.cfg"), BASECONFIG_I % dummy)
|
write_introducer(basedir, "someintroducer", dummy)
|
||||||
fileutil.write(os.path.join(basedir, client._Client.EXIT_TRIGGER_FILE), "")
|
basedir.child("tahoe.cfg").setContent(BASECONFIG)
|
||||||
yield client.create_client(basedir)
|
basedir.child(client._Client.EXIT_TRIGGER_FILE).touch()
|
||||||
|
yield client.create_client(basedir.path)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_reloadable(self):
|
def test_reloadable(self):
|
||||||
basedir = "test_client.Run.test_reloadable"
|
basedir = FilePath("test_client.Run.test_reloadable")
|
||||||
os.mkdir(basedir)
|
private = basedir.child("private")
|
||||||
|
private.makedirs()
|
||||||
dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus"
|
dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus"
|
||||||
fileutil.write(os.path.join(basedir, "tahoe.cfg"), BASECONFIG_I % dummy)
|
write_introducer(basedir, "someintroducer", dummy)
|
||||||
c1 = yield client.create_client(basedir)
|
basedir.child("tahoe.cfg").setContent(BASECONFIG)
|
||||||
|
c1 = yield client.create_client(basedir.path)
|
||||||
c1.setServiceParent(self.sparent)
|
c1.setServiceParent(self.sparent)
|
||||||
|
|
||||||
# delay to let the service start up completely. I'm not entirely sure
|
# delay to let the service start up completely. I'm not entirely sure
|
||||||
@ -983,11 +1018,102 @@ class Run(unittest.TestCase, testutil.StallMixin):
|
|||||||
# also change _check_exit_trigger to use it instead of a raw
|
# also change _check_exit_trigger to use it instead of a raw
|
||||||
# reactor.stop, also instrument the shutdown event in an
|
# reactor.stop, also instrument the shutdown event in an
|
||||||
# attribute that we can check.)
|
# attribute that we can check.)
|
||||||
c2 = yield client.create_client(basedir)
|
c2 = yield client.create_client(basedir.path)
|
||||||
c2.setServiceParent(self.sparent)
|
c2.setServiceParent(self.sparent)
|
||||||
yield c2.disownServiceParent()
|
yield c2.disownServiceParent()
|
||||||
|
|
||||||
class NodeMaker(testutil.ReallyEqualMixin, unittest.TestCase):
|
class NodeMakerTests(testutil.ReallyEqualMixin, AsyncBrokenTestCase):
|
||||||
|
|
||||||
|
def _make_node_maker(self, mode, writecap, deep_immutable):
|
||||||
|
"""
|
||||||
|
Create a callable which can create an ``IFilesystemNode`` provider for the
|
||||||
|
given cap.
|
||||||
|
|
||||||
|
:param unicode mode: The read/write combination to pass to
|
||||||
|
``NodeMaker.create_from_cap``. If it contains ``u"r"`` then a
|
||||||
|
readcap will be passed in. If it contains ``u"w"`` then a
|
||||||
|
writecap will be passed in.
|
||||||
|
|
||||||
|
:param IURI writecap: The capability for which to create a node.
|
||||||
|
|
||||||
|
:param bool deep_immutable: Whether to request a "deep immutable" node
|
||||||
|
which forces the result to be an immutable ``IFilesystemNode`` (I
|
||||||
|
think -exarkun).
|
||||||
|
"""
|
||||||
|
if writecap.is_mutable():
|
||||||
|
# It's just not a valid combination to have a mutable alongside
|
||||||
|
# deep_immutable = True. It's easier to fix deep_immutable than
|
||||||
|
# writecap to clear up this conflict.
|
||||||
|
deep_immutable = False
|
||||||
|
|
||||||
|
if "r" in mode:
|
||||||
|
readcap = writecap.get_readonly().to_string()
|
||||||
|
else:
|
||||||
|
readcap = None
|
||||||
|
if "w" in mode:
|
||||||
|
writecap = writecap.to_string()
|
||||||
|
else:
|
||||||
|
writecap = None
|
||||||
|
|
||||||
|
nm = NodeMaker(
|
||||||
|
storage_broker=None,
|
||||||
|
secret_holder=None,
|
||||||
|
history=None,
|
||||||
|
uploader=None,
|
||||||
|
terminator=None,
|
||||||
|
default_encoding_parameters={u"k": 1, u"n": 1},
|
||||||
|
mutable_file_default=None,
|
||||||
|
key_generator=None,
|
||||||
|
blacklist=None,
|
||||||
|
)
|
||||||
|
return partial(
|
||||||
|
nm.create_from_cap,
|
||||||
|
writecap,
|
||||||
|
readcap,
|
||||||
|
deep_immutable,
|
||||||
|
)
|
||||||
|
|
||||||
|
@given(
|
||||||
|
mode=sampled_from(["w", "r", "rw"]),
|
||||||
|
writecap=write_capabilities(),
|
||||||
|
deep_immutable=booleans(),
|
||||||
|
)
|
||||||
|
def test_cached_result(self, mode, writecap, deep_immutable):
|
||||||
|
"""
|
||||||
|
``NodeMaker.create_from_cap`` returns the same object when called with the
|
||||||
|
same arguments.
|
||||||
|
"""
|
||||||
|
make_node = self._make_node_maker(mode, writecap, deep_immutable)
|
||||||
|
original = make_node()
|
||||||
|
additional = make_node()
|
||||||
|
|
||||||
|
self.assertThat(
|
||||||
|
original,
|
||||||
|
Is(additional),
|
||||||
|
)
|
||||||
|
|
||||||
|
@given(
|
||||||
|
mode=sampled_from(["w", "r", "rw"]),
|
||||||
|
writecap=write_capabilities(),
|
||||||
|
deep_immutable=booleans(),
|
||||||
|
)
|
||||||
|
def test_cache_expired(self, mode, writecap, deep_immutable):
|
||||||
|
"""
|
||||||
|
After the node object returned by an earlier call to
|
||||||
|
``NodeMaker.create_from_cap`` has been garbage collected, a new call
|
||||||
|
to ``NodeMaker.create_from_cap`` returns a node object, maybe even a
|
||||||
|
new one although we can't really prove it.
|
||||||
|
"""
|
||||||
|
make_node = self._make_node_maker(mode, writecap, deep_immutable)
|
||||||
|
make_node()
|
||||||
|
additional = make_node()
|
||||||
|
self.assertThat(
|
||||||
|
additional,
|
||||||
|
AfterPreprocessing(
|
||||||
|
lambda node: node.get_readonly_uri(),
|
||||||
|
Equals(writecap.get_readonly().to_string()),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_maker(self):
|
def test_maker(self):
|
||||||
@ -1122,12 +1248,18 @@ class StorageAnnouncementTests(SyncTestCase):
|
|||||||
"""
|
"""
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(StorageAnnouncementTests, self).setUp()
|
super(StorageAnnouncementTests, self).setUp()
|
||||||
self.basedir = self.useFixture(TempDir()).path
|
self.basedir = FilePath(self.useFixture(TempDir()).path)
|
||||||
create_node_dir(self.basedir, u"")
|
create_node_dir(self.basedir.path, u"")
|
||||||
|
# Write an introducer configuration or we can't observer
|
||||||
|
# announcements.
|
||||||
|
write_introducer(self.basedir, "someintroducer", SOME_FURL)
|
||||||
|
|
||||||
|
|
||||||
def get_config(self, storage_enabled, more_storage="", more_sections=""):
|
def get_config(self, storage_enabled, more_storage="", more_sections=""):
|
||||||
return """
|
return """
|
||||||
|
[client]
|
||||||
|
# Empty
|
||||||
|
|
||||||
[node]
|
[node]
|
||||||
tub.location = tcp:192.0.2.0:1234
|
tub.location = tcp:192.0.2.0:1234
|
||||||
|
|
||||||
@ -1135,9 +1267,6 @@ tub.location = tcp:192.0.2.0:1234
|
|||||||
enabled = {storage_enabled}
|
enabled = {storage_enabled}
|
||||||
{more_storage}
|
{more_storage}
|
||||||
|
|
||||||
[client]
|
|
||||||
introducer.furl = pb://abcde@nowhere/fake
|
|
||||||
|
|
||||||
{more_sections}
|
{more_sections}
|
||||||
""".format(
|
""".format(
|
||||||
storage_enabled=storage_enabled,
|
storage_enabled=storage_enabled,
|
||||||
@ -1151,7 +1280,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
|||||||
No storage announcement is published if storage is not enabled.
|
No storage announcement is published if storage is not enabled.
|
||||||
"""
|
"""
|
||||||
config = client.config_from_string(
|
config = client.config_from_string(
|
||||||
self.basedir,
|
self.basedir.path,
|
||||||
"tub.port",
|
"tub.port",
|
||||||
self.get_config(storage_enabled=False),
|
self.get_config(storage_enabled=False),
|
||||||
)
|
)
|
||||||
@ -1173,7 +1302,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
|||||||
storage is enabled.
|
storage is enabled.
|
||||||
"""
|
"""
|
||||||
config = client.config_from_string(
|
config = client.config_from_string(
|
||||||
self.basedir,
|
self.basedir.path,
|
||||||
"tub.port",
|
"tub.port",
|
||||||
self.get_config(storage_enabled=True),
|
self.get_config(storage_enabled=True),
|
||||||
)
|
)
|
||||||
@ -1190,7 +1319,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
|||||||
# Match the following list (of one element) ...
|
# Match the following list (of one element) ...
|
||||||
MatchesListwise([
|
MatchesListwise([
|
||||||
# The only element in the list ...
|
# The only element in the list ...
|
||||||
matches_storage_announcement(self.basedir),
|
matches_storage_announcement(self.basedir.path),
|
||||||
]),
|
]),
|
||||||
)),
|
)),
|
||||||
)
|
)
|
||||||
@ -1205,7 +1334,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
|||||||
|
|
||||||
value = u"thing"
|
value = u"thing"
|
||||||
config = client.config_from_string(
|
config = client.config_from_string(
|
||||||
self.basedir,
|
self.basedir.path,
|
||||||
"tub.port",
|
"tub.port",
|
||||||
self.get_config(
|
self.get_config(
|
||||||
storage_enabled=True,
|
storage_enabled=True,
|
||||||
@ -1225,7 +1354,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
|||||||
get_published_announcements,
|
get_published_announcements,
|
||||||
MatchesListwise([
|
MatchesListwise([
|
||||||
matches_storage_announcement(
|
matches_storage_announcement(
|
||||||
self.basedir,
|
self.basedir.path,
|
||||||
options=[
|
options=[
|
||||||
matches_dummy_announcement(
|
matches_dummy_announcement(
|
||||||
u"tahoe-lafs-dummy-v1",
|
u"tahoe-lafs-dummy-v1",
|
||||||
@ -1246,7 +1375,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
|||||||
self.useFixture(UseTestPlugins())
|
self.useFixture(UseTestPlugins())
|
||||||
|
|
||||||
config = client.config_from_string(
|
config = client.config_from_string(
|
||||||
self.basedir,
|
self.basedir.path,
|
||||||
"tub.port",
|
"tub.port",
|
||||||
self.get_config(
|
self.get_config(
|
||||||
storage_enabled=True,
|
storage_enabled=True,
|
||||||
@ -1268,7 +1397,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
|||||||
get_published_announcements,
|
get_published_announcements,
|
||||||
MatchesListwise([
|
MatchesListwise([
|
||||||
matches_storage_announcement(
|
matches_storage_announcement(
|
||||||
self.basedir,
|
self.basedir.path,
|
||||||
options=[
|
options=[
|
||||||
matches_dummy_announcement(
|
matches_dummy_announcement(
|
||||||
u"tahoe-lafs-dummy-v1",
|
u"tahoe-lafs-dummy-v1",
|
||||||
@ -1294,7 +1423,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
|||||||
self.useFixture(UseTestPlugins())
|
self.useFixture(UseTestPlugins())
|
||||||
|
|
||||||
config = client.config_from_string(
|
config = client.config_from_string(
|
||||||
self.basedir,
|
self.basedir.path,
|
||||||
"tub.port",
|
"tub.port",
|
||||||
self.get_config(
|
self.get_config(
|
||||||
storage_enabled=True,
|
storage_enabled=True,
|
||||||
@ -1330,7 +1459,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
|||||||
self.useFixture(UseTestPlugins())
|
self.useFixture(UseTestPlugins())
|
||||||
|
|
||||||
config = client.config_from_string(
|
config = client.config_from_string(
|
||||||
self.basedir,
|
self.basedir.path,
|
||||||
"tub.port",
|
"tub.port",
|
||||||
self.get_config(
|
self.get_config(
|
||||||
storage_enabled=True,
|
storage_enabled=True,
|
||||||
@ -1346,7 +1475,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
|||||||
get_published_announcements,
|
get_published_announcements,
|
||||||
MatchesListwise([
|
MatchesListwise([
|
||||||
matches_storage_announcement(
|
matches_storage_announcement(
|
||||||
self.basedir,
|
self.basedir.path,
|
||||||
options=[
|
options=[
|
||||||
matches_dummy_announcement(
|
matches_dummy_announcement(
|
||||||
u"tahoe-lafs-dummy-v1",
|
u"tahoe-lafs-dummy-v1",
|
||||||
@ -1368,7 +1497,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
|||||||
self.useFixture(UseTestPlugins())
|
self.useFixture(UseTestPlugins())
|
||||||
|
|
||||||
config = client.config_from_string(
|
config = client.config_from_string(
|
||||||
self.basedir,
|
self.basedir.path,
|
||||||
"tub.port",
|
"tub.port",
|
||||||
self.get_config(
|
self.get_config(
|
||||||
storage_enabled=True,
|
storage_enabled=True,
|
||||||
@ -1395,7 +1524,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
|||||||
available on the system.
|
available on the system.
|
||||||
"""
|
"""
|
||||||
config = client.config_from_string(
|
config = client.config_from_string(
|
||||||
self.basedir,
|
self.basedir.path,
|
||||||
"tub.port",
|
"tub.port",
|
||||||
self.get_config(
|
self.get_config(
|
||||||
storage_enabled=True,
|
storage_enabled=True,
|
||||||
|
@ -52,8 +52,11 @@ from allmydata.util import pollmixin, idlib, fileutil, yamlutil
|
|||||||
from allmydata.util.iputil import (
|
from allmydata.util.iputil import (
|
||||||
listenOnUnused,
|
listenOnUnused,
|
||||||
)
|
)
|
||||||
|
from allmydata.scripts.common import (
|
||||||
|
write_introducer,
|
||||||
|
)
|
||||||
import allmydata.test.common_util as testutil
|
import allmydata.test.common_util as testutil
|
||||||
from allmydata.test.common import (
|
from .common import (
|
||||||
SyncTestCase,
|
SyncTestCase,
|
||||||
AsyncTestCase,
|
AsyncTestCase,
|
||||||
AsyncBrokenTestCase,
|
AsyncBrokenTestCase,
|
||||||
@ -797,22 +800,28 @@ class Announcements(AsyncTestCase):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_client_cache(self):
|
def test_client_cache(self):
|
||||||
basedir = "introducer/ClientSeqnums/test_client_cache_1"
|
"""
|
||||||
fileutil.make_dirs(basedir)
|
Announcements received by an introducer client are written to that
|
||||||
cache_filepath = FilePath(os.path.join(basedir, "private",
|
introducer client's cache file.
|
||||||
"introducer_default_cache.yaml"))
|
"""
|
||||||
|
basedir = FilePath("introducer/ClientSeqnums/test_client_cache_1")
|
||||||
|
private = basedir.child("private")
|
||||||
|
private.makedirs()
|
||||||
|
write_introducer(basedir, "default", "nope")
|
||||||
|
cache_filepath = basedir.descendant([
|
||||||
|
"private",
|
||||||
|
"introducer_default_cache.yaml",
|
||||||
|
])
|
||||||
|
|
||||||
# if storage is enabled, the Client will publish its storage server
|
# if storage is enabled, the Client will publish its storage server
|
||||||
# during startup (although the announcement will wait in a queue
|
# during startup (although the announcement will wait in a queue
|
||||||
# until the introducer connection is established). To avoid getting
|
# until the introducer connection is established). To avoid getting
|
||||||
# confused by this, disable storage.
|
# confused by this, disable storage.
|
||||||
with open(os.path.join(basedir, "tahoe.cfg"), "w") as f:
|
with basedir.child("tahoe.cfg").open("w") as f:
|
||||||
f.write("[client]\n")
|
f.write(b"[storage]\n")
|
||||||
f.write("introducer.furl = nope\n")
|
f.write(b"enabled = false\n")
|
||||||
f.write("[storage]\n")
|
|
||||||
f.write("enabled = false\n")
|
|
||||||
|
|
||||||
c = yield create_client(basedir)
|
c = yield create_client(basedir.path)
|
||||||
ic = c.introducer_clients[0]
|
ic = c.introducer_clients[0]
|
||||||
private_key, public_key = ed25519.create_signing_keypair()
|
private_key, public_key = ed25519.create_signing_keypair()
|
||||||
public_key_str = remove_prefix(ed25519.string_from_verifying_key(public_key), b"pub-")
|
public_key_str = remove_prefix(ed25519.string_from_verifying_key(public_key), b"pub-")
|
||||||
@ -878,7 +887,7 @@ class Announcements(AsyncTestCase):
|
|||||||
self.failUnlessEqual(announcements[public_key_str2]["anonymous-storage-FURL"],
|
self.failUnlessEqual(announcements[public_key_str2]["anonymous-storage-FURL"],
|
||||||
furl3)
|
furl3)
|
||||||
|
|
||||||
c2 = yield create_client(basedir)
|
c2 = yield create_client(basedir.path)
|
||||||
c2.introducer_clients[0]._load_announcements()
|
c2.introducer_clients[0]._load_announcements()
|
||||||
yield flushEventualQueue()
|
yield flushEventualQueue()
|
||||||
self.assertEqual(c2.storage_broker.get_all_serverids(),
|
self.assertEqual(c2.storage_broker.get_all_serverids(),
|
||||||
@ -888,27 +897,24 @@ class ClientSeqnums(AsyncBrokenTestCase):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_client(self):
|
def test_client(self):
|
||||||
basedir = "introducer/ClientSeqnums/test_client"
|
basedir = FilePath("introducer/ClientSeqnums/test_client")
|
||||||
fileutil.make_dirs(basedir)
|
private = basedir.child("private")
|
||||||
|
private.makedirs()
|
||||||
|
write_introducer(basedir, "default", "nope")
|
||||||
# if storage is enabled, the Client will publish its storage server
|
# if storage is enabled, the Client will publish its storage server
|
||||||
# during startup (although the announcement will wait in a queue
|
# during startup (although the announcement will wait in a queue
|
||||||
# until the introducer connection is established). To avoid getting
|
# until the introducer connection is established). To avoid getting
|
||||||
# confused by this, disable storage.
|
# confused by this, disable storage.
|
||||||
f = open(os.path.join(basedir, "tahoe.cfg"), "w")
|
with basedir.child("tahoe.cfg").open("w") as f:
|
||||||
f.write("[client]\n")
|
f.write(b"[storage]\n")
|
||||||
f.write("introducer.furl = nope\n")
|
f.write(b"enabled = false\n")
|
||||||
f.write("[storage]\n")
|
|
||||||
f.write("enabled = false\n")
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
c = yield create_client(basedir)
|
c = yield create_client(basedir.path)
|
||||||
ic = c.introducer_clients[0]
|
ic = c.introducer_clients[0]
|
||||||
outbound = ic._outbound_announcements
|
outbound = ic._outbound_announcements
|
||||||
published = ic._published_announcements
|
published = ic._published_announcements
|
||||||
def read_seqnum():
|
def read_seqnum():
|
||||||
f = open(os.path.join(basedir, "announcement-seqnum"))
|
seqnum = basedir.child("announcement-seqnum").getContent()
|
||||||
seqnum = f.read().strip()
|
|
||||||
f.close()
|
|
||||||
return int(seqnum)
|
return int(seqnum)
|
||||||
|
|
||||||
ic.publish("sA", {"key": "value1"}, c._node_private_key)
|
ic.publish("sA", {"key": "value1"}, c._node_private_key)
|
||||||
|
@ -24,9 +24,6 @@ class MultiIntroTests(unittest.TestCase):
|
|||||||
config = {'hide-ip':False, 'listen': 'tcp',
|
config = {'hide-ip':False, 'listen': 'tcp',
|
||||||
'port': None, 'location': None, 'hostname': 'example.net'}
|
'port': None, 'location': None, 'hostname': 'example.net'}
|
||||||
write_node_config(c, config)
|
write_node_config(c, config)
|
||||||
fake_furl = "furl1"
|
|
||||||
c.write("[client]\n")
|
|
||||||
c.write("introducer.furl = %s\n" % fake_furl)
|
|
||||||
c.write("[storage]\n")
|
c.write("[storage]\n")
|
||||||
c.write("enabled = false\n")
|
c.write("enabled = false\n")
|
||||||
c.close()
|
c.close()
|
||||||
@ -36,8 +33,10 @@ class MultiIntroTests(unittest.TestCase):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_introducer_count(self):
|
def test_introducer_count(self):
|
||||||
""" Ensure that the Client creates same number of introducer clients
|
"""
|
||||||
as found in "basedir/private/introducers" config file. """
|
If there are two introducers configured in ``introducers.yaml`` then
|
||||||
|
``Client`` creates two introducer clients.
|
||||||
|
"""
|
||||||
connections = {
|
connections = {
|
||||||
'introducers': {
|
'introducers': {
|
||||||
u'intro1':{ 'furl': 'furl1' },
|
u'intro1':{ 'furl': 'furl1' },
|
||||||
@ -50,25 +49,13 @@ class MultiIntroTests(unittest.TestCase):
|
|||||||
ic_count = len(myclient.introducer_clients)
|
ic_count = len(myclient.introducer_clients)
|
||||||
|
|
||||||
# assertions
|
# assertions
|
||||||
self.failUnlessEqual(ic_count, 3)
|
self.failUnlessEqual(ic_count, len(connections["introducers"]))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def test_introducer_count_commented(self):
|
|
||||||
""" Ensure that the Client creates same number of introducer clients
|
|
||||||
as found in "basedir/private/introducers" config file when there is one
|
|
||||||
commented."""
|
|
||||||
self.yaml_path.setContent(INTRODUCERS_CFG_FURLS_COMMENTED)
|
|
||||||
# get a client and count of introducer_clients
|
|
||||||
myclient = yield create_client(self.basedir)
|
|
||||||
ic_count = len(myclient.introducer_clients)
|
|
||||||
|
|
||||||
# assertions
|
|
||||||
self.failUnlessEqual(ic_count, 2)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_read_introducer_furl_from_tahoecfg(self):
|
def test_read_introducer_furl_from_tahoecfg(self):
|
||||||
""" Ensure that the Client reads the introducer.furl config item from
|
"""
|
||||||
the tahoe.cfg file. """
|
The deprecated [client]introducer.furl item is still read and respected.
|
||||||
|
"""
|
||||||
# create a custom tahoe.cfg
|
# create a custom tahoe.cfg
|
||||||
c = open(os.path.join(self.basedir, "tahoe.cfg"), "w")
|
c = open(os.path.join(self.basedir, "tahoe.cfg"), "w")
|
||||||
config = {'hide-ip':False, 'listen': 'tcp',
|
config = {'hide-ip':False, 'listen': 'tcp',
|
||||||
@ -87,20 +74,42 @@ class MultiIntroTests(unittest.TestCase):
|
|||||||
|
|
||||||
# assertions
|
# assertions
|
||||||
self.failUnlessEqual(fake_furl, tahoe_cfg_furl)
|
self.failUnlessEqual(fake_furl, tahoe_cfg_furl)
|
||||||
|
self.assertEqual(
|
||||||
|
list(
|
||||||
|
warning["message"]
|
||||||
|
for warning
|
||||||
|
in self.flushWarnings()
|
||||||
|
if warning["category"] is DeprecationWarning
|
||||||
|
),
|
||||||
|
["tahoe.cfg [client]introducer.furl is deprecated; "
|
||||||
|
"use private/introducers.yaml instead."],
|
||||||
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_reject_default_in_yaml(self):
|
def test_reject_default_in_yaml(self):
|
||||||
connections = {'introducers': {
|
"""
|
||||||
u'default': { 'furl': 'furl1' },
|
If an introducer is configured in tahoe.cfg with the deprecated
|
||||||
}}
|
[client]introducer.furl then a "default" introducer in
|
||||||
|
introducers.yaml is rejected.
|
||||||
|
"""
|
||||||
|
connections = {
|
||||||
|
'introducers': {
|
||||||
|
u'default': { 'furl': 'furl1' },
|
||||||
|
},
|
||||||
|
}
|
||||||
self.yaml_path.setContent(yamlutil.safe_dump(connections))
|
self.yaml_path.setContent(yamlutil.safe_dump(connections))
|
||||||
|
FilePath(self.basedir).child("tahoe.cfg").setContent(
|
||||||
|
"[client]\n"
|
||||||
|
"introducer.furl = furl1\n"
|
||||||
|
)
|
||||||
|
|
||||||
with self.assertRaises(ValueError) as ctx:
|
with self.assertRaises(ValueError) as ctx:
|
||||||
yield create_client(self.basedir)
|
yield create_client(self.basedir)
|
||||||
|
|
||||||
self.assertEquals(
|
self.assertEquals(
|
||||||
str(ctx.exception),
|
str(ctx.exception),
|
||||||
"'default' introducer furl cannot be specified in introducers.yaml; please "
|
"'default' introducer furl cannot be specified in tahoe.cfg and introducers.yaml; "
|
||||||
"fix impossible configuration.",
|
"please fix impossible configuration.",
|
||||||
)
|
)
|
||||||
|
|
||||||
SIMPLE_YAML = """
|
SIMPLE_YAML = """
|
||||||
@ -126,8 +135,6 @@ class NoDefault(unittest.TestCase):
|
|||||||
config = {'hide-ip':False, 'listen': 'tcp',
|
config = {'hide-ip':False, 'listen': 'tcp',
|
||||||
'port': None, 'location': None, 'hostname': 'example.net'}
|
'port': None, 'location': None, 'hostname': 'example.net'}
|
||||||
write_node_config(c, config)
|
write_node_config(c, config)
|
||||||
c.write("[client]\n")
|
|
||||||
c.write("# introducer.furl =\n") # omit default
|
|
||||||
c.write("[storage]\n")
|
c.write("[storage]\n")
|
||||||
c.write("enabled = false\n")
|
c.write("enabled = false\n")
|
||||||
c.close()
|
c.close()
|
||||||
|
@ -684,8 +684,6 @@ class TestMissingPorts(unittest.TestCase):
|
|||||||
|
|
||||||
|
|
||||||
BASE_CONFIG = """
|
BASE_CONFIG = """
|
||||||
[client]
|
|
||||||
introducer.furl = empty
|
|
||||||
[tor]
|
[tor]
|
||||||
enabled = false
|
enabled = false
|
||||||
[i2p]
|
[i2p]
|
||||||
|
@ -458,7 +458,7 @@ class StoragePluginWebPresence(AsyncTestCase):
|
|||||||
},
|
},
|
||||||
storage_plugin=self.storage_plugin,
|
storage_plugin=self.storage_plugin,
|
||||||
basedir=self.basedir,
|
basedir=self.basedir,
|
||||||
introducer_furl=ensure_text(SOME_FURL),
|
introducer_furl=SOME_FURL,
|
||||||
))
|
))
|
||||||
self.node = yield self.node_fixture.create_node()
|
self.node = yield self.node_fixture.create_node()
|
||||||
self.webish = self.node.getServiceNamed(WebishServer.name)
|
self.webish = self.node.getServiceNamed(WebishServer.name)
|
||||||
|
@ -33,6 +33,9 @@ from allmydata.mutable.publish import MutableData
|
|||||||
|
|
||||||
from foolscap.api import DeadReferenceError, fireEventually, flushEventualQueue
|
from foolscap.api import DeadReferenceError, fireEventually, flushEventualQueue
|
||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
|
from twisted.python.filepath import (
|
||||||
|
FilePath,
|
||||||
|
)
|
||||||
|
|
||||||
from .common import (
|
from .common import (
|
||||||
TEST_RSA_KEY_SIZE,
|
TEST_RSA_KEY_SIZE,
|
||||||
@ -47,6 +50,9 @@ from .web.common import (
|
|||||||
from allmydata.test.test_runner import RunBinTahoeMixin
|
from allmydata.test.test_runner import RunBinTahoeMixin
|
||||||
from . import common_util as testutil
|
from . import common_util as testutil
|
||||||
from .common_util import run_cli
|
from .common_util import run_cli
|
||||||
|
from ..scripts.common import (
|
||||||
|
write_introducer,
|
||||||
|
)
|
||||||
|
|
||||||
LARGE_DATA = """
|
LARGE_DATA = """
|
||||||
This is some data to publish to the remote grid.., which needs to be large
|
This is some data to publish to the remote grid.., which needs to be large
|
||||||
@ -806,8 +812,6 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
|
|||||||
|
|
||||||
except1 = set(range(self.numclients)) - {1}
|
except1 = set(range(self.numclients)) - {1}
|
||||||
feature_matrix = {
|
feature_matrix = {
|
||||||
# client 1 uses private/introducers.yaml, not tahoe.cfg
|
|
||||||
("client", "introducer.furl"): except1,
|
|
||||||
("client", "nickname"): except1,
|
("client", "nickname"): except1,
|
||||||
|
|
||||||
# client 1 has to auto-assign an address.
|
# client 1 has to auto-assign an address.
|
||||||
@ -833,7 +837,6 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
|
|||||||
setnode = partial(setconf, config, which, "node")
|
setnode = partial(setconf, config, which, "node")
|
||||||
sethelper = partial(setconf, config, which, "helper")
|
sethelper = partial(setconf, config, which, "helper")
|
||||||
|
|
||||||
setclient("introducer.furl", self.introducer_furl)
|
|
||||||
setnode("nickname", u"client %d \N{BLACK SMILING FACE}" % (which,))
|
setnode("nickname", u"client %d \N{BLACK SMILING FACE}" % (which,))
|
||||||
|
|
||||||
if self.stats_gatherer_furl:
|
if self.stats_gatherer_furl:
|
||||||
@ -850,13 +853,11 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
|
|||||||
|
|
||||||
sethelper("enabled", "True")
|
sethelper("enabled", "True")
|
||||||
|
|
||||||
if which == 1:
|
iyaml = ("introducers:\n"
|
||||||
# clients[1] uses private/introducers.yaml, not tahoe.cfg
|
" petname2:\n"
|
||||||
iyaml = ("introducers:\n"
|
" furl: %s\n") % self.introducer_furl
|
||||||
" petname2:\n"
|
iyaml_fn = os.path.join(basedir, "private", "introducers.yaml")
|
||||||
" furl: %s\n") % self.introducer_furl
|
fileutil.write(iyaml_fn, iyaml)
|
||||||
iyaml_fn = os.path.join(basedir, "private", "introducers.yaml")
|
|
||||||
fileutil.write(iyaml_fn, iyaml)
|
|
||||||
|
|
||||||
return _render_config(config)
|
return _render_config(config)
|
||||||
|
|
||||||
@ -905,16 +906,21 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
|
|||||||
# usually this node is *not* parented to our self.sparent, so we can
|
# usually this node is *not* parented to our self.sparent, so we can
|
||||||
# shut it down separately from the rest, to exercise the
|
# shut it down separately from the rest, to exercise the
|
||||||
# connection-lost code
|
# connection-lost code
|
||||||
basedir = self.getdir("client%d" % client_num)
|
basedir = FilePath(self.getdir("client%d" % client_num))
|
||||||
if not os.path.isdir(basedir):
|
basedir.makedirs()
|
||||||
fileutil.make_dirs(basedir)
|
|
||||||
config = "[client]\n"
|
config = "[client]\n"
|
||||||
config += "introducer.furl = %s\n" % self.introducer_furl
|
|
||||||
if helper_furl:
|
if helper_furl:
|
||||||
config += "helper.furl = %s\n" % helper_furl
|
config += "helper.furl = %s\n" % helper_furl
|
||||||
fileutil.write(os.path.join(basedir, 'tahoe.cfg'), config)
|
basedir.child("tahoe.cfg").setContent(config)
|
||||||
|
private = basedir.child("private")
|
||||||
|
private.makedirs()
|
||||||
|
write_introducer(
|
||||||
|
basedir,
|
||||||
|
"default",
|
||||||
|
self.introducer_furl,
|
||||||
|
)
|
||||||
|
|
||||||
c = yield client.create_client(basedir)
|
c = yield client.create_client(basedir.path)
|
||||||
self.clients.append(c)
|
self.clients.append(c)
|
||||||
c.set_default_mutable_keysize(TEST_RSA_KEY_SIZE)
|
c.set_default_mutable_keysize(TEST_RSA_KEY_SIZE)
|
||||||
self.numclients += 1
|
self.numclients += 1
|
||||||
|
14
tox.ini
14
tox.ini
@ -95,12 +95,16 @@ setenv =
|
|||||||
# .decode(getattr(sys.stdout, "encoding", "utf8"))
|
# .decode(getattr(sys.stdout, "encoding", "utf8"))
|
||||||
# `TypeError: decode() argument 1 must be string, not None`
|
# `TypeError: decode() argument 1 must be string, not None`
|
||||||
PYTHONIOENCODING=utf_8
|
PYTHONIOENCODING=utf_8
|
||||||
|
|
||||||
|
# If no positional arguments are given, try to run the checks on the
|
||||||
|
# entire codebase, including various pieces of supporting code.
|
||||||
|
DEFAULT_FILES=src integration static misc setup.py
|
||||||
commands =
|
commands =
|
||||||
flake8 src integration static misc setup.py
|
flake8 {posargs:{env:DEFAULT_FILES}}
|
||||||
python misc/coding_tools/check-umids.py src
|
python misc/coding_tools/check-umids.py {posargs:{env:DEFAULT_FILES}}
|
||||||
python misc/coding_tools/check-debugging.py
|
python misc/coding_tools/check-debugging.py {posargs:{env:DEFAULT_FILES}}
|
||||||
python misc/coding_tools/find-trailing-spaces.py -r src static misc setup.py
|
python misc/coding_tools/find-trailing-spaces.py -r {posargs:{env:DEFAULT_FILES}}
|
||||||
python misc/coding_tools/check-miscaptures.py
|
python misc/coding_tools/check-miscaptures.py {posargs:{env:DEFAULT_FILES}}
|
||||||
|
|
||||||
# If towncrier.check fails, you forgot to add a towncrier news
|
# If towncrier.check fails, you forgot to add a towncrier news
|
||||||
# fragment explaining the change in this branch. Create one at
|
# fragment explaining the change in this branch. Create one at
|
||||||
|
Loading…
Reference in New Issue
Block a user