mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-04-07 19:04:21 +00:00
Merge remote-tracking branch 'origin/master' into HEAD
This commit is contained in:
commit
ca5a64236d
@ -1,9 +1,10 @@
|
||||
repos:
|
||||
- repo: local
|
||||
- repo: "local"
|
||||
hooks:
|
||||
- id: codechecks
|
||||
name: codechecks
|
||||
- id: "codechecks"
|
||||
name: "codechecks"
|
||||
stages: ["push"]
|
||||
language: "system"
|
||||
files: ".py$"
|
||||
entry: "tox -e codechecks"
|
||||
language: system
|
||||
pass_filenames: false
|
||||
pass_filenames: true
|
||||
|
16
Makefile
16
Makefile
@ -13,8 +13,6 @@ MAKEFLAGS += --warn-undefined-variables
|
||||
MAKEFLAGS += --no-builtin-rules
|
||||
|
||||
# Local target variables
|
||||
VCS_HOOK_SAMPLES=$(wildcard .git/hooks/*.sample)
|
||||
VCS_HOOKS=$(VCS_HOOK_SAMPLES:%.sample=%)
|
||||
PYTHON=python
|
||||
export PYTHON
|
||||
PYFLAKES=flake8
|
||||
@ -31,15 +29,6 @@ TEST_SUITE=allmydata
|
||||
default:
|
||||
@echo "no default target"
|
||||
|
||||
.PHONY: install-vcs-hooks
|
||||
## Install the VCS hooks to run linters on commit and all tests on push
|
||||
install-vcs-hooks: .git/hooks/pre-commit .git/hooks/pre-push
|
||||
.PHONY: uninstall-vcs-hooks
|
||||
## Remove the VCS hooks
|
||||
uninstall-vcs-hooks: .tox/create-venvs.log
|
||||
"./$(dir $(<))py36/bin/pre-commit" uninstall || true
|
||||
"./$(dir $(<))py36/bin/pre-commit" uninstall -t pre-push || true
|
||||
|
||||
.PHONY: test
|
||||
## Run all tests and code reports
|
||||
test: .tox/create-venvs.log
|
||||
@ -215,7 +204,7 @@ clean:
|
||||
rm -f *.pkg
|
||||
|
||||
.PHONY: distclean
|
||||
distclean: clean uninstall-vcs-hooks
|
||||
distclean: clean
|
||||
rm -rf src/*.egg-info
|
||||
rm -f src/allmydata/_version.py
|
||||
rm -f src/allmydata/_appname.py
|
||||
@ -261,6 +250,3 @@ src/allmydata/_version.py:
|
||||
|
||||
.tox/create-venvs.log: tox.ini setup.py
|
||||
tox --notest -p all | tee -a "$(@)"
|
||||
|
||||
$(VCS_HOOKS): .tox/create-venvs.log .pre-commit-config.yaml
|
||||
"./$(dir $(<))py36/bin/pre-commit" install --hook-type $(@:.git/hooks/%=%)
|
||||
|
@ -398,13 +398,13 @@ This section controls *when* Tor and I2P are used. The ``[tor]`` and
|
||||
``[i2p]`` sections (described later) control *how* Tor/I2P connections are
|
||||
managed.
|
||||
|
||||
All Tahoe nodes need to make a connection to the Introducer; the ``[client]
|
||||
introducer.furl`` setting (described below) indicates where the Introducer
|
||||
lives. Tahoe client nodes must also make connections to storage servers:
|
||||
these targets are specified in announcements that come from the Introducer.
|
||||
Both are expressed as FURLs (a Foolscap URL), which include a list of
|
||||
"connection hints". Each connection hint describes one (of perhaps many)
|
||||
network endpoints where the service might live.
|
||||
All Tahoe nodes need to make a connection to the Introducer; the
|
||||
``private/introducers.yaml`` file (described below) configures where one or more
|
||||
Introducers live. Tahoe client nodes must also make connections to storage
|
||||
servers: these targets are specified in announcements that come from the
|
||||
Introducer. Both are expressed as FURLs (a Foolscap URL), which include a
|
||||
list of "connection hints". Each connection hint describes one (of perhaps
|
||||
many) network endpoints where the service might live.
|
||||
|
||||
Connection hints include a type, and look like:
|
||||
|
||||
@ -580,6 +580,8 @@ Client Configuration
|
||||
|
||||
``introducer.furl = (FURL string, mandatory)``
|
||||
|
||||
DEPRECATED. See :ref:`introducer-definitions`.
|
||||
|
||||
This FURL tells the client how to connect to the introducer. Each
|
||||
Tahoe-LAFS grid is defined by an introducer. The introducer's FURL is
|
||||
created by the introducer node and written into its private base
|
||||
@ -965,29 +967,28 @@ This section describes these other files.
|
||||
with as many people as possible, put the empty string (so that
|
||||
``private/convergence`` is a zero-length file).
|
||||
|
||||
Additional Introducer Definitions
|
||||
=================================
|
||||
.. _introducer-definitions:
|
||||
|
||||
The ``private/introducers.yaml`` file defines additional Introducers. The
|
||||
first introducer is defined in ``tahoe.cfg``, in ``[client]
|
||||
introducer.furl``. To use two or more Introducers, choose a locally-unique
|
||||
"petname" for each one, then define their FURLs in
|
||||
``private/introducers.yaml`` like this::
|
||||
Introducer Definitions
|
||||
======================
|
||||
|
||||
The ``private/introducers.yaml`` file defines Introducers.
|
||||
Choose a locally-unique "petname" for each one then define their FURLs in ``private/introducers.yaml`` like this::
|
||||
|
||||
introducers:
|
||||
petname2:
|
||||
furl: FURL2
|
||||
furl: "FURL2"
|
||||
petname3:
|
||||
furl: FURL3
|
||||
furl: "FURL3"
|
||||
|
||||
Servers will announce themselves to all configured introducers. Clients will
|
||||
merge the announcements they receive from all introducers. Nothing will
|
||||
re-broadcast an announcement (i.e. telling introducer 2 about something you
|
||||
heard from introducer 1).
|
||||
|
||||
If you omit the introducer definitions from both ``tahoe.cfg`` and
|
||||
``introducers.yaml``, the node will not use an Introducer at all. Such
|
||||
"introducerless" clients must be configured with static servers (described
|
||||
If you omit the introducer definitions from ``introducers.yaml``,
|
||||
the node will not use an Introducer at all.
|
||||
Such "introducerless" clients must be configured with static servers (described
|
||||
below), or they will not be able to upload and download files.
|
||||
|
||||
|
||||
@ -1155,7 +1156,6 @@ a legal one.
|
||||
timeout.disconnect = 1800
|
||||
|
||||
[client]
|
||||
introducer.furl = pb://ok45ssoklj4y7eok5c3xkmj@tcp:tahoe.example:44801/ii3uumo
|
||||
helper.furl = pb://ggti5ssoklj4y7eok5c3xkmj@tcp:helper.tahoe.example:7054/kk8lhr
|
||||
|
||||
[storage]
|
||||
@ -1166,6 +1166,11 @@ a legal one.
|
||||
[helper]
|
||||
enabled = True
|
||||
|
||||
To be introduced to storage servers, here is a sample ``private/introducers.yaml`` which can be used in conjunction::
|
||||
|
||||
introducers:
|
||||
examplegrid:
|
||||
furl: "pb://ok45ssoklj4y7eok5c3xkmj@tcp:tahoe.example:44801/ii3uumo"
|
||||
|
||||
Old Configuration Files
|
||||
=======================
|
||||
|
@ -5,23 +5,17 @@ Developer Guide
|
||||
Pre-commit Checks
|
||||
-----------------
|
||||
|
||||
This project is configured for use with `pre-commit`_ to install `VCS/git hooks`_ which
|
||||
perform some static code analysis checks and other code checks to catch common errors
|
||||
before each commit and to run the full self-test suite to find less obvious regressions
|
||||
before each push to a remote.
|
||||
This project is configured for use with `pre-commit`_ to install `VCS/git hooks`_ which perform some static code analysis checks and other code checks to catch common errors.
|
||||
These hooks can be configured to run before commits or pushes
|
||||
|
||||
For example::
|
||||
|
||||
tahoe-lafs $ make install-vcs-hooks
|
||||
...
|
||||
+ ./.tox//py36/bin/pre-commit install --hook-type pre-commit
|
||||
pre-commit installed at .git/hooks/pre-commit
|
||||
+ ./.tox//py36/bin/pre-commit install --hook-type pre-push
|
||||
tahoe-lafs $ pre-commit install --hook-type pre-push
|
||||
pre-commit installed at .git/hooks/pre-push
|
||||
tahoe-lafs $ python -c "import pathlib; pathlib.Path('src/allmydata/tabbed.py').write_text('def foo():\\n\\tpass\\n')"
|
||||
tahoe-lafs $ git add src/allmydata/tabbed.py
|
||||
tahoe-lafs $ echo "undefined" > src/allmydata/undefined_name.py
|
||||
tahoe-lafs $ git add src/allmydata/undefined_name.py
|
||||
tahoe-lafs $ git commit -a -m "Add a file that violates flake8"
|
||||
...
|
||||
tahoe-lafs $ git push
|
||||
codechecks...............................................................Failed
|
||||
- hook id: codechecks
|
||||
- exit code: 1
|
||||
@ -30,58 +24,17 @@ For example::
|
||||
codechecks inst-nodeps: ...
|
||||
codechecks installed: ...
|
||||
codechecks run-test-pre: PYTHONHASHSEED='...'
|
||||
codechecks run-test: commands[0] | flake8 src static misc setup.py
|
||||
src/allmydata/tabbed.py:2:1: W191 indentation contains tabs
|
||||
ERROR: InvocationError for command ./tahoe-lafs/.tox/codechecks/bin/flake8 src static misc setup.py (exited with code 1)
|
||||
codechecks run-test: commands[0] | flake8 src/allmydata/undefined_name.py
|
||||
src/allmydata/undefined_name.py:1:1: F821 undefined name 'undefined'
|
||||
ERROR: InvocationError for command ./tahoe-lafs/.tox/codechecks/bin/flake8 src/allmydata/undefined_name.py (exited with code 1)
|
||||
___________________________________ summary ____________________________________
|
||||
ERROR: codechecks: commands failed
|
||||
...
|
||||
|
||||
To uninstall::
|
||||
|
||||
tahoe-lafs $ make uninstall-vcs-hooks
|
||||
...
|
||||
+ ./.tox/py36/bin/pre-commit uninstall
|
||||
pre-commit uninstalled
|
||||
+ ./.tox/py36/bin/pre-commit uninstall -t pre-push
|
||||
tahoe-lafs $ pre-commit uninstall --hook-type pre-push
|
||||
pre-push uninstalled
|
||||
|
||||
Note that running the full self-test suite takes several minutes so expect pushing to
|
||||
take some time. If you can't or don't want to wait for the hooks in some cases, use the
|
||||
``--no-verify`` option to ``$ git commit ...`` or ``$ git push ...``. Alternatively,
|
||||
see the `pre-commit`_ documentation and CLI help output and use the committed
|
||||
`pre-commit configuration`_ as a starting point to write a local, uncommitted
|
||||
``../.pre-commit-config.local.yaml`` configuration to use instead. For example::
|
||||
|
||||
tahoe-lafs $ ./.tox/py36/bin/pre-commit --help
|
||||
tahoe-lafs $ ./.tox/py36/bin/pre-commit instll --help
|
||||
tahoe-lafs $ cp "./.pre-commit-config.yaml" "./.pre-commit-config.local.yaml"
|
||||
tahoe-lafs $ editor "./.pre-commit-config.local.yaml"
|
||||
...
|
||||
tahoe-lafs $ ./.tox/py36/bin/pre-commit install -c "./.pre-commit-config.local.yaml" -t pre-push
|
||||
pre-commit installed at .git/hooks/pre-push
|
||||
tahoe-lafs $ git commit -a -m "Add a file that violates flake8"
|
||||
[3398.pre-commit 29f8f43d2] Add a file that violates flake8
|
||||
1 file changed, 2 insertions(+)
|
||||
create mode 100644 src/allmydata/tabbed.py
|
||||
tahoe-lafs $ git push
|
||||
...
|
||||
codechecks...............................................................Failed
|
||||
- hook id: codechecks
|
||||
- exit code: 1
|
||||
|
||||
GLOB sdist-make: ./tahoe-lafs/setup.py
|
||||
codechecks inst-nodeps: ...
|
||||
codechecks installed: ...
|
||||
codechecks run-test-pre: PYTHONHASHSEED='...'
|
||||
codechecks run-test: commands[0] | flake8 src static misc setup.py
|
||||
src/allmydata/tabbed.py:2:1: W191 indentation contains tabs
|
||||
ERROR: InvocationError for command ./tahoe-lafs/.tox/codechecks/bin/flake8 src static misc setup.py (exited with code 1)
|
||||
___________________________________ summary ____________________________________
|
||||
ERROR: codechecks: commands failed
|
||||
...
|
||||
|
||||
error: failed to push some refs to 'github.com:jaraco/tahoe-lafs.git'
|
||||
|
||||
|
||||
.. _`pre-commit`: https://pre-commit.com
|
||||
|
@ -20,7 +20,7 @@ Config setting File Comment
|
||||
``[node]log_gatherer.furl`` ``BASEDIR/log_gatherer.furl`` (one per line)
|
||||
``[node]timeout.keepalive`` ``BASEDIR/keepalive_timeout``
|
||||
``[node]timeout.disconnect`` ``BASEDIR/disconnect_timeout``
|
||||
``[client]introducer.furl`` ``BASEDIR/introducer.furl``
|
||||
``BASEDIR/introducer.furl`` ``BASEDIR/private/introducers.yaml``
|
||||
``[client]helper.furl`` ``BASEDIR/helper.furl``
|
||||
``[client]key_generator.furl`` ``BASEDIR/key_generator.furl``
|
||||
``[client]stats_gatherer.furl`` ``BASEDIR/stats_gatherer.furl``
|
||||
|
@ -65,9 +65,9 @@ Running a Client
|
||||
To construct a client node, run “``tahoe create-client``”, which will create
|
||||
``~/.tahoe`` to be the node's base directory. Acquire the ``introducer.furl``
|
||||
(see below if you are running your own introducer, or use the one from the
|
||||
`TestGrid page`_), and paste it after ``introducer.furl =`` in the
|
||||
``[client]`` section of ``~/.tahoe/tahoe.cfg``. Then use “``tahoe run
|
||||
~/.tahoe``”. After that, the node should be off and running. The first thing
|
||||
`TestGrid page`_), and write it to ``~/.tahoe/private/introducers.yaml``
|
||||
(see :ref:`introducer-definitions`). Then use “``tahoe run ~/.tahoe``”.
|
||||
After that, the node should be off and running. The first thing
|
||||
it will do is connect to the introducer and get itself connected to all other
|
||||
nodes on the grid.
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
from os import mkdir
|
||||
from os.path import join
|
||||
|
||||
import pytest
|
||||
@ -9,6 +8,14 @@ import pytest_twisted
|
||||
|
||||
import util
|
||||
|
||||
from twisted.python.filepath import (
|
||||
FilePath,
|
||||
)
|
||||
|
||||
from allmydata.test.common import (
|
||||
write_introducer,
|
||||
)
|
||||
|
||||
# see "conftest.py" for the fixtures (e.g. "tor_network")
|
||||
|
||||
# XXX: Integration tests that involve Tor do not run reliably on
|
||||
@ -66,12 +73,12 @@ def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_ne
|
||||
|
||||
@pytest_twisted.inlineCallbacks
|
||||
def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_gatherer, tor_network, introducer_furl):
|
||||
node_dir = join(temp_dir, name)
|
||||
node_dir = FilePath(temp_dir).child(name)
|
||||
web_port = "tcp:{}:interface=localhost".format(control_port + 2000)
|
||||
|
||||
if True:
|
||||
print("creating", node_dir)
|
||||
mkdir(node_dir)
|
||||
print("creating", node_dir.path)
|
||||
node_dir.makedirs()
|
||||
proto = util._DumpOutputProtocol(None)
|
||||
reactor.spawnProcess(
|
||||
proto,
|
||||
@ -84,12 +91,15 @@ def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_
|
||||
'--hide-ip',
|
||||
'--tor-control-port', 'tcp:localhost:{}'.format(control_port),
|
||||
'--listen', 'tor',
|
||||
node_dir,
|
||||
node_dir.path,
|
||||
)
|
||||
)
|
||||
yield proto.done
|
||||
|
||||
with open(join(node_dir, 'tahoe.cfg'), 'w') as f:
|
||||
|
||||
# Which services should this client connect to?
|
||||
write_introducer(node_dir, "default", introducer_furl)
|
||||
with node_dir.child('tahoe.cfg').open('w') as f:
|
||||
f.write('''
|
||||
[node]
|
||||
nickname = %(name)s
|
||||
@ -105,15 +115,12 @@ onion = true
|
||||
onion.private_key_file = private/tor_onion.privkey
|
||||
|
||||
[client]
|
||||
# Which services should this client connect to?
|
||||
introducer.furl = %(furl)s
|
||||
shares.needed = 1
|
||||
shares.happy = 1
|
||||
shares.total = 2
|
||||
|
||||
''' % {
|
||||
'name': name,
|
||||
'furl': introducer_furl,
|
||||
'web_port': web_port,
|
||||
'log_furl': flog_gatherer,
|
||||
'control_port': control_port,
|
||||
@ -121,5 +128,5 @@ shares.total = 2
|
||||
})
|
||||
|
||||
print("running")
|
||||
yield util._run_node(reactor, node_dir, request, None)
|
||||
yield util._run_node(reactor, node_dir.path, request, None)
|
||||
print("okay, launched")
|
||||
|
@ -11,8 +11,12 @@ umids = {}
|
||||
|
||||
for starting_point in sys.argv[1:]:
|
||||
for root, dirs, files in os.walk(starting_point):
|
||||
for fn in [f for f in files if f.endswith(".py")]:
|
||||
fn = os.path.join(root, fn)
|
||||
for f in files:
|
||||
if not f.endswith(".py"):
|
||||
continue
|
||||
if f == "check-debugging.py":
|
||||
continue
|
||||
fn = os.path.join(root, f)
|
||||
for lineno,line in enumerate(open(fn, "r").readlines()):
|
||||
lineno = lineno+1
|
||||
mo = re.search(r"\.setDebugging\(True\)", line)
|
||||
|
1
newsfragments/3504.configuration
Normal file
1
newsfragments/3504.configuration
Normal file
@ -0,0 +1 @@
|
||||
The ``[client]introducer.furl`` configuration item is now deprecated in favor of the ``private/introducers.yaml`` file.
|
0
newsfragments/3514.minor
Normal file
0
newsfragments/3514.minor
Normal file
0
newsfragments/3515.minor
Normal file
0
newsfragments/3515.minor
Normal file
0
newsfragments/3520.minor
Normal file
0
newsfragments/3520.minor
Normal file
1
newsfragments/3539.bugfix
Normal file
1
newsfragments/3539.bugfix
Normal file
@ -0,0 +1 @@
|
||||
Certain implementation-internal weakref KeyErrors are now handled and should no longer cause user-initiated operations to fail.
|
0
newsfragments/3547.minor
Normal file
0
newsfragments/3547.minor
Normal file
@ -1,11 +1,12 @@
|
||||
from past.builtins import unicode
|
||||
|
||||
import os
|
||||
import stat
|
||||
import time
|
||||
import weakref
|
||||
from allmydata import node
|
||||
|
||||
from base64 import urlsafe_b64encode
|
||||
from functools import partial
|
||||
from errno import ENOENT, EPERM
|
||||
|
||||
# On Python 2 this will be the backported package:
|
||||
from configparser import NoSectionError
|
||||
@ -26,6 +27,7 @@ from twisted.application.internet import TimerService
|
||||
from twisted.python.filepath import FilePath
|
||||
|
||||
import allmydata
|
||||
from allmydata import node
|
||||
from allmydata.crypto import rsa, ed25519
|
||||
from allmydata.crypto.util import remove_prefix
|
||||
from allmydata.storage.server import StorageServer
|
||||
@ -471,56 +473,17 @@ def create_introducer_clients(config, main_tub, _introducer_factory=None):
|
||||
# we return this list
|
||||
introducer_clients = []
|
||||
|
||||
introducers_yaml_filename = config.get_private_path("introducers.yaml")
|
||||
introducers_filepath = FilePath(introducers_yaml_filename)
|
||||
introducers = config.get_introducer_configuration()
|
||||
|
||||
try:
|
||||
with introducers_filepath.open() as f:
|
||||
introducers_yaml = yamlutil.safe_load(f)
|
||||
if introducers_yaml is None:
|
||||
raise EnvironmentError(
|
||||
EPERM,
|
||||
"Can't read '{}'".format(introducers_yaml_filename),
|
||||
introducers_yaml_filename,
|
||||
)
|
||||
introducers = introducers_yaml.get("introducers", {})
|
||||
log.msg(
|
||||
"found {} introducers in private/introducers.yaml".format(
|
||||
len(introducers),
|
||||
)
|
||||
)
|
||||
except EnvironmentError as e:
|
||||
if e.errno != ENOENT:
|
||||
raise
|
||||
introducers = {}
|
||||
|
||||
if "default" in introducers.keys():
|
||||
raise ValueError(
|
||||
"'default' introducer furl cannot be specified in introducers.yaml;"
|
||||
" please fix impossible configuration."
|
||||
)
|
||||
|
||||
# read furl from tahoe.cfg
|
||||
tahoe_cfg_introducer_furl = config.get_config("client", "introducer.furl", None)
|
||||
if tahoe_cfg_introducer_furl == "None":
|
||||
raise ValueError(
|
||||
"tahoe.cfg has invalid 'introducer.furl = None':"
|
||||
" to disable it, use 'introducer.furl ='"
|
||||
" or omit the key entirely"
|
||||
)
|
||||
if tahoe_cfg_introducer_furl:
|
||||
introducers[u'default'] = {'furl':tahoe_cfg_introducer_furl}
|
||||
|
||||
for petname, introducer in introducers.items():
|
||||
introducer_cache_filepath = FilePath(config.get_private_path("introducer_{}_cache.yaml".format(petname)))
|
||||
for petname, (furl, cache_path) in introducers.items():
|
||||
ic = _introducer_factory(
|
||||
main_tub,
|
||||
introducer['furl'].encode("ascii"),
|
||||
furl.encode("ascii"),
|
||||
config.nickname,
|
||||
str(allmydata.__full_version__),
|
||||
str(_Client.OLDEST_SUPPORTED_VERSION),
|
||||
partial(_sequencer, config),
|
||||
introducer_cache_filepath,
|
||||
cache_path,
|
||||
)
|
||||
introducer_clients.append(ic)
|
||||
return introducer_clients
|
||||
@ -742,10 +705,14 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
return { 'node.uptime': time.time() - self.started_timestamp }
|
||||
|
||||
def init_secrets(self):
|
||||
lease_s = self.config.get_or_create_private_config("secret", _make_secret)
|
||||
# configs are always unicode
|
||||
def _unicode_make_secret():
|
||||
return unicode(_make_secret(), "ascii")
|
||||
lease_s = self.config.get_or_create_private_config(
|
||||
"secret", _unicode_make_secret).encode("utf-8")
|
||||
lease_secret = base32.a2b(lease_s)
|
||||
convergence_s = self.config.get_or_create_private_config('convergence',
|
||||
_make_secret)
|
||||
convergence_s = self.config.get_or_create_private_config(
|
||||
'convergence', _unicode_make_secret).encode("utf-8")
|
||||
self.convergence = base32.a2b(convergence_s)
|
||||
self._secret_holder = SecretHolder(lease_secret, self.convergence)
|
||||
|
||||
@ -754,9 +721,11 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
# existing key
|
||||
def _make_key():
|
||||
private_key, _ = ed25519.create_signing_keypair()
|
||||
return ed25519.string_from_signing_key(private_key) + b"\n"
|
||||
# Config values are always unicode:
|
||||
return unicode(ed25519.string_from_signing_key(private_key) + b"\n", "utf-8")
|
||||
|
||||
private_key_str = self.config.get_or_create_private_config("node.privkey", _make_key)
|
||||
private_key_str = self.config.get_or_create_private_config(
|
||||
"node.privkey", _make_key).encode("utf-8")
|
||||
private_key, public_key = ed25519.signing_keypair_from_string(private_key_str)
|
||||
public_key_str = ed25519.string_from_verifying_key(public_key)
|
||||
self.config.write_config_file("node.pubkey", public_key_str + b"\n", "wb")
|
||||
@ -1118,7 +1087,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
if accountfile:
|
||||
accountfile = self.config.get_config_path(accountfile)
|
||||
accounturl = self.config.get_config("sftpd", "accounts.url", None)
|
||||
sftp_portstr = self.config.get_config("sftpd", "port", "8022")
|
||||
sftp_portstr = self.config.get_config("sftpd", "port", "tcp:8022")
|
||||
pubkey_file = self.config.get_config("sftpd", "host_pubkey_file")
|
||||
privkey_file = self.config.get_config("sftpd", "host_privkey_file")
|
||||
|
||||
|
@ -1975,6 +1975,8 @@ class Dispatcher(object):
|
||||
|
||||
|
||||
class SFTPServer(service.MultiService):
|
||||
name = "frontend:sftp"
|
||||
|
||||
def __init__(self, client, accountfile, accounturl,
|
||||
sftp_portstr, pubkey_file, privkey_file):
|
||||
precondition(isinstance(accountfile, (unicode, NoneType)), accountfile)
|
||||
|
@ -1,4 +1,5 @@
|
||||
from past.builtins import unicode
|
||||
from past.builtins import unicode, long
|
||||
from six import ensure_text
|
||||
|
||||
import time
|
||||
from zope.interface import implementer
|
||||
@ -17,7 +18,7 @@ from allmydata.util.assertutil import precondition
|
||||
class InvalidCacheError(Exception):
|
||||
pass
|
||||
|
||||
V2 = "http://allmydata.org/tahoe/protocols/introducer/v2"
|
||||
V2 = b"http://allmydata.org/tahoe/protocols/introducer/v2"
|
||||
|
||||
@implementer(RIIntroducerSubscriberClient_v2, IIntroducerClient)
|
||||
class IntroducerClient(service.Service, Referenceable):
|
||||
@ -26,6 +27,8 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
nickname, my_version, oldest_supported,
|
||||
sequencer, cache_filepath):
|
||||
self._tub = tub
|
||||
if isinstance(introducer_furl, unicode):
|
||||
introducer_furl = introducer_furl.encode("utf-8")
|
||||
self.introducer_furl = introducer_furl
|
||||
|
||||
assert type(nickname) is unicode
|
||||
@ -35,11 +38,11 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
self._sequencer = sequencer
|
||||
self._cache_filepath = cache_filepath
|
||||
|
||||
self._my_subscriber_info = { "version": 0,
|
||||
"nickname": self._nickname,
|
||||
"app-versions": [],
|
||||
"my-version": self._my_version,
|
||||
"oldest-supported": self._oldest_supported,
|
||||
self._my_subscriber_info = { b"version": 0,
|
||||
b"nickname": self._nickname,
|
||||
b"app-versions": [],
|
||||
b"my-version": self._my_version,
|
||||
b"oldest-supported": self._oldest_supported,
|
||||
}
|
||||
|
||||
self._outbound_announcements = {} # not signed
|
||||
@ -113,19 +116,24 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
announcements = []
|
||||
for _, value in self._inbound_announcements.items():
|
||||
ann, key_s, time_stamp = value
|
||||
# On Python 2, bytes strings are encoded into YAML Unicode strings.
|
||||
# On Python 3, bytes are encoded as YAML bytes. To minimize
|
||||
# changes, Python 3 for now ensures the same is true.
|
||||
server_params = {
|
||||
"ann" : ann,
|
||||
"key_s" : key_s,
|
||||
"key_s" : ensure_text(key_s),
|
||||
}
|
||||
announcements.append(server_params)
|
||||
announcement_cache_yaml = yamlutil.safe_dump(announcements)
|
||||
if isinstance(announcement_cache_yaml, unicode):
|
||||
announcement_cache_yaml = announcement_cache_yaml.encode("utf-8")
|
||||
self._cache_filepath.setContent(announcement_cache_yaml)
|
||||
|
||||
def _got_introducer(self, publisher):
|
||||
self.log("connected to introducer, getting versions")
|
||||
default = { "http://allmydata.org/tahoe/protocols/introducer/v1":
|
||||
default = { b"http://allmydata.org/tahoe/protocols/introducer/v1":
|
||||
{ },
|
||||
"application-version": "unknown: no get_version()",
|
||||
b"application-version": b"unknown: no get_version()",
|
||||
}
|
||||
d = add_version_to_remote_reference(publisher, default)
|
||||
d.addCallback(self._got_versioned_introducer)
|
||||
@ -138,6 +146,7 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
def _got_versioned_introducer(self, publisher):
|
||||
self.log("got introducer version: %s" % (publisher.version,))
|
||||
# we require an introducer that speaks at least V2
|
||||
assert all(type(V2) == type(v) for v in publisher.version)
|
||||
if V2 not in publisher.version:
|
||||
raise InsufficientVersionError("V2", publisher.version)
|
||||
self._publisher = publisher
|
||||
@ -162,7 +171,7 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
self._subscribed_service_names.add(service_name)
|
||||
self._maybe_subscribe()
|
||||
for index,(ann,key_s,when) in self._inbound_announcements.items():
|
||||
precondition(isinstance(key_s, str), key_s)
|
||||
precondition(isinstance(key_s, bytes), key_s)
|
||||
servicename = index[0]
|
||||
if servicename == service_name:
|
||||
eventually(cb, key_s, ann, *args, **kwargs)
|
||||
@ -238,7 +247,7 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
# this might raise UnknownKeyError or bad-sig error
|
||||
ann, key_s = unsign_from_foolscap(ann_t)
|
||||
# key is "v0-base32abc123"
|
||||
precondition(isinstance(key_s, str), key_s)
|
||||
precondition(isinstance(key_s, bytes), key_s)
|
||||
except BadSignature:
|
||||
self.log("bad signature on inbound announcement: %s" % (ann_t,),
|
||||
parent=lp, level=log.WEIRD, umid="ZAU15Q")
|
||||
@ -248,7 +257,7 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
self._process_announcement(ann, key_s)
|
||||
|
||||
def _process_announcement(self, ann, key_s):
|
||||
precondition(isinstance(key_s, str), key_s)
|
||||
precondition(isinstance(key_s, bytes), key_s)
|
||||
self._debug_counts["inbound_announcement"] += 1
|
||||
service_name = str(ann["service-name"])
|
||||
if service_name not in self._subscribed_service_names:
|
||||
@ -257,7 +266,7 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
self._debug_counts["wrong_service"] += 1
|
||||
return
|
||||
# for ASCII values, simplejson might give us unicode *or* bytes
|
||||
if "nickname" in ann and isinstance(ann["nickname"], str):
|
||||
if "nickname" in ann and isinstance(ann["nickname"], bytes):
|
||||
ann["nickname"] = unicode(ann["nickname"])
|
||||
nick_s = ann.get("nickname",u"").encode("utf-8")
|
||||
lp2 = self.log(format="announcement for nickname '%(nick)s', service=%(svc)s: %(ann)s",
|
||||
@ -266,11 +275,11 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
# how do we describe this node in the logs?
|
||||
desc_bits = []
|
||||
assert key_s
|
||||
desc_bits.append("serverid=" + key_s[:20])
|
||||
desc_bits.append(b"serverid=" + key_s[:20])
|
||||
if "anonymous-storage-FURL" in ann:
|
||||
tubid_s = get_tubid_string_from_ann(ann)
|
||||
desc_bits.append("tubid=" + tubid_s[:8])
|
||||
description = "/".join(desc_bits)
|
||||
desc_bits.append(b"tubid=" + tubid_s[:8])
|
||||
description = b"/".join(desc_bits)
|
||||
|
||||
# the index is used to track duplicates
|
||||
index = (service_name, key_s)
|
||||
@ -320,7 +329,7 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
self._deliver_announcements(key_s, ann)
|
||||
|
||||
def _deliver_announcements(self, key_s, ann):
|
||||
precondition(isinstance(key_s, str), key_s)
|
||||
precondition(isinstance(key_s, bytes), key_s)
|
||||
service_name = str(ann["service-name"])
|
||||
for (service_name2,cb,args,kwargs) in self._local_subscribers:
|
||||
if service_name2 == service_name:
|
||||
|
@ -1,16 +1,19 @@
|
||||
from past.builtins import unicode
|
||||
|
||||
import re
|
||||
import json
|
||||
from allmydata.crypto.util import remove_prefix
|
||||
from allmydata.crypto import ed25519
|
||||
from allmydata.util import base32, rrefutil
|
||||
from allmydata.util import base32, rrefutil, jsonbytes as json
|
||||
|
||||
|
||||
def get_tubid_string_from_ann(ann):
|
||||
return get_tubid_string(str(ann.get("anonymous-storage-FURL")
|
||||
or ann.get("FURL")))
|
||||
furl = ann.get("anonymous-storage-FURL") or ann.get("FURL")
|
||||
if isinstance(furl, unicode):
|
||||
furl = furl.encode("utf-8")
|
||||
return get_tubid_string(furl)
|
||||
|
||||
def get_tubid_string(furl):
|
||||
m = re.match(r'pb://(\w+)@', furl)
|
||||
m = re.match(br'pb://(\w+)@', furl)
|
||||
assert m
|
||||
return m.group(1).lower()
|
||||
|
||||
|
@ -1,3 +1,5 @@
|
||||
from past.builtins import long
|
||||
from six import ensure_str, ensure_text
|
||||
|
||||
import time, os.path, textwrap
|
||||
from zope.interface import implementer
|
||||
@ -7,7 +9,7 @@ from twisted.python.failure import Failure
|
||||
from foolscap.api import Referenceable
|
||||
import allmydata
|
||||
from allmydata import node
|
||||
from allmydata.util import log, rrefutil
|
||||
from allmydata.util import log, rrefutil, dictutil
|
||||
from allmydata.util.i2p_provider import create as create_i2p_provider
|
||||
from allmydata.util.tor_provider import create as create_tor_provider
|
||||
from allmydata.introducer.interfaces import \
|
||||
@ -122,7 +124,7 @@ class _IntroducerNode(node.Node):
|
||||
|
||||
from allmydata.webish import IntroducerWebishServer
|
||||
nodeurl_path = self.config.get_config_path(u"node.url")
|
||||
config_staticdir = self.get_config("node", "web.static", "public_html").decode('utf-8')
|
||||
config_staticdir = self.get_config("node", "web.static", "public_html")
|
||||
staticdir = self.config.get_config_path(config_staticdir)
|
||||
ws = IntroducerWebishServer(self, webport, nodeurl_path, staticdir)
|
||||
ws.setServiceParent(self)
|
||||
@ -133,8 +135,8 @@ class IntroducerService(service.MultiService, Referenceable):
|
||||
# v1 is the original protocol, added in 1.0 (but only advertised starting
|
||||
# in 1.3), removed in 1.12. v2 is the new signed protocol, added in 1.10
|
||||
VERSION = { #"http://allmydata.org/tahoe/protocols/introducer/v1": { },
|
||||
"http://allmydata.org/tahoe/protocols/introducer/v2": { },
|
||||
"application-version": str(allmydata.__full_version__),
|
||||
b"http://allmydata.org/tahoe/protocols/introducer/v2": { },
|
||||
b"application-version": allmydata.__full_version__.encode("utf-8"),
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
@ -279,6 +281,10 @@ class IntroducerService(service.MultiService, Referenceable):
|
||||
def remote_subscribe_v2(self, subscriber, service_name, subscriber_info):
|
||||
self.log("introducer: subscription[%s] request at %s"
|
||||
% (service_name, subscriber), umid="U3uzLg")
|
||||
service_name = ensure_str(service_name)
|
||||
subscriber_info = dictutil.UnicodeKeyDict({
|
||||
ensure_text(k): v for (k, v) in subscriber_info.items()
|
||||
})
|
||||
return self.add_subscriber(subscriber, service_name, subscriber_info)
|
||||
|
||||
def add_subscriber(self, subscriber, service_name, subscriber_info):
|
||||
@ -302,6 +308,10 @@ class IntroducerService(service.MultiService, Referenceable):
|
||||
subscriber.notifyOnDisconnect(_remove)
|
||||
|
||||
# now tell them about any announcements they're interested in
|
||||
assert {type(service_name)}.issuperset(
|
||||
set(type(k[0]) for k in self._announcements)), (
|
||||
service_name, self._announcements.keys()
|
||||
)
|
||||
announcements = set( [ ann_t
|
||||
for idx,(ann_t,canary,ann,when)
|
||||
in self._announcements.items()
|
||||
|
@ -919,7 +919,7 @@ class Publish(object):
|
||||
|
||||
def log_goal(self, goal, message=""):
|
||||
logmsg = [message]
|
||||
for (shnum, server) in sorted([(s,p) for (p,s) in goal]):
|
||||
for (shnum, server) in sorted([(s,p) for (p,s) in goal], key=lambda t: (id(t[0]), id(t[1]))):
|
||||
logmsg.append("sh%d to [%s]" % (shnum, server.get_name()))
|
||||
self.log("current goal: %s" % (", ".join(logmsg)), level=log.NOISY)
|
||||
self.log("we are planning to push new seqnum=#%d" % self._new_seqnum,
|
||||
|
@ -21,6 +21,8 @@ import re
|
||||
import types
|
||||
import errno
|
||||
from base64 import b32decode, b32encode
|
||||
from errno import ENOENT, EPERM
|
||||
from warnings import warn
|
||||
|
||||
import attr
|
||||
|
||||
@ -42,6 +44,9 @@ from allmydata.util import fileutil, iputil
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.util.encodingutil import get_filesystem_encoding, quote_output
|
||||
from allmydata.util import configutil
|
||||
from allmydata.util.yamlutil import (
|
||||
safe_load,
|
||||
)
|
||||
|
||||
from . import (
|
||||
__full_version__,
|
||||
@ -537,6 +542,97 @@ class _Config(object):
|
||||
grid_manager_certificates.append(cert)
|
||||
return grid_manager_certificates
|
||||
|
||||
def get_introducer_configuration(self):
|
||||
"""
|
||||
Get configuration for introducers.
|
||||
|
||||
:return {unicode: (unicode, FilePath)}: A mapping from introducer
|
||||
petname to a tuple of the introducer's fURL and local cache path.
|
||||
"""
|
||||
introducers_yaml_filename = self.get_private_path("introducers.yaml")
|
||||
introducers_filepath = FilePath(introducers_yaml_filename)
|
||||
|
||||
def get_cache_filepath(petname):
|
||||
return FilePath(
|
||||
self.get_private_path("introducer_{}_cache.yaml".format(petname)),
|
||||
)
|
||||
|
||||
try:
|
||||
with introducers_filepath.open() as f:
|
||||
introducers_yaml = safe_load(f)
|
||||
if introducers_yaml is None:
|
||||
raise EnvironmentError(
|
||||
EPERM,
|
||||
"Can't read '{}'".format(introducers_yaml_filename),
|
||||
introducers_yaml_filename,
|
||||
)
|
||||
introducers = {
|
||||
petname: config["furl"]
|
||||
for petname, config
|
||||
in introducers_yaml.get("introducers", {}).items()
|
||||
}
|
||||
non_strs = list(
|
||||
k
|
||||
for k
|
||||
in introducers.keys()
|
||||
if not isinstance(k, str)
|
||||
)
|
||||
if non_strs:
|
||||
raise TypeError(
|
||||
"Introducer petnames {!r} should have been str".format(
|
||||
non_strs,
|
||||
),
|
||||
)
|
||||
non_strs = list(
|
||||
v
|
||||
for v
|
||||
in introducers.values()
|
||||
if not isinstance(v, str)
|
||||
)
|
||||
if non_strs:
|
||||
raise TypeError(
|
||||
"Introducer fURLs {!r} should have been str".format(
|
||||
non_strs,
|
||||
),
|
||||
)
|
||||
log.msg(
|
||||
"found {} introducers in {!r}".format(
|
||||
len(introducers),
|
||||
introducers_yaml_filename,
|
||||
)
|
||||
)
|
||||
except EnvironmentError as e:
|
||||
if e.errno != ENOENT:
|
||||
raise
|
||||
introducers = {}
|
||||
|
||||
# supported the deprecated [client]introducer.furl item in tahoe.cfg
|
||||
tahoe_cfg_introducer_furl = self.get_config("client", "introducer.furl", None)
|
||||
if tahoe_cfg_introducer_furl == "None":
|
||||
raise ValueError(
|
||||
"tahoe.cfg has invalid 'introducer.furl = None':"
|
||||
" to disable it omit the key entirely"
|
||||
)
|
||||
if tahoe_cfg_introducer_furl:
|
||||
warn(
|
||||
"tahoe.cfg [client]introducer.furl is deprecated; "
|
||||
"use private/introducers.yaml instead.",
|
||||
category=DeprecationWarning,
|
||||
stacklevel=-1,
|
||||
)
|
||||
if "default" in introducers:
|
||||
raise ValueError(
|
||||
"'default' introducer furl cannot be specified in tahoe.cfg and introducers.yaml;"
|
||||
" please fix impossible configuration."
|
||||
)
|
||||
introducers['default'] = tahoe_cfg_introducer_furl
|
||||
|
||||
return {
|
||||
petname: (furl, get_cache_filepath(petname))
|
||||
for (petname, furl)
|
||||
in introducers.items()
|
||||
}
|
||||
|
||||
|
||||
def create_tub_options(config):
|
||||
"""
|
||||
|
@ -66,9 +66,9 @@ class NodeMaker(object):
|
||||
memokey = b"I" + bigcap
|
||||
else:
|
||||
memokey = b"M" + bigcap
|
||||
if memokey in self._node_cache:
|
||||
try:
|
||||
node = self._node_cache[memokey]
|
||||
else:
|
||||
except KeyError:
|
||||
cap = uri.from_string(bigcap, deep_immutable=deep_immutable,
|
||||
name=name)
|
||||
node = self._create_from_single_cap(cap)
|
||||
|
@ -4,14 +4,15 @@ import os, sys, urllib, textwrap
|
||||
import codecs
|
||||
from os.path import join
|
||||
|
||||
from yaml import (
|
||||
safe_dump,
|
||||
)
|
||||
|
||||
# Python 2 compatibility
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import str # noqa: F401
|
||||
|
||||
# On Python 2 this will be the backported package:
|
||||
from configparser import NoSectionError
|
||||
|
||||
from twisted.python import usage
|
||||
|
||||
from allmydata.util.assertutil import precondition
|
||||
@ -115,24 +116,42 @@ class NoDefaultBasedirOptions(BasedirOptions):
|
||||
DEFAULT_ALIAS = u"tahoe"
|
||||
|
||||
|
||||
def write_introducer(basedir, petname, furl):
|
||||
"""
|
||||
Overwrite the node's ``introducers.yaml`` with a file containing the given
|
||||
introducer information.
|
||||
"""
|
||||
if isinstance(furl, bytes):
|
||||
furl = furl.decode("utf-8")
|
||||
basedir.child(b"private").child(b"introducers.yaml").setContent(
|
||||
safe_dump({
|
||||
"introducers": {
|
||||
petname: {
|
||||
"furl": furl,
|
||||
},
|
||||
},
|
||||
}).encode("ascii"),
|
||||
)
|
||||
|
||||
|
||||
def get_introducer_furl(nodedir, config):
|
||||
"""
|
||||
:return: the introducer FURL for the given node (no matter if it's
|
||||
a client-type node or an introducer itself)
|
||||
"""
|
||||
for petname, (furl, cache) in config.get_introducer_configuration().items():
|
||||
return furl
|
||||
|
||||
# We have no configured introducers. Maybe this is running *on* the
|
||||
# introducer? Let's guess, sure why not.
|
||||
try:
|
||||
introducer_furl = config.get('client', 'introducer.furl')
|
||||
except NoSectionError:
|
||||
# we're not a client; maybe this is running *on* the introducer?
|
||||
try:
|
||||
with open(join(nodedir, "private", "introducer.furl"), "r") as f:
|
||||
introducer_furl = f.read().strip()
|
||||
except IOError:
|
||||
raise Exception(
|
||||
"Can't find introducer FURL in tahoe.cfg nor "
|
||||
"{}/private/introducer.furl".format(nodedir)
|
||||
)
|
||||
return introducer_furl
|
||||
with open(join(nodedir, "private", "introducer.furl"), "r") as f:
|
||||
return f.read().strip()
|
||||
except IOError:
|
||||
raise Exception(
|
||||
"Can't find introducer FURL in tahoe.cfg nor "
|
||||
"{}/private/introducer.furl".format(nodedir)
|
||||
)
|
||||
|
||||
|
||||
def get_aliases(nodedir):
|
||||
|
@ -5,11 +5,20 @@ import json
|
||||
|
||||
from twisted.internet import reactor, defer
|
||||
from twisted.python.usage import UsageError
|
||||
from allmydata.scripts.common import BasedirOptions, NoDefaultBasedirOptions
|
||||
from twisted.python.filepath import (
|
||||
FilePath,
|
||||
)
|
||||
|
||||
from allmydata.scripts.common import (
|
||||
BasedirOptions,
|
||||
NoDefaultBasedirOptions,
|
||||
write_introducer,
|
||||
)
|
||||
from allmydata.scripts.default_nodedir import _default_nodedir
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.encodingutil import listdir_unicode, argv_to_unicode, quote_local_unicode_path, get_io_encoding
|
||||
from allmydata.util import fileutil, i2p_provider, iputil, tor_provider
|
||||
|
||||
from wormhole import wormhole
|
||||
|
||||
|
||||
@ -299,12 +308,15 @@ def write_node_config(c, config):
|
||||
|
||||
|
||||
def write_client_config(c, config):
|
||||
# note, config can be a plain dict, it seems -- see
|
||||
# test_configutil.py in test_create_client_config
|
||||
introducer = config.get("introducer", None)
|
||||
if introducer is not None:
|
||||
write_introducer(
|
||||
FilePath(config["basedir"]),
|
||||
"default",
|
||||
introducer,
|
||||
)
|
||||
|
||||
c.write("[client]\n")
|
||||
c.write("# Which services should this client connect to?\n")
|
||||
introducer = config.get("introducer", None) or ""
|
||||
c.write("introducer.furl = %s\n" % introducer)
|
||||
c.write("helper.furl =\n")
|
||||
c.write("#stats_gatherer.furl =\n")
|
||||
c.write("\n")
|
||||
@ -437,8 +449,11 @@ def create_node(config):
|
||||
|
||||
print("Node created in %s" % quote_local_unicode_path(basedir), file=out)
|
||||
tahoe_cfg = quote_local_unicode_path(os.path.join(basedir, "tahoe.cfg"))
|
||||
introducers_yaml = quote_local_unicode_path(
|
||||
os.path.join(basedir, "private", "introducers.yaml"),
|
||||
)
|
||||
if not config.get("introducer", ""):
|
||||
print(" Please set [client]introducer.furl= in %s!" % tahoe_cfg, file=out)
|
||||
print(" Please add introducers to %s!" % (introducers_yaml,), file=out)
|
||||
print(" The node cannot connect to a grid without it.", file=out)
|
||||
if not config.get("nickname", ""):
|
||||
print(" Please set [node]nickname= in %s" % tahoe_cfg, file=out)
|
||||
|
@ -1,16 +1,15 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import json
|
||||
from os.path import join
|
||||
|
||||
from twisted.python import usage
|
||||
from twisted.internet import defer, reactor
|
||||
|
||||
from wormhole import wormhole
|
||||
|
||||
from allmydata.util import configutil
|
||||
from allmydata.util.encodingutil import argv_to_abspath
|
||||
from allmydata.scripts.common import get_default_nodedir, get_introducer_furl
|
||||
from allmydata.node import read_config
|
||||
|
||||
|
||||
class InviteOptions(usage.Options):
|
||||
@ -77,7 +76,7 @@ def invite(options):
|
||||
basedir = argv_to_abspath(options.parent['node-directory'])
|
||||
else:
|
||||
basedir = get_default_nodedir()
|
||||
config = configutil.get_config(join(basedir, 'tahoe.cfg'))
|
||||
config = read_config(basedir, u"")
|
||||
out = options.stdout
|
||||
err = options.stderr
|
||||
|
||||
|
@ -8,6 +8,9 @@ if PY2:
|
||||
from future.builtins import str # noqa: F401
|
||||
from six.moves import cStringIO as StringIO
|
||||
|
||||
from twisted.python.filepath import (
|
||||
FilePath,
|
||||
)
|
||||
from twisted.internet import defer, reactor, protocol, error
|
||||
from twisted.application import service, internet
|
||||
from twisted.web import client as tw_client
|
||||
@ -21,6 +24,10 @@ from allmydata.util import fileutil, pollmixin
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.util.encodingutil import get_filesystem_encoding
|
||||
|
||||
from allmydata.scripts.common import (
|
||||
write_introducer,
|
||||
)
|
||||
|
||||
class StallableHTTPGetterDiscarder(tw_client.HTTPPageGetter, object):
|
||||
full_speed_ahead = False
|
||||
_bytes_so_far = 0
|
||||
@ -180,16 +187,18 @@ class SystemFramework(pollmixin.PollMixin):
|
||||
self.introducer_furl = self.introducer.introducer_url
|
||||
|
||||
def make_nodes(self):
|
||||
root = FilePath(self.testdir)
|
||||
self.nodes = []
|
||||
for i in range(self.numnodes):
|
||||
nodedir = os.path.join(self.testdir, "node%d" % i)
|
||||
os.mkdir(nodedir)
|
||||
f = open(os.path.join(nodedir, "tahoe.cfg"), "w")
|
||||
f.write("[client]\n"
|
||||
"introducer.furl = %s\n"
|
||||
"shares.happy = 1\n"
|
||||
"[storage]\n"
|
||||
% (self.introducer_furl,))
|
||||
nodedir = root.child("node%d" % (i,))
|
||||
private = nodedir.child("private")
|
||||
private.makedirs()
|
||||
write_introducer(nodedir, "default", self.introducer_url)
|
||||
config = (
|
||||
"[client]\n"
|
||||
"shares.happy = 1\n"
|
||||
"[storage]\n"
|
||||
)
|
||||
# the only tests for which we want the internal nodes to actually
|
||||
# retain shares are the ones where somebody's going to download
|
||||
# them.
|
||||
@ -200,13 +209,13 @@ class SystemFramework(pollmixin.PollMixin):
|
||||
# for these tests, we tell the storage servers to pretend to
|
||||
# accept shares, but really just throw them out, since we're
|
||||
# only testing upload and not download.
|
||||
f.write("debug_discard = true\n")
|
||||
config += "debug_discard = true\n"
|
||||
if self.mode in ("receive",):
|
||||
# for this mode, the client-under-test gets all the shares,
|
||||
# so our internal nodes can refuse requests
|
||||
f.write("readonly = true\n")
|
||||
f.close()
|
||||
c = client.Client(basedir=nodedir)
|
||||
config += "readonly = true\n"
|
||||
nodedir.child("tahoe.cfg").setContent(config)
|
||||
c = client.Client(basedir=nodedir.path)
|
||||
c.setServiceParent(self)
|
||||
self.nodes.append(c)
|
||||
# the peers will start running, eventually they will connect to each
|
||||
@ -235,16 +244,16 @@ this file are ignored.
|
||||
quiet = StringIO()
|
||||
create_node.create_node({'basedir': clientdir}, out=quiet)
|
||||
log.msg("DONE MAKING CLIENT")
|
||||
write_introducer(clientdir, "default", self.introducer_furl)
|
||||
# now replace tahoe.cfg
|
||||
# set webport=0 and then ask the node what port it picked.
|
||||
f = open(os.path.join(clientdir, "tahoe.cfg"), "w")
|
||||
f.write("[node]\n"
|
||||
"web.port = tcp:0:interface=127.0.0.1\n"
|
||||
"[client]\n"
|
||||
"introducer.furl = %s\n"
|
||||
"shares.happy = 1\n"
|
||||
"[storage]\n"
|
||||
% (self.introducer_furl,))
|
||||
)
|
||||
|
||||
if self.mode in ("upload-self", "receive"):
|
||||
# accept and store shares, to trigger the memory consumption bugs
|
||||
|
@ -8,7 +8,9 @@ from twisted.internet import defer
|
||||
from ..common_util import run_cli
|
||||
from ..no_network import GridTestMixin
|
||||
from .common import CLITestMixin
|
||||
|
||||
from ...client import (
|
||||
read_config,
|
||||
)
|
||||
|
||||
class _FakeWormhole(object):
|
||||
|
||||
@ -81,9 +83,19 @@ class Join(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
)
|
||||
|
||||
self.assertEqual(0, rc)
|
||||
|
||||
config = read_config(node_dir, u"")
|
||||
self.assertIn(
|
||||
"pb://foo",
|
||||
set(
|
||||
furl
|
||||
for (furl, cache)
|
||||
in config.get_introducer_configuration().values()
|
||||
),
|
||||
)
|
||||
|
||||
with open(join(node_dir, 'tahoe.cfg'), 'r') as f:
|
||||
config = f.read()
|
||||
self.assertIn("pb://foo", config)
|
||||
self.assertIn(u"somethinghopefullyunique", config)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -81,6 +81,9 @@ from allmydata.client import (
|
||||
config_from_string,
|
||||
create_client_from_config,
|
||||
)
|
||||
from allmydata.scripts.common import (
|
||||
write_introducer,
|
||||
)
|
||||
|
||||
from ..crypto import (
|
||||
ed25519,
|
||||
@ -221,8 +224,8 @@ class UseNode(object):
|
||||
"""
|
||||
plugin_config = attr.ib()
|
||||
storage_plugin = attr.ib()
|
||||
basedir = attr.ib()
|
||||
introducer_furl = attr.ib()
|
||||
basedir = attr.ib(validator=attr.validators.instance_of(FilePath))
|
||||
introducer_furl = attr.ib(validator=attr.validators.instance_of(bytes))
|
||||
node_config = attr.ib(default=attr.Factory(dict))
|
||||
|
||||
config = attr.ib(default=None)
|
||||
@ -246,6 +249,11 @@ class UseNode(object):
|
||||
config=format_config_items(self.plugin_config),
|
||||
)
|
||||
|
||||
write_introducer(
|
||||
self.basedir,
|
||||
"default",
|
||||
self.introducer_furl,
|
||||
)
|
||||
self.config = config_from_string(
|
||||
self.basedir.asTextMode().path,
|
||||
"tub.port",
|
||||
@ -254,11 +262,9 @@ class UseNode(object):
|
||||
{node_config}
|
||||
|
||||
[client]
|
||||
introducer.furl = {furl}
|
||||
storage.plugins = {storage_plugin}
|
||||
{plugin_config_section}
|
||||
""".format(
|
||||
furl=self.introducer_furl,
|
||||
storage_plugin=self.storage_plugin,
|
||||
node_config=format_config_items(self.node_config),
|
||||
plugin_config_section=plugin_config_section,
|
||||
|
1
src/allmydata/test/data/openssh-rsa-2048.pub.txt
Normal file
1
src/allmydata/test/data/openssh-rsa-2048.pub.txt
Normal file
@ -0,0 +1 @@
|
||||
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDx5JfaPwE2wfXIQcmlGte9EPAbrTmHPGOF/PuZ71XPa3mZTHMQQuc959gmLxupmcc5o4jYe8VTwT6bbNl6YM+HmCvL3XVH0BqdM2lpKCTB/WzSAyFUv8gSjQVXekRm9wF69tZkPrudqutTLhqXU5ESiUzfhU+CxHQW+kAf10Yd9R68V1f8jkuWjEoeVfCltj7O5fRlpouoTXn83MUAXB3J/wDjpjnjp2PxvXL2x5aCHtzd1WCGEmtWbHZvRA1a0EE233zfXNHg4xLd3ycUqAxoRlCcC230itUBXtr4qgDMzRdsL+HGWrcJ+4yezlQj+l8mc7vi5shNT7HDRfvi/rE7 exarkun@baryon
|
27
src/allmydata/test/data/openssh-rsa-2048.txt
Normal file
27
src/allmydata/test/data/openssh-rsa-2048.txt
Normal file
@ -0,0 +1,27 @@
|
||||
-----BEGIN OPENSSH PRIVATE KEY-----
|
||||
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABFwAAAAdzc2gtcn
|
||||
NhAAAAAwEAAQAAAQEA8eSX2j8BNsH1yEHJpRrXvRDwG605hzxjhfz7me9Vz2t5mUxzEELn
|
||||
PefYJi8bqZnHOaOI2HvFU8E+m2zZemDPh5gry911R9AanTNpaSgkwf1s0gMhVL/IEo0FV3
|
||||
pEZvcBevbWZD67narrUy4al1OREolM34VPgsR0FvpAH9dGHfUevFdX/I5LloxKHlXwpbY+
|
||||
zuX0ZaaLqE15/NzFAFwdyf8A46Y546dj8b1y9seWgh7c3dVghhJrVmx2b0QNWtBBNt9831
|
||||
zR4OMS3d8nFKgMaEZQnAtt9IrVAV7a+KoAzM0XbC/hxlq3CfuMns5UI/pfJnO74ubITU+x
|
||||
w0X74v6xOwAAA8gG6fYoBun2KAAAAAdzc2gtcnNhAAABAQDx5JfaPwE2wfXIQcmlGte9EP
|
||||
AbrTmHPGOF/PuZ71XPa3mZTHMQQuc959gmLxupmcc5o4jYe8VTwT6bbNl6YM+HmCvL3XVH
|
||||
0BqdM2lpKCTB/WzSAyFUv8gSjQVXekRm9wF69tZkPrudqutTLhqXU5ESiUzfhU+CxHQW+k
|
||||
Af10Yd9R68V1f8jkuWjEoeVfCltj7O5fRlpouoTXn83MUAXB3J/wDjpjnjp2PxvXL2x5aC
|
||||
Htzd1WCGEmtWbHZvRA1a0EE233zfXNHg4xLd3ycUqAxoRlCcC230itUBXtr4qgDMzRdsL+
|
||||
HGWrcJ+4yezlQj+l8mc7vi5shNT7HDRfvi/rE7AAAAAwEAAQAAAQBc8ukC/RjbULbAJ79z
|
||||
SRhDV2HcULj9ZVAc6XRI13XSyUqlhIHmar7uw8sECTAJAMVUOanY/d56a5RCJxZ+dvrn8K
|
||||
pLoSJy4N2JMHs95CYTwOzy2i8RoMwhjLzTu3DTW/DerkD9rjlrwYTBpsKjCYKCa+31KgW+
|
||||
ivzM44aGdbNEyO+yHaxdcyEr3OLcRMppgZmwTieFnG053lCP5XyYRQmZ1a78G6WOzpOgbO
|
||||
2N6Z1sbEqTMVd3oxFZAbmqA8kE4jLJzRcso/SSK5NDs22JzMfxByJQSlitWzDDvHdWpQpy
|
||||
8C6Eu7+48ataLI68VOOXuDWDy9Dck0ev89u7Z4vNLWBhAAAAgAndOZZ0C179Um6sn6gmfM
|
||||
0ttXEaSIqYNGRhkoYqn9vvw03bOMbSnqdEJiwFhbE/rWv7PypB5MeY7tRoCyBMWsUYj0pA
|
||||
HKSl68diLr5g5EOIRGAWu8e//7T2HgZKOo+VaG1IXgmb7PUoAJ6Tzsmb4jdnYfg+BP/TDd
|
||||
e9yCcoiT2fAAAAgQD6T7Kr6ECg0ME8vt/ixsjKdA2zS9SIHyjCMXbdMv1Ok1hkr5rRWbbZ
|
||||
jm79fF+a8pOQUg30Qw2JUx7II50akt2xL6zesGDDUcOHD2GE/B6Ftji53G3fwWZCqeQ5sD
|
||||
YP25qAWlrqDBGJvF+hkEdlceS8etYJ3XWXjNIYwfR7frQvkQAAAIEA92Pq3FWH63TS3Lqe
|
||||
mQjhfNV75tU0AwENG+xlI1g0nQb7Qsdbm6rIg6XqewUfw03Q+/AqPvwG/1mbyVF7jRZ+qw
|
||||
cl69yM70c9qY74GHjIIOOcC8Kgv29LQrm/VqVp0Lesn5RA8SIiLcMfyYBTEX8V9VY99Zkd
|
||||
v6WwRr4XK1bPRgsAAAAOZXhhcmt1bkBiYXJ5b24BAgMEBQ==
|
||||
-----END OPENSSH PRIVATE KEY-----
|
111
src/allmydata/test/strategies.py
Normal file
111
src/allmydata/test/strategies.py
Normal file
@ -0,0 +1,111 @@
|
||||
"""
|
||||
Hypothesis strategies use for testing Tahoe-LAFS.
|
||||
"""
|
||||
|
||||
from hypothesis.strategies import (
|
||||
one_of,
|
||||
builds,
|
||||
binary,
|
||||
)
|
||||
|
||||
from ..uri import (
|
||||
WriteableSSKFileURI,
|
||||
WriteableMDMFFileURI,
|
||||
DirectoryURI,
|
||||
MDMFDirectoryURI,
|
||||
)
|
||||
|
||||
def write_capabilities():
|
||||
"""
|
||||
Build ``IURI`` providers representing all kinds of write capabilities.
|
||||
"""
|
||||
return one_of([
|
||||
ssk_capabilities(),
|
||||
mdmf_capabilities(),
|
||||
dir2_capabilities(),
|
||||
dir2_mdmf_capabilities(),
|
||||
])
|
||||
|
||||
|
||||
def ssk_capabilities():
|
||||
"""
|
||||
Build ``WriteableSSKFileURI`` instances.
|
||||
"""
|
||||
return builds(
|
||||
WriteableSSKFileURI,
|
||||
ssk_writekeys(),
|
||||
ssk_fingerprints(),
|
||||
)
|
||||
|
||||
|
||||
def _writekeys(size=16):
|
||||
"""
|
||||
Build ``bytes`` representing write keys.
|
||||
"""
|
||||
return binary(min_size=size, max_size=size)
|
||||
|
||||
|
||||
def ssk_writekeys():
|
||||
"""
|
||||
Build ``bytes`` representing SSK write keys.
|
||||
"""
|
||||
return _writekeys()
|
||||
|
||||
|
||||
def _fingerprints(size=32):
|
||||
"""
|
||||
Build ``bytes`` representing fingerprints.
|
||||
"""
|
||||
return binary(min_size=size, max_size=size)
|
||||
|
||||
|
||||
def ssk_fingerprints():
|
||||
"""
|
||||
Build ``bytes`` representing SSK fingerprints.
|
||||
"""
|
||||
return _fingerprints()
|
||||
|
||||
|
||||
def mdmf_capabilities():
|
||||
"""
|
||||
Build ``WriteableMDMFFileURI`` instances.
|
||||
"""
|
||||
return builds(
|
||||
WriteableMDMFFileURI,
|
||||
mdmf_writekeys(),
|
||||
mdmf_fingerprints(),
|
||||
)
|
||||
|
||||
|
||||
def mdmf_writekeys():
|
||||
"""
|
||||
Build ``bytes`` representing MDMF write keys.
|
||||
"""
|
||||
return _writekeys()
|
||||
|
||||
|
||||
def mdmf_fingerprints():
|
||||
"""
|
||||
Build ``bytes`` representing MDMF fingerprints.
|
||||
"""
|
||||
return _fingerprints()
|
||||
|
||||
|
||||
def dir2_capabilities():
|
||||
"""
|
||||
Build ``DirectoryURI`` instances.
|
||||
"""
|
||||
return builds(
|
||||
DirectoryURI,
|
||||
ssk_capabilities(),
|
||||
)
|
||||
|
||||
|
||||
def dir2_mdmf_capabilities():
|
||||
"""
|
||||
Build ``MDMFDirectoryURI`` instances.
|
||||
"""
|
||||
return builds(
|
||||
MDMFDirectoryURI,
|
||||
mdmf_capabilities(),
|
||||
)
|
@ -1,5 +1,4 @@
|
||||
import os, sys
|
||||
import mock
|
||||
from functools import (
|
||||
partial,
|
||||
)
|
||||
@ -12,6 +11,15 @@ from fixtures import (
|
||||
Fixture,
|
||||
TempDir,
|
||||
)
|
||||
|
||||
from hypothesis import (
|
||||
given,
|
||||
)
|
||||
from hypothesis.strategies import (
|
||||
sampled_from,
|
||||
booleans,
|
||||
)
|
||||
|
||||
from eliot.testing import (
|
||||
capture_logging,
|
||||
assertHasAction,
|
||||
@ -39,6 +47,9 @@ from testtools.twistedsupport import (
|
||||
import allmydata
|
||||
import allmydata.util.log
|
||||
|
||||
from allmydata.nodemaker import (
|
||||
NodeMaker,
|
||||
)
|
||||
from allmydata.node import OldConfigError, UnescapedHashError, create_node_dir
|
||||
from allmydata.frontends.auth import NeedRootcapLookupScheme
|
||||
from allmydata import client
|
||||
@ -55,11 +66,15 @@ from allmydata.util import (
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.interfaces import IFilesystemNode, IFileNode, \
|
||||
IImmutableFileNode, IMutableFileNode, IDirectoryNode
|
||||
from allmydata.scripts.common import (
|
||||
write_introducer,
|
||||
)
|
||||
from foolscap.api import flushEventualQueue
|
||||
import allmydata.test.common_util as testutil
|
||||
from .common import (
|
||||
EMPTY_CLIENT_CONFIG,
|
||||
SyncTestCase,
|
||||
AsyncBrokenTestCase,
|
||||
UseTestPlugins,
|
||||
MemoryIntroducerClient,
|
||||
get_published_announcements,
|
||||
@ -69,16 +84,13 @@ from .matchers import (
|
||||
matches_storage_announcement,
|
||||
matches_furl,
|
||||
)
|
||||
from .strategies import (
|
||||
write_capabilities,
|
||||
)
|
||||
|
||||
SOME_FURL = b"pb://abcde@nowhere/fake"
|
||||
|
||||
BASECONFIG = ("[client]\n"
|
||||
"introducer.furl = \n"
|
||||
)
|
||||
|
||||
BASECONFIG_I = ("[client]\n"
|
||||
"introducer.furl = %s\n"
|
||||
)
|
||||
BASECONFIG = "[client]\n"
|
||||
|
||||
class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
def test_loadable(self):
|
||||
@ -120,14 +132,14 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
def write_config(s):
|
||||
config = ("[client]\n"
|
||||
"introducer.furl = %s\n" % s)
|
||||
"helper.furl = %s\n" % s)
|
||||
fileutil.write(os.path.join(basedir, "tahoe.cfg"), config)
|
||||
|
||||
for s in should_fail:
|
||||
write_config(s)
|
||||
with self.assertRaises(UnescapedHashError) as ctx:
|
||||
yield client.create_client(basedir)
|
||||
self.assertIn("[client]introducer.furl", str(ctx.exception))
|
||||
self.assertIn("[client]helper.furl", str(ctx.exception))
|
||||
|
||||
def test_unreadable_config(self):
|
||||
if sys.platform == "win32":
|
||||
@ -419,19 +431,32 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
"""
|
||||
configuration for sftpd results in it being started
|
||||
"""
|
||||
root = FilePath(self.mktemp())
|
||||
root.makedirs()
|
||||
accounts = root.child(b"sftp-accounts")
|
||||
accounts.touch()
|
||||
|
||||
data = FilePath(__file__).sibling(b"data")
|
||||
privkey = data.child(b"openssh-rsa-2048.txt")
|
||||
pubkey = data.child(b"openssh-rsa-2048.pub.txt")
|
||||
|
||||
basedir = u"client.Basic.test_ftp_create"
|
||||
create_node_dir(basedir, "testing")
|
||||
with open(os.path.join(basedir, "tahoe.cfg"), "w") as f:
|
||||
f.write(
|
||||
f.write((
|
||||
'[sftpd]\n'
|
||||
'enabled = true\n'
|
||||
'accounts.file = foo\n'
|
||||
'host_pubkey_file = pubkey\n'
|
||||
'host_privkey_file = privkey\n'
|
||||
)
|
||||
with mock.patch('allmydata.frontends.sftpd.SFTPServer') as p:
|
||||
yield client.create_client(basedir)
|
||||
self.assertTrue(p.called)
|
||||
'accounts.file = {}\n'
|
||||
'host_pubkey_file = {}\n'
|
||||
'host_privkey_file = {}\n'
|
||||
).format(accounts.path, pubkey.path, privkey.path))
|
||||
|
||||
client_node = yield client.create_client(
|
||||
basedir,
|
||||
)
|
||||
sftp = client_node.getServiceNamed("frontend:sftp")
|
||||
self.assertIs(sftp.parent, client_node)
|
||||
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_ftp_auth_keyfile(self):
|
||||
@ -665,12 +690,13 @@ class AnonymousStorage(SyncTestCase):
|
||||
"""
|
||||
If anonymous storage access is enabled then the client announces it.
|
||||
"""
|
||||
basedir = self.id()
|
||||
os.makedirs(basedir + b"/private")
|
||||
basedir = FilePath(self.id())
|
||||
basedir.child("private").makedirs()
|
||||
write_introducer(basedir, "someintroducer", SOME_FURL)
|
||||
config = client.config_from_string(
|
||||
basedir,
|
||||
basedir.path,
|
||||
"tub.port",
|
||||
BASECONFIG_I % (SOME_FURL,) + (
|
||||
BASECONFIG + (
|
||||
"[storage]\n"
|
||||
"enabled = true\n"
|
||||
"anonymous = true\n"
|
||||
@ -684,7 +710,7 @@ class AnonymousStorage(SyncTestCase):
|
||||
get_published_announcements(node),
|
||||
MatchesListwise([
|
||||
matches_storage_announcement(
|
||||
basedir,
|
||||
basedir.path,
|
||||
anonymous=True,
|
||||
),
|
||||
]),
|
||||
@ -696,12 +722,13 @@ class AnonymousStorage(SyncTestCase):
|
||||
If anonymous storage access is disabled then the client does not announce
|
||||
it nor does it write a fURL for it to beneath the node directory.
|
||||
"""
|
||||
basedir = self.id()
|
||||
os.makedirs(basedir + b"/private")
|
||||
basedir = FilePath(self.id())
|
||||
basedir.child("private").makedirs()
|
||||
write_introducer(basedir, "someintroducer", SOME_FURL)
|
||||
config = client.config_from_string(
|
||||
basedir,
|
||||
basedir.path,
|
||||
"tub.port",
|
||||
BASECONFIG_I % (SOME_FURL,) + (
|
||||
BASECONFIG + (
|
||||
"[storage]\n"
|
||||
"enabled = true\n"
|
||||
"anonymous = false\n"
|
||||
@ -715,7 +742,7 @@ class AnonymousStorage(SyncTestCase):
|
||||
get_published_announcements(node),
|
||||
MatchesListwise([
|
||||
matches_storage_announcement(
|
||||
basedir,
|
||||
basedir.path,
|
||||
anonymous=False,
|
||||
),
|
||||
]),
|
||||
@ -733,12 +760,12 @@ class AnonymousStorage(SyncTestCase):
|
||||
possible to reach the anonymous storage server via the originally
|
||||
published fURL.
|
||||
"""
|
||||
basedir = self.id()
|
||||
os.makedirs(basedir + b"/private")
|
||||
basedir = FilePath(self.id())
|
||||
basedir.child("private").makedirs()
|
||||
enabled_config = client.config_from_string(
|
||||
basedir,
|
||||
basedir.path,
|
||||
"tub.port",
|
||||
BASECONFIG_I % (SOME_FURL,) + (
|
||||
BASECONFIG + (
|
||||
"[storage]\n"
|
||||
"enabled = true\n"
|
||||
"anonymous = true\n"
|
||||
@ -760,9 +787,9 @@ class AnonymousStorage(SyncTestCase):
|
||||
)
|
||||
|
||||
disabled_config = client.config_from_string(
|
||||
basedir,
|
||||
basedir.path,
|
||||
"tub.port",
|
||||
BASECONFIG_I % (SOME_FURL,) + (
|
||||
BASECONFIG + (
|
||||
"[storage]\n"
|
||||
"enabled = true\n"
|
||||
"anonymous = false\n"
|
||||
@ -782,8 +809,8 @@ class IntroducerClients(unittest.TestCase):
|
||||
|
||||
def test_invalid_introducer_furl(self):
|
||||
"""
|
||||
An introducer.furl of 'None' is invalid and causes
|
||||
create_introducer_clients to fail.
|
||||
An introducer.furl of 'None' in the deprecated [client]introducer.furl
|
||||
field is invalid and causes `create_introducer_clients` to fail.
|
||||
"""
|
||||
cfg = (
|
||||
"[client]\n"
|
||||
@ -948,20 +975,28 @@ class Run(unittest.TestCase, testutil.StallMixin):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_loadable(self):
|
||||
basedir = "test_client.Run.test_loadable"
|
||||
os.mkdir(basedir)
|
||||
"""
|
||||
A configuration consisting only of an introducer can be turned into a
|
||||
client node.
|
||||
"""
|
||||
basedir = FilePath("test_client.Run.test_loadable")
|
||||
private = basedir.child("private")
|
||||
private.makedirs()
|
||||
dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus"
|
||||
fileutil.write(os.path.join(basedir, "tahoe.cfg"), BASECONFIG_I % dummy)
|
||||
fileutil.write(os.path.join(basedir, client._Client.EXIT_TRIGGER_FILE), "")
|
||||
yield client.create_client(basedir)
|
||||
write_introducer(basedir, "someintroducer", dummy)
|
||||
basedir.child("tahoe.cfg").setContent(BASECONFIG)
|
||||
basedir.child(client._Client.EXIT_TRIGGER_FILE).touch()
|
||||
yield client.create_client(basedir.path)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_reloadable(self):
|
||||
basedir = "test_client.Run.test_reloadable"
|
||||
os.mkdir(basedir)
|
||||
basedir = FilePath("test_client.Run.test_reloadable")
|
||||
private = basedir.child("private")
|
||||
private.makedirs()
|
||||
dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus"
|
||||
fileutil.write(os.path.join(basedir, "tahoe.cfg"), BASECONFIG_I % dummy)
|
||||
c1 = yield client.create_client(basedir)
|
||||
write_introducer(basedir, "someintroducer", dummy)
|
||||
basedir.child("tahoe.cfg").setContent(BASECONFIG)
|
||||
c1 = yield client.create_client(basedir.path)
|
||||
c1.setServiceParent(self.sparent)
|
||||
|
||||
# delay to let the service start up completely. I'm not entirely sure
|
||||
@ -983,11 +1018,102 @@ class Run(unittest.TestCase, testutil.StallMixin):
|
||||
# also change _check_exit_trigger to use it instead of a raw
|
||||
# reactor.stop, also instrument the shutdown event in an
|
||||
# attribute that we can check.)
|
||||
c2 = yield client.create_client(basedir)
|
||||
c2 = yield client.create_client(basedir.path)
|
||||
c2.setServiceParent(self.sparent)
|
||||
yield c2.disownServiceParent()
|
||||
|
||||
class NodeMaker(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
class NodeMakerTests(testutil.ReallyEqualMixin, AsyncBrokenTestCase):
|
||||
|
||||
def _make_node_maker(self, mode, writecap, deep_immutable):
|
||||
"""
|
||||
Create a callable which can create an ``IFilesystemNode`` provider for the
|
||||
given cap.
|
||||
|
||||
:param unicode mode: The read/write combination to pass to
|
||||
``NodeMaker.create_from_cap``. If it contains ``u"r"`` then a
|
||||
readcap will be passed in. If it contains ``u"w"`` then a
|
||||
writecap will be passed in.
|
||||
|
||||
:param IURI writecap: The capability for which to create a node.
|
||||
|
||||
:param bool deep_immutable: Whether to request a "deep immutable" node
|
||||
which forces the result to be an immutable ``IFilesystemNode`` (I
|
||||
think -exarkun).
|
||||
"""
|
||||
if writecap.is_mutable():
|
||||
# It's just not a valid combination to have a mutable alongside
|
||||
# deep_immutable = True. It's easier to fix deep_immutable than
|
||||
# writecap to clear up this conflict.
|
||||
deep_immutable = False
|
||||
|
||||
if "r" in mode:
|
||||
readcap = writecap.get_readonly().to_string()
|
||||
else:
|
||||
readcap = None
|
||||
if "w" in mode:
|
||||
writecap = writecap.to_string()
|
||||
else:
|
||||
writecap = None
|
||||
|
||||
nm = NodeMaker(
|
||||
storage_broker=None,
|
||||
secret_holder=None,
|
||||
history=None,
|
||||
uploader=None,
|
||||
terminator=None,
|
||||
default_encoding_parameters={u"k": 1, u"n": 1},
|
||||
mutable_file_default=None,
|
||||
key_generator=None,
|
||||
blacklist=None,
|
||||
)
|
||||
return partial(
|
||||
nm.create_from_cap,
|
||||
writecap,
|
||||
readcap,
|
||||
deep_immutable,
|
||||
)
|
||||
|
||||
@given(
|
||||
mode=sampled_from(["w", "r", "rw"]),
|
||||
writecap=write_capabilities(),
|
||||
deep_immutable=booleans(),
|
||||
)
|
||||
def test_cached_result(self, mode, writecap, deep_immutable):
|
||||
"""
|
||||
``NodeMaker.create_from_cap`` returns the same object when called with the
|
||||
same arguments.
|
||||
"""
|
||||
make_node = self._make_node_maker(mode, writecap, deep_immutable)
|
||||
original = make_node()
|
||||
additional = make_node()
|
||||
|
||||
self.assertThat(
|
||||
original,
|
||||
Is(additional),
|
||||
)
|
||||
|
||||
@given(
|
||||
mode=sampled_from(["w", "r", "rw"]),
|
||||
writecap=write_capabilities(),
|
||||
deep_immutable=booleans(),
|
||||
)
|
||||
def test_cache_expired(self, mode, writecap, deep_immutable):
|
||||
"""
|
||||
After the node object returned by an earlier call to
|
||||
``NodeMaker.create_from_cap`` has been garbage collected, a new call
|
||||
to ``NodeMaker.create_from_cap`` returns a node object, maybe even a
|
||||
new one although we can't really prove it.
|
||||
"""
|
||||
make_node = self._make_node_maker(mode, writecap, deep_immutable)
|
||||
make_node()
|
||||
additional = make_node()
|
||||
self.assertThat(
|
||||
additional,
|
||||
AfterPreprocessing(
|
||||
lambda node: node.get_readonly_uri(),
|
||||
Equals(writecap.get_readonly().to_string()),
|
||||
),
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_maker(self):
|
||||
@ -1122,12 +1248,18 @@ class StorageAnnouncementTests(SyncTestCase):
|
||||
"""
|
||||
def setUp(self):
|
||||
super(StorageAnnouncementTests, self).setUp()
|
||||
self.basedir = self.useFixture(TempDir()).path
|
||||
create_node_dir(self.basedir, u"")
|
||||
self.basedir = FilePath(self.useFixture(TempDir()).path)
|
||||
create_node_dir(self.basedir.path, u"")
|
||||
# Write an introducer configuration or we can't observer
|
||||
# announcements.
|
||||
write_introducer(self.basedir, "someintroducer", SOME_FURL)
|
||||
|
||||
|
||||
def get_config(self, storage_enabled, more_storage="", more_sections=""):
|
||||
return """
|
||||
[client]
|
||||
# Empty
|
||||
|
||||
[node]
|
||||
tub.location = tcp:192.0.2.0:1234
|
||||
|
||||
@ -1135,9 +1267,6 @@ tub.location = tcp:192.0.2.0:1234
|
||||
enabled = {storage_enabled}
|
||||
{more_storage}
|
||||
|
||||
[client]
|
||||
introducer.furl = pb://abcde@nowhere/fake
|
||||
|
||||
{more_sections}
|
||||
""".format(
|
||||
storage_enabled=storage_enabled,
|
||||
@ -1151,7 +1280,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
||||
No storage announcement is published if storage is not enabled.
|
||||
"""
|
||||
config = client.config_from_string(
|
||||
self.basedir,
|
||||
self.basedir.path,
|
||||
"tub.port",
|
||||
self.get_config(storage_enabled=False),
|
||||
)
|
||||
@ -1173,7 +1302,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
||||
storage is enabled.
|
||||
"""
|
||||
config = client.config_from_string(
|
||||
self.basedir,
|
||||
self.basedir.path,
|
||||
"tub.port",
|
||||
self.get_config(storage_enabled=True),
|
||||
)
|
||||
@ -1190,7 +1319,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
||||
# Match the following list (of one element) ...
|
||||
MatchesListwise([
|
||||
# The only element in the list ...
|
||||
matches_storage_announcement(self.basedir),
|
||||
matches_storage_announcement(self.basedir.path),
|
||||
]),
|
||||
)),
|
||||
)
|
||||
@ -1205,7 +1334,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
||||
|
||||
value = u"thing"
|
||||
config = client.config_from_string(
|
||||
self.basedir,
|
||||
self.basedir.path,
|
||||
"tub.port",
|
||||
self.get_config(
|
||||
storage_enabled=True,
|
||||
@ -1225,7 +1354,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
||||
get_published_announcements,
|
||||
MatchesListwise([
|
||||
matches_storage_announcement(
|
||||
self.basedir,
|
||||
self.basedir.path,
|
||||
options=[
|
||||
matches_dummy_announcement(
|
||||
u"tahoe-lafs-dummy-v1",
|
||||
@ -1246,7 +1375,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
||||
self.useFixture(UseTestPlugins())
|
||||
|
||||
config = client.config_from_string(
|
||||
self.basedir,
|
||||
self.basedir.path,
|
||||
"tub.port",
|
||||
self.get_config(
|
||||
storage_enabled=True,
|
||||
@ -1268,7 +1397,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
||||
get_published_announcements,
|
||||
MatchesListwise([
|
||||
matches_storage_announcement(
|
||||
self.basedir,
|
||||
self.basedir.path,
|
||||
options=[
|
||||
matches_dummy_announcement(
|
||||
u"tahoe-lafs-dummy-v1",
|
||||
@ -1294,7 +1423,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
||||
self.useFixture(UseTestPlugins())
|
||||
|
||||
config = client.config_from_string(
|
||||
self.basedir,
|
||||
self.basedir.path,
|
||||
"tub.port",
|
||||
self.get_config(
|
||||
storage_enabled=True,
|
||||
@ -1330,7 +1459,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
||||
self.useFixture(UseTestPlugins())
|
||||
|
||||
config = client.config_from_string(
|
||||
self.basedir,
|
||||
self.basedir.path,
|
||||
"tub.port",
|
||||
self.get_config(
|
||||
storage_enabled=True,
|
||||
@ -1346,7 +1475,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
||||
get_published_announcements,
|
||||
MatchesListwise([
|
||||
matches_storage_announcement(
|
||||
self.basedir,
|
||||
self.basedir.path,
|
||||
options=[
|
||||
matches_dummy_announcement(
|
||||
u"tahoe-lafs-dummy-v1",
|
||||
@ -1368,7 +1497,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
||||
self.useFixture(UseTestPlugins())
|
||||
|
||||
config = client.config_from_string(
|
||||
self.basedir,
|
||||
self.basedir.path,
|
||||
"tub.port",
|
||||
self.get_config(
|
||||
storage_enabled=True,
|
||||
@ -1395,7 +1524,7 @@ introducer.furl = pb://abcde@nowhere/fake
|
||||
available on the system.
|
||||
"""
|
||||
config = client.config_from_string(
|
||||
self.basedir,
|
||||
self.basedir.path,
|
||||
"tub.port",
|
||||
self.get_config(
|
||||
storage_enabled=True,
|
||||
|
@ -1,3 +1,16 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from six import ensure_binary, ensure_text
|
||||
|
||||
import os, re, itertools
|
||||
from base64 import b32decode
|
||||
@ -39,8 +52,11 @@ from allmydata.util import pollmixin, idlib, fileutil, yamlutil
|
||||
from allmydata.util.iputil import (
|
||||
listenOnUnused,
|
||||
)
|
||||
from allmydata.scripts.common import (
|
||||
write_introducer,
|
||||
)
|
||||
import allmydata.test.common_util as testutil
|
||||
from allmydata.test.common import (
|
||||
from .common import (
|
||||
SyncTestCase,
|
||||
AsyncTestCase,
|
||||
AsyncBrokenTestCase,
|
||||
@ -100,7 +116,7 @@ class Node(testutil.SignalMixin, testutil.ReallyEqualMixin, AsyncTestCase):
|
||||
q1 = yield create_introducer(basedir)
|
||||
del q1
|
||||
# new nodes create unguessable furls in private/introducer.furl
|
||||
ifurl = fileutil.read(private_fn)
|
||||
ifurl = fileutil.read(private_fn, mode="r")
|
||||
self.failUnless(ifurl)
|
||||
ifurl = ifurl.strip()
|
||||
self.failIf(ifurl.endswith("/introducer"), ifurl)
|
||||
@ -120,7 +136,7 @@ class Node(testutil.SignalMixin, testutil.ReallyEqualMixin, AsyncTestCase):
|
||||
q2 = yield create_introducer(basedir)
|
||||
del q2
|
||||
self.failIf(os.path.exists(public_fn))
|
||||
ifurl2 = fileutil.read(private_fn)
|
||||
ifurl2 = fileutil.read(private_fn, mode="r")
|
||||
self.failUnless(ifurl2)
|
||||
self.failUnlessEqual(ifurl2.strip(), guessable)
|
||||
|
||||
@ -169,7 +185,7 @@ def fakeseq():
|
||||
|
||||
seqnum_counter = itertools.count(1)
|
||||
def realseq():
|
||||
return seqnum_counter.next(), str(os.randint(1,100000))
|
||||
return next(seqnum_counter), str(os.randint(1,100000))
|
||||
|
||||
def make_ann(furl):
|
||||
ann = { "anonymous-storage-FURL": furl,
|
||||
@ -200,13 +216,13 @@ class Client(AsyncTestCase):
|
||||
def _received(key_s, ann):
|
||||
announcements.append( (key_s, ann) )
|
||||
ic1.subscribe_to("storage", _received)
|
||||
furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/gydnp"
|
||||
furl1a = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:7777/gydnp"
|
||||
furl2 = "pb://ttwwooyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/ttwwoo"
|
||||
furl1 = b"pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/gydnp"
|
||||
furl1a = b"pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:7777/gydnp"
|
||||
furl2 = b"pb://ttwwooyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/ttwwoo"
|
||||
|
||||
private_key, public_key = ed25519.create_signing_keypair()
|
||||
public_key_str = ed25519.string_from_verifying_key(public_key)
|
||||
pubkey_s = remove_prefix(public_key_str, "pub-")
|
||||
pubkey_s = remove_prefix(public_key_str, b"pub-")
|
||||
|
||||
# ann1: ic1, furl1
|
||||
# ann1a: ic1, furl1a (same SturdyRef, different connection hints)
|
||||
@ -226,7 +242,7 @@ class Client(AsyncTestCase):
|
||||
self.failUnlessEqual(len(announcements), 1)
|
||||
key_s,ann = announcements[0]
|
||||
self.failUnlessEqual(key_s, pubkey_s)
|
||||
self.failUnlessEqual(ann["anonymous-storage-FURL"], furl1)
|
||||
self.failUnlessEqual(ensure_binary(ann["anonymous-storage-FURL"]), furl1)
|
||||
self.failUnlessEqual(ann["my-version"], "ver23")
|
||||
d.addCallback(_then1)
|
||||
|
||||
@ -260,7 +276,7 @@ class Client(AsyncTestCase):
|
||||
self.failUnlessEqual(len(announcements), 2)
|
||||
key_s,ann = announcements[-1]
|
||||
self.failUnlessEqual(key_s, pubkey_s)
|
||||
self.failUnlessEqual(ann["anonymous-storage-FURL"], furl1)
|
||||
self.failUnlessEqual(ensure_binary(ann["anonymous-storage-FURL"]), furl1)
|
||||
self.failUnlessEqual(ann["my-version"], "ver24")
|
||||
d.addCallback(_then3)
|
||||
|
||||
@ -272,7 +288,7 @@ class Client(AsyncTestCase):
|
||||
self.failUnlessEqual(len(announcements), 3)
|
||||
key_s,ann = announcements[-1]
|
||||
self.failUnlessEqual(key_s, pubkey_s)
|
||||
self.failUnlessEqual(ann["anonymous-storage-FURL"], furl1a)
|
||||
self.failUnlessEqual(ensure_binary(ann["anonymous-storage-FURL"]), furl1a)
|
||||
self.failUnlessEqual(ann["my-version"], "ver23")
|
||||
d.addCallback(_then4)
|
||||
|
||||
@ -288,7 +304,7 @@ class Client(AsyncTestCase):
|
||||
self.failUnlessEqual(len(announcements2), 1)
|
||||
key_s,ann = announcements2[-1]
|
||||
self.failUnlessEqual(key_s, pubkey_s)
|
||||
self.failUnlessEqual(ann["anonymous-storage-FURL"], furl1a)
|
||||
self.failUnlessEqual(ensure_binary(ann["anonymous-storage-FURL"]), furl1a)
|
||||
self.failUnlessEqual(ann["my-version"], "ver23")
|
||||
d.addCallback(_then5)
|
||||
return d
|
||||
@ -300,7 +316,7 @@ class Server(AsyncTestCase):
|
||||
"introducer.furl", u"my_nickname",
|
||||
"ver23", "oldest_version", realseq,
|
||||
FilePath(self.mktemp()))
|
||||
furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/gydnp"
|
||||
furl1 = b"pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/gydnp"
|
||||
|
||||
private_key, _ = ed25519.create_signing_keypair()
|
||||
|
||||
@ -398,7 +414,7 @@ class Queue(SystemTestMixin, AsyncTestCase):
|
||||
c = IntroducerClient(tub2, ifurl,
|
||||
u"nickname", "version", "oldest", fakeseq,
|
||||
FilePath(self.mktemp()))
|
||||
furl1 = "pb://onug64tu@127.0.0.1:123/short" # base32("short")
|
||||
furl1 = b"pb://onug64tu@127.0.0.1:123/short" # base32("short")
|
||||
private_key, _ = ed25519.create_signing_keypair()
|
||||
|
||||
d = introducer.disownServiceParent()
|
||||
@ -420,7 +436,7 @@ class Queue(SystemTestMixin, AsyncTestCase):
|
||||
def _done(ign):
|
||||
v = introducer.get_announcements()[0]
|
||||
furl = v.announcement["anonymous-storage-FURL"]
|
||||
self.failUnlessEqual(furl, furl1)
|
||||
self.failUnlessEqual(ensure_binary(furl), furl1)
|
||||
d.addCallback(_done)
|
||||
|
||||
# now let the ack get back
|
||||
@ -446,7 +462,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
|
||||
iff = os.path.join(self.basedir, "introducer.furl")
|
||||
tub = self.central_tub
|
||||
ifurl = self.central_tub.registerReference(introducer, furlFile=iff)
|
||||
self.introducer_furl = ifurl
|
||||
self.introducer_furl = ifurl.encode("utf-8")
|
||||
|
||||
# we have 5 clients who publish themselves as storage servers, and a
|
||||
# sixth which does which not. All 6 clients subscriber to hear about
|
||||
@ -487,7 +503,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
|
||||
subscribing_clients.append(c)
|
||||
expected_announcements[i] += 1 # all expect a 'storage' announcement
|
||||
|
||||
node_furl = tub.registerReference(Referenceable())
|
||||
node_furl = tub.registerReference(Referenceable()).encode("utf-8")
|
||||
private_key, public_key = ed25519.create_signing_keypair()
|
||||
public_key_str = ed25519.string_from_verifying_key(public_key)
|
||||
privkeys[i] = private_key
|
||||
@ -504,7 +520,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
|
||||
|
||||
if i == 2:
|
||||
# also publish something that nobody cares about
|
||||
boring_furl = tub.registerReference(Referenceable())
|
||||
boring_furl = tub.registerReference(Referenceable()).encode("utf-8")
|
||||
c.publish("boring", make_ann(boring_furl), private_key)
|
||||
|
||||
c.setServiceParent(self.parent)
|
||||
@ -581,7 +597,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
|
||||
serverid0 = printable_serverids[0]
|
||||
ann = anns[serverid0]
|
||||
nick = ann["nickname"]
|
||||
self.failUnlessEqual(type(nick), unicode)
|
||||
self.assertIsInstance(nick, str)
|
||||
self.failUnlessEqual(nick, NICKNAME % "0")
|
||||
for c in publishing_clients:
|
||||
cdc = c._debug_counts
|
||||
@ -592,7 +608,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
|
||||
self.failUnlessEqual(cdc["outbound_message"], expected)
|
||||
# now check the web status, make sure it renders without error
|
||||
ir = introweb.IntroducerRoot(self.parent)
|
||||
self.parent.nodeid = "NODEID"
|
||||
self.parent.nodeid = b"NODEID"
|
||||
log.msg("_check1 done")
|
||||
return flattenString(None, ir._create_element())
|
||||
d.addCallback(_check1)
|
||||
@ -602,7 +618,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
|
||||
self.assertIn(NICKNAME % "0", text) # a v2 client
|
||||
self.assertIn(NICKNAME % "1", text) # another v2 client
|
||||
for i in range(NUM_STORAGE):
|
||||
self.assertIn(printable_serverids[i], text,
|
||||
self.assertIn(ensure_text(printable_serverids[i]), text,
|
||||
(i,printable_serverids[i],text))
|
||||
# make sure there isn't a double-base32ed string too
|
||||
self.assertNotIn(idlib.nodeid_b2a(printable_serverids[i]), text,
|
||||
@ -642,7 +658,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
|
||||
self.create_tub(self.central_portnum)
|
||||
newfurl = self.central_tub.registerReference(self.the_introducer,
|
||||
furlFile=iff)
|
||||
assert newfurl == self.introducer_furl
|
||||
assert ensure_binary(newfurl) == self.introducer_furl
|
||||
d.addCallback(_restart_introducer_tub)
|
||||
|
||||
d.addCallback(_wait_for_connected)
|
||||
@ -694,7 +710,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
|
||||
self.the_introducer = introducer
|
||||
newfurl = self.central_tub.registerReference(self.the_introducer,
|
||||
furlFile=iff)
|
||||
assert newfurl == self.introducer_furl
|
||||
assert ensure_binary(newfurl) == self.introducer_furl
|
||||
d.addCallback(_restart_introducer)
|
||||
|
||||
d.addCallback(_wait_for_connected)
|
||||
@ -740,7 +756,7 @@ class ClientInfo(AsyncTestCase):
|
||||
client_v2 = IntroducerClient(tub, introducer_furl, NICKNAME % u"v2",
|
||||
"my_version", "oldest",
|
||||
fakeseq, FilePath(self.mktemp()))
|
||||
#furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:0/swissnum"
|
||||
#furl1 = b"pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:0/swissnum"
|
||||
#ann_s = make_ann_t(client_v2, furl1, None, 10)
|
||||
#introducer.remote_publish_v2(ann_s, Referenceable())
|
||||
subscriber = FakeRemoteReference()
|
||||
@ -761,10 +777,10 @@ class Announcements(AsyncTestCase):
|
||||
client_v2 = IntroducerClient(tub, introducer_furl, u"nick-v2",
|
||||
"my_version", "oldest",
|
||||
fakeseq, FilePath(self.mktemp()))
|
||||
furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:0/swissnum"
|
||||
furl1 = b"pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:0/swissnum"
|
||||
|
||||
private_key, public_key = ed25519.create_signing_keypair()
|
||||
public_key_str = remove_prefix(ed25519.string_from_verifying_key(public_key), "pub-")
|
||||
public_key_str = remove_prefix(ed25519.string_from_verifying_key(public_key), b"pub-")
|
||||
|
||||
ann_t0 = make_ann_t(client_v2, furl1, private_key, 10)
|
||||
canary0 = Referenceable()
|
||||
@ -776,7 +792,7 @@ class Announcements(AsyncTestCase):
|
||||
self.failUnlessEqual(a[0].nickname, u"nick-v2")
|
||||
self.failUnlessEqual(a[0].service_name, "storage")
|
||||
self.failUnlessEqual(a[0].version, "my_version")
|
||||
self.failUnlessEqual(a[0].announcement["anonymous-storage-FURL"], furl1)
|
||||
self.failUnlessEqual(ensure_binary(a[0].announcement["anonymous-storage-FURL"]), furl1)
|
||||
|
||||
def _load_cache(self, cache_filepath):
|
||||
with cache_filepath.open() as f:
|
||||
@ -784,26 +800,32 @@ class Announcements(AsyncTestCase):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_client_cache(self):
|
||||
basedir = "introducer/ClientSeqnums/test_client_cache_1"
|
||||
fileutil.make_dirs(basedir)
|
||||
cache_filepath = FilePath(os.path.join(basedir, "private",
|
||||
"introducer_default_cache.yaml"))
|
||||
"""
|
||||
Announcements received by an introducer client are written to that
|
||||
introducer client's cache file.
|
||||
"""
|
||||
basedir = FilePath("introducer/ClientSeqnums/test_client_cache_1")
|
||||
private = basedir.child("private")
|
||||
private.makedirs()
|
||||
write_introducer(basedir, "default", "nope")
|
||||
cache_filepath = basedir.descendant([
|
||||
"private",
|
||||
"introducer_default_cache.yaml",
|
||||
])
|
||||
|
||||
# if storage is enabled, the Client will publish its storage server
|
||||
# during startup (although the announcement will wait in a queue
|
||||
# until the introducer connection is established). To avoid getting
|
||||
# confused by this, disable storage.
|
||||
with open(os.path.join(basedir, "tahoe.cfg"), "w") as f:
|
||||
f.write("[client]\n")
|
||||
f.write("introducer.furl = nope\n")
|
||||
f.write("[storage]\n")
|
||||
f.write("enabled = false\n")
|
||||
with basedir.child("tahoe.cfg").open("w") as f:
|
||||
f.write(b"[storage]\n")
|
||||
f.write(b"enabled = false\n")
|
||||
|
||||
c = yield create_client(basedir)
|
||||
c = yield create_client(basedir.path)
|
||||
ic = c.introducer_clients[0]
|
||||
private_key, public_key = ed25519.create_signing_keypair()
|
||||
public_key_str = remove_prefix(ed25519.string_from_verifying_key(public_key), "pub-")
|
||||
furl1 = "pb://onug64tu@127.0.0.1:123/short" # base32("short")
|
||||
public_key_str = remove_prefix(ed25519.string_from_verifying_key(public_key), b"pub-")
|
||||
furl1 = b"pb://onug64tu@127.0.0.1:123/short" # base32("short")
|
||||
ann_t = make_ann_t(ic, furl1, private_key, 1)
|
||||
|
||||
ic.got_announcements([ann_t])
|
||||
@ -812,29 +834,29 @@ class Announcements(AsyncTestCase):
|
||||
# check the cache for the announcement
|
||||
announcements = self._load_cache(cache_filepath)
|
||||
self.failUnlessEqual(len(announcements), 1)
|
||||
self.failUnlessEqual(announcements[0]['key_s'], public_key_str)
|
||||
self.failUnlessEqual(ensure_binary(announcements[0]['key_s']), public_key_str)
|
||||
ann = announcements[0]["ann"]
|
||||
self.failUnlessEqual(ann["anonymous-storage-FURL"], furl1)
|
||||
self.failUnlessEqual(ensure_binary(ann["anonymous-storage-FURL"]), furl1)
|
||||
self.failUnlessEqual(ann["seqnum"], 1)
|
||||
|
||||
# a new announcement that replaces the first should replace the
|
||||
# cached entry, not duplicate it
|
||||
furl2 = furl1 + "er"
|
||||
furl2 = furl1 + b"er"
|
||||
ann_t2 = make_ann_t(ic, furl2, private_key, 2)
|
||||
ic.got_announcements([ann_t2])
|
||||
yield flushEventualQueue()
|
||||
announcements = self._load_cache(cache_filepath)
|
||||
self.failUnlessEqual(len(announcements), 1)
|
||||
self.failUnlessEqual(announcements[0]['key_s'], public_key_str)
|
||||
self.failUnlessEqual(ensure_binary(announcements[0]['key_s']), public_key_str)
|
||||
ann = announcements[0]["ann"]
|
||||
self.failUnlessEqual(ann["anonymous-storage-FURL"], furl2)
|
||||
self.failUnlessEqual(ensure_binary(ann["anonymous-storage-FURL"]), furl2)
|
||||
self.failUnlessEqual(ann["seqnum"], 2)
|
||||
|
||||
# but a third announcement with a different key should add to the
|
||||
# cache
|
||||
private_key2, public_key2 = ed25519.create_signing_keypair()
|
||||
public_key_str2 = remove_prefix(ed25519.string_from_verifying_key(public_key2), "pub-")
|
||||
furl3 = "pb://onug64tu@127.0.0.1:456/short"
|
||||
public_key_str2 = remove_prefix(ed25519.string_from_verifying_key(public_key2), b"pub-")
|
||||
furl3 = b"pb://onug64tu@127.0.0.1:456/short"
|
||||
ann_t3 = make_ann_t(ic, furl3, private_key2, 1)
|
||||
ic.got_announcements([ann_t3])
|
||||
yield flushEventualQueue()
|
||||
@ -842,9 +864,9 @@ class Announcements(AsyncTestCase):
|
||||
announcements = self._load_cache(cache_filepath)
|
||||
self.failUnlessEqual(len(announcements), 2)
|
||||
self.failUnlessEqual(set([public_key_str, public_key_str2]),
|
||||
set([a["key_s"] for a in announcements]))
|
||||
set([ensure_binary(a["key_s"]) for a in announcements]))
|
||||
self.failUnlessEqual(set([furl2, furl3]),
|
||||
set([a["ann"]["anonymous-storage-FURL"]
|
||||
set([ensure_binary(a["ann"]["anonymous-storage-FURL"])
|
||||
for a in announcements]))
|
||||
|
||||
# test loading
|
||||
@ -860,12 +882,12 @@ class Announcements(AsyncTestCase):
|
||||
yield flushEventualQueue()
|
||||
|
||||
self.failUnless(public_key_str in announcements)
|
||||
self.failUnlessEqual(announcements[public_key_str]["anonymous-storage-FURL"],
|
||||
self.failUnlessEqual(ensure_binary(announcements[public_key_str]["anonymous-storage-FURL"]),
|
||||
furl2)
|
||||
self.failUnlessEqual(announcements[public_key_str2]["anonymous-storage-FURL"],
|
||||
self.failUnlessEqual(ensure_binary(announcements[public_key_str2]["anonymous-storage-FURL"]),
|
||||
furl3)
|
||||
|
||||
c2 = yield create_client(basedir)
|
||||
c2 = yield create_client(basedir.path)
|
||||
c2.introducer_clients[0]._load_announcements()
|
||||
yield flushEventualQueue()
|
||||
self.assertEqual(c2.storage_broker.get_all_serverids(),
|
||||
@ -875,27 +897,24 @@ class ClientSeqnums(AsyncBrokenTestCase):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_client(self):
|
||||
basedir = "introducer/ClientSeqnums/test_client"
|
||||
fileutil.make_dirs(basedir)
|
||||
basedir = FilePath("introducer/ClientSeqnums/test_client")
|
||||
private = basedir.child("private")
|
||||
private.makedirs()
|
||||
write_introducer(basedir, "default", "nope")
|
||||
# if storage is enabled, the Client will publish its storage server
|
||||
# during startup (although the announcement will wait in a queue
|
||||
# until the introducer connection is established). To avoid getting
|
||||
# confused by this, disable storage.
|
||||
f = open(os.path.join(basedir, "tahoe.cfg"), "w")
|
||||
f.write("[client]\n")
|
||||
f.write("introducer.furl = nope\n")
|
||||
f.write("[storage]\n")
|
||||
f.write("enabled = false\n")
|
||||
f.close()
|
||||
with basedir.child("tahoe.cfg").open("w") as f:
|
||||
f.write(b"[storage]\n")
|
||||
f.write(b"enabled = false\n")
|
||||
|
||||
c = yield create_client(basedir)
|
||||
c = yield create_client(basedir.path)
|
||||
ic = c.introducer_clients[0]
|
||||
outbound = ic._outbound_announcements
|
||||
published = ic._published_announcements
|
||||
def read_seqnum():
|
||||
f = open(os.path.join(basedir, "announcement-seqnum"))
|
||||
seqnum = f.read().strip()
|
||||
f.close()
|
||||
seqnum = basedir.child("announcement-seqnum").getContent()
|
||||
return int(seqnum)
|
||||
|
||||
ic.publish("sA", {"key": "value1"}, c._node_private_key)
|
||||
@ -903,7 +922,9 @@ class ClientSeqnums(AsyncBrokenTestCase):
|
||||
self.failUnless("sA" in outbound)
|
||||
self.failUnlessEqual(outbound["sA"]["seqnum"], 1)
|
||||
nonce1 = outbound["sA"]["nonce"]
|
||||
self.failUnless(isinstance(nonce1, str))
|
||||
self.failUnless(isinstance(nonce1, bytes))
|
||||
# Make nonce unicode, to match JSON:
|
||||
outbound["sA"]["nonce"] = str(nonce1, "utf-8")
|
||||
self.failUnlessEqual(json.loads(published["sA"][0]),
|
||||
outbound["sA"])
|
||||
# [1] is the signature, [2] is the pubkey
|
||||
@ -917,8 +938,11 @@ class ClientSeqnums(AsyncBrokenTestCase):
|
||||
self.failUnless("sA" in outbound)
|
||||
self.failUnlessEqual(outbound["sA"]["seqnum"], 2)
|
||||
nonce2 = outbound["sA"]["nonce"]
|
||||
self.failUnless(isinstance(nonce2, str))
|
||||
self.failUnless(isinstance(nonce2, bytes))
|
||||
self.failIfEqual(nonce1, nonce2)
|
||||
# Make nonce unicode, to match JSON:
|
||||
outbound["sA"]["nonce"] = str(nonce2, "utf-8")
|
||||
outbound["sB"]["nonce"] = str(outbound["sB"]["nonce"], "utf-8")
|
||||
self.failUnlessEqual(json.loads(published["sA"][0]),
|
||||
outbound["sA"])
|
||||
self.failUnlessEqual(json.loads(published["sB"][0]),
|
||||
@ -975,11 +999,11 @@ class DecodeFurl(SyncTestCase):
|
||||
def test_decode(self):
|
||||
# make sure we have a working base64.b32decode. The one in
|
||||
# python2.4.[01] was broken.
|
||||
furl = 'pb://t5g7egomnnktbpydbuijt6zgtmw4oqi5@127.0.0.1:51857/hfzv36i'
|
||||
m = re.match(r'pb://(\w+)@', furl)
|
||||
furl = b'pb://t5g7egomnnktbpydbuijt6zgtmw4oqi5@127.0.0.1:51857/hfzv36i'
|
||||
m = re.match(br'pb://(\w+)@', furl)
|
||||
assert m
|
||||
nodeid = b32decode(m.group(1).upper())
|
||||
self.failUnlessEqual(nodeid, "\x9fM\xf2\x19\xcckU0\xbf\x03\r\x10\x99\xfb&\x9b-\xc7A\x1d")
|
||||
self.failUnlessEqual(nodeid, b"\x9fM\xf2\x19\xcckU0\xbf\x03\r\x10\x99\xfb&\x9b-\xc7A\x1d")
|
||||
|
||||
class Signatures(SyncTestCase):
|
||||
|
||||
@ -991,11 +1015,11 @@ class Signatures(SyncTestCase):
|
||||
(msg, sig, key) = ann_t
|
||||
self.failUnlessEqual(type(msg), type("".encode("utf-8"))) # bytes
|
||||
self.failUnlessEqual(json.loads(msg.decode("utf-8")), ann)
|
||||
self.failUnless(sig.startswith("v0-"))
|
||||
self.failUnless(key.startswith("v0-"))
|
||||
self.failUnless(sig.startswith(b"v0-"))
|
||||
self.failUnless(key.startswith(b"v0-"))
|
||||
(ann2,key2) = unsign_from_foolscap(ann_t)
|
||||
self.failUnlessEqual(ann2, ann)
|
||||
self.failUnlessEqual("pub-" + key2, public_key_str)
|
||||
self.failUnlessEqual(b"pub-" + key2, public_key_str)
|
||||
|
||||
# not signed
|
||||
self.failUnlessRaises(UnknownKeyError,
|
||||
@ -1010,16 +1034,16 @@ class Signatures(SyncTestCase):
|
||||
|
||||
# unrecognized signatures
|
||||
self.failUnlessRaises(UnknownKeyError,
|
||||
unsign_from_foolscap, (bad_msg, "v999-sig", key))
|
||||
unsign_from_foolscap, (bad_msg, b"v999-sig", key))
|
||||
self.failUnlessRaises(UnknownKeyError,
|
||||
unsign_from_foolscap, (bad_msg, sig, "v999-key"))
|
||||
unsign_from_foolscap, (bad_msg, sig, b"v999-key"))
|
||||
|
||||
def test_unsigned_announcement(self):
|
||||
ed25519.verifying_key_from_string(b"pub-v0-wodst6ly4f7i7akt2nxizsmmy2rlmer6apltl56zctn67wfyu5tq")
|
||||
mock_tub = Mock()
|
||||
ic = IntroducerClient(
|
||||
mock_tub,
|
||||
u"pb://",
|
||||
b"pb://",
|
||||
u"fake_nick",
|
||||
"0.0.0",
|
||||
"1.2.3",
|
||||
@ -1028,7 +1052,7 @@ class Signatures(SyncTestCase):
|
||||
)
|
||||
self.assertEqual(0, ic._debug_counts["inbound_announcement"])
|
||||
ic.got_announcements([
|
||||
("message", "v0-aaaaaaa", "v0-wodst6ly4f7i7akt2nxizsmmy2rlmer6apltl56zctn67wfyu5tq")
|
||||
(b"message", b"v0-aaaaaaa", b"v0-wodst6ly4f7i7akt2nxizsmmy2rlmer6apltl56zctn67wfyu5tq")
|
||||
])
|
||||
# we should have rejected this announcement due to a bad signature
|
||||
self.assertEqual(0, ic._debug_counts["inbound_announcement"])
|
||||
|
@ -24,9 +24,6 @@ class MultiIntroTests(unittest.TestCase):
|
||||
config = {'hide-ip':False, 'listen': 'tcp',
|
||||
'port': None, 'location': None, 'hostname': 'example.net'}
|
||||
write_node_config(c, config)
|
||||
fake_furl = "furl1"
|
||||
c.write("[client]\n")
|
||||
c.write("introducer.furl = %s\n" % fake_furl)
|
||||
c.write("[storage]\n")
|
||||
c.write("enabled = false\n")
|
||||
c.close()
|
||||
@ -36,8 +33,10 @@ class MultiIntroTests(unittest.TestCase):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_introducer_count(self):
|
||||
""" Ensure that the Client creates same number of introducer clients
|
||||
as found in "basedir/private/introducers" config file. """
|
||||
"""
|
||||
If there are two introducers configured in ``introducers.yaml`` then
|
||||
``Client`` creates two introducer clients.
|
||||
"""
|
||||
connections = {
|
||||
'introducers': {
|
||||
u'intro1':{ 'furl': 'furl1' },
|
||||
@ -50,25 +49,13 @@ class MultiIntroTests(unittest.TestCase):
|
||||
ic_count = len(myclient.introducer_clients)
|
||||
|
||||
# assertions
|
||||
self.failUnlessEqual(ic_count, 3)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_introducer_count_commented(self):
|
||||
""" Ensure that the Client creates same number of introducer clients
|
||||
as found in "basedir/private/introducers" config file when there is one
|
||||
commented."""
|
||||
self.yaml_path.setContent(INTRODUCERS_CFG_FURLS_COMMENTED)
|
||||
# get a client and count of introducer_clients
|
||||
myclient = yield create_client(self.basedir)
|
||||
ic_count = len(myclient.introducer_clients)
|
||||
|
||||
# assertions
|
||||
self.failUnlessEqual(ic_count, 2)
|
||||
self.failUnlessEqual(ic_count, len(connections["introducers"]))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_read_introducer_furl_from_tahoecfg(self):
|
||||
""" Ensure that the Client reads the introducer.furl config item from
|
||||
the tahoe.cfg file. """
|
||||
"""
|
||||
The deprecated [client]introducer.furl item is still read and respected.
|
||||
"""
|
||||
# create a custom tahoe.cfg
|
||||
c = open(os.path.join(self.basedir, "tahoe.cfg"), "w")
|
||||
config = {'hide-ip':False, 'listen': 'tcp',
|
||||
@ -87,20 +74,42 @@ class MultiIntroTests(unittest.TestCase):
|
||||
|
||||
# assertions
|
||||
self.failUnlessEqual(fake_furl, tahoe_cfg_furl)
|
||||
self.assertEqual(
|
||||
list(
|
||||
warning["message"]
|
||||
for warning
|
||||
in self.flushWarnings()
|
||||
if warning["category"] is DeprecationWarning
|
||||
),
|
||||
["tahoe.cfg [client]introducer.furl is deprecated; "
|
||||
"use private/introducers.yaml instead."],
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_reject_default_in_yaml(self):
|
||||
connections = {'introducers': {
|
||||
u'default': { 'furl': 'furl1' },
|
||||
}}
|
||||
"""
|
||||
If an introducer is configured in tahoe.cfg with the deprecated
|
||||
[client]introducer.furl then a "default" introducer in
|
||||
introducers.yaml is rejected.
|
||||
"""
|
||||
connections = {
|
||||
'introducers': {
|
||||
u'default': { 'furl': 'furl1' },
|
||||
},
|
||||
}
|
||||
self.yaml_path.setContent(yamlutil.safe_dump(connections))
|
||||
FilePath(self.basedir).child("tahoe.cfg").setContent(
|
||||
"[client]\n"
|
||||
"introducer.furl = furl1\n"
|
||||
)
|
||||
|
||||
with self.assertRaises(ValueError) as ctx:
|
||||
yield create_client(self.basedir)
|
||||
|
||||
self.assertEquals(
|
||||
str(ctx.exception),
|
||||
"'default' introducer furl cannot be specified in introducers.yaml; please "
|
||||
"fix impossible configuration.",
|
||||
"'default' introducer furl cannot be specified in tahoe.cfg and introducers.yaml; "
|
||||
"please fix impossible configuration.",
|
||||
)
|
||||
|
||||
SIMPLE_YAML = """
|
||||
@ -126,8 +135,6 @@ class NoDefault(unittest.TestCase):
|
||||
config = {'hide-ip':False, 'listen': 'tcp',
|
||||
'port': None, 'location': None, 'hostname': 'example.net'}
|
||||
write_node_config(c, config)
|
||||
c.write("[client]\n")
|
||||
c.write("# introducer.furl =\n") # omit default
|
||||
c.write("[storage]\n")
|
||||
c.write("enabled = false\n")
|
||||
c.close()
|
||||
|
@ -684,8 +684,6 @@ class TestMissingPorts(unittest.TestCase):
|
||||
|
||||
|
||||
BASE_CONFIG = """
|
||||
[client]
|
||||
introducer.furl = empty
|
||||
[tor]
|
||||
enabled = false
|
||||
[i2p]
|
||||
|
@ -458,7 +458,7 @@ class StoragePluginWebPresence(AsyncTestCase):
|
||||
},
|
||||
storage_plugin=self.storage_plugin,
|
||||
basedir=self.basedir,
|
||||
introducer_furl=ensure_text(SOME_FURL),
|
||||
introducer_furl=SOME_FURL,
|
||||
))
|
||||
self.node = yield self.node_fixture.create_node()
|
||||
self.webish = self.node.getServiceNamed(WebishServer.name)
|
||||
|
@ -33,6 +33,9 @@ from allmydata.mutable.publish import MutableData
|
||||
|
||||
from foolscap.api import DeadReferenceError, fireEventually, flushEventualQueue
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.python.filepath import (
|
||||
FilePath,
|
||||
)
|
||||
|
||||
from .common import (
|
||||
TEST_RSA_KEY_SIZE,
|
||||
@ -47,6 +50,9 @@ from .web.common import (
|
||||
from allmydata.test.test_runner import RunBinTahoeMixin
|
||||
from . import common_util as testutil
|
||||
from .common_util import run_cli
|
||||
from ..scripts.common import (
|
||||
write_introducer,
|
||||
)
|
||||
|
||||
LARGE_DATA = """
|
||||
This is some data to publish to the remote grid.., which needs to be large
|
||||
@ -806,8 +812,6 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
|
||||
|
||||
except1 = set(range(self.numclients)) - {1}
|
||||
feature_matrix = {
|
||||
# client 1 uses private/introducers.yaml, not tahoe.cfg
|
||||
("client", "introducer.furl"): except1,
|
||||
("client", "nickname"): except1,
|
||||
|
||||
# client 1 has to auto-assign an address.
|
||||
@ -833,7 +837,6 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
|
||||
setnode = partial(setconf, config, which, "node")
|
||||
sethelper = partial(setconf, config, which, "helper")
|
||||
|
||||
setclient("introducer.furl", self.introducer_furl)
|
||||
setnode("nickname", u"client %d \N{BLACK SMILING FACE}" % (which,))
|
||||
|
||||
if self.stats_gatherer_furl:
|
||||
@ -850,13 +853,11 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
|
||||
|
||||
sethelper("enabled", "True")
|
||||
|
||||
if which == 1:
|
||||
# clients[1] uses private/introducers.yaml, not tahoe.cfg
|
||||
iyaml = ("introducers:\n"
|
||||
" petname2:\n"
|
||||
" furl: %s\n") % self.introducer_furl
|
||||
iyaml_fn = os.path.join(basedir, "private", "introducers.yaml")
|
||||
fileutil.write(iyaml_fn, iyaml)
|
||||
iyaml = ("introducers:\n"
|
||||
" petname2:\n"
|
||||
" furl: %s\n") % self.introducer_furl
|
||||
iyaml_fn = os.path.join(basedir, "private", "introducers.yaml")
|
||||
fileutil.write(iyaml_fn, iyaml)
|
||||
|
||||
return _render_config(config)
|
||||
|
||||
@ -905,16 +906,21 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
|
||||
# usually this node is *not* parented to our self.sparent, so we can
|
||||
# shut it down separately from the rest, to exercise the
|
||||
# connection-lost code
|
||||
basedir = self.getdir("client%d" % client_num)
|
||||
if not os.path.isdir(basedir):
|
||||
fileutil.make_dirs(basedir)
|
||||
basedir = FilePath(self.getdir("client%d" % client_num))
|
||||
basedir.makedirs()
|
||||
config = "[client]\n"
|
||||
config += "introducer.furl = %s\n" % self.introducer_furl
|
||||
if helper_furl:
|
||||
config += "helper.furl = %s\n" % helper_furl
|
||||
fileutil.write(os.path.join(basedir, 'tahoe.cfg'), config)
|
||||
basedir.child("tahoe.cfg").setContent(config)
|
||||
private = basedir.child("private")
|
||||
private.makedirs()
|
||||
write_introducer(
|
||||
basedir,
|
||||
"default",
|
||||
self.introducer_furl,
|
||||
)
|
||||
|
||||
c = yield client.create_client(basedir)
|
||||
c = yield client.create_client(basedir.path)
|
||||
self.clients.append(c)
|
||||
c.set_default_mutable_keysize(TEST_RSA_KEY_SIZE)
|
||||
self.numclients += 1
|
||||
|
@ -141,6 +141,7 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_helper",
|
||||
"allmydata.test.test_humanreadable",
|
||||
"allmydata.test.test_immutable",
|
||||
"allmydata.test.test_introducer",
|
||||
"allmydata.test.test_iputil",
|
||||
"allmydata.test.test_log",
|
||||
"allmydata.test.test_monitor",
|
||||
|
@ -104,7 +104,7 @@ class IntroducerRootElement(Element):
|
||||
if ad.service_name not in services:
|
||||
services[ad.service_name] = 0
|
||||
services[ad.service_name] += 1
|
||||
service_names = services.keys()
|
||||
service_names = list(services.keys())
|
||||
service_names.sort()
|
||||
return u", ".join(u"{}: {}".format(service_name, services[service_name])
|
||||
for service_name in service_names)
|
||||
|
14
tox.ini
14
tox.ini
@ -95,12 +95,16 @@ setenv =
|
||||
# .decode(getattr(sys.stdout, "encoding", "utf8"))
|
||||
# `TypeError: decode() argument 1 must be string, not None`
|
||||
PYTHONIOENCODING=utf_8
|
||||
|
||||
# If no positional arguments are given, try to run the checks on the
|
||||
# entire codebase, including various pieces of supporting code.
|
||||
DEFAULT_FILES=src integration static misc setup.py
|
||||
commands =
|
||||
flake8 src integration static misc setup.py
|
||||
python misc/coding_tools/check-umids.py src
|
||||
python misc/coding_tools/check-debugging.py
|
||||
python misc/coding_tools/find-trailing-spaces.py -r src static misc setup.py
|
||||
python misc/coding_tools/check-miscaptures.py
|
||||
flake8 {posargs:{env:DEFAULT_FILES}}
|
||||
python misc/coding_tools/check-umids.py {posargs:{env:DEFAULT_FILES}}
|
||||
python misc/coding_tools/check-debugging.py {posargs:{env:DEFAULT_FILES}}
|
||||
python misc/coding_tools/find-trailing-spaces.py -r {posargs:{env:DEFAULT_FILES}}
|
||||
python misc/coding_tools/check-miscaptures.py {posargs:{env:DEFAULT_FILES}}
|
||||
|
||||
# If towncrier.check fails, you forgot to add a towncrier news
|
||||
# fragment explaining the change in this branch. Create one at
|
||||
|
Loading…
x
Reference in New Issue
Block a user