mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-23 14:52:26 +00:00
Merge branch 'master' into 3031-replace-pycryptopp
This commit is contained in:
commit
dd55accec8
@ -23,4 +23,4 @@ RUN yum install --assumeyes \
|
||||
# *update* this checkout on each job run, saving us more time per-job.
|
||||
COPY . ${BUILD_SRC_ROOT}
|
||||
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}"
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
||||
|
@ -23,7 +23,7 @@ RUN apt-get --quiet update && \
|
||||
# *update* this checkout on each job run, saving us more time per-job.
|
||||
COPY . ${BUILD_SRC_ROOT}
|
||||
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}"
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
||||
|
||||
# Only the integration tests currently need this but it doesn't hurt to always
|
||||
# have it present and it's simpler than building a whole extra image just for
|
||||
|
@ -23,4 +23,4 @@ RUN yum install --assumeyes \
|
||||
# *update* this checkout on each job run, saving us more time per-job.
|
||||
COPY . ${BUILD_SRC_ROOT}
|
||||
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}"
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
||||
|
23
.circleci/Dockerfile.pypy
Normal file
23
.circleci/Dockerfile.pypy
Normal file
@ -0,0 +1,23 @@
|
||||
FROM pypy:2.7-7.1.1-jessie
|
||||
|
||||
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
||||
ENV VIRTUALENV_PATH /tmp/venv
|
||||
# This will get updated by the CircleCI checkout step.
|
||||
ENV BUILD_SRC_ROOT /tmp/project
|
||||
|
||||
RUN apt-get --quiet update && \
|
||||
apt-get --quiet --yes install \
|
||||
git \
|
||||
lsb-release \
|
||||
sudo \
|
||||
build-essential \
|
||||
libffi-dev \
|
||||
libssl-dev \
|
||||
libyaml-dev \
|
||||
virtualenv
|
||||
|
||||
# Get the project source. This is better than it seems. CircleCI will
|
||||
# *update* this checkout on each job run, saving us more time per-job.
|
||||
COPY . ${BUILD_SRC_ROOT}
|
||||
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "pypy"
|
@ -46,4 +46,4 @@ RUN slackpkg install \
|
||||
# *update* this checkout on each job run, saving us more time per-job.
|
||||
COPY . ${BUILD_SRC_ROOT}
|
||||
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}"
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
||||
|
@ -26,4 +26,4 @@ RUN apt-get --quiet update && \
|
||||
# *update* this checkout on each job run, saving us more time per-job.
|
||||
COPY . ${BUILD_SRC_ROOT}
|
||||
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}"
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
||||
|
@ -25,6 +25,9 @@ workflows:
|
||||
|
||||
- "slackware-14.2"
|
||||
|
||||
# Test against PyPy 2.7/7.1.1
|
||||
- "pypy2.7-7.1"
|
||||
|
||||
# Other assorted tasks and configurations
|
||||
- "lint"
|
||||
- "deprecations"
|
||||
@ -59,6 +62,7 @@ workflows:
|
||||
- "build-image-fedora-29"
|
||||
- "build-image-centos-7"
|
||||
- "build-image-slackware-14.2"
|
||||
- "build-image-pypy-2.7-7.1.1-jessie"
|
||||
|
||||
|
||||
jobs:
|
||||
@ -85,13 +89,17 @@ jobs:
|
||||
user: "nobody"
|
||||
|
||||
environment: &UTF_8_ENVIRONMENT
|
||||
# In general, the test suite is not allowed to fail while the job
|
||||
# succeeds. But you can set this to "yes" if you want it to be
|
||||
# otherwise.
|
||||
ALLOWED_FAILURE: "no"
|
||||
# Tell Hypothesis which configuration we want it to use.
|
||||
TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci"
|
||||
# Tell the C runtime things about character encoding (mainly to do with
|
||||
# filenames and argv).
|
||||
LANG: "en_US.UTF-8"
|
||||
# Select a tox environment to run for this job.
|
||||
TAHOE_LAFS_TOX_ENVIRONMENT: "coverage"
|
||||
TAHOE_LAFS_TOX_ENVIRONMENT: "py27-coverage"
|
||||
# Additional arguments to pass to tox.
|
||||
TAHOE_LAFS_TOX_ARGS: ""
|
||||
# The path in which test artifacts will be placed.
|
||||
@ -123,6 +131,7 @@ jobs:
|
||||
/tmp/project/.circleci/run-tests.sh \
|
||||
"/tmp/venv" \
|
||||
"/tmp/project" \
|
||||
"${ALLOWED_FAILURE}" \
|
||||
"${ARTIFACTS_OUTPUT_PATH}" \
|
||||
"${TAHOE_LAFS_TOX_ENVIRONMENT}" \
|
||||
"${TAHOE_LAFS_TOX_ARGS}"
|
||||
@ -157,6 +166,18 @@ jobs:
|
||||
user: "nobody"
|
||||
|
||||
|
||||
pypy2.7-7.1:
|
||||
<<: *DEBIAN
|
||||
docker:
|
||||
- image: "tahoelafsci/pypy:2.7-7.1.1-jessie"
|
||||
user: "nobody"
|
||||
|
||||
environment:
|
||||
<<: *UTF_8_ENVIRONMENT
|
||||
TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27-coverage"
|
||||
ALLOWED_FAILURE: "yes"
|
||||
|
||||
|
||||
c-locale:
|
||||
<<: *DEBIAN
|
||||
|
||||
@ -406,3 +427,11 @@ jobs:
|
||||
environment:
|
||||
DISTRO: "slackware"
|
||||
TAG: "14.2"
|
||||
|
||||
|
||||
build-image-pypy-2.7-7.1.1-jessie:
|
||||
<<: *BUILD_IMAGE
|
||||
|
||||
environment:
|
||||
DISTRO: "pypy"
|
||||
TAG: "2.7-7.1.1-jessie"
|
||||
|
@ -13,9 +13,14 @@ shift
|
||||
BOOTSTRAP_VENV="$1"
|
||||
shift
|
||||
|
||||
# The basename of the Python executable (found on PATH) that will be used with
|
||||
# this image. This lets us create a virtualenv that uses the correct Python.
|
||||
PYTHON="$1"
|
||||
shift
|
||||
|
||||
# Set up the virtualenv as a non-root user so we can run the test suite as a
|
||||
# non-root user. See below.
|
||||
virtualenv --python python2.7 "${BOOTSTRAP_VENV}"
|
||||
virtualenv --python "${PYTHON}" "${BOOTSTRAP_VENV}"
|
||||
|
||||
# For convenience.
|
||||
PIP="${BOOTSTRAP_VENV}/bin/pip"
|
||||
|
@ -18,6 +18,11 @@ shift
|
||||
PROJECT_ROOT="$1"
|
||||
shift
|
||||
|
||||
# The basename of the Python executable (found on PATH) that will be used with
|
||||
# this image. This lets us create a virtualenv that uses the correct Python.
|
||||
PYTHON="$1"
|
||||
shift
|
||||
|
||||
"${PROJECT_ROOT}"/.circleci/fix-permissions.sh "${WHEELHOUSE_PATH}" "${BOOTSTRAP_VENV}" "${PROJECT_ROOT}"
|
||||
sudo --set-home -u nobody "${PROJECT_ROOT}"/.circleci/create-virtualenv.sh "${WHEELHOUSE_PATH}" "${BOOTSTRAP_VENV}"
|
||||
sudo --set-home -u nobody "${PROJECT_ROOT}"/.circleci/create-virtualenv.sh "${WHEELHOUSE_PATH}" "${BOOTSTRAP_VENV}" "${PYTHON}"
|
||||
sudo --set-home -u nobody "${PROJECT_ROOT}"/.circleci/populate-wheelhouse.sh "${WHEELHOUSE_PATH}" "${BOOTSTRAP_VENV}" "${PROJECT_ROOT}"
|
||||
|
@ -13,6 +13,9 @@ shift
|
||||
PROJECT_ROOT="$1"
|
||||
shift
|
||||
|
||||
ALLOWED_FAILURE="$1"
|
||||
shift
|
||||
|
||||
ARTIFACTS=$1
|
||||
shift
|
||||
|
||||
@ -54,14 +57,20 @@ export SUBUNITREPORTER_OUTPUT_PATH="${SUBUNIT2}"
|
||||
export TAHOE_LAFS_TRIAL_ARGS="--reporter=subunitv2-file --rterrors"
|
||||
export PIP_NO_INDEX="1"
|
||||
|
||||
if [ "${ALLOWED_FAILURE}" = "yes" ]; then
|
||||
alternative="true"
|
||||
else
|
||||
alternative="false"
|
||||
fi
|
||||
|
||||
${BOOTSTRAP_VENV}/bin/tox \
|
||||
-c ${PROJECT_ROOT}/tox.ini \
|
||||
--workdir /tmp/tahoe-lafs.tox \
|
||||
-e "${TAHOE_LAFS_TOX_ENVIRONMENT}" \
|
||||
${TAHOE_LAFS_TOX_ARGS}
|
||||
${TAHOE_LAFS_TOX_ARGS} || "${alternative}"
|
||||
|
||||
if [ -n "${ARTIFACTS}" ]; then
|
||||
# Create a junitxml results area.
|
||||
mkdir -p "$(dirname "${JUNITXML}")"
|
||||
${BOOTSTRAP_VENV}/bin/subunit2junitxml < "${SUBUNIT2}" > "${JUNITXML}"
|
||||
${BOOTSTRAP_VENV}/bin/subunit2junitxml < "${SUBUNIT2}" > "${JUNITXML}" || "${alternative}"
|
||||
fi
|
||||
|
1
newsfragments/2479.other
Normal file
1
newsfragments/2479.other
Normal file
@ -0,0 +1 @@
|
||||
Tahoe-LAFS now tests for PyPy compatibility on CI.
|
1
newsfragments/3041.feature
Normal file
1
newsfragments/3041.feature
Normal file
@ -0,0 +1 @@
|
||||
End-to-end in-memory tests for websocket features
|
1
newsfragments/3051.feature
Normal file
1
newsfragments/3051.feature
Normal file
@ -0,0 +1 @@
|
||||
Static storage server "announcements" in ``private/servers.yaml`` are now individually logged and ignored if they cannot be interpreted.
|
0
newsfragments/3111.minor
Normal file
0
newsfragments/3111.minor
Normal file
@ -107,6 +107,9 @@ install_requires = [
|
||||
|
||||
# A great way to define types of values.
|
||||
"attrs >= 18.2.0",
|
||||
|
||||
# WebSocket library for twisted and asyncio
|
||||
"autobahn >= 19.5.2",
|
||||
]
|
||||
|
||||
# Includes some indirect dependencies, but does not include allmydata.
|
||||
|
@ -205,7 +205,7 @@ def create_client(basedir=u".", _client_factory=None):
|
||||
_client_factory=_client_factory,
|
||||
)
|
||||
except Exception:
|
||||
return Failure()
|
||||
return defer.fail()
|
||||
|
||||
|
||||
def create_client_from_config(config, _client_factory=None):
|
||||
@ -259,7 +259,7 @@ def create_client_from_config(config, _client_factory=None):
|
||||
storage_broker.setServiceParent(client)
|
||||
return defer.succeed(client)
|
||||
except Exception:
|
||||
return Failure()
|
||||
return defer.fail()
|
||||
|
||||
|
||||
def _sequencer(config):
|
||||
|
@ -105,6 +105,8 @@ the twistd-options.
|
||||
class MyTwistdConfig(twistd.ServerOptions):
|
||||
subCommands = [("DaemonizeTahoeNode", None, usage.Options, "node")]
|
||||
|
||||
stderr = sys.stderr
|
||||
|
||||
|
||||
class DaemonizeTheRealService(Service, HookMixin):
|
||||
"""
|
||||
@ -122,6 +124,7 @@ class DaemonizeTheRealService(Service, HookMixin):
|
||||
self._hooks = {
|
||||
"running": None,
|
||||
}
|
||||
self.stderr = options.parent.stderr
|
||||
|
||||
def startService(self):
|
||||
|
||||
@ -143,7 +146,7 @@ class DaemonizeTheRealService(Service, HookMixin):
|
||||
|
||||
def handle_config_error(fail):
|
||||
fail.trap(UnknownConfigError)
|
||||
sys.stderr.write("\nConfiguration error:\n{}\n\n".format(fail.value))
|
||||
self.stderr.write("\nConfiguration error:\n{}\n\n".format(fail.value))
|
||||
reactor.stop()
|
||||
return
|
||||
|
||||
@ -204,6 +207,8 @@ def daemonize(config):
|
||||
twistd_args.append("DaemonizeTahoeNode") # point at our DaemonizeTahoeNodePlugin
|
||||
|
||||
twistd_config = MyTwistdConfig()
|
||||
twistd_config.stdout = out
|
||||
twistd_config.stderr = err
|
||||
try:
|
||||
twistd_config.parseOptions(twistd_args)
|
||||
except usage.error as ue:
|
||||
|
@ -34,7 +34,9 @@ import attr
|
||||
from zope.interface import implementer
|
||||
from twisted.internet import defer
|
||||
from twisted.application import service
|
||||
|
||||
from eliot import (
|
||||
log_call,
|
||||
)
|
||||
from foolscap.api import eventually
|
||||
from allmydata.interfaces import (
|
||||
IStorageBroker,
|
||||
@ -90,18 +92,36 @@ class StorageFarmBroker(service.MultiService):
|
||||
self._threshold_listeners = [] # tuples of (threshold, Deferred)
|
||||
self._connected_high_water_mark = 0
|
||||
|
||||
@log_call(action_type=u"storage-client:broker:set-static-servers")
|
||||
def set_static_servers(self, servers):
|
||||
for (server_id, server) in servers.items():
|
||||
assert isinstance(server_id, unicode) # from YAML
|
||||
server_id = server_id.encode("ascii")
|
||||
self._static_server_ids.add(server_id)
|
||||
handler_overrides = server.get("connections", {})
|
||||
s = NativeStorageServer(server_id, server["ann"],
|
||||
self._tub_maker, handler_overrides)
|
||||
s.on_status_changed(lambda _: self._got_connection())
|
||||
s.setServiceParent(self)
|
||||
self.servers[server_id] = s
|
||||
s.start_connecting(self._trigger_connections)
|
||||
# Sorting the items gives us a deterministic processing order. This
|
||||
# doesn't really matter but it makes the logging behavior more
|
||||
# predictable and easier to test (and at least one test does depend on
|
||||
# this sorted order).
|
||||
for (server_id, server) in sorted(servers.items()):
|
||||
try:
|
||||
storage_server = self._make_storage_server(server_id, server)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
self._static_server_ids.add(server_id)
|
||||
self.servers[server_id] = storage_server
|
||||
storage_server.setServiceParent(self)
|
||||
storage_server.start_connecting(self._trigger_connections)
|
||||
|
||||
@log_call(
|
||||
action_type=u"storage-client:broker:make-storage-server",
|
||||
include_args=["server_id"],
|
||||
include_result=False,
|
||||
)
|
||||
def _make_storage_server(self, server_id, server):
|
||||
assert isinstance(server_id, unicode) # from YAML
|
||||
server_id = server_id.encode("ascii")
|
||||
handler_overrides = server.get("connections", {})
|
||||
s = NativeStorageServer(server_id, server["ann"],
|
||||
self._tub_maker, handler_overrides)
|
||||
s.on_status_changed(lambda _: self._got_connection())
|
||||
return s
|
||||
|
||||
def when_connected_enough(self, threshold):
|
||||
"""
|
||||
@ -254,6 +274,7 @@ class StubServer(object):
|
||||
def get_nickname(self):
|
||||
return "?"
|
||||
|
||||
|
||||
@implementer(IServer)
|
||||
class NativeStorageServer(service.MultiService):
|
||||
"""I hold information about a storage server that we want to connect to.
|
||||
|
@ -8,9 +8,14 @@ from allmydata.scripts import runner
|
||||
from allmydata.scripts.tahoe_daemonize import identify_node_type
|
||||
from allmydata.scripts.tahoe_daemonize import DaemonizeTahoeNodePlugin
|
||||
from allmydata.scripts.tahoe_daemonize import DaemonizeOptions
|
||||
from allmydata.scripts.tahoe_daemonize import MyTwistdConfig
|
||||
|
||||
|
||||
class Util(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.twistd_options = MyTwistdConfig()
|
||||
self.twistd_options.parseOptions(["DaemonizeTahoeNode"])
|
||||
self.options = self.twistd_options.subOptions
|
||||
|
||||
def test_node_type_nothing(self):
|
||||
tmpdir = self.mktemp()
|
||||
@ -39,7 +44,7 @@ class Util(unittest.TestCase):
|
||||
fn()
|
||||
r.stop = lambda: None
|
||||
r.callWhenRunning = call
|
||||
service = plug.makeService(None)
|
||||
service = plug.makeService(self.options)
|
||||
service.parent = Mock()
|
||||
service.startService()
|
||||
|
||||
@ -55,7 +60,7 @@ class Util(unittest.TestCase):
|
||||
d.addErrback(lambda _: None) # ignore the error we'll trigger
|
||||
r.callWhenRunning = call
|
||||
r.stop = 'foo'
|
||||
service = plug.makeService(None)
|
||||
service = plug.makeService(self.options)
|
||||
service.parent = Mock()
|
||||
# we'll raise ValueError because there's no key-generator
|
||||
# .. BUT we do this in an async function called via
|
||||
@ -80,7 +85,7 @@ class Util(unittest.TestCase):
|
||||
fn()
|
||||
r.stop = lambda: None
|
||||
r.callWhenRunning = call
|
||||
service = plug.makeService(None)
|
||||
service = plug.makeService(self.options)
|
||||
service.parent = Mock()
|
||||
with self.assertRaises(ValueError) as ctx:
|
||||
service.startService()
|
||||
|
@ -1,5 +1,4 @@
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import subprocess
|
||||
from os.path import join
|
||||
@ -255,10 +254,9 @@ class RunTests(unittest.TestCase):
|
||||
])
|
||||
|
||||
i, o, e = StringIO(), StringIO(), StringIO()
|
||||
with patch.object(sys, 'stdout', o), patch.object(sys, 'stderr', e):
|
||||
runner.dispatch(config, i, o, e)
|
||||
runner.dispatch(config, i, o, e)
|
||||
|
||||
output = o.getvalue()
|
||||
output = e.getvalue()
|
||||
# should print out the collected logs and an error-code
|
||||
self.assertIn(
|
||||
"invalid section",
|
||||
|
@ -1,9 +1,30 @@
|
||||
import os, sys
|
||||
import mock
|
||||
import twisted
|
||||
from yaml import (
|
||||
safe_dump,
|
||||
)
|
||||
from fixtures import (
|
||||
Fixture,
|
||||
TempDir,
|
||||
)
|
||||
from eliot.testing import (
|
||||
capture_logging,
|
||||
assertHasAction,
|
||||
)
|
||||
from twisted.trial import unittest
|
||||
from twisted.application import service
|
||||
from twisted.internet import defer
|
||||
from twisted.python.filepath import (
|
||||
FilePath,
|
||||
)
|
||||
from testtools.matchers import (
|
||||
Equals,
|
||||
AfterPreprocessing,
|
||||
)
|
||||
from testtools.twistedsupport import (
|
||||
succeeded,
|
||||
)
|
||||
|
||||
import allmydata
|
||||
import allmydata.frontends.magic_folder
|
||||
@ -20,6 +41,9 @@ from allmydata.interfaces import IFilesystemNode, IFileNode, \
|
||||
IImmutableFileNode, IMutableFileNode, IDirectoryNode
|
||||
from foolscap.api import flushEventualQueue
|
||||
import allmydata.test.common_util as testutil
|
||||
from allmydata.test.common import (
|
||||
SyncTestCase,
|
||||
)
|
||||
|
||||
|
||||
BASECONFIG = ("[client]\n"
|
||||
@ -666,6 +690,143 @@ class IntroducerClients(unittest.TestCase):
|
||||
)
|
||||
|
||||
|
||||
def get_known_server_details(a_client):
|
||||
"""
|
||||
Get some details about known storage servers from a client.
|
||||
|
||||
:param _Client a_client: The client to inspect.
|
||||
|
||||
:return: A ``list`` of two-tuples. Each element of the list corresponds
|
||||
to a "known server". The first element of each tuple is a server id.
|
||||
The second is the server's announcement.
|
||||
"""
|
||||
return list(
|
||||
(s.get_serverid(), s.get_announcement())
|
||||
for s
|
||||
in a_client.storage_broker.get_known_servers()
|
||||
)
|
||||
|
||||
|
||||
class StaticServers(Fixture):
|
||||
"""
|
||||
Create a ``servers.yaml`` file.
|
||||
"""
|
||||
def __init__(self, basedir, server_details):
|
||||
super(StaticServers, self).__init__()
|
||||
self._basedir = basedir
|
||||
self._server_details = server_details
|
||||
|
||||
def _setUp(self):
|
||||
private = self._basedir.child(u"private")
|
||||
private.makedirs()
|
||||
servers = private.child(u"servers.yaml")
|
||||
servers.setContent(safe_dump({
|
||||
u"storage": {
|
||||
serverid: {
|
||||
u"ann": announcement,
|
||||
}
|
||||
for (serverid, announcement)
|
||||
in self._server_details
|
||||
},
|
||||
}))
|
||||
|
||||
|
||||
class StorageClients(SyncTestCase):
|
||||
"""
|
||||
Tests for storage-related behavior of ``_Client``.
|
||||
"""
|
||||
def setUp(self):
|
||||
super(StorageClients, self).setUp()
|
||||
# Some other tests create Nodes and Node mutates tempfile.tempdir and
|
||||
# that screws us up because we're *not* making a Node. "Fix" it. See
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3052 for the real fix,
|
||||
# though.
|
||||
import tempfile
|
||||
tempfile.tempdir = None
|
||||
|
||||
tempdir = TempDir()
|
||||
self.useFixture(tempdir)
|
||||
self.basedir = FilePath(tempdir.path)
|
||||
|
||||
@capture_logging(
|
||||
lambda case, logger: assertHasAction(
|
||||
case,
|
||||
logger,
|
||||
actionType=u"storage-client:broker:set-static-servers",
|
||||
succeeded=True,
|
||||
),
|
||||
)
|
||||
def test_static_servers(self, logger):
|
||||
"""
|
||||
Storage servers defined in ``private/servers.yaml`` are loaded into the
|
||||
storage broker.
|
||||
"""
|
||||
serverid = u"v0-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
announcement = {
|
||||
u"nickname": u"some-storage-server",
|
||||
u"anonymous-storage-FURL": u"pb://xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx@tcp:storage.example:100/swissnum",
|
||||
}
|
||||
self.useFixture(
|
||||
StaticServers(
|
||||
self.basedir,
|
||||
[(serverid, announcement)],
|
||||
),
|
||||
)
|
||||
self.assertThat(
|
||||
client.create_client(self.basedir.asTextMode().path),
|
||||
succeeded(
|
||||
AfterPreprocessing(
|
||||
get_known_server_details,
|
||||
Equals([(serverid, announcement)]),
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
@capture_logging(
|
||||
lambda case, logger: assertHasAction(
|
||||
case,
|
||||
logger,
|
||||
actionType=u"storage-client:broker:make-storage-server",
|
||||
succeeded=False,
|
||||
),
|
||||
)
|
||||
def test_invalid_static_server(self, logger):
|
||||
"""
|
||||
An invalid announcement for a static server does not prevent other static
|
||||
servers from being loaded.
|
||||
"""
|
||||
# Some good details
|
||||
serverid = u"v1-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
announcement = {
|
||||
u"nickname": u"some-storage-server",
|
||||
u"anonymous-storage-FURL": u"pb://xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx@tcp:storage.example:100/swissnum",
|
||||
}
|
||||
self.useFixture(
|
||||
StaticServers(
|
||||
self.basedir,
|
||||
[(serverid, announcement),
|
||||
# Along with a "bad" server announcement. Order in this list
|
||||
# doesn't matter, yaml serializer and Python dicts are going
|
||||
# to shuffle everything around kind of randomly.
|
||||
(u"v0-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
|
||||
{u"nickname": u"another-storage-server",
|
||||
u"anonymous-storage-FURL": None,
|
||||
}),
|
||||
],
|
||||
),
|
||||
)
|
||||
self.assertThat(
|
||||
client.create_client(self.basedir.asTextMode().path),
|
||||
succeeded(
|
||||
AfterPreprocessing(
|
||||
get_known_server_details,
|
||||
# It should have the good server details.
|
||||
Equals([(serverid, announcement)]),
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Run(unittest.TestCase, testutil.StallMixin):
|
||||
|
||||
def setUp(self):
|
||||
|
54
src/allmydata/test/test_websocket_logs.py
Normal file
54
src/allmydata/test/test_websocket_logs.py
Normal file
@ -0,0 +1,54 @@
|
||||
import json
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet.defer import inlineCallbacks
|
||||
|
||||
from eliot import log_call
|
||||
|
||||
from autobahn.twisted.testing import create_memory_agent, MemoryReactorClockResolver, create_pumper
|
||||
|
||||
from allmydata.web.logs import TokenAuthenticatedWebSocketServerProtocol
|
||||
|
||||
|
||||
class TestStreamingLogs(unittest.TestCase):
|
||||
"""
|
||||
Test websocket streaming of logs
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.reactor = MemoryReactorClockResolver()
|
||||
self.pumper = create_pumper()
|
||||
self.agent = create_memory_agent(self.reactor, self.pumper, TokenAuthenticatedWebSocketServerProtocol)
|
||||
return self.pumper.start()
|
||||
|
||||
def tearDown(self):
|
||||
return self.pumper.stop()
|
||||
|
||||
@inlineCallbacks
|
||||
def test_one_log(self):
|
||||
"""
|
||||
write a single Eliot log and see it streamed via websocket
|
||||
"""
|
||||
|
||||
proto = yield self.agent.open(
|
||||
transport_config=u"ws://localhost:1234/ws",
|
||||
options={},
|
||||
)
|
||||
|
||||
messages = []
|
||||
def got_message(msg, is_binary=False):
|
||||
messages.append(json.loads(msg))
|
||||
proto.on("message", got_message)
|
||||
|
||||
@log_call(action_type=u"test:cli:magic-folder:cleanup")
|
||||
def do_a_thing():
|
||||
pass
|
||||
|
||||
do_a_thing()
|
||||
|
||||
proto.transport.loseConnection()
|
||||
yield proto.is_closed
|
||||
|
||||
self.assertEqual(len(messages), 2)
|
||||
self.assertEqual("started", messages[0]["action_status"])
|
||||
self.assertEqual("succeeded", messages[1]["action_status"])
|
Loading…
Reference in New Issue
Block a user