mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-19 04:57:54 +00:00
Merge remote-tracking branch 'origin/master' into 3849-refactor-out-foolscap-in-storage-server
This commit is contained in:
commit
c8f429c496
0
newsfragments/3758.minor
Normal file
0
newsfragments/3758.minor
Normal file
0
newsfragments/3847.minor
Normal file
0
newsfragments/3847.minor
Normal file
@ -125,5 +125,5 @@ if sys.platform == "win32":
|
||||
initialize()
|
||||
|
||||
from eliot import to_file
|
||||
from allmydata.util.jsonbytes import AnyBytesJSONEncoder
|
||||
to_file(open("eliot.log", "wb"), encoder=AnyBytesJSONEncoder)
|
||||
from allmydata.util.eliotutil import eliot_json_encoder
|
||||
to_file(open("eliot.log", "wb"), encoder=eliot_json_encoder)
|
||||
|
@ -42,7 +42,6 @@ from zope.interface import (
|
||||
from eliot import (
|
||||
ActionType,
|
||||
Field,
|
||||
MemoryLogger,
|
||||
ILogger,
|
||||
)
|
||||
from eliot.testing import (
|
||||
@ -54,8 +53,9 @@ from twisted.python.monkey import (
|
||||
MonkeyPatcher,
|
||||
)
|
||||
|
||||
from ..util.jsonbytes import AnyBytesJSONEncoder
|
||||
|
||||
from ..util.eliotutil import (
|
||||
MemoryLogger,
|
||||
)
|
||||
|
||||
_NAME = Field.for_types(
|
||||
u"name",
|
||||
@ -71,14 +71,6 @@ RUN_TEST = ActionType(
|
||||
)
|
||||
|
||||
|
||||
# On Python 3, we want to use our custom JSON encoder when validating messages
|
||||
# can be encoded to JSON:
|
||||
if PY2:
|
||||
_memory_logger = MemoryLogger
|
||||
else:
|
||||
_memory_logger = lambda: MemoryLogger(encoder=AnyBytesJSONEncoder)
|
||||
|
||||
|
||||
@attr.s
|
||||
class EliotLoggedRunTest(object):
|
||||
"""
|
||||
@ -170,7 +162,7 @@ def with_logging(
|
||||
"""
|
||||
@wraps(test_method)
|
||||
def run_with_logging(*args, **kwargs):
|
||||
validating_logger = _memory_logger()
|
||||
validating_logger = MemoryLogger()
|
||||
original = swap_logger(None)
|
||||
try:
|
||||
swap_logger(_TwoLoggers(original, validating_logger))
|
||||
|
@ -27,13 +27,12 @@ from fixtures import (
|
||||
)
|
||||
from testtools import (
|
||||
TestCase,
|
||||
)
|
||||
from testtools import (
|
||||
TestResult,
|
||||
)
|
||||
from testtools.matchers import (
|
||||
Is,
|
||||
IsInstance,
|
||||
Not,
|
||||
MatchesStructure,
|
||||
Equals,
|
||||
HasLength,
|
||||
@ -65,11 +64,11 @@ from twisted.internet.task import deferLater
|
||||
from twisted.internet import reactor
|
||||
|
||||
from ..util.eliotutil import (
|
||||
eliot_json_encoder,
|
||||
log_call_deferred,
|
||||
_parse_destination_description,
|
||||
_EliotLogging,
|
||||
)
|
||||
from ..util.jsonbytes import AnyBytesJSONEncoder
|
||||
|
||||
from .common import (
|
||||
SyncTestCase,
|
||||
@ -77,24 +76,105 @@ from .common import (
|
||||
)
|
||||
|
||||
|
||||
class EliotLoggedTestTests(AsyncTestCase):
|
||||
def passes():
|
||||
"""
|
||||
Create a matcher that matches a ``TestCase`` that runs without failures or
|
||||
errors.
|
||||
"""
|
||||
def run(case):
|
||||
result = TestResult()
|
||||
case.run(result)
|
||||
return result.wasSuccessful()
|
||||
return AfterPreprocessing(run, Equals(True))
|
||||
|
||||
|
||||
class EliotLoggedTestTests(TestCase):
|
||||
"""
|
||||
Tests for the automatic log-related provided by ``AsyncTestCase``.
|
||||
|
||||
This class uses ``testtools.TestCase`` because it is inconvenient to nest
|
||||
``AsyncTestCase`` inside ``AsyncTestCase`` (in particular, Eliot messages
|
||||
emitted by the inner test case get observed by the outer test case and if
|
||||
an inner case emits invalid messages they cause the outer test case to
|
||||
fail).
|
||||
"""
|
||||
def test_fails(self):
|
||||
"""
|
||||
A test method of an ``AsyncTestCase`` subclass can fail.
|
||||
"""
|
||||
class UnderTest(AsyncTestCase):
|
||||
def test_it(self):
|
||||
self.fail("make sure it can fail")
|
||||
|
||||
self.assertThat(UnderTest("test_it"), Not(passes()))
|
||||
|
||||
def test_unserializable_fails(self):
|
||||
"""
|
||||
A test method of an ``AsyncTestCase`` subclass that logs an unserializable
|
||||
value with Eliot fails.
|
||||
"""
|
||||
class world(object):
|
||||
"""
|
||||
an unserializable object
|
||||
"""
|
||||
|
||||
class UnderTest(AsyncTestCase):
|
||||
def test_it(self):
|
||||
Message.log(hello=world)
|
||||
|
||||
self.assertThat(UnderTest("test_it"), Not(passes()))
|
||||
|
||||
def test_logs_non_utf_8_byte(self):
|
||||
"""
|
||||
A test method of an ``AsyncTestCase`` subclass can log a message that
|
||||
contains a non-UTF-8 byte string and return ``None`` and pass.
|
||||
"""
|
||||
class UnderTest(AsyncTestCase):
|
||||
def test_it(self):
|
||||
Message.log(hello=b"\xFF")
|
||||
|
||||
self.assertThat(UnderTest("test_it"), passes())
|
||||
|
||||
def test_returns_none(self):
|
||||
Message.log(hello="world")
|
||||
"""
|
||||
A test method of an ``AsyncTestCase`` subclass can log a message and
|
||||
return ``None`` and pass.
|
||||
"""
|
||||
class UnderTest(AsyncTestCase):
|
||||
def test_it(self):
|
||||
Message.log(hello="world")
|
||||
|
||||
self.assertThat(UnderTest("test_it"), passes())
|
||||
|
||||
def test_returns_fired_deferred(self):
|
||||
Message.log(hello="world")
|
||||
return succeed(None)
|
||||
"""
|
||||
A test method of an ``AsyncTestCase`` subclass can log a message and
|
||||
return an already-fired ``Deferred`` and pass.
|
||||
"""
|
||||
class UnderTest(AsyncTestCase):
|
||||
def test_it(self):
|
||||
Message.log(hello="world")
|
||||
return succeed(None)
|
||||
|
||||
self.assertThat(UnderTest("test_it"), passes())
|
||||
|
||||
def test_returns_unfired_deferred(self):
|
||||
Message.log(hello="world")
|
||||
# @eliot_logged_test automatically gives us an action context but it's
|
||||
# still our responsibility to maintain it across stack-busting
|
||||
# operations.
|
||||
d = DeferredContext(deferLater(reactor, 0.0, lambda: None))
|
||||
d.addCallback(lambda ignored: Message.log(goodbye="world"))
|
||||
# We didn't start an action. We're not finishing an action.
|
||||
return d.result
|
||||
"""
|
||||
A test method of an ``AsyncTestCase`` subclass can log a message and
|
||||
return an unfired ``Deferred`` and pass when the ``Deferred`` fires.
|
||||
"""
|
||||
class UnderTest(AsyncTestCase):
|
||||
def test_it(self):
|
||||
Message.log(hello="world")
|
||||
# @eliot_logged_test automatically gives us an action context
|
||||
# but it's still our responsibility to maintain it across
|
||||
# stack-busting operations.
|
||||
d = DeferredContext(deferLater(reactor, 0.0, lambda: None))
|
||||
d.addCallback(lambda ignored: Message.log(goodbye="world"))
|
||||
# We didn't start an action. We're not finishing an action.
|
||||
return d.result
|
||||
|
||||
self.assertThat(UnderTest("test_it"), passes())
|
||||
|
||||
|
||||
class ParseDestinationDescriptionTests(SyncTestCase):
|
||||
@ -109,7 +189,7 @@ class ParseDestinationDescriptionTests(SyncTestCase):
|
||||
reactor = object()
|
||||
self.assertThat(
|
||||
_parse_destination_description("file:-")(reactor),
|
||||
Equals(FileDestination(stdout, encoder=AnyBytesJSONEncoder)),
|
||||
Equals(FileDestination(stdout, encoder=eliot_json_encoder)),
|
||||
)
|
||||
|
||||
|
||||
|
@ -553,11 +553,6 @@ class JSONBytes(unittest.TestCase):
|
||||
o, cls=jsonbytes.AnyBytesJSONEncoder)),
|
||||
expected,
|
||||
)
|
||||
self.assertEqual(
|
||||
json.loads(jsonbytes.dumps(o, any_bytes=True)),
|
||||
expected
|
||||
)
|
||||
|
||||
|
||||
|
||||
class FakeGetVersion(object):
|
||||
|
@ -18,7 +18,6 @@ from six.moves import StringIO
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from twisted.web import resource
|
||||
from twisted.trial import unittest
|
||||
from allmydata import uri, dirnode
|
||||
from allmydata.util import base32
|
||||
from allmydata.util.encodingutil import to_bytes
|
||||
@ -43,6 +42,21 @@ from .common import (
|
||||
unknown_rwcap,
|
||||
)
|
||||
|
||||
from ..common import (
|
||||
AsyncTestCase,
|
||||
)
|
||||
|
||||
from testtools.matchers import (
|
||||
Equals,
|
||||
Contains,
|
||||
Not,
|
||||
HasLength,
|
||||
EndsWith,
|
||||
)
|
||||
|
||||
from testtools.twistedsupport import flush_logged_errors
|
||||
|
||||
|
||||
DIR_HTML_TAG = '<html lang="en">'
|
||||
|
||||
class CompletelyUnhandledError(Exception):
|
||||
@ -53,7 +67,7 @@ class ErrorBoom(resource.Resource, object):
|
||||
def render(self, req):
|
||||
raise CompletelyUnhandledError("whoops")
|
||||
|
||||
class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMixin, AsyncTestCase):
|
||||
|
||||
def CHECK(self, ign, which, args, clientnum=0):
|
||||
fileurl = self.fileurls[which]
|
||||
@ -117,37 +131,37 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
|
||||
d.addCallback(self.CHECK, "good", "t=check")
|
||||
def _got_html_good(res):
|
||||
self.failUnlessIn("Healthy", res)
|
||||
self.failIfIn("Not Healthy", res)
|
||||
self.assertThat(res, Contains("Healthy"))
|
||||
self.assertThat(res, Not(Contains("Not Healthy", )))
|
||||
soup = BeautifulSoup(res, 'html5lib')
|
||||
assert_soup_has_favicon(self, soup)
|
||||
|
||||
d.addCallback(_got_html_good)
|
||||
d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
|
||||
def _got_html_good_return_to(res):
|
||||
self.failUnlessIn("Healthy", res)
|
||||
self.failIfIn("Not Healthy", res)
|
||||
self.failUnlessIn('<a href="somewhere">Return to file', res)
|
||||
self.assertThat(res, Contains("Healthy"))
|
||||
self.assertThat(res, Not(Contains("Not Healthy")))
|
||||
self.assertThat(res, Contains('<a href="somewhere">Return to file'))
|
||||
d.addCallback(_got_html_good_return_to)
|
||||
d.addCallback(self.CHECK, "good", "t=check&output=json")
|
||||
def _got_json_good(res):
|
||||
r = json.loads(res)
|
||||
self.failUnlessEqual(r["summary"], "Healthy")
|
||||
self.failUnless(r["results"]["healthy"])
|
||||
self.failIfIn("needs-rebalancing", r["results"])
|
||||
self.assertThat(r["results"], Not(Contains("needs-rebalancing",)))
|
||||
self.failUnless(r["results"]["recoverable"])
|
||||
d.addCallback(_got_json_good)
|
||||
|
||||
d.addCallback(self.CHECK, "small", "t=check")
|
||||
def _got_html_small(res):
|
||||
self.failUnlessIn("Literal files are always healthy", res)
|
||||
self.failIfIn("Not Healthy", res)
|
||||
self.assertThat(res, Contains("Literal files are always healthy"))
|
||||
self.assertThat(res, Not(Contains("Not Healthy")))
|
||||
d.addCallback(_got_html_small)
|
||||
d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
|
||||
def _got_html_small_return_to(res):
|
||||
self.failUnlessIn("Literal files are always healthy", res)
|
||||
self.failIfIn("Not Healthy", res)
|
||||
self.failUnlessIn('<a href="somewhere">Return to file', res)
|
||||
self.assertThat(res, Contains("Literal files are always healthy"))
|
||||
self.assertThat(res, Not(Contains("Not Healthy")))
|
||||
self.assertThat(res, Contains('<a href="somewhere">Return to file'))
|
||||
d.addCallback(_got_html_small_return_to)
|
||||
d.addCallback(self.CHECK, "small", "t=check&output=json")
|
||||
def _got_json_small(res):
|
||||
@ -158,8 +172,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
|
||||
d.addCallback(self.CHECK, "smalldir", "t=check")
|
||||
def _got_html_smalldir(res):
|
||||
self.failUnlessIn("Literal files are always healthy", res)
|
||||
self.failIfIn("Not Healthy", res)
|
||||
self.assertThat(res, Contains("Literal files are always healthy"))
|
||||
self.assertThat(res, Not(Contains("Not Healthy")))
|
||||
d.addCallback(_got_html_smalldir)
|
||||
d.addCallback(self.CHECK, "smalldir", "t=check&output=json")
|
||||
def _got_json_smalldir(res):
|
||||
@ -170,43 +184,43 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
|
||||
d.addCallback(self.CHECK, "sick", "t=check")
|
||||
def _got_html_sick(res):
|
||||
self.failUnlessIn("Not Healthy", res)
|
||||
self.assertThat(res, Contains("Not Healthy"))
|
||||
d.addCallback(_got_html_sick)
|
||||
d.addCallback(self.CHECK, "sick", "t=check&output=json")
|
||||
def _got_json_sick(res):
|
||||
r = json.loads(res)
|
||||
self.failUnlessEqual(r["summary"],
|
||||
"Not Healthy: 9 shares (enc 3-of-10)")
|
||||
self.failIf(r["results"]["healthy"])
|
||||
self.assertThat(r["results"]["healthy"], Equals(False))
|
||||
self.failUnless(r["results"]["recoverable"])
|
||||
self.failIfIn("needs-rebalancing", r["results"])
|
||||
self.assertThat(r["results"], Not(Contains("needs-rebalancing")))
|
||||
d.addCallback(_got_json_sick)
|
||||
|
||||
d.addCallback(self.CHECK, "dead", "t=check")
|
||||
def _got_html_dead(res):
|
||||
self.failUnlessIn("Not Healthy", res)
|
||||
self.assertThat(res, Contains("Not Healthy"))
|
||||
d.addCallback(_got_html_dead)
|
||||
d.addCallback(self.CHECK, "dead", "t=check&output=json")
|
||||
def _got_json_dead(res):
|
||||
r = json.loads(res)
|
||||
self.failUnlessEqual(r["summary"],
|
||||
"Not Healthy: 1 shares (enc 3-of-10)")
|
||||
self.failIf(r["results"]["healthy"])
|
||||
self.failIf(r["results"]["recoverable"])
|
||||
self.failIfIn("needs-rebalancing", r["results"])
|
||||
self.assertThat(r["results"]["healthy"], Equals(False))
|
||||
self.assertThat(r["results"]["recoverable"], Equals(False))
|
||||
self.assertThat(r["results"], Not(Contains("needs-rebalancing")))
|
||||
d.addCallback(_got_json_dead)
|
||||
|
||||
d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
|
||||
def _got_html_corrupt(res):
|
||||
self.failUnlessIn("Not Healthy! : Unhealthy", res)
|
||||
self.assertThat(res, Contains("Not Healthy! : Unhealthy"))
|
||||
d.addCallback(_got_html_corrupt)
|
||||
d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
|
||||
def _got_json_corrupt(res):
|
||||
r = json.loads(res)
|
||||
self.failUnlessIn("Unhealthy: 9 shares (enc 3-of-10)", r["summary"])
|
||||
self.failIf(r["results"]["healthy"])
|
||||
self.assertThat(r["summary"], Contains("Unhealthy: 9 shares (enc 3-of-10)"))
|
||||
self.assertThat(r["results"]["healthy"], Equals(False))
|
||||
self.failUnless(r["results"]["recoverable"])
|
||||
self.failIfIn("needs-rebalancing", r["results"])
|
||||
self.assertThat(r["results"], Not(Contains("needs-rebalancing")))
|
||||
self.failUnlessReallyEqual(r["results"]["count-happiness"], 9)
|
||||
self.failUnlessReallyEqual(r["results"]["count-shares-good"], 9)
|
||||
self.failUnlessReallyEqual(r["results"]["count-corrupt-shares"], 1)
|
||||
@ -261,9 +275,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
|
||||
d.addCallback(self.CHECK, "good", "t=check&repair=true")
|
||||
def _got_html_good(res):
|
||||
self.failUnlessIn("Healthy", res)
|
||||
self.failIfIn("Not Healthy", res)
|
||||
self.failUnlessIn("No repair necessary", res)
|
||||
self.assertThat(res, Contains("Healthy"))
|
||||
self.assertThat(res, Not(Contains("Not Healthy")))
|
||||
self.assertThat(res, Contains("No repair necessary", ))
|
||||
soup = BeautifulSoup(res, 'html5lib')
|
||||
assert_soup_has_favicon(self, soup)
|
||||
|
||||
@ -271,9 +285,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
|
||||
d.addCallback(self.CHECK, "sick", "t=check&repair=true")
|
||||
def _got_html_sick(res):
|
||||
self.failUnlessIn("Healthy : healthy", res)
|
||||
self.failIfIn("Not Healthy", res)
|
||||
self.failUnlessIn("Repair successful", res)
|
||||
self.assertThat(res, Contains("Healthy : healthy"))
|
||||
self.assertThat(res, Not(Contains("Not Healthy")))
|
||||
self.assertThat(res, Contains("Repair successful"))
|
||||
d.addCallback(_got_html_sick)
|
||||
|
||||
# repair of a dead file will fail, of course, but it isn't yet
|
||||
@ -290,9 +304,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
|
||||
d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
|
||||
def _got_html_corrupt(res):
|
||||
self.failUnlessIn("Healthy : Healthy", res)
|
||||
self.failIfIn("Not Healthy", res)
|
||||
self.failUnlessIn("Repair successful", res)
|
||||
self.assertThat(res, Contains("Healthy : Healthy"))
|
||||
self.assertThat(res, Not(Contains("Not Healthy")))
|
||||
self.assertThat(res, Contains("Repair successful"))
|
||||
d.addCallback(_got_html_corrupt)
|
||||
|
||||
d.addErrback(self.explain_web_error)
|
||||
@ -392,31 +406,31 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
if expect_rw_uri:
|
||||
self.failUnlessReallyEqual(to_bytes(f[1]["rw_uri"]), unknown_rwcap, data)
|
||||
else:
|
||||
self.failIfIn("rw_uri", f[1])
|
||||
self.assertThat(f[1], Not(Contains("rw_uri")))
|
||||
if immutable:
|
||||
self.failUnlessReallyEqual(to_bytes(f[1]["ro_uri"]), unknown_immcap, data)
|
||||
else:
|
||||
self.failUnlessReallyEqual(to_bytes(f[1]["ro_uri"]), unknown_rocap, data)
|
||||
self.failUnlessIn("metadata", f[1])
|
||||
self.assertThat(f[1], Contains("metadata"))
|
||||
d.addCallback(_check_directory_json, expect_rw_uri=not immutable)
|
||||
|
||||
def _check_info(res, expect_rw_uri, expect_ro_uri):
|
||||
if expect_rw_uri:
|
||||
self.failUnlessIn(unknown_rwcap, res)
|
||||
self.assertThat(res, Contains(unknown_rwcap))
|
||||
if expect_ro_uri:
|
||||
if immutable:
|
||||
self.failUnlessIn(unknown_immcap, res)
|
||||
self.assertThat(res, Contains(unknown_immcap))
|
||||
else:
|
||||
self.failUnlessIn(unknown_rocap, res)
|
||||
self.assertThat(res, Contains(unknown_rocap))
|
||||
else:
|
||||
self.failIfIn(unknown_rocap, res)
|
||||
self.assertThat(res, Not(Contains(unknown_rocap)))
|
||||
res = str(res, "utf-8")
|
||||
self.failUnlessIn("Object Type: <span>unknown</span>", res)
|
||||
self.failIfIn("Raw data as", res)
|
||||
self.failIfIn("Directory writecap", res)
|
||||
self.failIfIn("Checker Operations", res)
|
||||
self.failIfIn("Mutable File Operations", res)
|
||||
self.failIfIn("Directory Operations", res)
|
||||
self.assertThat(res, Contains("Object Type: <span>unknown</span>"))
|
||||
self.assertThat(res, Not(Contains("Raw data as")))
|
||||
self.assertThat(res, Not(Contains("Directory writecap")))
|
||||
self.assertThat(res, Not(Contains("Checker Operations")))
|
||||
self.assertThat(res, Not(Contains("Mutable File Operations")))
|
||||
self.assertThat(res, Not(Contains("Directory Operations")))
|
||||
|
||||
# FIXME: these should have expect_rw_uri=not immutable; I don't know
|
||||
# why they fail. Possibly related to ticket #922.
|
||||
@ -432,7 +446,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
if expect_rw_uri:
|
||||
self.failUnlessReallyEqual(to_bytes(data[1]["rw_uri"]), unknown_rwcap, data)
|
||||
else:
|
||||
self.failIfIn("rw_uri", data[1])
|
||||
self.assertThat(data[1], Not(Contains("rw_uri")))
|
||||
|
||||
if immutable:
|
||||
self.failUnlessReallyEqual(to_bytes(data[1]["ro_uri"]), unknown_immcap, data)
|
||||
@ -442,10 +456,10 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.failUnlessReallyEqual(data[1]["mutable"], True)
|
||||
else:
|
||||
self.failUnlessReallyEqual(to_bytes(data[1]["ro_uri"]), unknown_rocap, data)
|
||||
self.failIfIn("mutable", data[1])
|
||||
self.assertThat(data[1], Not(Contains("mutable")))
|
||||
|
||||
# TODO: check metadata contents
|
||||
self.failUnlessIn("metadata", data[1])
|
||||
self.assertThat(data[1], Contains("metadata"))
|
||||
|
||||
d.addCallback(lambda ign: self.GET("%s/%s?t=json" % (self.rooturl, str(name))))
|
||||
d.addCallback(_check_json, expect_rw_uri=not immutable)
|
||||
@ -519,14 +533,14 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
|
||||
def _created(dn):
|
||||
self.failUnless(isinstance(dn, dirnode.DirectoryNode))
|
||||
self.failIf(dn.is_mutable())
|
||||
self.assertThat(dn.is_mutable(), Equals(False))
|
||||
self.failUnless(dn.is_readonly())
|
||||
# This checks that if we somehow ended up calling dn._decrypt_rwcapdata, it would fail.
|
||||
self.failIf(hasattr(dn._node, 'get_writekey'))
|
||||
self.assertThat(hasattr(dn._node, 'get_writekey'), Equals(False))
|
||||
rep = str(dn)
|
||||
self.failUnlessIn("RO-IMM", rep)
|
||||
self.assertThat(rep, Contains("RO-IMM"))
|
||||
cap = dn.get_cap()
|
||||
self.failUnlessIn(b"CHK", cap.to_string())
|
||||
self.assertThat(cap.to_string(), Contains(b"CHK"))
|
||||
self.cap = cap
|
||||
self.rootnode = dn
|
||||
self.rooturl = "uri/" + url_quote(dn.get_uri())
|
||||
@ -546,7 +560,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
(name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4)
|
||||
name = name_utf8.decode("utf-8")
|
||||
self.failUnlessEqual(rwcapdata, b"")
|
||||
self.failUnlessIn(name, kids)
|
||||
self.assertThat(kids, Contains(name))
|
||||
(expected_child, ign) = kids[name]
|
||||
self.failUnlessReallyEqual(ro_uri, expected_child.get_readonly_uri())
|
||||
numkids += 1
|
||||
@ -572,27 +586,27 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
d.addCallback(lambda ign: self.GET(self.rooturl))
|
||||
def _check_html(res):
|
||||
soup = BeautifulSoup(res, 'html5lib')
|
||||
self.failIfIn(b"URI:SSK", res)
|
||||
self.assertThat(res, Not(Contains(b"URI:SSK")))
|
||||
found = False
|
||||
for td in soup.find_all(u"td"):
|
||||
if td.text != u"FILE":
|
||||
continue
|
||||
a = td.findNextSibling()(u"a")[0]
|
||||
self.assertIn(url_quote(lonely_uri), a[u"href"])
|
||||
self.assertEqual(u"lonely", a.text)
|
||||
self.assertEqual(a[u"rel"], [u"noreferrer"])
|
||||
self.assertEqual(u"{}".format(len("one")), td.findNextSibling().findNextSibling().text)
|
||||
self.assertThat(a[u"href"], Contains(url_quote(lonely_uri)))
|
||||
self.assertThat(a.text, Equals(u"lonely"))
|
||||
self.assertThat(a[u"rel"], Equals([u"noreferrer"]))
|
||||
self.assertThat(td.findNextSibling().findNextSibling().text, Equals(u"{}".format(len("one"))))
|
||||
found = True
|
||||
break
|
||||
self.assertTrue(found)
|
||||
self.assertThat(found, Equals(True))
|
||||
|
||||
infos = list(
|
||||
a[u"href"]
|
||||
for a in soup.find_all(u"a")
|
||||
if a.text == u"More Info"
|
||||
)
|
||||
self.assertEqual(1, len(infos))
|
||||
self.assertTrue(infos[0].endswith(url_quote(lonely_uri) + "?t=info"))
|
||||
self.assertThat(infos, HasLength(1))
|
||||
self.assertThat(infos[0], EndsWith(url_quote(lonely_uri) + "?t=info"))
|
||||
d.addCallback(_check_html)
|
||||
|
||||
# ... and in JSON.
|
||||
@ -604,7 +618,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.failUnlessReallyEqual(sorted(listed_children.keys()), [u"lonely"])
|
||||
ll_type, ll_data = listed_children[u"lonely"]
|
||||
self.failUnlessEqual(ll_type, "filenode")
|
||||
self.failIfIn("rw_uri", ll_data)
|
||||
self.assertThat(ll_data, Not(Contains("rw_uri")))
|
||||
self.failUnlessReallyEqual(to_bytes(ll_data["ro_uri"]), lonely_uri)
|
||||
d.addCallback(_check_json)
|
||||
return d
|
||||
@ -744,8 +758,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
error_line = lines[first_error]
|
||||
error_msg = lines[first_error+1:]
|
||||
error_msg_s = "\n".join(error_msg) + "\n"
|
||||
self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
|
||||
error_line)
|
||||
self.assertThat(error_line, Contains("ERROR: UnrecoverableFileError(no recoverable versions)"))
|
||||
self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
|
||||
units = [json.loads(line) for line in lines[:first_error]]
|
||||
self.failUnlessReallyEqual(len(units), 6) # includes subdir
|
||||
@ -765,8 +778,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
error_line = lines[first_error]
|
||||
error_msg = lines[first_error+1:]
|
||||
error_msg_s = "\n".join(error_msg) + "\n"
|
||||
self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
|
||||
error_line)
|
||||
self.assertThat(error_line, Contains("ERROR: UnrecoverableFileError(no recoverable versions)"))
|
||||
self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
|
||||
units = [json.loads(line) for line in lines[:first_error]]
|
||||
self.failUnlessReallyEqual(len(units), 6) # includes subdir
|
||||
@ -936,8 +948,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
|
||||
d.addCallback(self.CHECK, "one", "t=check") # no add-lease
|
||||
def _got_html_good(res):
|
||||
self.failUnlessIn("Healthy", res)
|
||||
self.failIfIn("Not Healthy", res)
|
||||
self.assertThat(res, Contains("Healthy"))
|
||||
self.assertThat(res, Not(Contains("Not Healthy")))
|
||||
d.addCallback(_got_html_good)
|
||||
|
||||
d.addCallback(self._count_leases, "one")
|
||||
@ -1111,7 +1123,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.GET, self.fileurls["0shares"]))
|
||||
def _check_zero_shares(body):
|
||||
body = str(body, "utf-8")
|
||||
self.failIfIn("<html>", body)
|
||||
self.assertThat(body, Not(Contains("<html>")))
|
||||
body = " ".join(body.strip().split())
|
||||
exp = ("NoSharesError: no shares could be found. "
|
||||
"Zero shares usually indicates a corrupt URI, or that "
|
||||
@ -1129,7 +1141,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.GET, self.fileurls["1share"]))
|
||||
def _check_one_share(body):
|
||||
body = str(body, "utf-8")
|
||||
self.failIfIn("<html>", body)
|
||||
self.assertThat(body, Not(Contains("<html>")))
|
||||
body = " ".join(body.strip().split())
|
||||
msgbase = ("NotEnoughSharesError: This indicates that some "
|
||||
"servers were unavailable, or that shares have been "
|
||||
@ -1154,17 +1166,16 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.GET, self.fileurls["imaginary"]))
|
||||
def _missing_child(body):
|
||||
body = str(body, "utf-8")
|
||||
self.failUnlessIn("No such child: imaginary", body)
|
||||
self.assertThat(body, Contains("No such child: imaginary"))
|
||||
d.addCallback(_missing_child)
|
||||
|
||||
d.addCallback(lambda ignored: self.GET_unicode(self.fileurls["dir-0share"]))
|
||||
def _check_0shares_dir_html(body):
|
||||
self.failUnlessIn(DIR_HTML_TAG, body)
|
||||
self.assertThat(body, Contains(DIR_HTML_TAG))
|
||||
# we should see the regular page, but without the child table or
|
||||
# the dirops forms
|
||||
body = " ".join(body.strip().split())
|
||||
self.failUnlessIn('href="?t=info">More info on this directory',
|
||||
body)
|
||||
self.assertThat(body, Contains('href="?t=info">More info on this directory'))
|
||||
exp = ("UnrecoverableFileError: the directory (or mutable file) "
|
||||
"could not be retrieved, because there were insufficient "
|
||||
"good shares. This might indicate that no servers were "
|
||||
@ -1172,8 +1183,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
"was corrupt, or that shares have been lost due to server "
|
||||
"departure, hard drive failure, or disk corruption. You "
|
||||
"should perform a filecheck on this object to learn more.")
|
||||
self.failUnlessIn(exp, body)
|
||||
self.failUnlessIn("No upload forms: directory is unreadable", body)
|
||||
self.assertThat(body, Contains(exp))
|
||||
self.assertThat(body, Contains("No upload forms: directory is unreadable"))
|
||||
d.addCallback(_check_0shares_dir_html)
|
||||
|
||||
d.addCallback(lambda ignored: self.GET_unicode(self.fileurls["dir-1share"]))
|
||||
@ -1182,10 +1193,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
# and some-shares like we did for immutable files (since there
|
||||
# are different sorts of advice to offer in each case). For now,
|
||||
# they present the same way.
|
||||
self.failUnlessIn(DIR_HTML_TAG, body)
|
||||
self.assertThat(body, Contains(DIR_HTML_TAG))
|
||||
body = " ".join(body.strip().split())
|
||||
self.failUnlessIn('href="?t=info">More info on this directory',
|
||||
body)
|
||||
self.assertThat(body, Contains('href="?t=info">More info on this directory'))
|
||||
exp = ("UnrecoverableFileError: the directory (or mutable file) "
|
||||
"could not be retrieved, because there were insufficient "
|
||||
"good shares. This might indicate that no servers were "
|
||||
@ -1193,8 +1203,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
"was corrupt, or that shares have been lost due to server "
|
||||
"departure, hard drive failure, or disk corruption. You "
|
||||
"should perform a filecheck on this object to learn more.")
|
||||
self.failUnlessIn(exp, body)
|
||||
self.failUnlessIn("No upload forms: directory is unreadable", body)
|
||||
self.assertThat(body, Contains(exp))
|
||||
self.assertThat(body, Contains("No upload forms: directory is unreadable"))
|
||||
d.addCallback(_check_1shares_dir_html)
|
||||
|
||||
d.addCallback(lambda ignored:
|
||||
@ -1204,7 +1214,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.fileurls["dir-0share-json"]))
|
||||
def _check_unrecoverable_file(body):
|
||||
body = str(body, "utf-8")
|
||||
self.failIfIn("<html>", body)
|
||||
self.assertThat(body, Not(Contains("<html>")))
|
||||
body = " ".join(body.strip().split())
|
||||
exp = ("UnrecoverableFileError: the directory (or mutable file) "
|
||||
"could not be retrieved, because there were insufficient "
|
||||
@ -1213,7 +1223,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
"was corrupt, or that shares have been lost due to server "
|
||||
"departure, hard drive failure, or disk corruption. You "
|
||||
"should perform a filecheck on this object to learn more.")
|
||||
self.failUnlessIn(exp, body)
|
||||
self.assertThat(body, Contains(exp))
|
||||
d.addCallback(_check_unrecoverable_file)
|
||||
|
||||
d.addCallback(lambda ignored:
|
||||
@ -1245,7 +1255,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
headers={"accept": "*/*"}))
|
||||
def _internal_error_html1(body):
|
||||
body = str(body, "utf-8")
|
||||
self.failUnlessIn("<html>", "expected HTML, not '%s'" % body)
|
||||
self.assertThat("expected HTML, not '%s'" % body, Contains("<html>"))
|
||||
d.addCallback(_internal_error_html1)
|
||||
|
||||
d.addCallback(lambda ignored:
|
||||
@ -1255,8 +1265,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
headers={"accept": "text/plain"}))
|
||||
def _internal_error_text2(body):
|
||||
body = str(body, "utf-8")
|
||||
self.failIfIn("<html>", body)
|
||||
self.assertThat(body, Not(Contains("<html>")))
|
||||
self.failUnless(body.startswith("Traceback "), body)
|
||||
|
||||
d.addCallback(_internal_error_text2)
|
||||
|
||||
CLI_accepts = "text/plain, application/octet-stream"
|
||||
@ -1267,7 +1278,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
headers={"accept": CLI_accepts}))
|
||||
def _internal_error_text3(body):
|
||||
body = str(body, "utf-8")
|
||||
self.failIfIn("<html>", body)
|
||||
self.assertThat(body, Not(Contains("<html>")))
|
||||
self.failUnless(body.startswith("Traceback "), body)
|
||||
d.addCallback(_internal_error_text3)
|
||||
|
||||
@ -1276,12 +1287,12 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
500, "Internal Server Error", None,
|
||||
self.GET, "ERRORBOOM"))
|
||||
def _internal_error_html4(body):
|
||||
self.failUnlessIn(b"<html>", body)
|
||||
self.assertThat(body, Contains(b"<html>"))
|
||||
d.addCallback(_internal_error_html4)
|
||||
|
||||
def _flush_errors(res):
|
||||
# Trial: please ignore the CompletelyUnhandledError in the logs
|
||||
self.flushLoggedErrors(CompletelyUnhandledError)
|
||||
flush_logged_errors(CompletelyUnhandledError)
|
||||
return res
|
||||
d.addBoth(_flush_errors)
|
||||
|
||||
@ -1312,8 +1323,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
d.addCallback(_stash_dir)
|
||||
d.addCallback(lambda ign: self.GET_unicode(self.dir_url, followRedirect=True))
|
||||
def _check_dir_html(body):
|
||||
self.failUnlessIn(DIR_HTML_TAG, body)
|
||||
self.failUnlessIn("blacklisted.txt</a>", body)
|
||||
self.assertThat(body, Contains(DIR_HTML_TAG))
|
||||
self.assertThat(body, Contains("blacklisted.txt</a>"))
|
||||
d.addCallback(_check_dir_html)
|
||||
d.addCallback(lambda ign: self.GET(self.url))
|
||||
d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
|
||||
@ -1336,8 +1347,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
# We should still be able to list the parent directory, in HTML...
|
||||
d.addCallback(lambda ign: self.GET_unicode(self.dir_url, followRedirect=True))
|
||||
def _check_dir_html2(body):
|
||||
self.failUnlessIn(DIR_HTML_TAG, body)
|
||||
self.failUnlessIn("blacklisted.txt</strike>", body)
|
||||
self.assertThat(body, Contains(DIR_HTML_TAG))
|
||||
self.assertThat(body, Contains("blacklisted.txt</strike>"))
|
||||
d.addCallback(_check_dir_html2)
|
||||
|
||||
# ... and in JSON (used by CLI).
|
||||
@ -1347,8 +1358,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.failUnless(isinstance(data, list), data)
|
||||
self.failUnlessEqual(data[0], "dirnode")
|
||||
self.failUnless(isinstance(data[1], dict), data)
|
||||
self.failUnlessIn("children", data[1])
|
||||
self.failUnlessIn("blacklisted.txt", data[1]["children"])
|
||||
self.assertThat(data[1], Contains("children"))
|
||||
self.assertThat(data[1]["children"], Contains("blacklisted.txt"))
|
||||
childdata = data[1]["children"]["blacklisted.txt"]
|
||||
self.failUnless(isinstance(childdata, list), data)
|
||||
self.failUnlessEqual(childdata[0], "filenode")
|
||||
@ -1387,7 +1398,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.child_url = b"uri/"+dn.get_readonly_uri()+b"/child"
|
||||
d.addCallback(_get_dircap)
|
||||
d.addCallback(lambda ign: self.GET(self.dir_url_base, followRedirect=True))
|
||||
d.addCallback(lambda body: self.failUnlessIn(DIR_HTML_TAG, str(body, "utf-8")))
|
||||
d.addCallback(lambda body: self.assertThat(str(body, "utf-8"), Contains(DIR_HTML_TAG)))
|
||||
d.addCallback(lambda ign: self.GET(self.dir_url_json1))
|
||||
d.addCallback(lambda res: json.loads(res)) # just check it decodes
|
||||
d.addCallback(lambda ign: self.GET(self.dir_url_json2))
|
||||
|
@ -17,10 +17,8 @@ if PY2:
|
||||
|
||||
import json
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet.defer import inlineCallbacks
|
||||
|
||||
from eliot import log_call
|
||||
|
||||
from autobahn.twisted.testing import create_memory_agent, MemoryReactorClockResolver, create_pumper
|
||||
|
||||
@ -48,6 +46,7 @@ from .matchers import (
|
||||
|
||||
from ..common import (
|
||||
SyncTestCase,
|
||||
AsyncTestCase,
|
||||
)
|
||||
|
||||
from ...web.logs import (
|
||||
@ -55,6 +54,8 @@ from ...web.logs import (
|
||||
TokenAuthenticatedWebSocketServerProtocol,
|
||||
)
|
||||
|
||||
from eliot import log_call
|
||||
|
||||
class StreamingEliotLogsTests(SyncTestCase):
|
||||
"""
|
||||
Tests for the log streaming resources created by ``create_log_resources``.
|
||||
@ -75,18 +76,20 @@ class StreamingEliotLogsTests(SyncTestCase):
|
||||
)
|
||||
|
||||
|
||||
class TestStreamingLogs(unittest.TestCase):
|
||||
class TestStreamingLogs(AsyncTestCase):
|
||||
"""
|
||||
Test websocket streaming of logs
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
super(TestStreamingLogs, self).setUp()
|
||||
self.reactor = MemoryReactorClockResolver()
|
||||
self.pumper = create_pumper()
|
||||
self.agent = create_memory_agent(self.reactor, self.pumper, TokenAuthenticatedWebSocketServerProtocol)
|
||||
return self.pumper.start()
|
||||
|
||||
def tearDown(self):
|
||||
super(TestStreamingLogs, self).tearDown()
|
||||
return self.pumper.stop()
|
||||
|
||||
@inlineCallbacks
|
||||
@ -114,10 +117,10 @@ class TestStreamingLogs(unittest.TestCase):
|
||||
proto.transport.loseConnection()
|
||||
yield proto.is_closed
|
||||
|
||||
self.assertEqual(len(messages), 2)
|
||||
self.assertEqual(messages[0]["action_type"], "test:cli:some-exciting-action")
|
||||
self.assertEqual(messages[0]["arguments"],
|
||||
["hello", "good-\\xff-day", 123, {"a": 35}, [None]])
|
||||
self.assertEqual(messages[1]["action_type"], "test:cli:some-exciting-action")
|
||||
self.assertEqual("started", messages[0]["action_status"])
|
||||
self.assertEqual("succeeded", messages[1]["action_status"])
|
||||
self.assertThat(len(messages), Equals(3))
|
||||
self.assertThat(messages[0]["action_type"], Equals("test:cli:some-exciting-action"))
|
||||
self.assertThat(messages[0]["arguments"],
|
||||
Equals(["hello", "good-\\xff-day", 123, {"a": 35}, [None]]))
|
||||
self.assertThat(messages[1]["action_type"], Equals("test:cli:some-exciting-action"))
|
||||
self.assertThat("started", Equals(messages[0]["action_status"]))
|
||||
self.assertThat("succeeded", Equals(messages[1]["action_status"]))
|
||||
|
@ -20,10 +20,11 @@ from bs4 import (
|
||||
BeautifulSoup,
|
||||
)
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.web.template import Tag
|
||||
from twisted.web.test.requesthelper import DummyRequest
|
||||
from twisted.application import service
|
||||
from testtools.twistedsupport import succeeded
|
||||
from twisted.internet.defer import inlineCallbacks
|
||||
|
||||
from ...storage_client import (
|
||||
NativeStorageServer,
|
||||
@ -44,7 +45,17 @@ from ..common import (
|
||||
EMPTY_CLIENT_CONFIG,
|
||||
)
|
||||
|
||||
class RenderSlashUri(unittest.TestCase):
|
||||
from ..common import (
|
||||
SyncTestCase,
|
||||
)
|
||||
|
||||
from testtools.matchers import (
|
||||
Equals,
|
||||
Contains,
|
||||
AfterPreprocessing,
|
||||
)
|
||||
|
||||
class RenderSlashUri(SyncTestCase):
|
||||
"""
|
||||
Ensure that URIs starting with /uri?uri= only accept valid
|
||||
capabilities
|
||||
@ -53,7 +64,9 @@ class RenderSlashUri(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.client = object()
|
||||
self.res = URIHandler(self.client)
|
||||
super(RenderSlashUri, self).setUp()
|
||||
|
||||
@inlineCallbacks
|
||||
def test_valid_query_redirect(self):
|
||||
"""
|
||||
A syntactically valid capability given in the ``uri`` query argument
|
||||
@ -64,9 +77,7 @@ class RenderSlashUri(unittest.TestCase):
|
||||
b"mukesarwdjxiyqsjinbfiiro6q7kgmmekocxfjcngh23oxwyxtzq:2:5:5874882"
|
||||
)
|
||||
query_args = {b"uri": [cap]}
|
||||
response_body = self.successResultOf(
|
||||
render(self.res, query_args),
|
||||
)
|
||||
response_body = yield render(self.res, query_args)
|
||||
soup = BeautifulSoup(response_body, 'html5lib')
|
||||
tag = assert_soup_has_tag_with_attributes(
|
||||
self,
|
||||
@ -74,9 +85,9 @@ class RenderSlashUri(unittest.TestCase):
|
||||
u"meta",
|
||||
{u"http-equiv": "refresh"},
|
||||
)
|
||||
self.assertIn(
|
||||
quote(cap, safe=""),
|
||||
self.assertThat(
|
||||
tag.attrs.get(u"content"),
|
||||
Contains(quote(cap, safe="")),
|
||||
)
|
||||
|
||||
def test_invalid(self):
|
||||
@ -84,16 +95,14 @@ class RenderSlashUri(unittest.TestCase):
|
||||
A syntactically invalid capbility results in an error.
|
||||
"""
|
||||
query_args = {b"uri": [b"not a capability"]}
|
||||
response_body = self.successResultOf(
|
||||
render(self.res, query_args),
|
||||
)
|
||||
self.assertEqual(
|
||||
response_body = render(self.res, query_args)
|
||||
self.assertThat(
|
||||
response_body,
|
||||
b"Invalid capability",
|
||||
succeeded(AfterPreprocessing(bytes, Equals(b"Invalid capability"))),
|
||||
)
|
||||
|
||||
|
||||
class RenderServiceRow(unittest.TestCase):
|
||||
class RenderServiceRow(SyncTestCase):
|
||||
def test_missing(self):
|
||||
"""
|
||||
minimally-defined static servers just need anonymous-storage-FURL
|
||||
@ -127,5 +136,5 @@ class RenderServiceRow(unittest.TestCase):
|
||||
# Coerce `items` to list and pick the first item from it.
|
||||
item = list(items)[0]
|
||||
|
||||
self.assertEqual(item.slotData.get("version"), "")
|
||||
self.assertEqual(item.slotData.get("nickname"), "")
|
||||
self.assertThat(item.slotData.get("version"), Equals(""))
|
||||
self.assertThat(item.slotData.get("nickname"), Equals(""))
|
||||
|
195
src/allmydata/util/_eliot_updates.py
Normal file
195
src/allmydata/util/_eliot_updates.py
Normal file
@ -0,0 +1,195 @@
|
||||
"""
|
||||
Bring in some Eliot updates from newer versions of Eliot than we can
|
||||
depend on in Python 2. The implementations are copied from Eliot 1.14 and
|
||||
only changed enough to add Python 2 compatibility.
|
||||
|
||||
Every API in this module (except ``eliot_json_encoder``) should be obsolete as
|
||||
soon as we depend on Eliot 1.14 or newer.
|
||||
|
||||
When that happens:
|
||||
|
||||
* replace ``capture_logging``
|
||||
with ``partial(eliot.testing.capture_logging, encoder_=eliot_json_encoder)``
|
||||
* replace ``validateLogging``
|
||||
with ``partial(eliot.testing.validateLogging, encoder_=eliot_json_encoder)``
|
||||
* replace ``MemoryLogger``
|
||||
with ``partial(eliot.MemoryLogger, encoder=eliot_json_encoder)``
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import json as pyjson
|
||||
from functools import wraps, partial
|
||||
|
||||
from eliot import (
|
||||
MemoryLogger as _MemoryLogger,
|
||||
)
|
||||
|
||||
from eliot.testing import (
|
||||
check_for_errors,
|
||||
swap_logger,
|
||||
)
|
||||
|
||||
from .jsonbytes import AnyBytesJSONEncoder
|
||||
|
||||
# There are currently a number of log messages that include non-UTF-8 bytes.
|
||||
# Allow these, at least for now. Later when the whole test suite has been
|
||||
# converted to our SyncTestCase or AsyncTestCase it will be easier to turn
|
||||
# this off and then attribute log failures to specific codepaths so they can
|
||||
# be fixed (and then not regressed later) because those instances will result
|
||||
# in test failures instead of only garbage being written to the eliot log.
|
||||
eliot_json_encoder = AnyBytesJSONEncoder
|
||||
|
||||
class _CustomEncoderMemoryLogger(_MemoryLogger):
|
||||
"""
|
||||
Override message validation from the Eliot-supplied ``MemoryLogger`` to
|
||||
use our chosen JSON encoder.
|
||||
|
||||
This is only necessary on Python 2 where we use an old version of Eliot
|
||||
that does not parameterize the encoder.
|
||||
"""
|
||||
def __init__(self, encoder=eliot_json_encoder):
|
||||
"""
|
||||
@param encoder: A JSONEncoder subclass to use when encoding JSON.
|
||||
"""
|
||||
self._encoder = encoder
|
||||
super(_CustomEncoderMemoryLogger, self).__init__()
|
||||
|
||||
def _validate_message(self, dictionary, serializer):
|
||||
"""Validate an individual message.
|
||||
|
||||
As a side-effect, the message is replaced with its serialized contents.
|
||||
|
||||
@param dictionary: A message C{dict} to be validated. Might be mutated
|
||||
by the serializer!
|
||||
|
||||
@param serializer: C{None} or a serializer.
|
||||
|
||||
@raises TypeError: If a field name is not unicode, or the dictionary
|
||||
fails to serialize to JSON.
|
||||
|
||||
@raises eliot.ValidationError: If serializer was given and validation
|
||||
failed.
|
||||
"""
|
||||
if serializer is not None:
|
||||
serializer.validate(dictionary)
|
||||
for key in dictionary:
|
||||
if not isinstance(key, str):
|
||||
if isinstance(key, bytes):
|
||||
key.decode("utf-8")
|
||||
else:
|
||||
raise TypeError(dictionary, "%r is not unicode" % (key,))
|
||||
if serializer is not None:
|
||||
serializer.serialize(dictionary)
|
||||
|
||||
try:
|
||||
pyjson.dumps(dictionary, cls=self._encoder)
|
||||
except Exception as e:
|
||||
raise TypeError("Message %s doesn't encode to JSON: %s" % (dictionary, e))
|
||||
|
||||
if PY2:
|
||||
MemoryLogger = partial(_CustomEncoderMemoryLogger, encoder=eliot_json_encoder)
|
||||
else:
|
||||
MemoryLogger = partial(_MemoryLogger, encoder=eliot_json_encoder)
|
||||
|
||||
def validateLogging(
|
||||
assertion, *assertionArgs, **assertionKwargs
|
||||
):
|
||||
"""
|
||||
Decorator factory for L{unittest.TestCase} methods to add logging
|
||||
validation.
|
||||
|
||||
1. The decorated test method gets a C{logger} keyword argument, a
|
||||
L{MemoryLogger}.
|
||||
2. All messages logged to this logger will be validated at the end of
|
||||
the test.
|
||||
3. Any unflushed logged tracebacks will cause the test to fail.
|
||||
|
||||
For example:
|
||||
|
||||
from unittest import TestCase
|
||||
from eliot.testing import assertContainsFields, validateLogging
|
||||
|
||||
class MyTests(TestCase):
|
||||
def assertFooLogging(self, logger):
|
||||
assertContainsFields(self, logger.messages[0], {"key": 123})
|
||||
|
||||
|
||||
@param assertion: A callable that will be called with the
|
||||
L{unittest.TestCase} instance, the logger and C{assertionArgs} and
|
||||
C{assertionKwargs} once the actual test has run, allowing for extra
|
||||
logging-related assertions on the effects of the test. Use L{None} if you
|
||||
want the cleanup assertions registered but no custom assertions.
|
||||
|
||||
@param assertionArgs: Additional positional arguments to pass to
|
||||
C{assertion}.
|
||||
|
||||
@param assertionKwargs: Additional keyword arguments to pass to
|
||||
C{assertion}.
|
||||
|
||||
@param encoder_: C{json.JSONEncoder} subclass to use when validating JSON.
|
||||
"""
|
||||
encoder_ = assertionKwargs.pop("encoder_", eliot_json_encoder)
|
||||
def decorator(function):
|
||||
@wraps(function)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
skipped = False
|
||||
|
||||
kwargs["logger"] = logger = MemoryLogger(encoder=encoder_)
|
||||
self.addCleanup(check_for_errors, logger)
|
||||
# TestCase runs cleanups in reverse order, and we want this to
|
||||
# run *before* tracebacks are checked:
|
||||
if assertion is not None:
|
||||
self.addCleanup(
|
||||
lambda: skipped
|
||||
or assertion(self, logger, *assertionArgs, **assertionKwargs)
|
||||
)
|
||||
try:
|
||||
return function(self, *args, **kwargs)
|
||||
except self.skipException:
|
||||
skipped = True
|
||||
raise
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
# PEP 8 variant:
|
||||
validate_logging = validateLogging
|
||||
|
||||
def capture_logging(
|
||||
assertion, *assertionArgs, **assertionKwargs
|
||||
):
|
||||
"""
|
||||
Capture and validate all logging that doesn't specify a L{Logger}.
|
||||
|
||||
See L{validate_logging} for details on the rest of its behavior.
|
||||
"""
|
||||
encoder_ = assertionKwargs.pop("encoder_", eliot_json_encoder)
|
||||
def decorator(function):
|
||||
@validate_logging(
|
||||
assertion, *assertionArgs, encoder_=encoder_, **assertionKwargs
|
||||
)
|
||||
@wraps(function)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
logger = kwargs["logger"]
|
||||
previous_logger = swap_logger(logger)
|
||||
|
||||
def cleanup():
|
||||
swap_logger(previous_logger)
|
||||
|
||||
self.addCleanup(cleanup)
|
||||
return function(self, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
@ -16,12 +16,14 @@ from __future__ import (
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"MemoryLogger",
|
||||
"inline_callbacks",
|
||||
"eliot_logging_service",
|
||||
"opt_eliot_destination",
|
||||
"opt_help_eliot_destinations",
|
||||
"validateInstanceOf",
|
||||
"validateSetMembership",
|
||||
"capture_logging",
|
||||
]
|
||||
|
||||
from future.utils import PY2
|
||||
@ -32,7 +34,7 @@ from six import ensure_text
|
||||
from sys import (
|
||||
stdout,
|
||||
)
|
||||
from functools import wraps, partial
|
||||
from functools import wraps
|
||||
from logging import (
|
||||
INFO,
|
||||
Handler,
|
||||
@ -66,8 +68,6 @@ from eliot.twisted import (
|
||||
DeferredContext,
|
||||
inline_callbacks,
|
||||
)
|
||||
from eliot.testing import capture_logging as eliot_capture_logging
|
||||
|
||||
from twisted.python.usage import (
|
||||
UsageError,
|
||||
)
|
||||
@ -87,8 +87,11 @@ from twisted.internet.defer import (
|
||||
)
|
||||
from twisted.application.service import Service
|
||||
|
||||
from .jsonbytes import AnyBytesJSONEncoder
|
||||
|
||||
from ._eliot_updates import (
|
||||
MemoryLogger,
|
||||
eliot_json_encoder,
|
||||
capture_logging,
|
||||
)
|
||||
|
||||
def validateInstanceOf(t):
|
||||
"""
|
||||
@ -306,7 +309,7 @@ class _DestinationParser(object):
|
||||
rotateLength=rotate_length,
|
||||
maxRotatedFiles=max_rotated_files,
|
||||
)
|
||||
return lambda reactor: FileDestination(get_file(), AnyBytesJSONEncoder)
|
||||
return lambda reactor: FileDestination(get_file(), eliot_json_encoder)
|
||||
|
||||
|
||||
_parse_destination_description = _DestinationParser().parse
|
||||
@ -327,10 +330,3 @@ def log_call_deferred(action_type):
|
||||
return DeferredContext(d).addActionFinish()
|
||||
return logged_f
|
||||
return decorate_log_call_deferred
|
||||
|
||||
# On Python 3, encoding bytes to JSON doesn't work, so we have a custom JSON
|
||||
# encoder we want to use when validating messages.
|
||||
if PY2:
|
||||
capture_logging = eliot_capture_logging
|
||||
else:
|
||||
capture_logging = partial(eliot_capture_logging, encoder_=AnyBytesJSONEncoder)
|
||||
|
Loading…
Reference in New Issue
Block a user