mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-01-19 03:06:33 +00:00
Merge pull request #556 from tahoe-lafs/2977.more-magic-folder-eliot
Convert the rest of Magic-Folder logging to Eliot Fixes: ticket:2977
This commit is contained in:
commit
d9b7dcae57
@ -8,6 +8,12 @@ from os.path import join, exists
|
||||
from tempfile import mkdtemp, mktemp
|
||||
from functools import partial
|
||||
|
||||
from eliot import (
|
||||
to_file,
|
||||
log_call,
|
||||
start_action,
|
||||
)
|
||||
|
||||
from twisted.python.procutils import which
|
||||
from twisted.internet.error import (
|
||||
ProcessExitedAlready,
|
||||
@ -36,14 +42,21 @@ def pytest_addoption(parser):
|
||||
help="Keep the tmpdir with the client directories (introducer, etc)",
|
||||
)
|
||||
|
||||
@pytest.fixture(autouse=True, scope='session')
|
||||
def eliot_logging():
|
||||
with open("integration.eliot.json", "w") as f:
|
||||
to_file(f)
|
||||
yield
|
||||
|
||||
|
||||
# I've mostly defined these fixtures from "easiest" to "most
|
||||
# complicated", and the dependencies basically go "down the
|
||||
# page". They're all session-scoped which has the "pro" that we only
|
||||
# set up the grid once, but the "con" that each test has to be a
|
||||
# little careful they're not stepping on toes etc :/
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@log_call(action_type=u"integration:reactor", include_result=False)
|
||||
def reactor():
|
||||
# this is a fixture in case we might want to try different
|
||||
# reactors for some reason.
|
||||
@ -52,6 +65,7 @@ def reactor():
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@log_call(action_type=u"integration:temp_dir", include_args=[])
|
||||
def temp_dir(request):
|
||||
"""
|
||||
Invoke like 'py.test --keep-tempdir ...' to avoid deleting the temp-dir
|
||||
@ -76,11 +90,13 @@ def temp_dir(request):
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@log_call(action_type=u"integration:flog_binary", include_args=[])
|
||||
def flog_binary():
|
||||
return which('flogtool')[0]
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@log_call(action_type=u"integration:flog_gatherer", include_args=[])
|
||||
def flog_gatherer(reactor, temp_dir, flog_binary, request):
|
||||
out_protocol = _CollectOutputProtocol()
|
||||
gather_dir = join(temp_dir, 'flog_gather')
|
||||
@ -139,6 +155,11 @@ def flog_gatherer(reactor, temp_dir, flog_binary, request):
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@log_call(
|
||||
action_type=u"integration:introducer",
|
||||
include_args=["temp_dir", "flog_gatherer"],
|
||||
include_result=False,
|
||||
)
|
||||
def introducer(reactor, temp_dir, flog_gatherer, request):
|
||||
config = '''
|
||||
[node]
|
||||
@ -190,6 +211,7 @@ log_gatherer.furl = {log_furl}
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@log_call(action_type=u"integration:introducer:furl", include_args=["temp_dir"])
|
||||
def introducer_furl(introducer, temp_dir):
|
||||
furl_fname = join(temp_dir, 'introducer', 'private', 'introducer.furl')
|
||||
while not exists(furl_fname):
|
||||
@ -200,6 +222,11 @@ def introducer_furl(introducer, temp_dir):
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@log_call(
|
||||
action_type=u"integration:tor:introducer",
|
||||
include_args=["temp_dir", "flog_gatherer"],
|
||||
include_result=False,
|
||||
)
|
||||
def tor_introducer(reactor, temp_dir, flog_gatherer, request):
|
||||
config = '''
|
||||
[node]
|
||||
@ -268,6 +295,11 @@ def tor_introducer_furl(tor_introducer, temp_dir):
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@log_call(
|
||||
action_type=u"integration:storage_nodes",
|
||||
include_args=["temp_dir", "introducer_furl", "flog_gatherer"],
|
||||
include_result=False,
|
||||
)
|
||||
def storage_nodes(reactor, temp_dir, introducer, introducer_furl, flog_gatherer, request):
|
||||
nodes = []
|
||||
# start all 5 nodes in parallel
|
||||
@ -287,6 +319,7 @@ def storage_nodes(reactor, temp_dir, introducer, introducer_furl, flog_gatherer,
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@log_call(action_type=u"integration:alice", include_args=[], include_result=False)
|
||||
def alice(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, request):
|
||||
try:
|
||||
mkdir(join(temp_dir, 'magic-alice'))
|
||||
@ -304,6 +337,7 @@ def alice(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, requ
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@log_call(action_type=u"integration:bob", include_args=[], include_result=False)
|
||||
def bob(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, request):
|
||||
try:
|
||||
mkdir(join(temp_dir, 'magic-bob'))
|
||||
@ -321,54 +355,63 @@ def bob(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, reques
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@log_call(action_type=u"integration:alice:invite", include_args=["temp_dir"])
|
||||
def alice_invite(reactor, alice, temp_dir, request):
|
||||
node_dir = join(temp_dir, 'alice')
|
||||
|
||||
# FIXME XXX by the time we see "client running" in the logs, the
|
||||
# storage servers aren't "really" ready to roll yet (uploads
|
||||
# fairly consistently fail if we don't hack in this pause...)
|
||||
import time ; time.sleep(5)
|
||||
proto = _CollectOutputProtocol()
|
||||
reactor.spawnProcess(
|
||||
proto,
|
||||
sys.executable,
|
||||
[
|
||||
sys.executable, '-m', 'allmydata.scripts.runner',
|
||||
'magic-folder', 'create',
|
||||
'--poll-interval', '2',
|
||||
'--basedir', node_dir, 'magik:', 'alice',
|
||||
join(temp_dir, 'magic-alice'),
|
||||
]
|
||||
)
|
||||
pytest_twisted.blockon(proto.done)
|
||||
with start_action(action_type=u"integration:alice:magic_folder:create"):
|
||||
# FIXME XXX by the time we see "client running" in the logs, the
|
||||
# storage servers aren't "really" ready to roll yet (uploads fairly
|
||||
# consistently fail if we don't hack in this pause...)
|
||||
import time ; time.sleep(5)
|
||||
proto = _CollectOutputProtocol()
|
||||
reactor.spawnProcess(
|
||||
proto,
|
||||
sys.executable,
|
||||
[
|
||||
sys.executable, '-m', 'allmydata.scripts.runner',
|
||||
'magic-folder', 'create',
|
||||
'--poll-interval', '2',
|
||||
'--basedir', node_dir, 'magik:', 'alice',
|
||||
join(temp_dir, 'magic-alice'),
|
||||
]
|
||||
)
|
||||
pytest_twisted.blockon(proto.done)
|
||||
|
||||
proto = _CollectOutputProtocol()
|
||||
reactor.spawnProcess(
|
||||
proto,
|
||||
sys.executable,
|
||||
[
|
||||
sys.executable, '-m', 'allmydata.scripts.runner',
|
||||
'magic-folder', 'invite',
|
||||
'--basedir', node_dir, 'magik:', 'bob',
|
||||
]
|
||||
)
|
||||
pytest_twisted.blockon(proto.done)
|
||||
invite = proto.output.getvalue()
|
||||
print("invite from alice", invite)
|
||||
with start_action(action_type=u"integration:alice:magic_folder:invite") as a:
|
||||
proto = _CollectOutputProtocol()
|
||||
reactor.spawnProcess(
|
||||
proto,
|
||||
sys.executable,
|
||||
[
|
||||
sys.executable, '-m', 'allmydata.scripts.runner',
|
||||
'magic-folder', 'invite',
|
||||
'--basedir', node_dir, 'magik:', 'bob',
|
||||
]
|
||||
)
|
||||
pytest_twisted.blockon(proto.done)
|
||||
invite = proto.output.getvalue()
|
||||
a.add_success_fields(invite=invite)
|
||||
|
||||
# before magic-folder works, we have to stop and restart (this is
|
||||
# crappy for the tests -- can we fix it in magic-folder?)
|
||||
try:
|
||||
alice.signalProcess('TERM')
|
||||
pytest_twisted.blockon(alice.exited)
|
||||
except ProcessExitedAlready:
|
||||
pass
|
||||
magic_text = 'Completed initial Magic Folder scan successfully'
|
||||
pytest_twisted.blockon(_run_node(reactor, node_dir, request, magic_text))
|
||||
with start_action(action_type=u"integration:alice:magic_folder:restart"):
|
||||
# before magic-folder works, we have to stop and restart (this is
|
||||
# crappy for the tests -- can we fix it in magic-folder?)
|
||||
try:
|
||||
alice.signalProcess('TERM')
|
||||
pytest_twisted.blockon(alice.exited)
|
||||
except ProcessExitedAlready:
|
||||
pass
|
||||
with start_action(action_type=u"integration:alice:magic_folder:magic-text"):
|
||||
magic_text = 'Completed initial Magic Folder scan successfully'
|
||||
pytest_twisted.blockon(_run_node(reactor, node_dir, request, magic_text))
|
||||
return invite
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@log_call(
|
||||
action_type=u"integration:magic_folder",
|
||||
include_args=["alice_invite", "temp_dir"],
|
||||
)
|
||||
def magic_folder(reactor, alice_invite, alice, bob, temp_dir, request):
|
||||
print("pairing magic-folder")
|
||||
bob_dir = join(temp_dir, 'bob')
|
||||
|
@ -139,6 +139,7 @@ def _run_node(reactor, node_dir, request, magic_text):
|
||||
sys.executable,
|
||||
(
|
||||
sys.executable, '-m', 'allmydata.scripts.runner',
|
||||
'--eliot-destination', 'file:{}/logs/eliot.json'.format(node_dir),
|
||||
'run',
|
||||
node_dir,
|
||||
),
|
||||
|
1
newsfragments/2977.other
Normal file
1
newsfragments/2977.other
Normal file
@ -0,0 +1 @@
|
||||
The Magic-Folder frontend has had additional logging improvements.
|
File diff suppressed because it is too large
Load Diff
@ -57,3 +57,6 @@ import sys
|
||||
if sys.platform == "win32":
|
||||
from allmydata.windows.fixups import initialize
|
||||
initialize()
|
||||
|
||||
from eliot import to_file
|
||||
to_file(open("eliot.log", "w"))
|
||||
|
@ -71,6 +71,10 @@ def eliot_logged_test(f):
|
||||
def run(self, logger):
|
||||
# Record the MemoryLogger for later message extraction.
|
||||
storage.logger = logger
|
||||
# Give the test access to the logger as well. It would be just
|
||||
# fine to pass this as a keyword argument to `f` but implementing
|
||||
# that now will give me conflict headaches so I'm not doing it.
|
||||
self.eliot_logger = logger
|
||||
return f(self, *a, **kw)
|
||||
|
||||
# Arrange for all messages written to the memory logger that
|
||||
|
@ -9,7 +9,10 @@ from twisted.internet import defer, task, reactor
|
||||
|
||||
from eliot.twisted import DeferredContext
|
||||
|
||||
from allmydata.interfaces import IDirectoryNode
|
||||
from allmydata.interfaces import (
|
||||
IDirectoryNode,
|
||||
NoSharesError,
|
||||
)
|
||||
from allmydata.util.assertutil import precondition
|
||||
|
||||
from allmydata.util import fake_inotify, fileutil, configutil, yamlutil
|
||||
@ -29,6 +32,9 @@ from allmydata import magicfolderdb, magicpath
|
||||
from allmydata.util.fileutil import get_pathinfo
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.immutable.upload import Data
|
||||
from allmydata.mutable.common import (
|
||||
UnrecoverableFileError,
|
||||
)
|
||||
|
||||
from .eliotutil import (
|
||||
eliot_logged_test,
|
||||
@ -928,6 +934,14 @@ class MagicFolderAliceBobTestMixin(MagicFolderCLITestMixin, ShouldFailMixin, Rea
|
||||
# now let bob try to do the download
|
||||
yield iterate(self.bob_magicfolder)
|
||||
|
||||
self.eliot_logger.flushTracebacks(UnrecoverableFileError)
|
||||
logged = self.eliot_logger.flushTracebacks(NoSharesError)
|
||||
self.assertEqual(
|
||||
1,
|
||||
len(logged),
|
||||
"Got other than expected single NoSharesError: {}".format(logged),
|
||||
)
|
||||
|
||||
# ...however Bob shouldn't have downloaded anything
|
||||
self._check_version_in_local_db(self.bob_magicfolder, u"blam", 0)
|
||||
# bob should *not* have downloaded anything, as we failed all the servers
|
||||
@ -1482,11 +1496,6 @@ class SingleMagicFolderTestMixin(MagicFolderCLITestMixin, ShouldFailMixin, Reall
|
||||
|
||||
def tearDown(self):
|
||||
d = super(SingleMagicFolderTestMixin, self).tearDown()
|
||||
def _disable_debugging(res):
|
||||
if self.magicfolder:
|
||||
self.magicfolder.enable_debug_log(False)
|
||||
return res
|
||||
d.addBoth(_disable_debugging)
|
||||
d.addCallback(self.cleanup)
|
||||
shutil.rmtree(self.basedir, ignore_errors=True)
|
||||
return d
|
||||
@ -1593,7 +1602,6 @@ class SingleMagicFolderTestMixin(MagicFolderCLITestMixin, ShouldFailMixin, Reall
|
||||
into the magic folder, so we upload the file and record the
|
||||
directory. (XXX split to separate test)
|
||||
"""
|
||||
self.magicfolder.enable_debug_log()
|
||||
empty_tree_name = self.unicode_or_fallback(u"empty_tr\u00EAe", u"empty_tree")
|
||||
empty_tree_dir = abspath_expanduser_unicode(empty_tree_name, base=self.basedir)
|
||||
new_empty_tree_dir = abspath_expanduser_unicode(empty_tree_name, base=self.local_dir)
|
||||
@ -1785,7 +1793,8 @@ class SingleMagicFolderTestMixin(MagicFolderCLITestMixin, ShouldFailMixin, Reall
|
||||
# pending callbacks including the exception are processed
|
||||
# before we flush the errors.
|
||||
yield task.deferLater(reactor, 0, lambda: None)
|
||||
errors = self.flushLoggedErrors(BadStuff)
|
||||
|
||||
errors = self.eliot_logger.flushTracebacks(BadStuff)
|
||||
# it seems on Windows the "RealTest" variant only produces 1
|
||||
# notification for some reason..
|
||||
self.assertTrue(len(errors) >= 1)
|
||||
@ -1963,8 +1972,8 @@ class MockTest(SingleMagicFolderTestMixin, TestCase):
|
||||
self.magicfolder.uploader._clock.advance(self.magicfolder.uploader._periodic_full_scan_duration + 1)
|
||||
# this will have now done the full scan, so we have to do
|
||||
# an iteration to process anything from it
|
||||
iterate_uploader(self.magicfolder)
|
||||
return processed_d
|
||||
iterate_d = iterate_uploader(self.magicfolder)
|
||||
return processed_d.addCallback(lambda ignored: iterate_d)
|
||||
d.addCallback(_create_file_without_event)
|
||||
def _advance_clock(res):
|
||||
processed_d = self.magicfolder.uploader.set_hook('processed')
|
||||
|
Loading…
Reference in New Issue
Block a user