mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-01-02 03:06:41 +00:00
hierarchical logging: add numbered messages and parent= args
This commit is contained in:
parent
725621aded
commit
cc1612aee5
@ -3,10 +3,10 @@ import os, struct
|
|||||||
from itertools import islice
|
from itertools import islice
|
||||||
from zope.interface import implements
|
from zope.interface import implements
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from twisted.python import failure, log
|
from twisted.python import failure
|
||||||
from foolscap.eventual import eventually
|
from foolscap.eventual import eventually
|
||||||
from allmydata.interfaces import IMutableFileNode, IMutableFileURI
|
from allmydata.interfaces import IMutableFileNode, IMutableFileURI
|
||||||
from allmydata.util import hashutil, mathutil, idlib
|
from allmydata.util import hashutil, mathutil, idlib, log
|
||||||
from allmydata.uri import WriteableSSKFileURI
|
from allmydata.uri import WriteableSSKFileURI
|
||||||
from allmydata.Crypto.Cipher import AES
|
from allmydata.Crypto.Cipher import AES
|
||||||
from allmydata import hashtree, codec
|
from allmydata import hashtree, codec
|
||||||
@ -205,13 +205,20 @@ class Retrieve:
|
|||||||
self._storage_index = filenode.get_storage_index()
|
self._storage_index = filenode.get_storage_index()
|
||||||
self._readkey = filenode.get_readkey()
|
self._readkey = filenode.get_readkey()
|
||||||
self._last_failure = None
|
self._last_failure = None
|
||||||
|
self._log_number = None
|
||||||
|
self._log_prefix = prefix = idlib.b2a(self._storage_index)[:6]
|
||||||
|
num = self._node._client.log("Retrieve(%s): starting" % prefix)
|
||||||
|
self._log_number = num
|
||||||
|
|
||||||
def log(self, msg):
|
def log(self, msg):
|
||||||
prefix = idlib.b2a(self._node.get_storage_index())[:6]
|
prefix = self._log_prefix
|
||||||
self._node._client.log("Retrieve(%s): %s" % (prefix, msg))
|
num = self._node._client.log("Retrieve(%s): %s" % (prefix, msg),
|
||||||
|
parent=self._log_number)
|
||||||
|
return num
|
||||||
|
|
||||||
def log_err(self, f):
|
def log_err(self, f):
|
||||||
log.err(f)
|
num = log.err(f, parent=self._log_number)
|
||||||
|
return num
|
||||||
|
|
||||||
def retrieve(self):
|
def retrieve(self):
|
||||||
"""Retrieve the filenode's current contents. Returns a Deferred that
|
"""Retrieve the filenode's current contents. Returns a Deferred that
|
||||||
@ -252,8 +259,6 @@ class Retrieve:
|
|||||||
# 7: if we discover corrupt shares during the reconstruction process,
|
# 7: if we discover corrupt shares during the reconstruction process,
|
||||||
# remove that share from the sharemap. and start step#6 again.
|
# remove that share from the sharemap. and start step#6 again.
|
||||||
|
|
||||||
self.log("starting retrieval")
|
|
||||||
|
|
||||||
initial_query_count = 5
|
initial_query_count = 5
|
||||||
self._read_size = 2000
|
self._read_size = 2000
|
||||||
|
|
||||||
@ -703,13 +708,20 @@ class Publish:
|
|||||||
|
|
||||||
def __init__(self, filenode):
|
def __init__(self, filenode):
|
||||||
self._node = filenode
|
self._node = filenode
|
||||||
|
self._storage_index = self._node.get_storage_index()
|
||||||
|
self._log_prefix = prefix = idlib.b2a(self._storage_index)[:6]
|
||||||
|
num = self._node._client.log("Publish(%s): starting")
|
||||||
|
self._log_number = num
|
||||||
|
|
||||||
def log(self, msg):
|
def log(self, msg):
|
||||||
prefix = idlib.b2a(self._node.get_storage_index())[:6]
|
prefix = self._log_prefix
|
||||||
self._node._client.log("Publish(%s): %s" % (prefix, msg))
|
num = self._node._client.log("Publish(%s): %s" % (prefix, msg),
|
||||||
|
parent=self._log_number)
|
||||||
|
return num
|
||||||
|
|
||||||
def log_err(self, f):
|
def log_err(self, f):
|
||||||
log.err(f)
|
num = log.err(f, parent=self._log_number)
|
||||||
|
return num
|
||||||
|
|
||||||
def publish(self, newdata, wait_for_numpeers=None):
|
def publish(self, newdata, wait_for_numpeers=None):
|
||||||
"""Publish the filenode's current contents. Returns a Deferred that
|
"""Publish the filenode's current contents. Returns a Deferred that
|
||||||
@ -745,9 +757,8 @@ class Publish:
|
|||||||
# 4a: may need to run recovery algorithm
|
# 4a: may need to run recovery algorithm
|
||||||
# 5: when enough responses are back, we're done
|
# 5: when enough responses are back, we're done
|
||||||
|
|
||||||
self.log("got enough peers, datalen is %s" % len(newdata))
|
self.log("starting publish, data is %r" % (newdata,))
|
||||||
|
|
||||||
self._storage_index = self._node.get_storage_index()
|
|
||||||
self._writekey = self._node.get_writekey()
|
self._writekey = self._node.get_writekey()
|
||||||
assert self._writekey, "need write capability to publish"
|
assert self._writekey, "need write capability to publish"
|
||||||
|
|
||||||
|
@ -7,6 +7,7 @@ from twisted.python import log
|
|||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
from foolscap import Tub, eventual
|
from foolscap import Tub, eventual
|
||||||
|
from allmydata.util import log as tahoe_log
|
||||||
from allmydata.util import iputil, observer, humanreadable
|
from allmydata.util import iputil, observer, humanreadable
|
||||||
from allmydata.util.assertutil import precondition
|
from allmydata.util.assertutil import precondition
|
||||||
from allmydata.logpublisher import LogPublisher
|
from allmydata.logpublisher import LogPublisher
|
||||||
@ -240,7 +241,7 @@ class Node(service.MultiService):
|
|||||||
ob.formatTime = newmeth
|
ob.formatTime = newmeth
|
||||||
# TODO: twisted >2.5.0 offers maxRotatedFiles=50
|
# TODO: twisted >2.5.0 offers maxRotatedFiles=50
|
||||||
|
|
||||||
def log(self, msg, src="", args=()):
|
def log(self, msg, src="", args=(), **kw):
|
||||||
if src:
|
if src:
|
||||||
logsrc = src
|
logsrc = src
|
||||||
else:
|
else:
|
||||||
@ -250,10 +251,9 @@ class Node(service.MultiService):
|
|||||||
msg = msg % tuple(map(humanreadable.hr, args))
|
msg = msg % tuple(map(humanreadable.hr, args))
|
||||||
except TypeError, e:
|
except TypeError, e:
|
||||||
msg = "ERROR: output string '%s' contained invalid %% expansion, error: %s, args: %s\n" % (`msg`, e, `args`)
|
msg = "ERROR: output string '%s' contained invalid %% expansion, error: %s, args: %s\n" % (`msg`, e, `args`)
|
||||||
|
msg = self.short_nodeid + ": " + humanreadable.hr(msg)
|
||||||
log.callWithContext({"system":logsrc},
|
return log.callWithContext({"system":logsrc},
|
||||||
log.msg,
|
tahoe_log.msg, msg, **kw)
|
||||||
(self.short_nodeid + ": " + humanreadable.hr(msg)))
|
|
||||||
|
|
||||||
def _setup_tub(self, local_addresses):
|
def _setup_tub(self, local_addresses):
|
||||||
# we can't get a dynamically-assigned portnum until our Tub is
|
# we can't get a dynamically-assigned portnum until our Tub is
|
||||||
|
@ -16,8 +16,8 @@ class MyNode(Referenceable):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
class LoggingMultiService(service.MultiService):
|
class LoggingMultiService(service.MultiService):
|
||||||
def log(self, msg):
|
def log(self, msg, **kw):
|
||||||
log.msg(msg)
|
log.msg(msg, **kw)
|
||||||
|
|
||||||
class TestIntroducerNode(testutil.SignalMixin, unittest.TestCase):
|
class TestIntroducerNode(testutil.SignalMixin, unittest.TestCase):
|
||||||
def test_loadable(self):
|
def test_loadable(self):
|
||||||
|
@ -111,7 +111,6 @@ class FakeClient:
|
|||||||
self._num_peers = num_peers
|
self._num_peers = num_peers
|
||||||
self._peerids = [tagged_hash("peerid", "%d" % i)[:20]
|
self._peerids = [tagged_hash("peerid", "%d" % i)[:20]
|
||||||
for i in range(self._num_peers)]
|
for i in range(self._num_peers)]
|
||||||
self.introducer_client = FakeIntroducerClient()
|
|
||||||
def log(self, msg):
|
def log(self, msg):
|
||||||
log.msg(msg)
|
log.msg(msg)
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ from allmydata.util import testutil, fileutil
|
|||||||
from allmydata import logpublisher
|
from allmydata import logpublisher
|
||||||
|
|
||||||
class LoggingMultiService(service.MultiService):
|
class LoggingMultiService(service.MultiService):
|
||||||
def log(self, msg):
|
def log(self, msg, **kw):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class TestNode(Node):
|
class TestNode(Node):
|
||||||
|
@ -4,8 +4,10 @@ def foo(): pass # keep the line number constant
|
|||||||
import os
|
import os
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
|
|
||||||
|
from twisted.python import failure
|
||||||
|
from twisted.python import log as twisted_log
|
||||||
from allmydata.util import bencode, idlib, humanreadable, mathutil
|
from allmydata.util import bencode, idlib, humanreadable, mathutil
|
||||||
from allmydata.util import assertutil, fileutil, testutil
|
from allmydata.util import assertutil, fileutil, testutil, log
|
||||||
|
|
||||||
|
|
||||||
class IDLib(unittest.TestCase):
|
class IDLib(unittest.TestCase):
|
||||||
@ -390,3 +392,55 @@ class PollMixinTests(unittest.TestCase):
|
|||||||
d = self.pm.poll(check_f=i.next,
|
d = self.pm.poll(check_f=i.next,
|
||||||
pollinterval=0.1)
|
pollinterval=0.1)
|
||||||
return self._check(d)
|
return self._check(d)
|
||||||
|
|
||||||
|
class SampleError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Log(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.catcher = []
|
||||||
|
twisted_log.addObserver(self.catcher.append)
|
||||||
|
def tearDown(self):
|
||||||
|
twisted_log.removeObserver(self.catcher.append)
|
||||||
|
|
||||||
|
def test_log(self):
|
||||||
|
num = log.msg("this is a message")
|
||||||
|
self.failUnless(isinstance(num, int))
|
||||||
|
log.msg("sub message", parent=num)
|
||||||
|
log.msg("numbered message", number=47)
|
||||||
|
|
||||||
|
f = failure.Failure(SampleError())
|
||||||
|
num2 = log.err(f)
|
||||||
|
log.err(f, parent=num2)
|
||||||
|
log.err(f, number=48)
|
||||||
|
|
||||||
|
logs = self.catcher[:]
|
||||||
|
self.flushLoggedErrors(SampleError)
|
||||||
|
|
||||||
|
self.failUnlessEqual(logs[0]['message'], ("this is a message",))
|
||||||
|
self.failUnlessEqual(logs[0]['number'], num)
|
||||||
|
self.failUnlessEqual(logs[0]['parent'], None)
|
||||||
|
|
||||||
|
self.failUnlessEqual(logs[1]['message'], ("sub message",))
|
||||||
|
self.failUnlessEqual(logs[1]['number'], num+1)
|
||||||
|
self.failUnlessEqual(logs[1]['parent'], num)
|
||||||
|
|
||||||
|
self.failUnlessEqual(logs[2]['message'], ("numbered message",))
|
||||||
|
self.failUnlessEqual(logs[2]['number'], 47)
|
||||||
|
self.failUnlessEqual(logs[2]['parent'], None)
|
||||||
|
|
||||||
|
self.failUnlessEqual(logs[3]['message'], ())
|
||||||
|
self.failUnlessEqual(logs[3]['failure'], f)
|
||||||
|
self.failUnlessEqual(logs[3]['number'], num2)
|
||||||
|
self.failUnlessEqual(logs[3]['parent'], None)
|
||||||
|
|
||||||
|
self.failUnlessEqual(logs[4]['message'], ())
|
||||||
|
self.failUnlessEqual(logs[4]['failure'], f)
|
||||||
|
self.failUnlessEqual(logs[4]['number'], num2+1)
|
||||||
|
self.failUnlessEqual(logs[4]['parent'], num2)
|
||||||
|
|
||||||
|
self.failUnlessEqual(logs[5]['message'], ())
|
||||||
|
self.failUnlessEqual(logs[5]['failure'], f)
|
||||||
|
self.failUnlessEqual(logs[5]['number'], 48)
|
||||||
|
self.failUnlessEqual(logs[5]['parent'], None)
|
||||||
|
|
||||||
|
27
src/allmydata/util/log.py
Normal file
27
src/allmydata/util/log.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
|
||||||
|
import itertools
|
||||||
|
from twisted.python import log
|
||||||
|
|
||||||
|
counter = itertools.count()
|
||||||
|
|
||||||
|
def msg(*message, **kw):
|
||||||
|
if 'number' not in kw:
|
||||||
|
number = counter.next()
|
||||||
|
kw['number'] = number
|
||||||
|
else:
|
||||||
|
number = kw['number']
|
||||||
|
if 'parent' not in kw:
|
||||||
|
kw['parent'] = None
|
||||||
|
log.msg(*message, **kw)
|
||||||
|
return number
|
||||||
|
|
||||||
|
def err(*args, **kw):
|
||||||
|
if 'number' not in kw:
|
||||||
|
number = counter.next()
|
||||||
|
kw['number'] = number
|
||||||
|
else:
|
||||||
|
number = kw['number']
|
||||||
|
if 'parent' not in kw:
|
||||||
|
kw['parent'] = None
|
||||||
|
log.err(*args, **kw)
|
||||||
|
return number
|
Loading…
Reference in New Issue
Block a user