mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-24 07:06:41 +00:00
remove pickle-based stats output altogether
* update munin plugin to read JSON, not pickle * update docs
This commit is contained in:
parent
e1dba4abeb
commit
ea473cdc76
docs
misc/operations_helpers/munin
src/allmydata
topfiles
@ -309,15 +309,14 @@ and the next time the gatherer is started, it will start listening on the
|
||||
given port. The portnum file is actually a "strports specification string",
|
||||
as described in :doc:`configuration`.
|
||||
|
||||
Once running, the stats gatherer will create a standard python "pickle" file
|
||||
in ``$BASEDIR/stats.pickle``, and a standard JSON file in
|
||||
Once running, the stats gatherer will create a standard JSON file in
|
||||
``$BASEDIR/stats.json``. Once a minute, the gatherer will pull stats
|
||||
information from every connected node and write them into the pickle. The
|
||||
pickle will contain a dictionary, in which node identifiers (known as "tubid"
|
||||
strings) are the keys, and the values are a dict with 'timestamp', 'nickname',
|
||||
and 'stats' keys. d[tubid][stats] will contain the stats dictionary as made
|
||||
available at http://localhost:3456/statistics?t=json . The pickle file will
|
||||
only contain the most recent update from each node.
|
||||
information from every connected node and write them into the file. The file
|
||||
will contain a dictionary, in which node identifiers (known as "tubid"
|
||||
strings) are the keys, and the values are a dict with 'timestamp',
|
||||
'nickname', and 'stats' keys. d[tubid][stats] will contain the stats
|
||||
dictionary as made available at http://localhost:3456/statistics?t=json . The
|
||||
file will only contain the most recent update from each node.
|
||||
|
||||
Other tools can be built to examine these stats and render them into
|
||||
something useful. For example, a tool could sum the
|
||||
@ -336,7 +335,7 @@ last year).
|
||||
|
||||
Most of the plugins are designed to pull stats from a single Tahoe node, and
|
||||
are configured with the e.g. http://localhost:3456/statistics?t=json URL. The
|
||||
"tahoe_stats" plugin is designed to read from the pickle file created by the
|
||||
"tahoe_stats" plugin is designed to read from the JSON file created by the
|
||||
stats-gatherer. Some plugins are to be used with the disk watcher, and a few
|
||||
(like tahoe_nodememory) are designed to watch the node processes directly
|
||||
(and must therefore run on the same host as the target node).
|
||||
|
@ -1,12 +1,12 @@
|
||||
[tahoe_storage_allocated]
|
||||
env.statsfile /home/robk/trees/tahoe/stats_gatherer/stats.pickle
|
||||
env.statsfile /home/robk/trees/tahoe/stats_gatherer/stats.json
|
||||
[tahoe_storage_consumed]
|
||||
env.statsfile /home/robk/trees/tahoe/stats_gatherer/stats.pickle
|
||||
env.statsfile /home/robk/trees/tahoe/stats_gatherer/stats.json
|
||||
[tahoe_runtime_load_avg]
|
||||
env.statsfile /home/robk/trees/tahoe/stats_gatherer/stats.pickle
|
||||
env.statsfile /home/robk/trees/tahoe/stats_gatherer/stats.json
|
||||
[tahoe_runtime_load_peak]
|
||||
env.statsfile /home/robk/trees/tahoe/stats_gatherer/stats.pickle
|
||||
env.statsfile /home/robk/trees/tahoe/stats_gatherer/stats.json
|
||||
[tahoe_storage_bytes_added]
|
||||
env.statsfile /home/robk/trees/tahoe/stats_gatherer/stats.pickle
|
||||
env.statsfile /home/robk/trees/tahoe/stats_gatherer/stats.json
|
||||
[tahoe_storage_bytes_freed]
|
||||
env.statsfile /home/robk/trees/tahoe/stats_gatherer/stats.pickle
|
||||
env.statsfile /home/robk/trees/tahoe/stats_gatherer/stats.json
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import pickle
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
@ -428,7 +428,7 @@ def smash_name(name):
|
||||
|
||||
def open_stats(fname):
|
||||
f = open(fname, 'rb')
|
||||
stats = pickle.load(f)
|
||||
stats = json.load(f)
|
||||
f.close()
|
||||
return stats
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
|
||||
import json
|
||||
import os
|
||||
import pickle
|
||||
import pprint
|
||||
import time
|
||||
from collections import deque
|
||||
@ -241,23 +240,23 @@ class StdOutStatsGatherer(StatsGatherer):
|
||||
print '"%s" [%s]:' % (nickname, tubid)
|
||||
pprint.pprint(stats)
|
||||
|
||||
class PickleStatsGatherer(StdOutStatsGatherer):
|
||||
class JSONStatsGatherer(StdOutStatsGatherer):
|
||||
# inherit from StdOutStatsGatherer for connect/disconnect notifications
|
||||
|
||||
def __init__(self, basedir=u".", verbose=True):
|
||||
self.verbose = verbose
|
||||
StatsGatherer.__init__(self, basedir)
|
||||
self.picklefile = os.path.join(basedir, "stats.pickle")
|
||||
self.jsonfile = os.path.join(basedir, "stats.json")
|
||||
|
||||
if os.path.exists(self.picklefile):
|
||||
f = open(self.picklefile, 'rb')
|
||||
if os.path.exists(self.jsonfile):
|
||||
f = open(self.jsonfile, 'rb')
|
||||
try:
|
||||
self.gathered_stats = pickle.load(f)
|
||||
self.gathered_stats = json.load(f)
|
||||
except Exception:
|
||||
print ("Error while attempting to load pickle file %s.\n"
|
||||
"You may need to restore this file from a backup, or delete it if no backup is available.\n" %
|
||||
quote_local_unicode_path(self.picklefile))
|
||||
print ("Error while attempting to load stats file %s.\n"
|
||||
"You may need to restore this file from a backup,"
|
||||
" or delete it if no backup is available.\n" %
|
||||
quote_local_unicode_path(self.jsonfile))
|
||||
raise
|
||||
f.close()
|
||||
else:
|
||||
@ -268,20 +267,9 @@ class PickleStatsGatherer(StdOutStatsGatherer):
|
||||
s['timestamp'] = time.time()
|
||||
s['nickname'] = nickname
|
||||
s['stats'] = stats
|
||||
self.dump_pickle()
|
||||
self.dump_json()
|
||||
|
||||
def dump_pickle(self):
|
||||
tmp = "%s.tmp" % (self.picklefile,)
|
||||
f = open(tmp, 'wb')
|
||||
pickle.dump(self.gathered_stats, f)
|
||||
f.close()
|
||||
if os.path.exists(self.picklefile):
|
||||
os.unlink(self.picklefile)
|
||||
os.rename(tmp, self.picklefile)
|
||||
|
||||
def dump_json(self):
|
||||
# Same logic as pickle, but using JSON instead.
|
||||
tmp = "%s.tmp" % (self.jsonfile,)
|
||||
f = open(tmp, 'wb')
|
||||
json.dump(self.gathered_stats, f)
|
||||
@ -303,7 +291,7 @@ class StatsGathererService(service.MultiService):
|
||||
self.tub.setOption("logRemoteFailures", True)
|
||||
self.tub.setOption("expose-remote-exception-types", False)
|
||||
|
||||
self.stats_gatherer = PickleStatsGatherer(self.basedir, verbose)
|
||||
self.stats_gatherer = JSONStatsGatherer(self.basedir, verbose)
|
||||
self.stats_gatherer.setServiceParent(self)
|
||||
|
||||
portnumfile = os.path.join(self.basedir, "portnum")
|
||||
|
4
topfiles/PR242.docs
Normal file
4
topfiles/PR242.docs
Normal file
@ -0,0 +1,4 @@
|
||||
The "stats gatherer" (created with 'tahoe create-stats-gatherer') now updates
|
||||
a JSON file named "stats.json"; previously it used Pickle and "stats.pickle".
|
||||
The munin plugins in misc/operations_helpers/munin/ have been updated to
|
||||
match, and must be re-installed and re-configured if you use munin.
|
Loading…
Reference in New Issue
Block a user