mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-02-21 02:01:31 +00:00
Add a 'tahoe status' command
This required also fixing the JSON output from the "recent and active operations" page.
This commit is contained in:
parent
2ceea6df0d
commit
4f1dd95963
@ -89,6 +89,8 @@ install_requires = [
|
||||
# <https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2474>.
|
||||
"pyOpenSSL >= 0.14",
|
||||
"PyYAML >= 3.11",
|
||||
|
||||
"six >= 1.10.0",
|
||||
]
|
||||
|
||||
# Includes some indirect dependencies, but does not include allmydata.
|
||||
|
@ -3,6 +3,7 @@ from twisted.python import usage
|
||||
from allmydata.scripts.common import get_aliases, get_default_nodedir, \
|
||||
DEFAULT_ALIAS, BaseOptions
|
||||
from allmydata.util.encodingutil import argv_to_unicode, argv_to_abspath, quote_local_unicode_path
|
||||
from .tahoe_status import TahoeStatusCommand
|
||||
|
||||
NODEURL_RE=re.compile("http(s?)://([^:]*)(:([1-9][0-9]*))?")
|
||||
|
||||
@ -472,6 +473,7 @@ subCommands = [
|
||||
["stats", None, StatsOptions, "Print statistics about all files/directories in a subtree."],
|
||||
["check", None, CheckOptions, "Check a single file or directory."],
|
||||
["deep-check", None, DeepCheckOptions, "Check all files/directories reachable from a starting point."],
|
||||
["status", None, TahoeStatusCommand, "Various status information."],
|
||||
]
|
||||
|
||||
def mkdir(options):
|
||||
@ -571,6 +573,10 @@ def deepcheck(options):
|
||||
rc = tahoe_check.deepcheck(options)
|
||||
return rc
|
||||
|
||||
def status(options):
|
||||
from allmydata.scripts import tahoe_status
|
||||
return tahoe_status.do_status(options)
|
||||
|
||||
dispatch = {
|
||||
"mkdir": mkdir,
|
||||
"add-alias": add_alias,
|
||||
@ -590,4 +596,5 @@ dispatch = {
|
||||
"stats": stats,
|
||||
"check": check,
|
||||
"deep-check": deepcheck,
|
||||
"status": status,
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
|
||||
import os, sys
|
||||
from cStringIO import StringIO
|
||||
from six.moves import StringIO
|
||||
|
||||
from twisted.python import usage
|
||||
from twisted.internet import defer, task, threads
|
||||
|
236
src/allmydata/scripts/tahoe_status.py
Normal file
236
src/allmydata/scripts/tahoe_status.py
Normal file
@ -0,0 +1,236 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import urllib
|
||||
|
||||
import json
|
||||
|
||||
from .common import BaseOptions
|
||||
from allmydata.scripts.common import get_default_nodedir
|
||||
from allmydata.scripts.common_http import do_http, BadResponse
|
||||
from allmydata.util.abbreviate import abbreviate_space, abbreviate_time
|
||||
from allmydata.util.encodingutil import argv_to_abspath
|
||||
|
||||
|
||||
def _get_json_for_fragment(options, fragment, method='GET', post_args=None):
|
||||
"""
|
||||
returns the JSON for a particular URI-fragment (to which is
|
||||
pre-pended the node's URL)
|
||||
"""
|
||||
nodeurl = options['node-url']
|
||||
if nodeurl.endswith('/'):
|
||||
nodeurl = nodeurl[:-1]
|
||||
|
||||
url = u'%s/%s' % (nodeurl, fragment)
|
||||
if method == 'POST':
|
||||
if post_args is None:
|
||||
raise ValueError("Must pass post_args= for POST method")
|
||||
body = urllib.urlencode(post_args)
|
||||
else:
|
||||
body = ''
|
||||
if post_args is not None:
|
||||
raise ValueError("post_args= only valid for POST method")
|
||||
resp = do_http(method, url, body=body)
|
||||
if isinstance(resp, BadResponse):
|
||||
# specifically NOT using format_http_error() here because the
|
||||
# URL is pretty sensitive (we're doing /uri/<key>).
|
||||
raise RuntimeError(
|
||||
"Failed to get json from '%s': %s" % (nodeurl, resp.error)
|
||||
)
|
||||
|
||||
data = resp.read()
|
||||
parsed = json.loads(data)
|
||||
if parsed is None:
|
||||
raise RuntimeError("No data from '%s'" % (nodeurl,))
|
||||
return parsed
|
||||
|
||||
|
||||
def _get_json_for_cap(options, cap):
|
||||
return _get_json_for_fragment(
|
||||
options,
|
||||
'uri/%s?t=json' % urllib.quote(cap),
|
||||
)
|
||||
|
||||
def pretty_progress(percent, size=10, ascii=False):
|
||||
"""
|
||||
Displays a unicode or ascii based progress bar of a certain
|
||||
length. Should we just depend on a library instead?
|
||||
|
||||
(Originally from txtorcon)
|
||||
"""
|
||||
|
||||
curr = int(percent / 100.0 * size)
|
||||
part = (percent / (100.0 / size)) - curr
|
||||
|
||||
if ascii:
|
||||
part = int(part * 4)
|
||||
part = '.oO%'[part]
|
||||
block_chr = '#'
|
||||
|
||||
else:
|
||||
block_chr = u'\u2588'
|
||||
# there are 8 unicode characters for vertical-bars/horiz-bars
|
||||
part = int(part * 8)
|
||||
|
||||
# unicode 0x2581 -> 2589 are vertical bar chunks, like rainbarf uses
|
||||
# and following are narrow -> wider bars
|
||||
part = unichr(0x258f - part) # for smooth bar
|
||||
# part = unichr(0x2581 + part) # for neater-looking thing
|
||||
|
||||
# hack for 100+ full so we don't print extra really-narrow/high bar
|
||||
if percent >= 100.0:
|
||||
part = ''
|
||||
curr = int(curr)
|
||||
return '%s%s%s' % ((block_chr * curr), part, (' ' * (size - curr - 1)))
|
||||
|
||||
|
||||
def do_status(options):
|
||||
nodedir = options["node-directory"]
|
||||
with open(os.path.join(nodedir, u'private', u'api_auth_token'), 'r') as f:
|
||||
token = f.read().strip()
|
||||
with open(os.path.join(nodedir, u'node.url'), 'r') as f:
|
||||
options['node-url'] = f.read().strip()
|
||||
|
||||
# do *all* our data-retrievals first in case there's an error
|
||||
try:
|
||||
status_data = _get_json_for_fragment(
|
||||
options,
|
||||
'status?t=json',
|
||||
method='POST',
|
||||
post_args=dict(
|
||||
t='json',
|
||||
token=token,
|
||||
)
|
||||
)
|
||||
statistics_data = _get_json_for_fragment(
|
||||
options,
|
||||
'statistics?t=json',
|
||||
method='POST',
|
||||
post_args=dict(
|
||||
t='json',
|
||||
token=token,
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
print(u"failed to retrieve data: %s" % str(e), file=options.stderr)
|
||||
return 2
|
||||
|
||||
downloaded_bytes = statistics_data['counters'].get('downloader.bytes_downloaded', 0)
|
||||
downloaded_files = statistics_data['counters'].get('downloader.files_downloaded', 0)
|
||||
uploaded_bytes = statistics_data['counters'].get('uploader.bytes_uploaded', 0)
|
||||
uploaded_files = statistics_data['counters'].get('uploader.files_uploaded', 0)
|
||||
print(u"Statistics (for last {}):".format(abbreviate_time(statistics_data['stats']['node.uptime'])), file=options.stdout)
|
||||
print(u" uploaded {} in {} files".format(abbreviate_space(uploaded_bytes), uploaded_files), file=options.stdout)
|
||||
print(u" downloaded {} in {} files".format(abbreviate_space(downloaded_bytes), downloaded_files), file=options.stdout)
|
||||
print(u"", file=options.stdout)
|
||||
|
||||
if status_data.get('active', None):
|
||||
print(u"Active operations:", file=options.stdout)
|
||||
print(
|
||||
u"\u2553 {:<5} \u2565 {:<26} \u2565 {:<22} \u2565 {}".format(
|
||||
"type",
|
||||
"storage index",
|
||||
"progress",
|
||||
"status message",
|
||||
), file=options.stdout
|
||||
)
|
||||
print(u"\u255f\u2500{}\u2500\u256b\u2500{}\u2500\u256b\u2500{}\u2500\u256b\u2500{}".format(u'\u2500' * 5, u'\u2500' * 26, u'\u2500' * 22, u'\u2500' * 20), file=options.stdout)
|
||||
for op in status_data['active']:
|
||||
op_type = 'UKN '
|
||||
if 'progress-hash' in op:
|
||||
op_type = ' put '
|
||||
total = (op['progress-hash'] + op['progress-ciphertext'] + op['progress-encode-push']) / 3.0
|
||||
progress_bar = u"{}".format(pretty_progress(total * 100.0, size=15))
|
||||
else:
|
||||
op_type = ' get '
|
||||
total = op['progress']
|
||||
progress_bar = u"{}".format(pretty_progress(op['progress'] * 100.0, size=15))
|
||||
print(
|
||||
u"\u2551 {op_type} \u2551 {storage-index-string} \u2551 {progress_bar} ({total:3}%) \u2551 {status}".format(
|
||||
op_type=op_type,
|
||||
progress_bar=progress_bar,
|
||||
total=int(total * 100.0),
|
||||
**op
|
||||
), file=options.stdout
|
||||
)
|
||||
|
||||
print(u"\u2559\u2500{}\u2500\u2568\u2500{}\u2500\u2568\u2500{}\u2500\u2568\u2500{}".format(u'\u2500' * 5, u'\u2500' * 26, u'\u2500' * 22, u'\u2500' * 20), file=options.stdout)
|
||||
else:
|
||||
print(u"No active operations.", file=options.stdout)
|
||||
|
||||
if status_data.get('recent', None):
|
||||
non_verbose_ops = ('upload', 'download')
|
||||
recent = [op for op in status_data['recent'] if op['type'] in non_verbose_ops]
|
||||
print(u"\nRecent operations:", file=options.stdout)
|
||||
if len(recent) or options['verbose']:
|
||||
print(
|
||||
u"\u2553 {:<5} \u2565 {:<26} \u2565 {:<10} \u2565 {}".format(
|
||||
"type",
|
||||
"storage index",
|
||||
"size",
|
||||
"status message",
|
||||
), file=options.stdout
|
||||
)
|
||||
|
||||
op_map = {
|
||||
'upload': ' put ',
|
||||
'download': ' get ',
|
||||
'retrieve': 'retr ',
|
||||
'publish': ' pub ',
|
||||
'mapupdate': 'mapup',
|
||||
}
|
||||
|
||||
ops_to_show = status_data['recent'] if options['verbose'] else recent
|
||||
for op in ops_to_show:
|
||||
op_type = op_map[op.get('type', None)]
|
||||
if op['type'] == 'mapupdate':
|
||||
nice_size = op['mode']
|
||||
else:
|
||||
nice_size = abbreviate_space(op['total-size'])
|
||||
print(
|
||||
u"\u2551 {op_type} \u2551 {storage-index-string} \u2551 {nice_size:<10} \u2551 {status}".format(
|
||||
op_type=op_type,
|
||||
nice_size=nice_size,
|
||||
**op
|
||||
), file=options.stdout
|
||||
)
|
||||
|
||||
if len(recent) or options['verbose']:
|
||||
print(u"\u2559\u2500{}\u2500\u2568\u2500{}\u2500\u2568\u2500{}\u2500\u2568\u2500{}".format(u'\u2500' * 5, u'\u2500' * 26, u'\u2500' * 10, u'\u2500' * 20), file=options.stdout)
|
||||
skipped = len(status_data['recent']) - len(ops_to_show)
|
||||
if not options['verbose'] and skipped:
|
||||
print(u" Skipped {} non-upload/download operations; use --verbose to see".format(skipped), file=options.stdout)
|
||||
else:
|
||||
print(u"No recent operations.", file=options.stdout)
|
||||
|
||||
# open question: should we return non-zero if there were no
|
||||
# operations at all to display?
|
||||
return 0
|
||||
|
||||
|
||||
class TahoeStatusCommand(BaseOptions):
|
||||
|
||||
optFlags = [
|
||||
["verbose", "v", "Include publish, retrieve, mapupdate in ops"],
|
||||
]
|
||||
|
||||
def postOptions(self):
|
||||
if self.parent['node-directory']:
|
||||
self['node-directory'] = argv_to_abspath(self.parent['node-directory'])
|
||||
else:
|
||||
self['node-directory'] = get_default_nodedir()
|
||||
|
||||
def getSynopsis(self):
|
||||
return "Usage: tahoe [global-options] status [options]"
|
||||
|
||||
def getUsage(self, width=None):
|
||||
t = BaseOptions.getUsage(self, width)
|
||||
t += "Various status information"
|
||||
return t
|
||||
|
||||
|
||||
subCommands = [
|
||||
["status", None, TahoeStatusCommand,
|
||||
"Status."],
|
||||
]
|
248
src/allmydata/test/cli/test_status.py
Normal file
248
src/allmydata/test/cli/test_status.py
Normal file
@ -0,0 +1,248 @@
|
||||
import os
|
||||
import mock
|
||||
import json
|
||||
import tempfile
|
||||
from StringIO import StringIO
|
||||
from os.path import join
|
||||
from UserDict import UserDict
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer
|
||||
from twisted.web.client import getPage
|
||||
|
||||
from allmydata.mutable.publish import MutableData
|
||||
from allmydata.scripts.common_http import BadResponse
|
||||
from allmydata.scripts.tahoe_status import _get_json_for_fragment
|
||||
from allmydata.scripts.tahoe_status import _get_json_for_cap
|
||||
from allmydata.scripts.tahoe_status import pretty_progress
|
||||
from allmydata.scripts.tahoe_status import do_status
|
||||
|
||||
from ..no_network import GridTestMixin
|
||||
from .common import CLITestMixin
|
||||
|
||||
|
||||
class ProgressBar(unittest.TestCase):
|
||||
|
||||
def test_ascii0(self):
|
||||
prog = pretty_progress(80.0, size=10, ascii=True)
|
||||
self.assertEqual('########. ', prog)
|
||||
|
||||
def test_ascii1(self):
|
||||
prog = pretty_progress(10.0, size=10, ascii=True)
|
||||
self.assertEqual('#. ', prog)
|
||||
|
||||
def test_ascii2(self):
|
||||
prog = pretty_progress(13.0, size=10, ascii=True)
|
||||
self.assertEqual('#o ', prog)
|
||||
|
||||
def test_ascii3(self):
|
||||
prog = pretty_progress(90.0, size=10, ascii=True)
|
||||
self.assertEqual('#########.', prog)
|
||||
|
||||
def test_unicode0(self):
|
||||
self.assertEqual(
|
||||
pretty_progress(82.0, size=10, ascii=False),
|
||||
u'\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e ',
|
||||
)
|
||||
|
||||
def test_unicode1(self):
|
||||
self.assertEqual(
|
||||
pretty_progress(100.0, size=10, ascii=False),
|
||||
u'\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588',
|
||||
)
|
||||
|
||||
|
||||
class _FakeOptions(UserDict, object):
|
||||
def __init__(self):
|
||||
super(_FakeOptions, self).__init__()
|
||||
self._tmp = tempfile.mkdtemp()
|
||||
os.mkdir(join(self._tmp, 'private'), 0o777)
|
||||
with open(join(self._tmp, 'private', 'api_auth_token'), 'w') as f:
|
||||
f.write('a' * 32)
|
||||
with open(join(self._tmp, 'node.url'), 'w') as f:
|
||||
f.write('localhost:9000')
|
||||
|
||||
self['node-directory'] = self._tmp
|
||||
self['verbose'] = True
|
||||
self.stdout = StringIO()
|
||||
self.stderr = StringIO()
|
||||
|
||||
|
||||
class Integration(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def setUp(self):
|
||||
yield super(Integration, self).setUp()
|
||||
self.basedir = "cli/status"
|
||||
self.set_up_grid()
|
||||
|
||||
# upload something
|
||||
c0 = self.g.clients[0]
|
||||
data = MutableData("data" * 100)
|
||||
filenode = yield c0.create_mutable_file(data)
|
||||
self.uri = filenode.get_uri()
|
||||
|
||||
# make sure our web-port is actually answering
|
||||
yield getPage('http://127.0.0.1:{}/status?t=json'.format(self.client_webports[0]))
|
||||
|
||||
def test_simple(self):
|
||||
d = self.do_cli('status')# '--verbose')
|
||||
|
||||
def _check(ign):
|
||||
code, stdout, stdin = ign
|
||||
self.assertEqual(code, 0)
|
||||
self.assertTrue('Skipped 1' in stdout)
|
||||
d.addCallback(_check)
|
||||
return d
|
||||
|
||||
@mock.patch('sys.stdout')
|
||||
def test_help(self, fake):
|
||||
return self.do_cli('status', '--help')
|
||||
|
||||
|
||||
class CommandStatus(unittest.TestCase):
|
||||
"""
|
||||
These tests just exercise the renderers and ensure they don't
|
||||
catastrophically fail.
|
||||
|
||||
They could be enhanced to look for "some" magic strings in the
|
||||
results and assert they're in the output.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.options = _FakeOptions()
|
||||
|
||||
@mock.patch('allmydata.scripts.tahoe_status.do_http')
|
||||
@mock.patch('sys.stdout', StringIO())
|
||||
def test_no_operations(self, http):
|
||||
values = [
|
||||
StringIO(json.dumps({
|
||||
"active": [],
|
||||
"recent": [],
|
||||
})),
|
||||
StringIO(json.dumps({
|
||||
"counters": {
|
||||
"bytes_downloaded": 0,
|
||||
},
|
||||
"stats": {
|
||||
"node.uptime": 0,
|
||||
}
|
||||
})),
|
||||
]
|
||||
http.side_effect = lambda *args, **kw: values.pop(0)
|
||||
do_status(self.options)
|
||||
|
||||
@mock.patch('allmydata.scripts.tahoe_status.do_http')
|
||||
@mock.patch('sys.stdout', StringIO())
|
||||
def test_simple(self, http):
|
||||
values = [
|
||||
StringIO(json.dumps({
|
||||
"active": [
|
||||
{
|
||||
"progress": 0.5,
|
||||
"storage-index-string": "index0",
|
||||
"status": "foo",
|
||||
},
|
||||
{
|
||||
"progress-hash": 1.0,
|
||||
"progress-ciphertext": 1.0,
|
||||
"progress-encode-push": 0.5,
|
||||
"storage-index-string": "index1",
|
||||
"status": "bar",
|
||||
}
|
||||
],
|
||||
"recent": [
|
||||
{
|
||||
"type": "download",
|
||||
"total-size": 12345,
|
||||
"storage-index-string": "index1",
|
||||
"status": "bar",
|
||||
},
|
||||
]
|
||||
})),
|
||||
StringIO(json.dumps({
|
||||
"counters": {
|
||||
"bytes_downloaded": 0,
|
||||
},
|
||||
"stats": {
|
||||
"node.uptime": 0,
|
||||
}
|
||||
})),
|
||||
]
|
||||
http.side_effect = lambda *args, **kw: values.pop(0)
|
||||
do_status(self.options)
|
||||
|
||||
@mock.patch('allmydata.scripts.tahoe_status.do_http')
|
||||
def test_fetch_error(self, http):
|
||||
|
||||
def boom(*args, **kw):
|
||||
raise RuntimeError("boom")
|
||||
http.side_effect = boom
|
||||
do_status(self.options)
|
||||
|
||||
|
||||
class JsonHelpers(unittest.TestCase):
|
||||
|
||||
@mock.patch('allmydata.scripts.tahoe_status.do_http')
|
||||
def test_bad_response(self, http):
|
||||
http.return_value = BadResponse('the url', 'some err')
|
||||
with self.assertRaises(RuntimeError) as ctx:
|
||||
_get_json_for_fragment({'node-url': 'http://localhost:1234'}, '/fragment')
|
||||
self.assertTrue(
|
||||
"Failed to get" in str(ctx.exception)
|
||||
)
|
||||
|
||||
@mock.patch('allmydata.scripts.tahoe_status.do_http')
|
||||
def test_happy_path(self, http):
|
||||
http.return_value = StringIO('{"some": "json"}')
|
||||
resp = _get_json_for_fragment({'node-url': 'http://localhost:1234/'}, '/fragment/')
|
||||
self.assertEqual(resp, dict(some='json'))
|
||||
|
||||
@mock.patch('allmydata.scripts.tahoe_status.do_http')
|
||||
def test_happy_path_post(self, http):
|
||||
http.return_value = StringIO('{"some": "json"}')
|
||||
resp = _get_json_for_fragment(
|
||||
{'node-url': 'http://localhost:1234/'},
|
||||
'/fragment/',
|
||||
method='POST',
|
||||
post_args={'foo': 'bar'}
|
||||
)
|
||||
self.assertEqual(resp, dict(some='json'))
|
||||
|
||||
@mock.patch('allmydata.scripts.tahoe_status.do_http')
|
||||
def test_happy_path_for_cap(self, http):
|
||||
http.return_value = StringIO('{"some": "json"}')
|
||||
resp = _get_json_for_cap({'node-url': 'http://localhost:1234'}, 'fake cap')
|
||||
self.assertEqual(resp, dict(some='json'))
|
||||
|
||||
@mock.patch('allmydata.scripts.tahoe_status.do_http')
|
||||
def test_no_data_returned(self, http):
|
||||
http.return_value = StringIO('null')
|
||||
|
||||
with self.assertRaises(RuntimeError) as ctx:
|
||||
_get_json_for_cap({'node-url': 'http://localhost:1234'}, 'fake cap')
|
||||
self.assertTrue('No data from' in str(ctx.exception))
|
||||
|
||||
def test_no_post_args(self):
|
||||
with self.assertRaises(ValueError) as ctx:
|
||||
_get_json_for_fragment(
|
||||
{'node-url': 'http://localhost:1234'},
|
||||
'/fragment',
|
||||
method='POST',
|
||||
post_args=None,
|
||||
)
|
||||
self.assertTrue(
|
||||
"Must pass post_args" in str(ctx.exception)
|
||||
)
|
||||
|
||||
def test_post_args_for_get(self):
|
||||
with self.assertRaises(ValueError) as ctx:
|
||||
_get_json_for_fragment(
|
||||
{'node-url': 'http://localhost:1234'},
|
||||
'/fragment',
|
||||
method='GET',
|
||||
post_args={'foo': 'bar'}
|
||||
)
|
||||
self.assertTrue(
|
||||
"only valid for POST" in str(ctx.exception)
|
||||
)
|
@ -1,6 +1,6 @@
|
||||
import os, signal, sys, time
|
||||
from random import randrange
|
||||
from cStringIO import StringIO
|
||||
from six.moves import StringIO
|
||||
|
||||
from twisted.internet import reactor, defer
|
||||
from twisted.python import failure
|
||||
@ -28,7 +28,8 @@ def run_cli(verb, *args, **kwargs):
|
||||
nodeargs = kwargs.get("nodeargs", [])
|
||||
argv = nodeargs + [verb] + list(args)
|
||||
stdin = kwargs.get("stdin", "")
|
||||
stdout, stderr = StringIO(), StringIO()
|
||||
stdout = StringIO()
|
||||
stderr = StringIO()
|
||||
d = defer.succeed(argv)
|
||||
d.addCallback(runner.parse_or_exit_with_explanation, stdout=stdout)
|
||||
d.addCallback(runner.dispatch,
|
||||
|
@ -177,6 +177,10 @@ class NoNetworkStorageBroker(object):
|
||||
return None
|
||||
def when_connected_enough(self, threshold):
|
||||
return defer.Deferred()
|
||||
def get_all_serverids(self):
|
||||
return [] # FIXME?
|
||||
def get_known_servers(self):
|
||||
return [] # FIXME?
|
||||
|
||||
class NoNetworkClient(Client):
|
||||
|
||||
|
@ -966,30 +966,53 @@ class Status(rend.Page):
|
||||
return rend.Page.renderHTTP(self, ctx)
|
||||
|
||||
def json(self, req):
|
||||
req.setHeader("content-type", "text/plain")
|
||||
# modern browsers now render this instead of forcing downloads
|
||||
req.setHeader("content-type", "application/json")
|
||||
data = {}
|
||||
data["active"] = active = []
|
||||
for s in self._get_active_operations():
|
||||
si_s = base32.b2a_or_none(s.get_storage_index())
|
||||
size = s.get_size()
|
||||
status = s.get_status()
|
||||
data["recent"] = recent = []
|
||||
|
||||
def _marshal_json(s):
|
||||
# common item data
|
||||
item = {
|
||||
"storage-index-string": base32.b2a_or_none(s.get_storage_index()),
|
||||
"total-size": s.get_size(),
|
||||
"status": s.get_status(),
|
||||
}
|
||||
|
||||
# type-specific item date
|
||||
if IUploadStatus.providedBy(s):
|
||||
h,c,e = s.get_progress()
|
||||
active.append({"type": "upload",
|
||||
"storage-index-string": si_s,
|
||||
"total-size": size,
|
||||
"status": status,
|
||||
"progress-hash": h,
|
||||
"progress-ciphertext": c,
|
||||
"progress-encode-push": e,
|
||||
})
|
||||
h, c, e = s.get_progress()
|
||||
item["type"] = "upload"
|
||||
item["progress-hash"] = h
|
||||
item["progress-ciphertext"] = c
|
||||
item["progress-encode-push"] = e
|
||||
|
||||
elif IDownloadStatus.providedBy(s):
|
||||
active.append({"type": "download",
|
||||
"storage-index-string": si_s,
|
||||
"total-size": size,
|
||||
"status": status,
|
||||
"progress": s.get_progress(),
|
||||
})
|
||||
item["type"] = "download"
|
||||
item["progress"] = s.get_progress()
|
||||
|
||||
elif IPublishStatus.providedBy(s):
|
||||
item["type"] = "publish"
|
||||
|
||||
elif IRetrieveStatus.providedBy(s):
|
||||
item["type"] = "retrieve"
|
||||
|
||||
elif IServermapUpdaterStatus.providedBy(s):
|
||||
item["type"] = "mapupdate"
|
||||
item["mode"] = s.get_mode()
|
||||
|
||||
else:
|
||||
item["type"] = "unknown"
|
||||
item["class"] = s.__class__.__name__
|
||||
|
||||
return item
|
||||
|
||||
for s in self._get_active_operations():
|
||||
active.append(_marshal_json(s))
|
||||
|
||||
for s in self._get_recent_operations():
|
||||
recent.append(_marshal_json(s))
|
||||
|
||||
return json.dumps(data, indent=1) + "\n"
|
||||
|
||||
@ -1010,6 +1033,8 @@ class Status(rend.Page):
|
||||
active = [s
|
||||
for s in self._get_all_statuses()
|
||||
if s.get_active()]
|
||||
active.sort(lambda a, b: cmp(a.get_started(), b.get_started()))
|
||||
active.reverse()
|
||||
return active
|
||||
|
||||
def data_recent_operations(self, ctx, data):
|
||||
@ -1019,7 +1044,7 @@ class Status(rend.Page):
|
||||
recent = [s
|
||||
for s in self._get_all_statuses()
|
||||
if not s.get_active()]
|
||||
recent.sort(lambda a,b: cmp(a.get_started(), b.get_started()))
|
||||
recent.sort(lambda a, b: cmp(a.get_started(), b.get_started()))
|
||||
recent.reverse()
|
||||
return recent
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user