mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-03-10 22:43:52 +00:00
Improve error-reporting from JSON APIs, and use it better in CLI
Improve error-handling for directories if you ask for JSON from the /uri endpoint, but an error occurs (you get a proper HTTP status code and a valid JSON object). For 'tahoe magic-folder status' e now retrieve *all* the remote data required in the CLI before doing anything else so that errors can be shown immediately. Use the improved JSON endpoints to print better errors.
This commit is contained in:
parent
af1e2a1e2f
commit
a6920d9799
@ -241,11 +241,7 @@ def _get_json_for_fragment(options, fragment, method='GET', post_args=None):
|
||||
)
|
||||
|
||||
data = resp.read()
|
||||
try:
|
||||
parsed = simplejson.loads(data)
|
||||
except Exception:
|
||||
print "Failed to parse reply:\n%s" % (data,)
|
||||
return []
|
||||
parsed = simplejson.loads(data)
|
||||
if parsed is None:
|
||||
raise RuntimeError("No data from '%s'" % (nodeurl,))
|
||||
return parsed
|
||||
@ -297,15 +293,36 @@ def status(options):
|
||||
dmd_cap = f.read().strip()
|
||||
with open(os.path.join(nodedir, u"private", u"collective_dircap")) as f:
|
||||
collective_readcap = f.read().strip()
|
||||
with open(os.path.join(nodedir, u'private', u'api_auth_token'), 'rb') as f:
|
||||
token = f.read()
|
||||
|
||||
# do *all* our data-retrievals first in case there's an error
|
||||
try:
|
||||
captype, dmd = _get_json_for_cap(options, dmd_cap)
|
||||
if captype != 'dirnode':
|
||||
print >>stderr, "magic_folder_dircap isn't a directory capability"
|
||||
return 2
|
||||
except RuntimeError as e:
|
||||
print >>stderr, str(e)
|
||||
return 1
|
||||
dmd_data = _get_json_for_cap(options, dmd_cap)
|
||||
remote_data = _get_json_for_cap(options, collective_readcap)
|
||||
magic_data = _get_json_for_fragment(
|
||||
options,
|
||||
'magic_folder?t=json',
|
||||
method='POST',
|
||||
post_args=dict(
|
||||
t='json',
|
||||
token=token,
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
print >>stderr, "failed to retrieve data: %s" % str(e)
|
||||
return 2
|
||||
|
||||
for d in [dmd_data, remote_data, magic_data]:
|
||||
if isinstance(d, dict) and 'error' in d:
|
||||
print >>stderr, "Error from server: %s" % d['error']
|
||||
print >>stderr, "This means we can't retrieve the remote shared directory."
|
||||
return 3
|
||||
|
||||
captype, dmd = dmd_data
|
||||
if captype != 'dirnode':
|
||||
print >>stderr, "magic_folder_dircap isn't a directory capability"
|
||||
return 2
|
||||
|
||||
now = datetime.now()
|
||||
|
||||
@ -323,14 +340,18 @@ def status(options):
|
||||
continue
|
||||
print " %s (%s): %s, version=%s, created %s" % (name, nice_size, status, version, nice_created)
|
||||
|
||||
captype, collective = _get_json_for_cap(options, collective_readcap)
|
||||
print
|
||||
print "Remote files:"
|
||||
|
||||
captype, collective = remote_data
|
||||
for (name, data) in collective['children'].items():
|
||||
if data[0] != 'dirnode':
|
||||
print "Error: '%s': expected a dirnode, not '%s'" % (name, data[0])
|
||||
print " %s's remote:" % name
|
||||
dmd = _get_json_for_cap(options, data[1]['ro_uri'])
|
||||
if isinstance(dmd, dict) and 'error' in dmd:
|
||||
print(" Error: could not retrieve directory")
|
||||
continue
|
||||
if dmd[0] != 'dirnode':
|
||||
print "Error: should be a dirnode"
|
||||
continue
|
||||
@ -347,21 +368,10 @@ def status(options):
|
||||
nice_created = abbreviate_time(now - created)
|
||||
print " %s (%s): %s, version=%s, created %s" % (n, nice_size, status, version, nice_created)
|
||||
|
||||
with open(os.path.join(nodedir, u'private', u'api_auth_token'), 'rb') as f:
|
||||
token = f.read()
|
||||
magicdata = _get_json_for_fragment(
|
||||
options,
|
||||
'magic_folder?t=json',
|
||||
method='POST',
|
||||
post_args=dict(
|
||||
t='json',
|
||||
token=token,
|
||||
)
|
||||
)
|
||||
if len(magicdata):
|
||||
uploads = [item for item in magicdata if item['kind'] == 'upload']
|
||||
downloads = [item for item in magicdata if item['kind'] == 'download']
|
||||
longest = max([len(item['path']) for item in magicdata])
|
||||
if len(magic_data):
|
||||
uploads = [item for item in magic_data if item['kind'] == 'upload']
|
||||
downloads = [item for item in magic_data if item['kind'] == 'download']
|
||||
longest = max([len(item['path']) for item in magic_data])
|
||||
|
||||
if True: # maybe --show-completed option or something?
|
||||
uploads = [item for item in uploads if item['status'] != 'success']
|
||||
@ -379,7 +389,7 @@ def status(options):
|
||||
for item in downloads:
|
||||
_print_item_status(item, now, longest)
|
||||
|
||||
for item in magicdata:
|
||||
for item in magic_data:
|
||||
if item['status'] == 'failure':
|
||||
print "Failed:", item
|
||||
|
||||
|
@ -1162,7 +1162,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
"was corrupt, or that shares have been lost due to server "
|
||||
"departure, hard drive failure, or disk corruption. You "
|
||||
"should perform a filecheck on this object to learn more.")
|
||||
self.failUnlessReallyEqual(exp, body)
|
||||
self.failUnlessIn(exp, body)
|
||||
d.addCallback(_check_unrecoverable_file)
|
||||
|
||||
d.addCallback(lambda ignored:
|
||||
|
@ -4,6 +4,7 @@ import simplejson
|
||||
|
||||
from twisted.web import http, server, resource
|
||||
from twisted.python import log
|
||||
from twisted.python.failure import Failure
|
||||
from zope.interface import Interface
|
||||
from nevow import loaders, appserver
|
||||
from nevow.inevow import IRequest
|
||||
@ -426,6 +427,11 @@ class TokenOnlyWebApi(resource.Resource):
|
||||
if not t:
|
||||
raise WebError("Must provide 't=' argument")
|
||||
if t == u'json':
|
||||
return self.post_json(req)
|
||||
try:
|
||||
return self.post_json(req)
|
||||
except Exception:
|
||||
message, code = humanize_failure(Failure())
|
||||
req.setResponseCode(code)
|
||||
return simplejson.dumps({"error": message})
|
||||
else:
|
||||
raise WebError("'%s' invalid type for 't' arg" % (t,), http.BAD_REQUEST)
|
||||
|
@ -911,6 +911,15 @@ def DirectoryJSONMetadata(ctx, dirnode):
|
||||
return json
|
||||
d.addCallback(_got)
|
||||
d.addCallback(text_plain, ctx)
|
||||
|
||||
def error(f):
|
||||
message, code = humanize_failure(f)
|
||||
req = IRequest(ctx)
|
||||
req.setResponseCode(code)
|
||||
return simplejson.dumps({
|
||||
"error": message,
|
||||
})
|
||||
d.addErrback(error)
|
||||
return d
|
||||
|
||||
|
||||
|
@ -16,26 +16,31 @@ class MagicFolderWebApi(TokenOnlyWebApi):
|
||||
req.setHeader("content-type", "application/json")
|
||||
|
||||
data = []
|
||||
for item in self.client._magic_folder.uploader.get_status():
|
||||
d = dict(
|
||||
path=item.relpath_u,
|
||||
status=item.status_history()[-1][0],
|
||||
kind='upload',
|
||||
)
|
||||
for (status, ts) in item.status_history():
|
||||
d[status + '_at'] = ts
|
||||
d['percent_done'] = item.progress.progress
|
||||
data.append(d)
|
||||
try:
|
||||
for item in self.client._magic_folder.uploader.get_status():
|
||||
d = dict(
|
||||
path=item.relpath_u,
|
||||
status=item.status_history()[-1][0],
|
||||
kind='upload',
|
||||
)
|
||||
for (status, ts) in item.status_history():
|
||||
d[status + '_at'] = ts
|
||||
d['percent_done'] = item.progress.progress
|
||||
data.append(d)
|
||||
|
||||
for item in self.client._magic_folder.downloader.get_status():
|
||||
d = dict(
|
||||
path=item.relpath_u,
|
||||
status=item.status_history()[-1][0],
|
||||
kind='download',
|
||||
)
|
||||
for (status, ts) in item.status_history():
|
||||
d[status + '_at'] = ts
|
||||
d['percent_done'] = item.progress.progress
|
||||
data.append(d)
|
||||
for item in self.client._magic_folder.downloader.get_status():
|
||||
d = dict(
|
||||
path=item.relpath_u,
|
||||
status=item.status_history()[-1][0],
|
||||
kind='download',
|
||||
)
|
||||
for (status, ts) in item.status_history():
|
||||
d[status + '_at'] = ts
|
||||
d['percent_done'] = item.progress.progress
|
||||
data.append(d)
|
||||
except Exception as e:
|
||||
data.append({
|
||||
"error": str(e),
|
||||
})
|
||||
|
||||
return simplejson.dumps(data)
|
||||
|
Loading…
x
Reference in New Issue
Block a user