2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
import simplejson
|
|
|
|
import urllib
|
|
|
|
import time
|
|
|
|
|
|
|
|
from twisted.internet import defer
|
|
|
|
from twisted.python.failure import Failure
|
|
|
|
from twisted.web import http, html
|
2008-10-07 04:36:18 +00:00
|
|
|
from nevow import url, rend, inevow, tags as T
|
2008-05-19 19:57:04 +00:00
|
|
|
from nevow.inevow import IRequest
|
|
|
|
|
|
|
|
from foolscap.eventual import fireEventually
|
|
|
|
|
2008-07-16 22:42:56 +00:00
|
|
|
from allmydata.util import base32
|
2008-10-07 19:48:20 +00:00
|
|
|
from allmydata.uri import from_string_dirnode
|
2008-05-19 19:57:04 +00:00
|
|
|
from allmydata.interfaces import IDirectoryNode, IFileNode, IMutableFileNode, \
|
2008-10-27 20:15:25 +00:00
|
|
|
ExistingChildError, NoSuchChildError
|
2008-10-22 08:38:18 +00:00
|
|
|
from allmydata.monitor import Monitor
|
2009-01-23 05:01:36 +00:00
|
|
|
from allmydata import dirnode
|
2008-10-22 00:03:07 +00:00
|
|
|
from allmydata.web.common import text_plain, WebError, \
|
|
|
|
IClient, IOpHandleTable, NeedOperationHandleError, \
|
|
|
|
boolean_of_arg, get_arg, get_root, \
|
|
|
|
should_create_intermediate_directories, \
|
2008-05-19 22:19:25 +00:00
|
|
|
getxmlfile, RenderMixin
|
2008-05-19 19:57:04 +00:00
|
|
|
from allmydata.web.filenode import ReplaceMeMixin, \
|
|
|
|
FileNodeHandler, PlaceHolderNodeHandler
|
2009-01-10 01:00:52 +00:00
|
|
|
from allmydata.web.check_results import CheckResults, \
|
2008-09-10 02:45:17 +00:00
|
|
|
CheckAndRepairResults, DeepCheckResults, DeepCheckAndRepairResults
|
2008-09-18 05:00:41 +00:00
|
|
|
from allmydata.web.info import MoreInfo
|
2008-10-22 00:03:07 +00:00
|
|
|
from allmydata.web.operations import ReloadMixin
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
class BlockingFileError(Exception):
|
|
|
|
# TODO: catch and transform
|
|
|
|
"""We cannot auto-create a parent directory, because there is a file in
|
|
|
|
the way"""
|
|
|
|
|
|
|
|
def make_handler_for(node, parentnode=None, name=None):
|
|
|
|
if parentnode:
|
|
|
|
assert IDirectoryNode.providedBy(parentnode)
|
|
|
|
if IMutableFileNode.providedBy(node):
|
|
|
|
return FileNodeHandler(node, parentnode, name)
|
2008-10-29 04:54:06 +00:00
|
|
|
if IFileNode.providedBy(node):
|
|
|
|
return FileNodeHandler(node, parentnode, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
if IDirectoryNode.providedBy(node):
|
|
|
|
return DirectoryNodeHandler(node, parentnode, name)
|
|
|
|
raise WebError("Cannot provide handler for '%s'" % node)
|
|
|
|
|
2008-05-19 22:19:25 +00:00
|
|
|
class DirectoryNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
|
2008-05-19 19:57:04 +00:00
|
|
|
addSlash = True
|
|
|
|
|
|
|
|
def __init__(self, node, parentnode=None, name=None):
|
|
|
|
rend.Page.__init__(self)
|
|
|
|
assert node
|
|
|
|
self.node = node
|
|
|
|
self.parentnode = parentnode
|
|
|
|
self.name = name
|
|
|
|
|
|
|
|
def childFactory(self, ctx, name):
|
|
|
|
req = IRequest(ctx)
|
|
|
|
name = name.decode("utf-8")
|
|
|
|
d = self.node.get(name)
|
|
|
|
d.addBoth(self.got_child, ctx, name)
|
|
|
|
# got_child returns a handler resource: FileNodeHandler or
|
|
|
|
# DirectoryNodeHandler
|
|
|
|
return d
|
|
|
|
|
|
|
|
def got_child(self, node_or_failure, ctx, name):
|
|
|
|
DEBUG = False
|
|
|
|
if DEBUG: print "GOT_CHILD", name, node_or_failure
|
|
|
|
req = IRequest(ctx)
|
|
|
|
method = req.method
|
|
|
|
nonterminal = len(req.postpath) > 1
|
|
|
|
t = get_arg(req, "t", "").strip()
|
|
|
|
if isinstance(node_or_failure, Failure):
|
|
|
|
f = node_or_failure
|
2008-10-27 20:15:25 +00:00
|
|
|
f.trap(NoSuchChildError)
|
2008-05-19 19:57:04 +00:00
|
|
|
# No child by this name. What should we do about it?
|
|
|
|
if DEBUG: print "no child", name
|
|
|
|
if DEBUG: print "postpath", req.postpath
|
|
|
|
if nonterminal:
|
|
|
|
if DEBUG: print " intermediate"
|
|
|
|
if should_create_intermediate_directories(req):
|
|
|
|
# create intermediate directories
|
|
|
|
if DEBUG: print " making intermediate directory"
|
|
|
|
d = self.node.create_empty_directory(name)
|
|
|
|
d.addCallback(make_handler_for, self.node, name)
|
|
|
|
return d
|
|
|
|
else:
|
|
|
|
if DEBUG: print " terminal"
|
|
|
|
# terminal node
|
|
|
|
if (method,t) in [ ("POST","mkdir"), ("PUT","mkdir") ]:
|
|
|
|
if DEBUG: print " making final directory"
|
|
|
|
# final directory
|
|
|
|
d = self.node.create_empty_directory(name)
|
|
|
|
d.addCallback(make_handler_for, self.node, name)
|
|
|
|
return d
|
|
|
|
if (method,t) in ( ("PUT",""), ("PUT","uri"), ):
|
|
|
|
if DEBUG: print " PUT, making leaf placeholder"
|
|
|
|
# we were trying to find the leaf filenode (to put a new
|
|
|
|
# file in its place), and it didn't exist. That's ok,
|
|
|
|
# since that's the leaf node that we're about to create.
|
|
|
|
# We make a dummy one, which will respond to the PUT
|
|
|
|
# request by replacing itself.
|
|
|
|
return PlaceHolderNodeHandler(self.node, name)
|
|
|
|
if DEBUG: print " 404"
|
|
|
|
# otherwise, we just return a no-such-child error
|
|
|
|
return rend.FourOhFour()
|
|
|
|
|
|
|
|
node = node_or_failure
|
|
|
|
if nonterminal and should_create_intermediate_directories(req):
|
|
|
|
if not IDirectoryNode.providedBy(node):
|
|
|
|
# we would have put a new directory here, but there was a
|
|
|
|
# file in the way.
|
|
|
|
if DEBUG: print "blocking"
|
|
|
|
raise WebError("Unable to create directory '%s': "
|
|
|
|
"a file was in the way" % name,
|
|
|
|
http.CONFLICT)
|
|
|
|
if DEBUG: print "good child"
|
|
|
|
return make_handler_for(node, self.node, name)
|
|
|
|
|
|
|
|
def render_DELETE(self, ctx):
|
|
|
|
assert self.parentnode and self.name
|
|
|
|
d = self.parentnode.delete(self.name)
|
|
|
|
d.addCallback(lambda res: self.node.get_uri())
|
|
|
|
return d
|
|
|
|
|
|
|
|
def render_GET(self, ctx):
|
|
|
|
client = IClient(ctx)
|
|
|
|
req = IRequest(ctx)
|
|
|
|
# This is where all of the directory-related ?t=* code goes.
|
|
|
|
t = get_arg(req, "t", "").strip()
|
|
|
|
if not t:
|
|
|
|
# render the directory as HTML, using the docFactory and Nevow's
|
|
|
|
# whole templating thing.
|
|
|
|
return DirectoryAsHTML(self.node)
|
|
|
|
|
|
|
|
if t == "json":
|
|
|
|
return DirectoryJSONMetadata(ctx, self.node)
|
2008-09-18 05:00:41 +00:00
|
|
|
if t == "info":
|
|
|
|
return MoreInfo(self.node)
|
2008-05-19 19:57:04 +00:00
|
|
|
if t == "uri":
|
|
|
|
return DirectoryURI(ctx, self.node)
|
|
|
|
if t == "readonly-uri":
|
|
|
|
return DirectoryReadonlyURI(ctx, self.node)
|
|
|
|
if t == 'rename-form':
|
|
|
|
return RenameForm(self.node)
|
|
|
|
|
|
|
|
raise WebError("GET directory: bad t=%s" % t)
|
|
|
|
|
|
|
|
def render_PUT(self, ctx):
|
|
|
|
req = IRequest(ctx)
|
|
|
|
t = get_arg(req, "t", "").strip()
|
|
|
|
replace = boolean_of_arg(get_arg(req, "replace", "true"))
|
|
|
|
if t == "mkdir":
|
|
|
|
# our job was done by the traversal/create-intermediate-directory
|
|
|
|
# process that got us here.
|
|
|
|
return text_plain(self.node.get_uri(), ctx) # TODO: urlencode
|
|
|
|
if t == "uri":
|
|
|
|
if not replace:
|
|
|
|
# they're trying to set_uri and that name is already occupied
|
|
|
|
# (by us).
|
|
|
|
raise ExistingChildError()
|
2008-10-29 04:54:46 +00:00
|
|
|
d = self.replace_me_with_a_childcap(ctx, replace)
|
2008-05-19 19:57:04 +00:00
|
|
|
# TODO: results
|
|
|
|
return d
|
|
|
|
|
|
|
|
raise WebError("PUT to a directory")
|
|
|
|
|
|
|
|
def render_POST(self, ctx):
|
|
|
|
req = IRequest(ctx)
|
|
|
|
t = get_arg(req, "t", "").strip()
|
2008-10-22 00:03:07 +00:00
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
if t == "mkdir":
|
|
|
|
d = self._POST_mkdir(req)
|
|
|
|
elif t == "mkdir-p":
|
|
|
|
# TODO: docs, tests
|
|
|
|
d = self._POST_mkdir_p(req)
|
|
|
|
elif t == "upload":
|
|
|
|
d = self._POST_upload(ctx) # this one needs the context
|
|
|
|
elif t == "uri":
|
|
|
|
d = self._POST_uri(req)
|
|
|
|
elif t == "delete":
|
|
|
|
d = self._POST_delete(req)
|
|
|
|
elif t == "rename":
|
|
|
|
d = self._POST_rename(req)
|
|
|
|
elif t == "check":
|
|
|
|
d = self._POST_check(req)
|
2008-10-22 00:03:07 +00:00
|
|
|
elif t == "start-deep-check":
|
|
|
|
d = self._POST_start_deep_check(ctx)
|
|
|
|
elif t == "start-manifest":
|
|
|
|
d = self._POST_start_manifest(ctx)
|
|
|
|
elif t == "start-deep-size":
|
|
|
|
d = self._POST_start_deep_size(ctx)
|
|
|
|
elif t == "start-deep-stats":
|
|
|
|
d = self._POST_start_deep_stats(ctx)
|
2009-01-23 05:01:36 +00:00
|
|
|
elif t == "stream-manifest":
|
|
|
|
d = self._POST_stream_manifest(ctx)
|
2008-05-19 19:57:04 +00:00
|
|
|
elif t == "set_children":
|
|
|
|
# TODO: docs
|
|
|
|
d = self._POST_set_children(req)
|
|
|
|
else:
|
|
|
|
raise WebError("POST to a directory with bad t=%s" % t)
|
|
|
|
|
|
|
|
when_done = get_arg(req, "when_done", None)
|
|
|
|
if when_done:
|
|
|
|
d.addCallback(lambda res: url.URL.fromString(when_done))
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _POST_mkdir(self, req):
|
|
|
|
name = get_arg(req, "name", "")
|
|
|
|
if not name:
|
|
|
|
# our job is done, it was handled by the code in got_child
|
|
|
|
# which created the final directory (i.e. us)
|
|
|
|
return defer.succeed(self.node.get_uri()) # TODO: urlencode
|
|
|
|
name = name.decode("utf-8")
|
|
|
|
replace = boolean_of_arg(get_arg(req, "replace", "true"))
|
|
|
|
d = self.node.create_empty_directory(name, overwrite=replace)
|
|
|
|
d.addCallback(lambda child: child.get_uri()) # TODO: urlencode
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _POST_mkdir_p(self, req):
|
|
|
|
path = get_arg(req, "path")
|
|
|
|
if not path:
|
|
|
|
raise WebError("mkdir-p requires a path")
|
|
|
|
path_ = tuple([seg.decode("utf-8") for seg in path.split('/') if seg ])
|
|
|
|
# TODO: replace
|
|
|
|
d = self._get_or_create_directories(self.node, path_)
|
|
|
|
d.addCallback(lambda node: node.get_uri())
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _get_or_create_directories(self, node, path):
|
|
|
|
if not IDirectoryNode.providedBy(node):
|
|
|
|
# unfortunately it is too late to provide the name of the
|
|
|
|
# blocking directory in the error message.
|
|
|
|
raise BlockingFileError("cannot create directory because there "
|
|
|
|
"is a file in the way")
|
|
|
|
if not path:
|
|
|
|
return defer.succeed(node)
|
|
|
|
d = node.get(path[0])
|
|
|
|
def _maybe_create(f):
|
2008-10-27 20:15:25 +00:00
|
|
|
f.trap(NoSuchChildError)
|
2008-05-19 19:57:04 +00:00
|
|
|
return node.create_empty_directory(path[0])
|
|
|
|
d.addErrback(_maybe_create)
|
|
|
|
d.addCallback(self._get_or_create_directories, path[1:])
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _POST_upload(self, ctx):
|
|
|
|
req = IRequest(ctx)
|
|
|
|
charset = get_arg(req, "_charset", "utf-8")
|
|
|
|
contents = req.fields["file"]
|
2008-06-04 00:09:39 +00:00
|
|
|
assert contents.filename is None or isinstance(contents.filename, str)
|
2008-05-19 19:57:04 +00:00
|
|
|
name = get_arg(req, "name")
|
|
|
|
name = name or contents.filename
|
|
|
|
if name is not None:
|
|
|
|
name = name.strip()
|
|
|
|
if not name:
|
|
|
|
# this prohibts empty, missing, and all-whitespace filenames
|
|
|
|
raise WebError("upload requires a name")
|
2008-06-04 00:09:39 +00:00
|
|
|
assert isinstance(name, str)
|
2008-05-19 19:57:04 +00:00
|
|
|
name = name.decode(charset)
|
|
|
|
if "/" in name:
|
|
|
|
raise WebError("name= may not contain a slash", http.BAD_REQUEST)
|
|
|
|
assert isinstance(name, unicode)
|
|
|
|
|
|
|
|
# since POST /uri/path/file?t=upload is equivalent to
|
|
|
|
# POST /uri/path/dir?t=upload&name=foo, just do the same thing that
|
|
|
|
# childFactory would do. Things are cleaner if we only do a subset of
|
|
|
|
# them, though, so we don't do: d = self.childFactory(ctx, name)
|
|
|
|
|
|
|
|
d = self.node.get(name)
|
|
|
|
def _maybe_got_node(node_or_failure):
|
|
|
|
if isinstance(node_or_failure, Failure):
|
|
|
|
f = node_or_failure
|
2008-10-27 20:15:25 +00:00
|
|
|
f.trap(NoSuchChildError)
|
2008-05-20 18:13:12 +00:00
|
|
|
# create a placeholder which will see POST t=upload
|
2008-05-19 19:57:04 +00:00
|
|
|
return PlaceHolderNodeHandler(self.node, name)
|
|
|
|
else:
|
|
|
|
node = node_or_failure
|
|
|
|
return make_handler_for(node, self.node, name)
|
|
|
|
d.addBoth(_maybe_got_node)
|
|
|
|
# now we have a placeholder or a filenodehandler, and we can just
|
|
|
|
# delegate to it. We could return the resource back out of
|
|
|
|
# DirectoryNodeHandler.renderHTTP, and nevow would recurse into it,
|
|
|
|
# but the addCallback() that handles when_done= would break.
|
|
|
|
d.addCallback(lambda child: child.renderHTTP(ctx))
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _POST_uri(self, req):
|
|
|
|
childcap = get_arg(req, "uri")
|
|
|
|
if not childcap:
|
|
|
|
raise WebError("set-uri requires a uri")
|
|
|
|
name = get_arg(req, "name")
|
|
|
|
if not name:
|
|
|
|
raise WebError("set-uri requires a name")
|
|
|
|
charset = get_arg(req, "_charset", "utf-8")
|
|
|
|
name = name.decode(charset)
|
|
|
|
replace = boolean_of_arg(get_arg(req, "replace", "true"))
|
|
|
|
d = self.node.set_uri(name, childcap, overwrite=replace)
|
|
|
|
d.addCallback(lambda res: childcap)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _POST_delete(self, req):
|
|
|
|
name = get_arg(req, "name")
|
|
|
|
if name is None:
|
|
|
|
# apparently an <input type="hidden" name="name" value="">
|
|
|
|
# won't show up in the resulting encoded form.. the 'name'
|
|
|
|
# field is completely missing. So to allow deletion of an
|
|
|
|
# empty file, we have to pretend that None means ''. The only
|
|
|
|
# downide of this is a slightly confusing error message if
|
|
|
|
# someone does a POST without a name= field. For our own HTML
|
|
|
|
# thisn't a big deal, because we create the 'delete' POST
|
|
|
|
# buttons ourselves.
|
|
|
|
name = ''
|
|
|
|
charset = get_arg(req, "_charset", "utf-8")
|
|
|
|
name = name.decode(charset)
|
|
|
|
d = self.node.delete(name)
|
|
|
|
d.addCallback(lambda res: "thing deleted")
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _POST_rename(self, req):
|
|
|
|
charset = get_arg(req, "_charset", "utf-8")
|
|
|
|
from_name = get_arg(req, "from_name")
|
|
|
|
if from_name is not None:
|
|
|
|
from_name = from_name.strip()
|
|
|
|
from_name = from_name.decode(charset)
|
|
|
|
assert isinstance(from_name, unicode)
|
|
|
|
to_name = get_arg(req, "to_name")
|
|
|
|
if to_name is not None:
|
|
|
|
to_name = to_name.strip()
|
|
|
|
to_name = to_name.decode(charset)
|
|
|
|
assert isinstance(to_name, unicode)
|
|
|
|
if not from_name or not to_name:
|
|
|
|
raise WebError("rename requires from_name and to_name")
|
2008-10-23 23:32:36 +00:00
|
|
|
if from_name == to_name:
|
|
|
|
return defer.succeed("redundant rename")
|
2008-09-24 20:35:05 +00:00
|
|
|
|
|
|
|
# allow from_name to contain slashes, so they can fix names that were
|
|
|
|
# accidentally created with them. But disallow them in to_name, to
|
|
|
|
# discourage the practice.
|
|
|
|
if "/" in to_name:
|
|
|
|
raise WebError("to_name= may not contain a slash", http.BAD_REQUEST)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
replace = boolean_of_arg(get_arg(req, "replace", "true"))
|
|
|
|
d = self.node.move_child_to(from_name, self.node, to_name, replace)
|
|
|
|
d.addCallback(lambda res: "thing renamed")
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _POST_check(self, req):
|
|
|
|
# check this directory
|
2008-09-10 02:45:17 +00:00
|
|
|
verify = boolean_of_arg(get_arg(req, "verify", "false"))
|
|
|
|
repair = boolean_of_arg(get_arg(req, "repair", "false"))
|
|
|
|
if repair:
|
2008-10-22 08:38:18 +00:00
|
|
|
d = self.node.check_and_repair(Monitor(), verify)
|
2008-09-10 02:45:17 +00:00
|
|
|
d.addCallback(lambda res: CheckAndRepairResults(res))
|
|
|
|
else:
|
2008-10-22 08:38:18 +00:00
|
|
|
d = self.node.check(Monitor(), verify)
|
2009-01-10 01:00:52 +00:00
|
|
|
d.addCallback(lambda res: CheckResults(res))
|
2008-05-19 19:57:04 +00:00
|
|
|
return d
|
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
def _start_operation(self, monitor, renderer, ctx):
|
|
|
|
table = IOpHandleTable(ctx)
|
2008-10-22 05:13:54 +00:00
|
|
|
table.add_monitor(ctx, monitor, renderer)
|
|
|
|
return table.redirect_to(ctx)
|
2008-10-22 00:03:07 +00:00
|
|
|
|
|
|
|
def _POST_start_deep_check(self, ctx):
|
2008-07-17 23:47:09 +00:00
|
|
|
# check this directory and everything reachable from it
|
2008-10-22 00:03:07 +00:00
|
|
|
if not get_arg(ctx, "ophandle"):
|
|
|
|
raise NeedOperationHandleError("slow operation requires ophandle=")
|
|
|
|
verify = boolean_of_arg(get_arg(ctx, "verify", "false"))
|
|
|
|
repair = boolean_of_arg(get_arg(ctx, "repair", "false"))
|
2008-09-07 19:44:56 +00:00
|
|
|
if repair:
|
2008-10-22 00:03:07 +00:00
|
|
|
monitor = self.node.start_deep_check_and_repair(verify)
|
|
|
|
renderer = DeepCheckAndRepairResults(monitor)
|
2008-09-07 19:44:56 +00:00
|
|
|
else:
|
2008-10-22 00:03:07 +00:00
|
|
|
monitor = self.node.start_deep_check(verify)
|
|
|
|
renderer = DeepCheckResults(monitor)
|
|
|
|
return self._start_operation(monitor, renderer, ctx)
|
|
|
|
|
|
|
|
def _POST_start_manifest(self, ctx):
|
|
|
|
if not get_arg(ctx, "ophandle"):
|
|
|
|
raise NeedOperationHandleError("slow operation requires ophandle=")
|
|
|
|
monitor = self.node.build_manifest()
|
|
|
|
renderer = ManifestResults(monitor)
|
|
|
|
return self._start_operation(monitor, renderer, ctx)
|
|
|
|
|
|
|
|
def _POST_start_deep_size(self, ctx):
|
|
|
|
if not get_arg(ctx, "ophandle"):
|
|
|
|
raise NeedOperationHandleError("slow operation requires ophandle=")
|
|
|
|
monitor = self.node.start_deep_stats()
|
|
|
|
renderer = DeepSizeResults(monitor)
|
|
|
|
return self._start_operation(monitor, renderer, ctx)
|
|
|
|
|
|
|
|
def _POST_start_deep_stats(self, ctx):
|
|
|
|
if not get_arg(ctx, "ophandle"):
|
|
|
|
raise NeedOperationHandleError("slow operation requires ophandle=")
|
|
|
|
monitor = self.node.start_deep_stats()
|
|
|
|
renderer = DeepStatsResults(monitor)
|
|
|
|
return self._start_operation(monitor, renderer, ctx)
|
2008-07-17 23:47:09 +00:00
|
|
|
|
2009-01-23 05:01:36 +00:00
|
|
|
def _POST_stream_manifest(self, ctx):
|
|
|
|
walker = ManifestStreamer(ctx, self.node)
|
|
|
|
monitor = self.node.deep_traverse(walker)
|
|
|
|
return monitor.when_done()
|
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
def _POST_set_children(self, req):
|
|
|
|
replace = boolean_of_arg(get_arg(req, "replace", "true"))
|
|
|
|
req.content.seek(0)
|
|
|
|
body = req.content.read()
|
|
|
|
try:
|
|
|
|
children = simplejson.loads(body)
|
|
|
|
except ValueError, le:
|
|
|
|
le.args = tuple(le.args + (body,))
|
|
|
|
# TODO test handling of bad JSON
|
|
|
|
raise
|
|
|
|
cs = []
|
|
|
|
for name, (file_or_dir, mddict) in children.iteritems():
|
2008-09-30 22:21:06 +00:00
|
|
|
name = unicode(name) # simplejson-2.0.1 returns str *or* unicode
|
2008-05-19 19:57:04 +00:00
|
|
|
cap = str(mddict.get('rw_uri') or mddict.get('ro_uri'))
|
|
|
|
cs.append((name, cap, mddict.get('metadata')))
|
|
|
|
d = self.node.set_children(cs, replace)
|
|
|
|
d.addCallback(lambda res: "Okay so I did it.")
|
|
|
|
# TODO: results
|
|
|
|
return d
|
|
|
|
|
|
|
|
def abbreviated_dirnode(dirnode):
|
|
|
|
u = from_string_dirnode(dirnode.get_uri())
|
download: refactor handling of URI Extension Block and crypttext hash tree, simplify things
Refactor into a class the logic of asking each server in turn until one of them gives an answer
that validates. It is called ValidatedThingObtainer.
Refactor the downloading and verification of the URI Extension Block into a class named
ValidatedExtendedURIProxy.
The new logic of validating UEBs is minimalist: it doesn't require the UEB to contain any
unncessary information, but of course it still accepts such information for backwards
compatibility (so that this new download code is able to download files uploaded with old, and
for that matter with current, upload code).
The new logic of validating UEBs follows the practice of doing all validation up front. This
practice advises one to isolate the validation of incoming data into one place, so that all of
the rest of the code can assume only valid data.
If any redundant information is present in the UEB+URI, the new code cross-checks and asserts
that it is all fully consistent. This closes some issues where the uploader could have
uploaded inconsistent redundant data, which would probably have caused the old downloader to
simply reject that download after getting a Python exception, but perhaps could have caused
greater harm to the old downloader.
I removed the notion of selecting an erasure codec from codec.py based on the string that was
passed in the UEB. Currently "crs" is the only such string that works, so
"_assert(codec_name == 'crs')" is simpler and more explicit. This is also in keeping with the
"validate up front" strategy -- now if someone sets a different string than "crs" in their UEB,
the downloader will reject the download in the "validate this UEB" function instead of in a
separate "select the codec instance" function.
I removed the code to check plaintext hashes and plaintext Merkle Trees. Uploaders do not
produce this information any more (since it potentially exposes confidential information about
the file), and the unit tests for it were disabled. The downloader before this patch would
check that plaintext hash or plaintext merkle tree if they were present, but not complain if
they were absent. The new downloader in this patch complains if they are present and doesn't
check them. (We might in the future re-introduce such hashes over the plaintext, but encrypt
the hashes which are stored in the UEB to preserve confidentiality. This would be a double-
check on the correctness of our own source code -- the current Merkle Tree over the ciphertext
is already sufficient to guarantee the integrity of the download unless there is a bug in our
Merkle Tree or AES implementation.)
This patch increases the lines-of-code count by 8 (from 17,770 to 17,778), and reduces the
uncovered-by-tests lines-of-code count by 24 (from 1408 to 1384). Those numbers would be more
meaningful if we omitted src/allmydata/util/ from the test-coverage statistics.
2008-12-05 15:17:54 +00:00
|
|
|
return u.abbrev()
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
class DirectoryAsHTML(rend.Page):
|
|
|
|
# The remainder of this class is to render the directory into
|
|
|
|
# human+browser -oriented HTML.
|
|
|
|
docFactory = getxmlfile("directory.xhtml")
|
2008-06-11 22:35:27 +00:00
|
|
|
addSlash = True
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
def __init__(self, node):
|
|
|
|
rend.Page.__init__(self)
|
|
|
|
self.node = node
|
|
|
|
|
|
|
|
def render_title(self, ctx, data):
|
|
|
|
si_s = abbreviated_dirnode(self.node)
|
|
|
|
header = ["Directory SI=%s" % si_s]
|
|
|
|
return ctx.tag[header]
|
|
|
|
|
|
|
|
def render_header(self, ctx, data):
|
|
|
|
si_s = abbreviated_dirnode(self.node)
|
|
|
|
header = ["Directory SI=%s" % si_s]
|
|
|
|
if self.node.is_readonly():
|
|
|
|
header.append(" (readonly)")
|
|
|
|
return ctx.tag[header]
|
|
|
|
|
2008-06-18 02:49:40 +00:00
|
|
|
def render_welcome(self, ctx, data):
|
2008-10-07 20:17:42 +00:00
|
|
|
link = get_root(ctx)
|
2008-05-19 19:57:04 +00:00
|
|
|
return T.div[T.a(href=link)["Return to Welcome page"]]
|
|
|
|
|
|
|
|
def data_children(self, ctx, data):
|
|
|
|
d = self.node.list()
|
|
|
|
d.addCallback(lambda dict: sorted(dict.items()))
|
|
|
|
def _stall_some(items):
|
|
|
|
# Deferreds don't optimize out tail recursion, and the way
|
|
|
|
# Nevow's flattener handles Deferreds doesn't take this into
|
|
|
|
# account. As a result, large lists of Deferreds that fire in the
|
|
|
|
# same turn (i.e. the output of defer.succeed) will cause a stack
|
|
|
|
# overflow. To work around this, we insert a turn break after
|
|
|
|
# every 100 items, using foolscap's fireEventually(). This gives
|
|
|
|
# the stack a chance to be popped. It would also work to put
|
|
|
|
# every item in its own turn, but that'd be a lot more
|
|
|
|
# inefficient. This addresses ticket #237, for which I was never
|
|
|
|
# able to create a failing unit test.
|
|
|
|
output = []
|
|
|
|
for i,item in enumerate(items):
|
|
|
|
if i % 100 == 0:
|
|
|
|
output.append(fireEventually(item))
|
|
|
|
else:
|
|
|
|
output.append(item)
|
|
|
|
return output
|
|
|
|
d.addCallback(_stall_some)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def render_row(self, ctx, data):
|
|
|
|
name, (target, metadata) = data
|
|
|
|
name = name.encode("utf-8")
|
|
|
|
assert not isinstance(name, unicode)
|
2008-09-24 20:35:05 +00:00
|
|
|
nameurl = urllib.quote(name, safe="") # encode any slashes too
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2008-10-07 20:17:42 +00:00
|
|
|
root = get_root(ctx)
|
2008-06-18 02:49:40 +00:00
|
|
|
here = "%s/uri/%s/" % (root, urllib.quote(self.node.get_uri()))
|
2008-05-19 19:57:04 +00:00
|
|
|
if self.node.is_readonly():
|
|
|
|
delete = "-"
|
|
|
|
rename = "-"
|
|
|
|
else:
|
|
|
|
# this creates a button which will cause our child__delete method
|
|
|
|
# to be invoked, which deletes the file and then redirects the
|
|
|
|
# browser back to this directory
|
2008-06-18 02:49:40 +00:00
|
|
|
delete = T.form(action=here, method="post")[
|
2008-05-19 19:57:04 +00:00
|
|
|
T.input(type='hidden', name='t', value='delete'),
|
|
|
|
T.input(type='hidden', name='name', value=name),
|
2008-06-18 02:49:40 +00:00
|
|
|
T.input(type='hidden', name='when_done', value="."),
|
2008-05-19 19:57:04 +00:00
|
|
|
T.input(type='submit', value='del', name="del"),
|
|
|
|
]
|
|
|
|
|
2008-06-18 02:49:40 +00:00
|
|
|
rename = T.form(action=here, method="get")[
|
2008-05-19 19:57:04 +00:00
|
|
|
T.input(type='hidden', name='t', value='rename-form'),
|
|
|
|
T.input(type='hidden', name='name', value=name),
|
2008-06-18 02:49:40 +00:00
|
|
|
T.input(type='hidden', name='when_done', value="."),
|
2008-05-19 19:57:04 +00:00
|
|
|
T.input(type='submit', value='rename', name="rename"),
|
|
|
|
]
|
|
|
|
|
|
|
|
ctx.fillSlots("delete", delete)
|
|
|
|
ctx.fillSlots("rename", rename)
|
|
|
|
|
|
|
|
times = []
|
|
|
|
TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
|
|
|
|
if "ctime" in metadata:
|
|
|
|
ctime = time.strftime(TIME_FORMAT,
|
|
|
|
time.localtime(metadata["ctime"]))
|
|
|
|
times.append("c: " + ctime)
|
|
|
|
if "mtime" in metadata:
|
|
|
|
mtime = time.strftime(TIME_FORMAT,
|
|
|
|
time.localtime(metadata["mtime"]))
|
|
|
|
if times:
|
|
|
|
times.append(T.br())
|
|
|
|
times.append("m: " + mtime)
|
|
|
|
ctx.fillSlots("times", times)
|
|
|
|
|
|
|
|
assert (IFileNode.providedBy(target)
|
|
|
|
or IDirectoryNode.providedBy(target)
|
|
|
|
or IMutableFileNode.providedBy(target)), target
|
|
|
|
|
|
|
|
quoted_uri = urllib.quote(target.get_uri())
|
|
|
|
|
|
|
|
if IMutableFileNode.providedBy(target):
|
|
|
|
# to prevent javascript in displayed .html files from stealing a
|
|
|
|
# secret directory URI from the URL, send the browser to a URI-based
|
|
|
|
# page that doesn't know about the directory at all
|
2008-09-24 20:35:05 +00:00
|
|
|
dlurl = "%s/file/%s/@@named=/%s" % (root, quoted_uri, nameurl)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
ctx.fillSlots("filename",
|
|
|
|
T.a(href=dlurl)[html.escape(name)])
|
|
|
|
ctx.fillSlots("type", "SSK")
|
|
|
|
|
|
|
|
ctx.fillSlots("size", "?")
|
|
|
|
|
2008-12-05 22:39:39 +00:00
|
|
|
info_link = "%s/uri/%s?t=info" % (root, quoted_uri)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
elif IFileNode.providedBy(target):
|
2008-09-24 20:35:05 +00:00
|
|
|
dlurl = "%s/file/%s/@@named=/%s" % (root, quoted_uri, nameurl)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
ctx.fillSlots("filename",
|
|
|
|
T.a(href=dlurl)[html.escape(name)])
|
|
|
|
ctx.fillSlots("type", "FILE")
|
|
|
|
|
|
|
|
ctx.fillSlots("size", target.get_size())
|
|
|
|
|
2008-12-05 22:39:39 +00:00
|
|
|
info_link = "%s/uri/%s?t=info" % (root, quoted_uri)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
elif IDirectoryNode.providedBy(target):
|
|
|
|
# directory
|
2008-06-18 02:49:40 +00:00
|
|
|
uri_link = "%s/uri/%s/" % (root, urllib.quote(target.get_uri()))
|
2008-05-19 19:57:04 +00:00
|
|
|
ctx.fillSlots("filename",
|
|
|
|
T.a(href=uri_link)[html.escape(name)])
|
|
|
|
if target.is_readonly():
|
|
|
|
dirtype = "DIR-RO"
|
|
|
|
else:
|
|
|
|
dirtype = "DIR"
|
|
|
|
ctx.fillSlots("type", dirtype)
|
|
|
|
ctx.fillSlots("size", "-")
|
2008-12-05 22:39:39 +00:00
|
|
|
info_link = "%s/uri/%s/?t=info" % (root, quoted_uri)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2008-12-06 06:14:12 +00:00
|
|
|
ctx.fillSlots("info", T.a(href=info_link)["More Info"])
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
return ctx.tag
|
|
|
|
|
|
|
|
def render_forms(self, ctx, data):
|
2008-07-17 23:47:09 +00:00
|
|
|
forms = []
|
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
if self.node.is_readonly():
|
2008-07-17 23:47:09 +00:00
|
|
|
forms.append(T.div["No upload forms: directory is read-only"])
|
|
|
|
return forms
|
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
mkdir = T.form(action=".", method="post",
|
|
|
|
enctype="multipart/form-data")[
|
|
|
|
T.fieldset[
|
|
|
|
T.input(type="hidden", name="t", value="mkdir"),
|
2008-06-18 02:49:40 +00:00
|
|
|
T.input(type="hidden", name="when_done", value="."),
|
2008-05-19 19:57:04 +00:00
|
|
|
T.legend(class_="freeform-form-label")["Create a new directory"],
|
|
|
|
"New directory name: ",
|
|
|
|
T.input(type="text", name="name"), " ",
|
|
|
|
T.input(type="submit", value="Create"),
|
|
|
|
]]
|
2008-07-17 23:47:09 +00:00
|
|
|
forms.append(T.div(class_="freeform-form")[mkdir])
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
upload = T.form(action=".", method="post",
|
|
|
|
enctype="multipart/form-data")[
|
|
|
|
T.fieldset[
|
|
|
|
T.input(type="hidden", name="t", value="upload"),
|
2008-06-18 02:49:40 +00:00
|
|
|
T.input(type="hidden", name="when_done", value="."),
|
2008-05-19 19:57:04 +00:00
|
|
|
T.legend(class_="freeform-form-label")["Upload a file to this directory"],
|
|
|
|
"Choose a file to upload: ",
|
|
|
|
T.input(type="file", name="file", class_="freeform-input-file"),
|
|
|
|
" ",
|
|
|
|
T.input(type="submit", value="Upload"),
|
|
|
|
" Mutable?:",
|
|
|
|
T.input(type="checkbox", name="mutable"),
|
|
|
|
]]
|
2008-07-17 23:47:09 +00:00
|
|
|
forms.append(T.div(class_="freeform-form")[upload])
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
mount = T.form(action=".", method="post",
|
|
|
|
enctype="multipart/form-data")[
|
|
|
|
T.fieldset[
|
|
|
|
T.input(type="hidden", name="t", value="uri"),
|
2008-06-18 02:49:40 +00:00
|
|
|
T.input(type="hidden", name="when_done", value="."),
|
2008-05-19 19:57:04 +00:00
|
|
|
T.legend(class_="freeform-form-label")["Attach a file or directory"
|
|
|
|
" (by URI) to this"
|
|
|
|
" directory"],
|
|
|
|
"New child name: ",
|
|
|
|
T.input(type="text", name="name"), " ",
|
|
|
|
"URI of new child: ",
|
|
|
|
T.input(type="text", name="uri"), " ",
|
|
|
|
T.input(type="submit", value="Attach"),
|
|
|
|
]]
|
2008-07-17 23:47:09 +00:00
|
|
|
forms.append(T.div(class_="freeform-form")[mount])
|
|
|
|
return forms
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
def render_results(self, ctx, data):
|
|
|
|
req = IRequest(ctx)
|
|
|
|
return get_arg(req, "results", "")
|
|
|
|
|
|
|
|
|
|
|
|
def DirectoryJSONMetadata(ctx, dirnode):
|
|
|
|
d = dirnode.list()
|
|
|
|
def _got(children):
|
|
|
|
kids = {}
|
|
|
|
for name, (childnode, metadata) in children.iteritems():
|
2008-05-20 01:37:28 +00:00
|
|
|
if childnode.is_readonly():
|
|
|
|
rw_uri = None
|
|
|
|
ro_uri = childnode.get_uri()
|
|
|
|
else:
|
|
|
|
rw_uri = childnode.get_uri()
|
|
|
|
ro_uri = childnode.get_readonly_uri()
|
2008-05-19 19:57:04 +00:00
|
|
|
if IFileNode.providedBy(childnode):
|
2008-05-20 01:37:28 +00:00
|
|
|
kiddata = ("filenode", {'size': childnode.get_size(),
|
|
|
|
'metadata': metadata,
|
|
|
|
})
|
2008-05-19 19:57:04 +00:00
|
|
|
else:
|
|
|
|
assert IDirectoryNode.providedBy(childnode), (childnode,
|
|
|
|
children,)
|
2008-05-20 01:37:28 +00:00
|
|
|
kiddata = ("dirnode", {'metadata': metadata})
|
|
|
|
if ro_uri:
|
|
|
|
kiddata[1]["ro_uri"] = ro_uri
|
|
|
|
if rw_uri:
|
|
|
|
kiddata[1]["rw_uri"] = rw_uri
|
2008-05-20 22:40:49 +00:00
|
|
|
kiddata[1]['mutable'] = childnode.is_mutable()
|
2008-05-19 19:57:04 +00:00
|
|
|
kids[name] = kiddata
|
2008-05-20 01:37:28 +00:00
|
|
|
if dirnode.is_readonly():
|
|
|
|
drw_uri = None
|
|
|
|
dro_uri = dirnode.get_uri()
|
|
|
|
else:
|
|
|
|
drw_uri = dirnode.get_uri()
|
|
|
|
dro_uri = dirnode.get_readonly_uri()
|
|
|
|
contents = { 'children': kids }
|
|
|
|
if dro_uri:
|
|
|
|
contents['ro_uri'] = dro_uri
|
|
|
|
if drw_uri:
|
|
|
|
contents['rw_uri'] = drw_uri
|
2008-05-20 22:40:49 +00:00
|
|
|
contents['mutable'] = dirnode.is_mutable()
|
2008-05-19 19:57:04 +00:00
|
|
|
data = ("dirnode", contents)
|
2008-09-15 20:43:14 +00:00
|
|
|
return simplejson.dumps(data, indent=1) + "\n"
|
2008-05-19 19:57:04 +00:00
|
|
|
d.addCallback(_got)
|
|
|
|
d.addCallback(text_plain, ctx)
|
|
|
|
return d
|
|
|
|
|
2008-09-18 05:00:41 +00:00
|
|
|
|
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
def DirectoryURI(ctx, dirnode):
|
|
|
|
return text_plain(dirnode.get_uri(), ctx)
|
|
|
|
|
|
|
|
def DirectoryReadonlyURI(ctx, dirnode):
|
|
|
|
return text_plain(dirnode.get_readonly_uri(), ctx)
|
|
|
|
|
|
|
|
class RenameForm(rend.Page):
|
|
|
|
addSlash = True
|
|
|
|
docFactory = getxmlfile("rename-form.xhtml")
|
|
|
|
|
|
|
|
def render_title(self, ctx, data):
|
|
|
|
return ctx.tag["Directory SI=%s" % abbreviated_dirnode(self.original)]
|
|
|
|
|
|
|
|
def render_header(self, ctx, data):
|
|
|
|
header = ["Rename "
|
|
|
|
"in directory SI=%s" % abbreviated_dirnode(self.original),
|
|
|
|
]
|
|
|
|
|
|
|
|
if self.original.is_readonly():
|
|
|
|
header.append(" (readonly!)")
|
|
|
|
header.append(":")
|
|
|
|
return ctx.tag[header]
|
|
|
|
|
|
|
|
def render_when_done(self, ctx, data):
|
2008-06-18 02:49:40 +00:00
|
|
|
return T.input(type="hidden", name="when_done", value=".")
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
def render_get_name(self, ctx, data):
|
|
|
|
req = IRequest(ctx)
|
|
|
|
name = get_arg(req, "name", "")
|
|
|
|
ctx.tag.attributes['value'] = name
|
|
|
|
return ctx.tag
|
|
|
|
|
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
class ManifestResults(rend.Page, ReloadMixin):
|
2008-05-19 19:57:04 +00:00
|
|
|
docFactory = getxmlfile("manifest.xhtml")
|
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
def __init__(self, monitor):
|
|
|
|
self.monitor = monitor
|
|
|
|
|
2008-10-07 04:36:18 +00:00
|
|
|
def renderHTTP(self, ctx):
|
|
|
|
output = get_arg(inevow.IRequest(ctx), "output", "html").lower()
|
|
|
|
if output == "text":
|
|
|
|
return self.text(ctx)
|
|
|
|
if output == "json":
|
|
|
|
return self.json(ctx)
|
|
|
|
return rend.Page.renderHTTP(self, ctx)
|
|
|
|
|
|
|
|
def slashify_path(self, path):
|
|
|
|
if not path:
|
|
|
|
return ""
|
|
|
|
return "/".join([p.encode("utf-8") for p in path])
|
|
|
|
|
|
|
|
def text(self, ctx):
|
|
|
|
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
2008-10-22 00:03:07 +00:00
|
|
|
lines = []
|
2008-10-22 00:52:56 +00:00
|
|
|
is_finished = self.monitor.is_finished()
|
|
|
|
lines.append("finished: " + {True: "yes", False: "no"}[is_finished])
|
2008-11-19 22:03:47 +00:00
|
|
|
for (path, cap) in self.monitor.get_status()["manifest"]:
|
2008-10-22 00:03:07 +00:00
|
|
|
lines.append(self.slashify_path(path) + " " + cap)
|
|
|
|
return "\n".join(lines) + "\n"
|
2008-10-07 04:36:18 +00:00
|
|
|
|
|
|
|
def json(self, ctx):
|
|
|
|
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
2008-10-22 00:03:07 +00:00
|
|
|
m = self.monitor
|
2008-11-24 21:40:46 +00:00
|
|
|
s = m.get_status()
|
2009-01-09 02:59:32 +00:00
|
|
|
|
|
|
|
status = { "stats": s["stats"],
|
|
|
|
"finished": m.is_finished(),
|
|
|
|
"origin": base32.b2a(m.origin_si),
|
|
|
|
}
|
|
|
|
if m.is_finished():
|
|
|
|
# don't return manifest/verifycaps/SIs unless the operation is
|
|
|
|
# done, to save on CPU/memory (both here and in the HTTP client
|
|
|
|
# who has to unpack the JSON). Tests show that the ManifestWalker
|
|
|
|
# needs about 1092 bytes per item, the JSON we generate here
|
|
|
|
# requires about 503 bytes per item, and some internal overhead
|
|
|
|
# (perhaps transport-layer buffers in twisted.web?) requires an
|
|
|
|
# additional 1047 bytes per item.
|
|
|
|
status.update({ "manifest": s["manifest"],
|
|
|
|
"verifycaps": [i for i in s["verifycaps"]],
|
|
|
|
"storage-index": [i for i in s["storage-index"]],
|
|
|
|
})
|
|
|
|
# simplejson doesn't know how to serialize a set. We use a
|
|
|
|
# generator that walks the set rather than list(setofthing) to
|
|
|
|
# save a small amount of memory (4B*len) and a moderate amount of
|
|
|
|
# CPU.
|
2008-10-22 00:03:07 +00:00
|
|
|
return simplejson.dumps(status, indent=1)
|
|
|
|
|
|
|
|
def _si_abbrev(self):
|
|
|
|
return base32.b2a(self.monitor.origin_si)[:6]
|
2008-10-07 04:36:18 +00:00
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
def render_title(self, ctx):
|
2008-10-22 00:03:07 +00:00
|
|
|
return T.title["Manifest of SI=%s" % self._si_abbrev()]
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
def render_header(self, ctx):
|
2008-10-22 00:03:07 +00:00
|
|
|
return T.p["Manifest of SI=%s" % self._si_abbrev()]
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
def data_items(self, ctx, data):
|
2008-11-19 22:03:47 +00:00
|
|
|
return self.monitor.get_status()["manifest"]
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2008-10-07 04:36:18 +00:00
|
|
|
def render_row(self, ctx, (path, cap)):
|
|
|
|
ctx.fillSlots("path", self.slashify_path(path))
|
2008-10-07 20:18:45 +00:00
|
|
|
root = get_root(ctx)
|
|
|
|
# TODO: we need a clean consistent way to get the type of a cap string
|
|
|
|
if cap.startswith("URI:CHK") or cap.startswith("URI:SSK"):
|
|
|
|
nameurl = urllib.quote(path[-1].encode("utf-8"))
|
|
|
|
uri_link = "%s/file/%s/@@named=/%s" % (root, urllib.quote(cap),
|
|
|
|
nameurl)
|
|
|
|
else:
|
|
|
|
uri_link = "%s/uri/%s" % (root, urllib.quote(cap))
|
|
|
|
ctx.fillSlots("cap", T.a(href=uri_link)[cap])
|
2008-05-19 19:57:04 +00:00
|
|
|
return ctx.tag
|
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
class DeepSizeResults(rend.Page):
|
|
|
|
def __init__(self, monitor):
|
|
|
|
self.monitor = monitor
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
def renderHTTP(self, ctx):
|
|
|
|
output = get_arg(inevow.IRequest(ctx), "output", "html").lower()
|
|
|
|
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
|
|
|
if output == "json":
|
|
|
|
return self.json(ctx)
|
|
|
|
# plain text
|
2008-10-22 00:52:56 +00:00
|
|
|
is_finished = self.monitor.is_finished()
|
|
|
|
output = "finished: " + {True: "yes", False: "no"}[is_finished] + "\n"
|
|
|
|
if is_finished:
|
2008-10-22 00:03:07 +00:00
|
|
|
stats = self.monitor.get_status()
|
|
|
|
total = (stats.get("size-immutable-files", 0)
|
|
|
|
+ stats.get("size-mutable-files", 0)
|
|
|
|
+ stats.get("size-directories", 0))
|
|
|
|
output += "size: %d\n" % total
|
|
|
|
return output
|
|
|
|
|
|
|
|
def json(self, ctx):
|
|
|
|
status = {"finished": self.monitor.is_finished(),
|
|
|
|
"size": self.monitor.get_status(),
|
|
|
|
}
|
|
|
|
return simplejson.dumps(status)
|
|
|
|
|
|
|
|
class DeepStatsResults(rend.Page):
|
|
|
|
def __init__(self, monitor):
|
|
|
|
self.monitor = monitor
|
|
|
|
|
|
|
|
def renderHTTP(self, ctx):
|
|
|
|
# JSON only
|
|
|
|
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
|
|
|
s = self.monitor.get_status().copy()
|
|
|
|
s["finished"] = self.monitor.is_finished()
|
|
|
|
return simplejson.dumps(s, indent=1)
|
2009-01-23 05:01:36 +00:00
|
|
|
|
|
|
|
class ManifestStreamer(dirnode.DeepStats):
|
|
|
|
|
|
|
|
def __init__(self, ctx, origin):
|
|
|
|
dirnode.DeepStats.__init__(self, origin)
|
|
|
|
self.req = IRequest(ctx)
|
|
|
|
|
|
|
|
def add_node(self, node, path):
|
|
|
|
dirnode.DeepStats.add_node(self, node, path)
|
|
|
|
d = {"path": path,
|
|
|
|
"cap": node.get_uri()}
|
|
|
|
|
|
|
|
if IDirectoryNode.providedBy(node):
|
|
|
|
d["type"] = "directory"
|
|
|
|
else:
|
|
|
|
d["type"] = "file"
|
|
|
|
|
|
|
|
v = node.get_verify_cap()
|
|
|
|
if v:
|
|
|
|
v = v.to_string()
|
|
|
|
d["verifycap"] = v
|
|
|
|
|
|
|
|
r = node.get_repair_cap()
|
|
|
|
if r:
|
|
|
|
r = r.to_string()
|
|
|
|
d["repaircap"] = r
|
|
|
|
|
|
|
|
si = node.get_storage_index()
|
|
|
|
if si:
|
|
|
|
si = base32.b2a(si)
|
|
|
|
d["storage-index"] = si
|
|
|
|
|
|
|
|
j = simplejson.dumps(d, ensure_ascii=True)
|
|
|
|
assert "\n" not in j
|
|
|
|
self.req.write(j+"\n")
|
|
|
|
|
|
|
|
def finish(self):
|
|
|
|
stats = dirnode.DeepStats.get_results(self)
|
|
|
|
d = {"type": "stats",
|
|
|
|
"stats": stats,
|
|
|
|
}
|
|
|
|
j = simplejson.dumps(d, ensure_ascii=True)
|
|
|
|
assert "\n" not in j
|
|
|
|
self.req.write(j+"\n")
|
|
|
|
return ""
|