2008-05-19 19:57:04 +00:00
|
|
|
|
2017-01-19 22:39:53 +00:00
|
|
|
import json
|
2008-05-19 19:57:04 +00:00
|
|
|
import urllib
|
2019-11-17 02:23:20 +00:00
|
|
|
from datetime import timedelta
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2017-02-27 17:56:49 +00:00
|
|
|
from zope.interface import implementer
|
2008-05-19 19:57:04 +00:00
|
|
|
from twisted.internet import defer
|
2009-01-24 02:39:08 +00:00
|
|
|
from twisted.internet.interfaces import IPushProducer
|
2008-05-19 19:57:04 +00:00
|
|
|
from twisted.python.failure import Failure
|
2012-10-25 00:01:25 +00:00
|
|
|
from twisted.web import http
|
2019-11-05 06:34:27 +00:00
|
|
|
from twisted.web.template import (
|
|
|
|
Element,
|
|
|
|
XMLFile,
|
|
|
|
renderElement,
|
|
|
|
renderer,
|
|
|
|
tags,
|
|
|
|
)
|
|
|
|
from twisted.python.filepath import FilePath
|
2008-10-07 04:36:18 +00:00
|
|
|
from nevow import url, rend, inevow, tags as T
|
2008-05-19 19:57:04 +00:00
|
|
|
from nevow.inevow import IRequest
|
|
|
|
|
2009-05-22 00:38:23 +00:00
|
|
|
from foolscap.api import fireEventually
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2016-01-04 16:00:59 +00:00
|
|
|
from allmydata.util import base32
|
2013-04-05 05:36:14 +00:00
|
|
|
from allmydata.util.encodingutil import to_str
|
2011-11-19 01:42:10 +00:00
|
|
|
from allmydata.uri import from_string_dirnode
|
2009-11-20 07:52:55 +00:00
|
|
|
from allmydata.interfaces import IDirectoryNode, IFileNode, IFilesystemNode, \
|
2009-12-27 20:10:43 +00:00
|
|
|
IImmutableFileNode, IMutableFileNode, ExistingChildError, \
|
2011-08-07 00:43:48 +00:00
|
|
|
NoSuchChildError, EmptyPathnameComponentError, SDMF_VERSION, MDMF_VERSION
|
2011-08-24 15:59:28 +00:00
|
|
|
from allmydata.blacklist import ProhibitedNode
|
2009-01-24 02:39:08 +00:00
|
|
|
from allmydata.monitor import Monitor, OperationCancelledError
|
2009-01-23 05:01:36 +00:00
|
|
|
from allmydata import dirnode
|
2019-11-05 06:34:27 +00:00
|
|
|
from allmydata.web.common import (
|
|
|
|
text_plain,
|
|
|
|
WebError,
|
|
|
|
NeedOperationHandleError,
|
|
|
|
boolean_of_arg,
|
|
|
|
get_arg,
|
|
|
|
get_root,
|
|
|
|
parse_replace_arg,
|
|
|
|
should_create_intermediate_directories,
|
|
|
|
getxmlfile,
|
|
|
|
RenderMixin,
|
|
|
|
humanize_failure,
|
|
|
|
convert_children_json,
|
|
|
|
get_format,
|
|
|
|
get_mutable_type,
|
|
|
|
get_filenode_metadata,
|
|
|
|
render_time,
|
|
|
|
MultiFormatPage,
|
|
|
|
MultiFormatResource,
|
|
|
|
SlotsSequenceElement,
|
|
|
|
)
|
2008-05-19 19:57:04 +00:00
|
|
|
from allmydata.web.filenode import ReplaceMeMixin, \
|
|
|
|
FileNodeHandler, PlaceHolderNodeHandler
|
2012-04-03 03:02:59 +00:00
|
|
|
from allmydata.web.check_results import CheckResultsRenderer, \
|
|
|
|
CheckAndRepairResultsRenderer, DeepCheckResultsRenderer, \
|
|
|
|
DeepCheckAndRepairResultsRenderer, LiteralCheckResultsRenderer
|
2008-09-18 05:00:41 +00:00
|
|
|
from allmydata.web.info import MoreInfo
|
2008-10-22 00:03:07 +00:00
|
|
|
from allmydata.web.operations import ReloadMixin
|
2009-02-17 06:35:53 +00:00
|
|
|
from allmydata.web.check_results import json_check_results, \
|
|
|
|
json_check_and_repair_results
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
class BlockingFileError(Exception):
|
|
|
|
# TODO: catch and transform
|
|
|
|
"""We cannot auto-create a parent directory, because there is a file in
|
|
|
|
the way"""
|
|
|
|
|
2009-02-20 19:15:54 +00:00
|
|
|
def make_handler_for(node, client, parentnode=None, name=None):
|
2008-05-19 19:57:04 +00:00
|
|
|
if parentnode:
|
|
|
|
assert IDirectoryNode.providedBy(parentnode)
|
2008-10-29 04:54:06 +00:00
|
|
|
if IFileNode.providedBy(node):
|
2009-02-20 19:15:54 +00:00
|
|
|
return FileNodeHandler(client, node, parentnode, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
if IDirectoryNode.providedBy(node):
|
2009-02-20 19:15:54 +00:00
|
|
|
return DirectoryNodeHandler(client, node, parentnode, name)
|
2009-07-03 01:07:49 +00:00
|
|
|
return UnknownNodeHandler(client, node, parentnode, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2008-05-19 22:19:25 +00:00
|
|
|
class DirectoryNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
|
2008-05-19 19:57:04 +00:00
|
|
|
addSlash = True
|
|
|
|
|
2009-02-20 19:15:54 +00:00
|
|
|
def __init__(self, client, node, parentnode=None, name=None):
|
2008-05-19 19:57:04 +00:00
|
|
|
rend.Page.__init__(self)
|
2009-02-20 19:15:54 +00:00
|
|
|
self.client = client
|
2008-05-19 19:57:04 +00:00
|
|
|
assert node
|
|
|
|
self.node = node
|
|
|
|
self.parentnode = parentnode
|
|
|
|
self.name = name
|
2019-08-24 18:31:08 +00:00
|
|
|
self._operations = client.get_web_service().get_operations()
|
2019-08-07 23:47:52 +00:00
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
def childFactory(self, ctx, name):
|
|
|
|
name = name.decode("utf-8")
|
2009-12-27 20:10:43 +00:00
|
|
|
if not name:
|
|
|
|
raise EmptyPathnameComponentError()
|
2008-05-19 19:57:04 +00:00
|
|
|
d = self.node.get(name)
|
|
|
|
d.addBoth(self.got_child, ctx, name)
|
|
|
|
# got_child returns a handler resource: FileNodeHandler or
|
|
|
|
# DirectoryNodeHandler
|
|
|
|
return d
|
|
|
|
|
|
|
|
def got_child(self, node_or_failure, ctx, name):
|
|
|
|
req = IRequest(ctx)
|
|
|
|
method = req.method
|
|
|
|
nonterminal = len(req.postpath) > 1
|
|
|
|
t = get_arg(req, "t", "").strip()
|
|
|
|
if isinstance(node_or_failure, Failure):
|
|
|
|
f = node_or_failure
|
2008-10-27 20:15:25 +00:00
|
|
|
f.trap(NoSuchChildError)
|
2008-05-19 19:57:04 +00:00
|
|
|
# No child by this name. What should we do about it?
|
|
|
|
if nonterminal:
|
|
|
|
if should_create_intermediate_directories(req):
|
|
|
|
# create intermediate directories
|
2009-10-13 02:15:20 +00:00
|
|
|
d = self.node.create_subdirectory(name)
|
2009-02-20 19:15:54 +00:00
|
|
|
d.addCallback(make_handler_for,
|
|
|
|
self.client, self.node, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
return d
|
|
|
|
else:
|
|
|
|
# terminal node
|
2009-10-26 01:13:21 +00:00
|
|
|
if (method,t) in [ ("POST","mkdir"), ("PUT","mkdir"),
|
2009-11-18 07:09:00 +00:00
|
|
|
("POST", "mkdir-with-children"),
|
|
|
|
("POST", "mkdir-immutable") ]:
|
2008-05-19 19:57:04 +00:00
|
|
|
# final directory
|
2009-10-26 01:13:21 +00:00
|
|
|
kids = {}
|
2009-11-18 07:09:00 +00:00
|
|
|
if t in ("mkdir-with-children", "mkdir-immutable"):
|
|
|
|
req.content.seek(0)
|
|
|
|
kids_json = req.content.read()
|
2009-10-26 01:13:21 +00:00
|
|
|
kids = convert_children_json(self.client.nodemaker,
|
|
|
|
kids_json)
|
2011-10-13 16:29:51 +00:00
|
|
|
file_format = get_format(req, None)
|
2009-11-18 07:09:00 +00:00
|
|
|
mutable = True
|
2011-10-13 16:29:51 +00:00
|
|
|
mt = get_mutable_type(file_format)
|
2009-11-18 07:09:00 +00:00
|
|
|
if t == "mkdir-immutable":
|
|
|
|
mutable = False
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2009-11-18 07:09:00 +00:00
|
|
|
d = self.node.create_subdirectory(name, kids,
|
2011-08-07 00:43:48 +00:00
|
|
|
mutable=mutable,
|
|
|
|
mutable_version=mt)
|
2009-02-20 19:15:54 +00:00
|
|
|
d.addCallback(make_handler_for,
|
|
|
|
self.client, self.node, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
return d
|
|
|
|
if (method,t) in ( ("PUT",""), ("PUT","uri"), ):
|
|
|
|
# we were trying to find the leaf filenode (to put a new
|
|
|
|
# file in its place), and it didn't exist. That's ok,
|
|
|
|
# since that's the leaf node that we're about to create.
|
|
|
|
# We make a dummy one, which will respond to the PUT
|
|
|
|
# request by replacing itself.
|
2009-02-20 19:15:54 +00:00
|
|
|
return PlaceHolderNodeHandler(self.client, self.node, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
# otherwise, we just return a no-such-child error
|
2009-03-04 04:56:30 +00:00
|
|
|
return f
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
node = node_or_failure
|
|
|
|
if nonterminal and should_create_intermediate_directories(req):
|
|
|
|
if not IDirectoryNode.providedBy(node):
|
|
|
|
# we would have put a new directory here, but there was a
|
|
|
|
# file in the way.
|
|
|
|
raise WebError("Unable to create directory '%s': "
|
|
|
|
"a file was in the way" % name,
|
|
|
|
http.CONFLICT)
|
2009-02-20 19:15:54 +00:00
|
|
|
return make_handler_for(node, self.client, self.node, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
def render_DELETE(self, ctx):
|
|
|
|
assert self.parentnode and self.name
|
|
|
|
d = self.parentnode.delete(self.name)
|
|
|
|
d.addCallback(lambda res: self.node.get_uri())
|
|
|
|
return d
|
|
|
|
|
|
|
|
def render_GET(self, ctx):
|
|
|
|
req = IRequest(ctx)
|
|
|
|
# This is where all of the directory-related ?t=* code goes.
|
|
|
|
t = get_arg(req, "t", "").strip()
|
2012-05-13 03:42:52 +00:00
|
|
|
|
2012-05-13 07:41:53 +00:00
|
|
|
# t=info contains variable ophandles, t=rename-form contains the name
|
|
|
|
# of the child being renamed. Neither is allowed an ETag.
|
|
|
|
FIXED_OUTPUT_TYPES = ["", "json", "uri", "readonly-uri"]
|
|
|
|
if not self.node.is_mutable() and t in FIXED_OUTPUT_TYPES:
|
2012-05-13 03:42:52 +00:00
|
|
|
si = self.node.get_storage_index()
|
|
|
|
if si and req.setETag('DIR:%s-%s' % (base32.b2a(si), t or "")):
|
|
|
|
return ""
|
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
if not t:
|
|
|
|
# render the directory as HTML, using the docFactory and Nevow's
|
|
|
|
# whole templating thing.
|
2011-08-07 00:43:48 +00:00
|
|
|
return DirectoryAsHTML(self.node,
|
|
|
|
self.client.mutable_file_default)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
if t == "json":
|
|
|
|
return DirectoryJSONMetadata(ctx, self.node)
|
2008-09-18 05:00:41 +00:00
|
|
|
if t == "info":
|
|
|
|
return MoreInfo(self.node)
|
2008-05-19 19:57:04 +00:00
|
|
|
if t == "uri":
|
|
|
|
return DirectoryURI(ctx, self.node)
|
|
|
|
if t == "readonly-uri":
|
|
|
|
return DirectoryReadonlyURI(ctx, self.node)
|
|
|
|
if t == 'rename-form':
|
|
|
|
return RenameForm(self.node)
|
|
|
|
|
|
|
|
raise WebError("GET directory: bad t=%s" % t)
|
|
|
|
|
|
|
|
def render_PUT(self, ctx):
|
|
|
|
req = IRequest(ctx)
|
|
|
|
t = get_arg(req, "t", "").strip()
|
2009-07-20 03:47:46 +00:00
|
|
|
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
if t == "mkdir":
|
|
|
|
# our job was done by the traversal/create-intermediate-directory
|
|
|
|
# process that got us here.
|
|
|
|
return text_plain(self.node.get_uri(), ctx) # TODO: urlencode
|
|
|
|
if t == "uri":
|
|
|
|
if not replace:
|
|
|
|
# they're trying to set_uri and that name is already occupied
|
|
|
|
# (by us).
|
|
|
|
raise ExistingChildError()
|
2009-02-20 19:15:54 +00:00
|
|
|
d = self.replace_me_with_a_childcap(req, self.client, replace)
|
2008-05-19 19:57:04 +00:00
|
|
|
# TODO: results
|
|
|
|
return d
|
|
|
|
|
|
|
|
raise WebError("PUT to a directory")
|
|
|
|
|
|
|
|
def render_POST(self, ctx):
|
|
|
|
req = IRequest(ctx)
|
|
|
|
t = get_arg(req, "t", "").strip()
|
2008-10-22 00:03:07 +00:00
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
if t == "mkdir":
|
|
|
|
d = self._POST_mkdir(req)
|
2009-10-26 01:13:21 +00:00
|
|
|
elif t == "mkdir-with-children":
|
|
|
|
d = self._POST_mkdir_with_children(req)
|
2009-11-18 07:09:00 +00:00
|
|
|
elif t == "mkdir-immutable":
|
|
|
|
d = self._POST_mkdir_immutable(req)
|
2008-05-19 19:57:04 +00:00
|
|
|
elif t == "upload":
|
|
|
|
d = self._POST_upload(ctx) # this one needs the context
|
|
|
|
elif t == "uri":
|
|
|
|
d = self._POST_uri(req)
|
2011-07-13 00:12:18 +00:00
|
|
|
elif t == "delete" or t == "unlink":
|
|
|
|
d = self._POST_unlink(req)
|
2008-05-19 19:57:04 +00:00
|
|
|
elif t == "rename":
|
|
|
|
d = self._POST_rename(req)
|
2013-04-05 05:36:14 +00:00
|
|
|
elif t == "relink":
|
|
|
|
d = self._POST_relink(req)
|
2008-05-19 19:57:04 +00:00
|
|
|
elif t == "check":
|
|
|
|
d = self._POST_check(req)
|
2008-10-22 00:03:07 +00:00
|
|
|
elif t == "start-deep-check":
|
|
|
|
d = self._POST_start_deep_check(ctx)
|
2009-02-17 06:35:53 +00:00
|
|
|
elif t == "stream-deep-check":
|
|
|
|
d = self._POST_stream_deep_check(ctx)
|
2008-10-22 00:03:07 +00:00
|
|
|
elif t == "start-manifest":
|
|
|
|
d = self._POST_start_manifest(ctx)
|
|
|
|
elif t == "start-deep-size":
|
|
|
|
d = self._POST_start_deep_size(ctx)
|
|
|
|
elif t == "start-deep-stats":
|
|
|
|
d = self._POST_start_deep_stats(ctx)
|
2009-01-23 05:01:36 +00:00
|
|
|
elif t == "stream-manifest":
|
|
|
|
d = self._POST_stream_manifest(ctx)
|
2010-01-24 03:00:20 +00:00
|
|
|
elif t == "set_children" or t == "set-children":
|
2008-05-19 19:57:04 +00:00
|
|
|
d = self._POST_set_children(req)
|
|
|
|
else:
|
|
|
|
raise WebError("POST to a directory with bad t=%s" % t)
|
|
|
|
|
|
|
|
when_done = get_arg(req, "when_done", None)
|
|
|
|
if when_done:
|
|
|
|
d.addCallback(lambda res: url.URL.fromString(when_done))
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _POST_mkdir(self, req):
|
2009-10-26 01:13:21 +00:00
|
|
|
name = get_arg(req, "name", "")
|
|
|
|
if not name:
|
|
|
|
# our job is done, it was handled by the code in got_child
|
|
|
|
# which created the final directory (i.e. us)
|
|
|
|
return defer.succeed(self.node.get_uri()) # TODO: urlencode
|
|
|
|
name = name.decode("utf-8")
|
|
|
|
replace = boolean_of_arg(get_arg(req, "replace", "true"))
|
|
|
|
kids = {}
|
2011-10-13 16:29:51 +00:00
|
|
|
mt = get_mutable_type(get_format(req, None))
|
|
|
|
d = self.node.create_subdirectory(name, kids, overwrite=replace,
|
2011-08-07 00:43:48 +00:00
|
|
|
mutable_version=mt)
|
2009-10-26 01:13:21 +00:00
|
|
|
d.addCallback(lambda child: child.get_uri()) # TODO: urlencode
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _POST_mkdir_with_children(self, req):
|
2008-05-19 19:57:04 +00:00
|
|
|
name = get_arg(req, "name", "")
|
|
|
|
if not name:
|
|
|
|
# our job is done, it was handled by the code in got_child
|
|
|
|
# which created the final directory (i.e. us)
|
|
|
|
return defer.succeed(self.node.get_uri()) # TODO: urlencode
|
|
|
|
name = name.decode("utf-8")
|
2010-01-24 22:43:25 +00:00
|
|
|
# TODO: decide on replace= behavior, see #903
|
|
|
|
#replace = boolean_of_arg(get_arg(req, "replace", "false"))
|
2009-11-18 07:09:00 +00:00
|
|
|
req.content.seek(0)
|
|
|
|
kids_json = req.content.read()
|
2009-10-26 01:13:21 +00:00
|
|
|
kids = convert_children_json(self.client.nodemaker, kids_json)
|
2011-10-13 16:29:51 +00:00
|
|
|
mt = get_mutable_type(get_format(req, None))
|
|
|
|
d = self.node.create_subdirectory(name, kids, overwrite=False,
|
|
|
|
mutable_version=mt)
|
2008-05-19 19:57:04 +00:00
|
|
|
d.addCallback(lambda child: child.get_uri()) # TODO: urlencode
|
|
|
|
return d
|
|
|
|
|
2009-11-18 07:09:00 +00:00
|
|
|
def _POST_mkdir_immutable(self, req):
|
|
|
|
name = get_arg(req, "name", "")
|
|
|
|
if not name:
|
|
|
|
# our job is done, it was handled by the code in got_child
|
|
|
|
# which created the final directory (i.e. us)
|
|
|
|
return defer.succeed(self.node.get_uri()) # TODO: urlencode
|
|
|
|
name = name.decode("utf-8")
|
2010-01-14 22:28:04 +00:00
|
|
|
# TODO: decide on replace= behavior, see #903
|
2010-01-24 22:43:25 +00:00
|
|
|
#replace = boolean_of_arg(get_arg(req, "replace", "false"))
|
2009-11-18 07:09:00 +00:00
|
|
|
req.content.seek(0)
|
|
|
|
kids_json = req.content.read()
|
|
|
|
kids = convert_children_json(self.client.nodemaker, kids_json)
|
2010-01-24 22:43:25 +00:00
|
|
|
d = self.node.create_subdirectory(name, kids, overwrite=False, mutable=False)
|
2009-11-18 07:09:00 +00:00
|
|
|
d.addCallback(lambda child: child.get_uri()) # TODO: urlencode
|
|
|
|
return d
|
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
def _POST_upload(self, ctx):
|
|
|
|
req = IRequest(ctx)
|
|
|
|
charset = get_arg(req, "_charset", "utf-8")
|
|
|
|
contents = req.fields["file"]
|
2008-06-04 00:09:39 +00:00
|
|
|
assert contents.filename is None or isinstance(contents.filename, str)
|
2008-05-19 19:57:04 +00:00
|
|
|
name = get_arg(req, "name")
|
|
|
|
name = name or contents.filename
|
|
|
|
if name is not None:
|
|
|
|
name = name.strip()
|
|
|
|
if not name:
|
|
|
|
# this prohibts empty, missing, and all-whitespace filenames
|
|
|
|
raise WebError("upload requires a name")
|
2008-06-04 00:09:39 +00:00
|
|
|
assert isinstance(name, str)
|
2008-05-19 19:57:04 +00:00
|
|
|
name = name.decode(charset)
|
|
|
|
if "/" in name:
|
|
|
|
raise WebError("name= may not contain a slash", http.BAD_REQUEST)
|
|
|
|
assert isinstance(name, unicode)
|
|
|
|
|
|
|
|
# since POST /uri/path/file?t=upload is equivalent to
|
|
|
|
# POST /uri/path/dir?t=upload&name=foo, just do the same thing that
|
|
|
|
# childFactory would do. Things are cleaner if we only do a subset of
|
|
|
|
# them, though, so we don't do: d = self.childFactory(ctx, name)
|
|
|
|
|
|
|
|
d = self.node.get(name)
|
|
|
|
def _maybe_got_node(node_or_failure):
|
|
|
|
if isinstance(node_or_failure, Failure):
|
|
|
|
f = node_or_failure
|
2008-10-27 20:15:25 +00:00
|
|
|
f.trap(NoSuchChildError)
|
2008-05-20 18:13:12 +00:00
|
|
|
# create a placeholder which will see POST t=upload
|
2009-02-20 19:15:54 +00:00
|
|
|
return PlaceHolderNodeHandler(self.client, self.node, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
else:
|
|
|
|
node = node_or_failure
|
2009-02-20 19:15:54 +00:00
|
|
|
return make_handler_for(node, self.client, self.node, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
d.addBoth(_maybe_got_node)
|
|
|
|
# now we have a placeholder or a filenodehandler, and we can just
|
|
|
|
# delegate to it. We could return the resource back out of
|
|
|
|
# DirectoryNodeHandler.renderHTTP, and nevow would recurse into it,
|
|
|
|
# but the addCallback() that handles when_done= would break.
|
|
|
|
d.addCallback(lambda child: child.renderHTTP(ctx))
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _POST_uri(self, req):
|
|
|
|
childcap = get_arg(req, "uri")
|
|
|
|
if not childcap:
|
|
|
|
raise WebError("set-uri requires a uri")
|
|
|
|
name = get_arg(req, "name")
|
|
|
|
if not name:
|
|
|
|
raise WebError("set-uri requires a name")
|
|
|
|
charset = get_arg(req, "_charset", "utf-8")
|
|
|
|
name = name.decode(charset)
|
2013-04-05 05:36:14 +00:00
|
|
|
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
2011-08-09 00:11:17 +00:00
|
|
|
|
2010-01-27 06:44:30 +00:00
|
|
|
# We mustn't pass childcap for the readcap argument because we don't
|
|
|
|
# know whether it is a read cap. Passing a read cap as the writecap
|
|
|
|
# argument will work (it ends up calling NodeMaker.create_from_cap,
|
|
|
|
# which derives a readcap if necessary and possible).
|
|
|
|
d = self.node.set_uri(name, childcap, None, overwrite=replace)
|
2008-05-19 19:57:04 +00:00
|
|
|
d.addCallback(lambda res: childcap)
|
|
|
|
return d
|
|
|
|
|
2011-07-13 00:12:18 +00:00
|
|
|
def _POST_unlink(self, req):
|
2008-05-19 19:57:04 +00:00
|
|
|
name = get_arg(req, "name")
|
|
|
|
if name is None:
|
|
|
|
# apparently an <input type="hidden" name="name" value="">
|
|
|
|
# won't show up in the resulting encoded form.. the 'name'
|
2011-07-13 00:12:18 +00:00
|
|
|
# field is completely missing. So to allow unlinking of a
|
|
|
|
# child with a name that is the empty string, we have to
|
|
|
|
# pretend that None means ''. The only downside of this is
|
|
|
|
# a slightly confusing error message if someone does a POST
|
|
|
|
# without a name= field. For our own HTML this isn't a big
|
|
|
|
# deal, because we create the 'unlink' POST buttons ourselves.
|
2008-05-19 19:57:04 +00:00
|
|
|
name = ''
|
|
|
|
charset = get_arg(req, "_charset", "utf-8")
|
|
|
|
name = name.decode(charset)
|
|
|
|
d = self.node.delete(name)
|
2011-07-13 00:12:18 +00:00
|
|
|
d.addCallback(lambda res: "thing unlinked")
|
2008-05-19 19:57:04 +00:00
|
|
|
return d
|
|
|
|
|
|
|
|
def _POST_rename(self, req):
|
2013-04-05 05:36:14 +00:00
|
|
|
# rename is identical to relink, but to_dir is not allowed
|
|
|
|
# and to_name is required.
|
|
|
|
if get_arg(req, "to_dir") is not None:
|
|
|
|
raise WebError("to_dir= is not valid for rename")
|
|
|
|
if get_arg(req, "to_name") is None:
|
|
|
|
raise WebError("to_name= is required for rename")
|
|
|
|
return self._POST_relink(req)
|
|
|
|
|
|
|
|
def _POST_relink(self, req):
|
2008-05-19 19:57:04 +00:00
|
|
|
charset = get_arg(req, "_charset", "utf-8")
|
2013-04-05 05:36:14 +00:00
|
|
|
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2011-11-10 08:00:11 +00:00
|
|
|
from_name = get_arg(req, "from_name")
|
|
|
|
if from_name is not None:
|
|
|
|
from_name = from_name.strip()
|
|
|
|
from_name = from_name.decode(charset)
|
|
|
|
assert isinstance(from_name, unicode)
|
2013-04-05 05:36:14 +00:00
|
|
|
else:
|
|
|
|
raise WebError("from_name= is required")
|
|
|
|
|
2011-11-10 08:00:11 +00:00
|
|
|
to_name = get_arg(req, "to_name")
|
|
|
|
if to_name is not None:
|
|
|
|
to_name = to_name.strip()
|
|
|
|
to_name = to_name.decode(charset)
|
|
|
|
assert isinstance(to_name, unicode)
|
2013-04-05 05:36:14 +00:00
|
|
|
else:
|
2011-11-10 08:00:11 +00:00
|
|
|
to_name = from_name
|
|
|
|
|
2012-05-09 21:18:27 +00:00
|
|
|
# Disallow slashes in both from_name and to_name, that would only
|
2013-04-05 05:36:14 +00:00
|
|
|
# cause confusion.
|
2012-05-09 21:18:27 +00:00
|
|
|
if "/" in from_name:
|
|
|
|
raise WebError("from_name= may not contain a slash",
|
|
|
|
http.BAD_REQUEST)
|
|
|
|
if "/" in to_name:
|
2011-11-19 01:42:10 +00:00
|
|
|
raise WebError("to_name= may not contain a slash",
|
|
|
|
http.BAD_REQUEST)
|
2011-11-10 08:00:11 +00:00
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
to_dir = get_arg(req, "to_dir")
|
|
|
|
if to_dir is not None and to_dir != self.node.get_write_uri():
|
|
|
|
to_dir = to_dir.strip()
|
|
|
|
to_dir = to_dir.decode(charset)
|
|
|
|
assert isinstance(to_dir, unicode)
|
|
|
|
to_path = to_dir.split(u"/")
|
|
|
|
to_root = self.client.nodemaker.create_from_cap(to_str(to_path[0]))
|
|
|
|
if not IDirectoryNode.providedBy(to_root):
|
|
|
|
raise WebError("to_dir is not a directory", http.BAD_REQUEST)
|
|
|
|
d = to_root.get_child_at_path(to_path[1:])
|
2012-05-09 21:18:27 +00:00
|
|
|
else:
|
2013-04-05 05:36:14 +00:00
|
|
|
d = defer.succeed(self.node)
|
2012-05-09 21:18:27 +00:00
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def _got_new_parent(new_parent):
|
|
|
|
if not IDirectoryNode.providedBy(new_parent):
|
2012-05-09 21:18:27 +00:00
|
|
|
raise WebError("to_dir is not a directory", http.BAD_REQUEST)
|
2013-04-05 05:36:14 +00:00
|
|
|
|
|
|
|
return self.node.move_child_to(from_name, new_parent,
|
|
|
|
to_name, replace)
|
|
|
|
d.addCallback(_got_new_parent)
|
2011-11-10 08:00:11 +00:00
|
|
|
d.addCallback(lambda res: "thing moved")
|
|
|
|
return d
|
|
|
|
|
2009-11-26 23:27:31 +00:00
|
|
|
def _maybe_literal(self, res, Results_Class):
|
|
|
|
if res:
|
|
|
|
return Results_Class(self.client, res)
|
2012-04-03 03:02:59 +00:00
|
|
|
return LiteralCheckResultsRenderer(self.client)
|
2009-11-26 23:27:31 +00:00
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
def _POST_check(self, req):
|
|
|
|
# check this directory
|
2008-09-10 02:45:17 +00:00
|
|
|
verify = boolean_of_arg(get_arg(req, "verify", "false"))
|
|
|
|
repair = boolean_of_arg(get_arg(req, "repair", "false"))
|
2009-02-18 02:32:43 +00:00
|
|
|
add_lease = boolean_of_arg(get_arg(req, "add-lease", "false"))
|
2008-09-10 02:45:17 +00:00
|
|
|
if repair:
|
2009-02-18 02:32:43 +00:00
|
|
|
d = self.node.check_and_repair(Monitor(), verify, add_lease)
|
2012-04-03 03:02:59 +00:00
|
|
|
d.addCallback(self._maybe_literal, CheckAndRepairResultsRenderer)
|
2008-09-10 02:45:17 +00:00
|
|
|
else:
|
2009-02-18 02:32:43 +00:00
|
|
|
d = self.node.check(Monitor(), verify, add_lease)
|
2012-04-03 03:02:59 +00:00
|
|
|
d.addCallback(self._maybe_literal, CheckResultsRenderer)
|
2008-05-19 19:57:04 +00:00
|
|
|
return d
|
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
def _start_operation(self, monitor, renderer, ctx):
|
2019-08-07 23:47:52 +00:00
|
|
|
self._operations.add_monitor(ctx, monitor, renderer)
|
|
|
|
return self._operations.redirect_to(ctx)
|
2008-10-22 00:03:07 +00:00
|
|
|
|
|
|
|
def _POST_start_deep_check(self, ctx):
|
2008-07-17 23:47:09 +00:00
|
|
|
# check this directory and everything reachable from it
|
2008-10-22 00:03:07 +00:00
|
|
|
if not get_arg(ctx, "ophandle"):
|
|
|
|
raise NeedOperationHandleError("slow operation requires ophandle=")
|
|
|
|
verify = boolean_of_arg(get_arg(ctx, "verify", "false"))
|
|
|
|
repair = boolean_of_arg(get_arg(ctx, "repair", "false"))
|
2009-02-18 02:32:43 +00:00
|
|
|
add_lease = boolean_of_arg(get_arg(ctx, "add-lease", "false"))
|
2008-09-07 19:44:56 +00:00
|
|
|
if repair:
|
2009-02-18 02:32:43 +00:00
|
|
|
monitor = self.node.start_deep_check_and_repair(verify, add_lease)
|
2012-04-03 03:02:59 +00:00
|
|
|
renderer = DeepCheckAndRepairResultsRenderer(self.client, monitor)
|
2008-09-07 19:44:56 +00:00
|
|
|
else:
|
2009-02-18 02:32:43 +00:00
|
|
|
monitor = self.node.start_deep_check(verify, add_lease)
|
2012-04-03 03:02:59 +00:00
|
|
|
renderer = DeepCheckResultsRenderer(self.client, monitor)
|
2008-10-22 00:03:07 +00:00
|
|
|
return self._start_operation(monitor, renderer, ctx)
|
|
|
|
|
2009-02-17 06:35:53 +00:00
|
|
|
def _POST_stream_deep_check(self, ctx):
|
|
|
|
verify = boolean_of_arg(get_arg(ctx, "verify", "false"))
|
|
|
|
repair = boolean_of_arg(get_arg(ctx, "repair", "false"))
|
2009-02-18 02:32:43 +00:00
|
|
|
add_lease = boolean_of_arg(get_arg(ctx, "add-lease", "false"))
|
|
|
|
walker = DeepCheckStreamer(ctx, self.node, verify, repair, add_lease)
|
2009-02-17 06:35:53 +00:00
|
|
|
monitor = self.node.deep_traverse(walker)
|
|
|
|
walker.setMonitor(monitor)
|
|
|
|
# register to hear stopProducing. The walker ignores pauseProducing.
|
|
|
|
IRequest(ctx).registerProducer(walker, True)
|
|
|
|
d = monitor.when_done()
|
|
|
|
def _done(res):
|
|
|
|
IRequest(ctx).unregisterProducer()
|
|
|
|
return res
|
|
|
|
d.addBoth(_done)
|
|
|
|
def _cancelled(f):
|
|
|
|
f.trap(OperationCancelledError)
|
|
|
|
return "Operation Cancelled"
|
|
|
|
d.addErrback(_cancelled)
|
2009-02-25 06:13:35 +00:00
|
|
|
def _error(f):
|
|
|
|
# signal the error as a non-JSON "ERROR:" line, plus exception
|
2009-02-25 08:46:21 +00:00
|
|
|
msg = "ERROR: %s(%s)\n" % (f.value.__class__.__name__,
|
|
|
|
", ".join([str(a) for a in f.value.args]))
|
2009-02-25 06:13:35 +00:00
|
|
|
msg += str(f)
|
|
|
|
return msg
|
|
|
|
d.addErrback(_error)
|
2009-02-17 06:35:53 +00:00
|
|
|
return d
|
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
def _POST_start_manifest(self, ctx):
|
|
|
|
if not get_arg(ctx, "ophandle"):
|
|
|
|
raise NeedOperationHandleError("slow operation requires ophandle=")
|
|
|
|
monitor = self.node.build_manifest()
|
2009-02-20 19:15:54 +00:00
|
|
|
renderer = ManifestResults(self.client, monitor)
|
2008-10-22 00:03:07 +00:00
|
|
|
return self._start_operation(monitor, renderer, ctx)
|
|
|
|
|
|
|
|
def _POST_start_deep_size(self, ctx):
|
|
|
|
if not get_arg(ctx, "ophandle"):
|
|
|
|
raise NeedOperationHandleError("slow operation requires ophandle=")
|
|
|
|
monitor = self.node.start_deep_stats()
|
2009-02-20 19:15:54 +00:00
|
|
|
renderer = DeepSizeResults(self.client, monitor)
|
2008-10-22 00:03:07 +00:00
|
|
|
return self._start_operation(monitor, renderer, ctx)
|
|
|
|
|
|
|
|
def _POST_start_deep_stats(self, ctx):
|
|
|
|
if not get_arg(ctx, "ophandle"):
|
|
|
|
raise NeedOperationHandleError("slow operation requires ophandle=")
|
|
|
|
monitor = self.node.start_deep_stats()
|
2009-02-20 19:15:54 +00:00
|
|
|
renderer = DeepStatsResults(self.client, monitor)
|
2008-10-22 00:03:07 +00:00
|
|
|
return self._start_operation(monitor, renderer, ctx)
|
2008-07-17 23:47:09 +00:00
|
|
|
|
2009-01-23 05:01:36 +00:00
|
|
|
def _POST_stream_manifest(self, ctx):
|
|
|
|
walker = ManifestStreamer(ctx, self.node)
|
|
|
|
monitor = self.node.deep_traverse(walker)
|
2009-01-24 02:39:08 +00:00
|
|
|
walker.setMonitor(monitor)
|
|
|
|
# register to hear stopProducing. The walker ignores pauseProducing.
|
|
|
|
IRequest(ctx).registerProducer(walker, True)
|
|
|
|
d = monitor.when_done()
|
|
|
|
def _done(res):
|
|
|
|
IRequest(ctx).unregisterProducer()
|
|
|
|
return res
|
|
|
|
d.addBoth(_done)
|
|
|
|
def _cancelled(f):
|
|
|
|
f.trap(OperationCancelledError)
|
|
|
|
return "Operation Cancelled"
|
|
|
|
d.addErrback(_cancelled)
|
2009-02-25 06:13:35 +00:00
|
|
|
def _error(f):
|
|
|
|
# signal the error as a non-JSON "ERROR:" line, plus exception
|
2009-02-25 08:46:21 +00:00
|
|
|
msg = "ERROR: %s(%s)\n" % (f.value.__class__.__name__,
|
|
|
|
", ".join([str(a) for a in f.value.args]))
|
2009-02-25 06:13:35 +00:00
|
|
|
msg += str(f)
|
|
|
|
return msg
|
|
|
|
d.addErrback(_error)
|
2009-01-24 02:39:08 +00:00
|
|
|
return d
|
2009-01-23 05:01:36 +00:00
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
def _POST_set_children(self, req):
|
2013-04-05 05:36:14 +00:00
|
|
|
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
2008-05-19 19:57:04 +00:00
|
|
|
req.content.seek(0)
|
|
|
|
body = req.content.read()
|
|
|
|
try:
|
2017-01-19 22:39:53 +00:00
|
|
|
children = json.loads(body)
|
2019-03-22 17:03:34 +00:00
|
|
|
except ValueError as le:
|
2008-05-19 19:57:04 +00:00
|
|
|
le.args = tuple(le.args + (body,))
|
|
|
|
# TODO test handling of bad JSON
|
|
|
|
raise
|
2009-10-13 00:24:40 +00:00
|
|
|
cs = {}
|
2008-05-19 19:57:04 +00:00
|
|
|
for name, (file_or_dir, mddict) in children.iteritems():
|
2017-03-24 02:19:00 +00:00
|
|
|
name = unicode(name) # json returns str *or* unicode
|
2009-10-12 23:51:26 +00:00
|
|
|
writecap = mddict.get('rw_uri')
|
|
|
|
if writecap is not None:
|
|
|
|
writecap = str(writecap)
|
|
|
|
readcap = mddict.get('ro_uri')
|
|
|
|
if readcap is not None:
|
|
|
|
readcap = str(readcap)
|
2009-10-13 00:24:40 +00:00
|
|
|
cs[name] = (writecap, readcap, mddict.get('metadata'))
|
2008-05-19 19:57:04 +00:00
|
|
|
d = self.node.set_children(cs, replace)
|
|
|
|
d.addCallback(lambda res: "Okay so I did it.")
|
|
|
|
# TODO: results
|
|
|
|
return d
|
|
|
|
|
|
|
|
def abbreviated_dirnode(dirnode):
|
|
|
|
u = from_string_dirnode(dirnode.get_uri())
|
2009-01-31 02:32:05 +00:00
|
|
|
return u.abbrev_si()
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2011-10-02 03:45:03 +00:00
|
|
|
SPACE = u"\u00A0"*2
|
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
class DirectoryAsHTML(rend.Page):
|
|
|
|
# The remainder of this class is to render the directory into
|
|
|
|
# human+browser -oriented HTML.
|
|
|
|
docFactory = getxmlfile("directory.xhtml")
|
2008-06-11 22:35:27 +00:00
|
|
|
addSlash = True
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def __init__(self, node, default_mutable_format):
|
2008-05-19 19:57:04 +00:00
|
|
|
rend.Page.__init__(self)
|
|
|
|
self.node = node
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
assert default_mutable_format in (MDMF_VERSION, SDMF_VERSION)
|
|
|
|
self.default_mutable_format = default_mutable_format
|
|
|
|
|
2009-03-07 11:56:01 +00:00
|
|
|
def beforeRender(self, ctx):
|
|
|
|
# attempt to get the dirnode's children, stashing them (or the
|
|
|
|
# failure that results) for later use
|
|
|
|
d = self.node.list()
|
|
|
|
def _good(children):
|
|
|
|
# Deferreds don't optimize out tail recursion, and the way
|
|
|
|
# Nevow's flattener handles Deferreds doesn't take this into
|
|
|
|
# account. As a result, large lists of Deferreds that fire in the
|
|
|
|
# same turn (i.e. the output of defer.succeed) will cause a stack
|
|
|
|
# overflow. To work around this, we insert a turn break after
|
|
|
|
# every 100 items, using foolscap's fireEventually(). This gives
|
|
|
|
# the stack a chance to be popped. It would also work to put
|
|
|
|
# every item in its own turn, but that'd be a lot more
|
|
|
|
# inefficient. This addresses ticket #237, for which I was never
|
|
|
|
# able to create a failing unit test.
|
|
|
|
output = []
|
|
|
|
for i,item in enumerate(sorted(children.items())):
|
|
|
|
if i % 100 == 0:
|
|
|
|
output.append(fireEventually(item))
|
|
|
|
else:
|
|
|
|
output.append(item)
|
|
|
|
self.dirnode_children = output
|
|
|
|
return ctx
|
|
|
|
def _bad(f):
|
|
|
|
text, code = humanize_failure(f)
|
|
|
|
self.dirnode_children = None
|
|
|
|
self.dirnode_children_error = text
|
|
|
|
return ctx
|
|
|
|
d.addCallbacks(_good, _bad)
|
|
|
|
return d
|
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
def render_title(self, ctx, data):
|
|
|
|
si_s = abbreviated_dirnode(self.node)
|
2009-10-29 02:50:50 +00:00
|
|
|
header = ["Tahoe-LAFS - Directory SI=%s" % si_s]
|
2010-01-27 06:44:30 +00:00
|
|
|
if self.node.is_unknown():
|
|
|
|
header.append(" (unknown)")
|
|
|
|
elif not self.node.is_mutable():
|
|
|
|
header.append(" (immutable)")
|
|
|
|
elif self.node.is_readonly():
|
2009-05-26 23:24:14 +00:00
|
|
|
header.append(" (read-only)")
|
|
|
|
else:
|
|
|
|
header.append(" (modifiable)")
|
2008-05-19 19:57:04 +00:00
|
|
|
return ctx.tag[header]
|
|
|
|
|
|
|
|
def render_header(self, ctx, data):
|
|
|
|
si_s = abbreviated_dirnode(self.node)
|
2009-10-29 02:50:50 +00:00
|
|
|
header = ["Tahoe-LAFS Directory SI=", T.span(class_="data-chars")[si_s]]
|
2010-01-27 06:44:30 +00:00
|
|
|
if self.node.is_unknown():
|
|
|
|
header.append(" (unknown)")
|
|
|
|
elif not self.node.is_mutable():
|
|
|
|
header.append(" (immutable)")
|
|
|
|
elif self.node.is_readonly():
|
2009-05-26 23:24:14 +00:00
|
|
|
header.append(" (read-only)")
|
2008-05-19 19:57:04 +00:00
|
|
|
return ctx.tag[header]
|
|
|
|
|
2008-06-18 02:49:40 +00:00
|
|
|
def render_welcome(self, ctx, data):
|
2008-10-07 20:17:42 +00:00
|
|
|
link = get_root(ctx)
|
2010-06-22 20:53:01 +00:00
|
|
|
return ctx.tag[T.a(href=link)["Return to Welcome page"]]
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2009-01-31 02:32:05 +00:00
|
|
|
def render_show_readonly(self, ctx, data):
|
2010-01-27 06:44:30 +00:00
|
|
|
if self.node.is_unknown() or self.node.is_readonly():
|
2009-01-31 02:32:05 +00:00
|
|
|
return ""
|
|
|
|
rocap = self.node.get_readonly_uri()
|
|
|
|
root = get_root(ctx)
|
|
|
|
uri_link = "%s/uri/%s/" % (root, urllib.quote(rocap))
|
|
|
|
return ctx.tag[T.a(href=uri_link)["Read-Only Version"]]
|
|
|
|
|
2009-03-07 11:56:01 +00:00
|
|
|
def render_try_children(self, ctx, data):
|
|
|
|
# if the dirnode can be retrived, render a table of children.
|
|
|
|
# Otherwise, render an apologetic error message.
|
2009-03-20 23:58:09 +00:00
|
|
|
if self.dirnode_children is not None:
|
2009-03-07 11:56:01 +00:00
|
|
|
return ctx.tag
|
|
|
|
else:
|
|
|
|
return T.div[T.p["Error reading directory:"],
|
|
|
|
T.p[self.dirnode_children_error]]
|
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
def data_children(self, ctx, data):
|
2009-03-07 11:56:01 +00:00
|
|
|
return self.dirnode_children
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
def render_row(self, ctx, data):
|
|
|
|
name, (target, metadata) = data
|
|
|
|
name = name.encode("utf-8")
|
|
|
|
assert not isinstance(name, unicode)
|
2008-09-24 20:35:05 +00:00
|
|
|
nameurl = urllib.quote(name, safe="") # encode any slashes too
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2008-10-07 20:17:42 +00:00
|
|
|
root = get_root(ctx)
|
2008-06-18 02:49:40 +00:00
|
|
|
here = "%s/uri/%s/" % (root, urllib.quote(self.node.get_uri()))
|
2010-01-27 06:44:30 +00:00
|
|
|
if self.node.is_unknown() or self.node.is_readonly():
|
2011-07-13 00:12:18 +00:00
|
|
|
unlink = "-"
|
2008-05-19 19:57:04 +00:00
|
|
|
rename = "-"
|
|
|
|
else:
|
2011-07-13 00:12:18 +00:00
|
|
|
# this creates a button which will cause our _POST_unlink method
|
|
|
|
# to be invoked, which unlinks the file and then redirects the
|
2008-05-19 19:57:04 +00:00
|
|
|
# browser back to this directory
|
2011-07-13 00:12:18 +00:00
|
|
|
unlink = T.form(action=here, method="post")[
|
|
|
|
T.input(type='hidden', name='t', value='unlink'),
|
2008-05-19 19:57:04 +00:00
|
|
|
T.input(type='hidden', name='name', value=name),
|
2008-06-18 02:49:40 +00:00
|
|
|
T.input(type='hidden', name='when_done', value="."),
|
2013-03-14 17:04:18 +00:00
|
|
|
T.input(type='submit', _class='btn', value='unlink', name="unlink"),
|
2008-05-19 19:57:04 +00:00
|
|
|
]
|
|
|
|
|
2008-06-18 02:49:40 +00:00
|
|
|
rename = T.form(action=here, method="get")[
|
2008-05-19 19:57:04 +00:00
|
|
|
T.input(type='hidden', name='t', value='rename-form'),
|
|
|
|
T.input(type='hidden', name='name', value=name),
|
2008-06-18 02:49:40 +00:00
|
|
|
T.input(type='hidden', name='when_done', value="."),
|
2013-03-14 17:04:18 +00:00
|
|
|
T.input(type='submit', _class='btn', value='rename/relink', name="rename"),
|
2011-11-10 08:00:11 +00:00
|
|
|
]
|
|
|
|
|
2011-07-13 00:12:18 +00:00
|
|
|
ctx.fillSlots("unlink", unlink)
|
2008-05-19 19:57:04 +00:00
|
|
|
ctx.fillSlots("rename", rename)
|
|
|
|
|
|
|
|
times = []
|
2009-04-11 22:52:05 +00:00
|
|
|
linkcrtime = metadata.get('tahoe', {}).get("linkcrtime")
|
|
|
|
if linkcrtime is not None:
|
2016-01-04 16:00:59 +00:00
|
|
|
times.append("lcr: " + render_time(linkcrtime))
|
2009-04-11 22:52:05 +00:00
|
|
|
else:
|
|
|
|
# For backwards-compatibility with links last modified by Tahoe < 1.4.0:
|
|
|
|
if "ctime" in metadata:
|
2016-01-04 16:00:59 +00:00
|
|
|
ctime = render_time(metadata["ctime"])
|
2009-04-11 22:52:05 +00:00
|
|
|
times.append("c: " + ctime)
|
|
|
|
linkmotime = metadata.get('tahoe', {}).get("linkmotime")
|
|
|
|
if linkmotime is not None:
|
2008-05-19 19:57:04 +00:00
|
|
|
if times:
|
|
|
|
times.append(T.br())
|
2016-01-04 16:00:59 +00:00
|
|
|
times.append("lmo: " + render_time(linkmotime))
|
2009-04-11 22:52:05 +00:00
|
|
|
else:
|
|
|
|
# For backwards-compatibility with links last modified by Tahoe < 1.4.0:
|
|
|
|
if "mtime" in metadata:
|
2016-01-04 16:00:59 +00:00
|
|
|
mtime = render_time(metadata["mtime"])
|
2009-04-11 22:52:05 +00:00
|
|
|
if times:
|
|
|
|
times.append(T.br())
|
2008-05-19 19:57:04 +00:00
|
|
|
times.append("m: " + mtime)
|
|
|
|
ctx.fillSlots("times", times)
|
|
|
|
|
2009-07-03 01:07:49 +00:00
|
|
|
assert IFilesystemNode.providedBy(target), target
|
2010-01-27 06:44:30 +00:00
|
|
|
target_uri = target.get_uri() or ""
|
|
|
|
quoted_uri = urllib.quote(target_uri, safe="") # escape slashes too
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
if IMutableFileNode.providedBy(target):
|
|
|
|
# to prevent javascript in displayed .html files from stealing a
|
|
|
|
# secret directory URI from the URL, send the browser to a URI-based
|
|
|
|
# page that doesn't know about the directory at all
|
2008-09-24 20:35:05 +00:00
|
|
|
dlurl = "%s/file/%s/@@named=/%s" % (root, quoted_uri, nameurl)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2015-04-04 19:29:00 +00:00
|
|
|
ctx.fillSlots("filename", T.a(href=dlurl, rel="noreferrer")[name])
|
2008-05-19 19:57:04 +00:00
|
|
|
ctx.fillSlots("type", "SSK")
|
|
|
|
|
|
|
|
ctx.fillSlots("size", "?")
|
|
|
|
|
2008-12-05 22:39:39 +00:00
|
|
|
info_link = "%s/uri/%s?t=info" % (root, quoted_uri)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2009-11-20 07:52:55 +00:00
|
|
|
elif IImmutableFileNode.providedBy(target):
|
2008-09-24 20:35:05 +00:00
|
|
|
dlurl = "%s/file/%s/@@named=/%s" % (root, quoted_uri, nameurl)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2015-04-04 19:29:00 +00:00
|
|
|
ctx.fillSlots("filename", T.a(href=dlurl, rel="noreferrer")[name])
|
2008-05-19 19:57:04 +00:00
|
|
|
ctx.fillSlots("type", "FILE")
|
|
|
|
|
|
|
|
ctx.fillSlots("size", target.get_size())
|
|
|
|
|
2008-12-05 22:39:39 +00:00
|
|
|
info_link = "%s/uri/%s?t=info" % (root, quoted_uri)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
elif IDirectoryNode.providedBy(target):
|
|
|
|
# directory
|
2010-01-27 06:44:30 +00:00
|
|
|
uri_link = "%s/uri/%s/" % (root, urllib.quote(target_uri))
|
2012-10-25 00:01:25 +00:00
|
|
|
ctx.fillSlots("filename", T.a(href=uri_link)[name])
|
2009-11-18 19:18:32 +00:00
|
|
|
if not target.is_mutable():
|
|
|
|
dirtype = "DIR-IMM"
|
|
|
|
elif target.is_readonly():
|
2008-05-19 19:57:04 +00:00
|
|
|
dirtype = "DIR-RO"
|
|
|
|
else:
|
|
|
|
dirtype = "DIR"
|
|
|
|
ctx.fillSlots("type", dirtype)
|
|
|
|
ctx.fillSlots("size", "-")
|
2008-12-05 22:39:39 +00:00
|
|
|
info_link = "%s/uri/%s/?t=info" % (root, quoted_uri)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2011-08-24 15:59:28 +00:00
|
|
|
elif isinstance(target, ProhibitedNode):
|
|
|
|
ctx.fillSlots("filename", T.strike[name])
|
|
|
|
if IDirectoryNode.providedBy(target.wrapped_node):
|
|
|
|
blacklisted_type = "DIR-BLACKLISTED"
|
|
|
|
else:
|
|
|
|
blacklisted_type = "BLACKLISTED"
|
|
|
|
ctx.fillSlots("type", blacklisted_type)
|
|
|
|
ctx.fillSlots("size", "-")
|
|
|
|
info_link = None
|
|
|
|
ctx.fillSlots("info", ["Access Prohibited:", T.br, target.reason])
|
|
|
|
|
2009-07-03 01:07:49 +00:00
|
|
|
else:
|
|
|
|
# unknown
|
2012-10-25 00:01:25 +00:00
|
|
|
ctx.fillSlots("filename", name)
|
2010-01-28 22:08:00 +00:00
|
|
|
if target.get_write_uri() is not None:
|
|
|
|
unknowntype = "?"
|
2010-01-29 03:14:24 +00:00
|
|
|
elif not self.node.is_mutable() or target.is_alleged_immutable():
|
2010-01-28 22:08:00 +00:00
|
|
|
unknowntype = "?-IMM"
|
|
|
|
else:
|
|
|
|
unknowntype = "?-RO"
|
|
|
|
ctx.fillSlots("type", unknowntype)
|
2009-07-03 01:07:49 +00:00
|
|
|
ctx.fillSlots("size", "-")
|
|
|
|
# use a directory-relative info link, so we can extract both the
|
|
|
|
# writecap and the readcap
|
|
|
|
info_link = "%s?t=info" % urllib.quote(name)
|
|
|
|
|
2011-08-24 15:59:28 +00:00
|
|
|
if info_link:
|
|
|
|
ctx.fillSlots("info", T.a(href=info_link)["More Info"])
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
return ctx.tag
|
|
|
|
|
2011-10-02 03:45:03 +00:00
|
|
|
# XXX: similar to render_upload_form and render_mkdir_form in root.py.
|
2008-05-19 19:57:04 +00:00
|
|
|
def render_forms(self, ctx, data):
|
2008-07-17 23:47:09 +00:00
|
|
|
forms = []
|
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
if self.node.is_readonly():
|
2009-03-07 11:56:01 +00:00
|
|
|
return T.div["No upload forms: directory is read-only"]
|
2009-04-07 18:28:34 +00:00
|
|
|
if self.dirnode_children is None:
|
2009-03-07 11:56:01 +00:00
|
|
|
return T.div["No upload forms: directory is unreadable"]
|
2008-07-17 23:47:09 +00:00
|
|
|
|
2011-10-02 03:45:03 +00:00
|
|
|
mkdir_sdmf = T.input(type='radio', name='format',
|
|
|
|
value='sdmf', id='mkdir-sdmf',
|
|
|
|
checked='checked')
|
|
|
|
mkdir_mdmf = T.input(type='radio', name='format',
|
|
|
|
value='mdmf', id='mkdir-mdmf')
|
|
|
|
|
|
|
|
mkdir_form = T.form(action=".", method="post",
|
|
|
|
enctype="multipart/form-data")[
|
2008-05-19 19:57:04 +00:00
|
|
|
T.fieldset[
|
|
|
|
T.input(type="hidden", name="t", value="mkdir"),
|
2008-06-18 02:49:40 +00:00
|
|
|
T.input(type="hidden", name="when_done", value="."),
|
2009-05-26 23:24:14 +00:00
|
|
|
T.legend(class_="freeform-form-label")["Create a new directory in this directory"],
|
2013-08-30 14:03:58 +00:00
|
|
|
"New directory name:"+SPACE, T.br,
|
2011-10-02 03:45:03 +00:00
|
|
|
T.input(type="text", name="name"), SPACE,
|
2013-08-30 14:03:58 +00:00
|
|
|
T.div(class_="form-inline")[
|
|
|
|
mkdir_sdmf, T.label(for_='mutable-directory-sdmf')[SPACE, "SDMF"], SPACE*2,
|
|
|
|
mkdir_mdmf, T.label(for_='mutable-directory-mdmf')[SPACE, "MDMF (experimental)"]
|
|
|
|
],
|
|
|
|
T.input(type="submit", class_="btn", value="Create")
|
2008-05-19 19:57:04 +00:00
|
|
|
]]
|
2011-10-02 03:45:03 +00:00
|
|
|
forms.append(T.div(class_="freeform-form")[mkdir_form])
|
|
|
|
|
|
|
|
upload_chk = T.input(type='radio', name='format',
|
|
|
|
value='chk', id='upload-chk',
|
|
|
|
checked='checked')
|
|
|
|
upload_sdmf = T.input(type='radio', name='format',
|
|
|
|
value='sdmf', id='upload-sdmf')
|
|
|
|
upload_mdmf = T.input(type='radio', name='format',
|
|
|
|
value='mdmf', id='upload-mdmf')
|
|
|
|
|
|
|
|
upload_form = T.form(action=".", method="post",
|
|
|
|
enctype="multipart/form-data")[
|
2008-05-19 19:57:04 +00:00
|
|
|
T.fieldset[
|
|
|
|
T.input(type="hidden", name="t", value="upload"),
|
2008-06-18 02:49:40 +00:00
|
|
|
T.input(type="hidden", name="when_done", value="."),
|
2008-05-19 19:57:04 +00:00
|
|
|
T.legend(class_="freeform-form-label")["Upload a file to this directory"],
|
2011-10-02 03:45:03 +00:00
|
|
|
"Choose a file to upload:"+SPACE,
|
|
|
|
T.input(type="file", name="file", class_="freeform-input-file"), SPACE,
|
2013-08-30 14:03:58 +00:00
|
|
|
T.div(class_="form-inline")[
|
|
|
|
upload_chk, T.label(for_="upload-chk") [SPACE, "Immutable"], SPACE*2,
|
|
|
|
upload_sdmf, T.label(for_="upload-sdmf")[SPACE, "SDMF"], SPACE*2,
|
|
|
|
upload_mdmf, T.label(for_="upload-mdmf")[SPACE, "MDMF (experimental)"]
|
|
|
|
],
|
|
|
|
T.input(type="submit", class_="btn", value="Upload"), SPACE*2,
|
2008-05-19 19:57:04 +00:00
|
|
|
]]
|
2011-10-02 03:45:03 +00:00
|
|
|
forms.append(T.div(class_="freeform-form")[upload_form])
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2011-10-02 03:45:03 +00:00
|
|
|
attach_form = T.form(action=".", method="post",
|
|
|
|
enctype="multipart/form-data")[
|
2013-08-30 14:03:58 +00:00
|
|
|
T.fieldset[ T.div(class_="form-inline")[
|
|
|
|
T.input(type="hidden", name="t", value="uri"),
|
|
|
|
T.input(type="hidden", name="when_done", value="."),
|
|
|
|
T.legend(class_="freeform-form-label")["Add a link to a file or directory which is already in Tahoe-LAFS."],
|
|
|
|
"New child name:"+SPACE,
|
2013-08-30 19:57:38 +00:00
|
|
|
T.input(type="text", name="name"), SPACE*2, T.br,
|
2013-08-30 14:03:58 +00:00
|
|
|
"URI of new child:"+SPACE,
|
|
|
|
T.input(type="text", name="uri"), SPACE,
|
|
|
|
T.input(type="submit", class_="btn", value="Attach"),
|
|
|
|
]]]
|
2011-10-02 03:45:03 +00:00
|
|
|
forms.append(T.div(class_="freeform-form")[attach_form])
|
2008-07-17 23:47:09 +00:00
|
|
|
return forms
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
def render_results(self, ctx, data):
|
|
|
|
req = IRequest(ctx)
|
|
|
|
return get_arg(req, "results", "")
|
|
|
|
|
|
|
|
def DirectoryJSONMetadata(ctx, dirnode):
|
|
|
|
d = dirnode.list()
|
|
|
|
def _got(children):
|
|
|
|
kids = {}
|
|
|
|
for name, (childnode, metadata) in children.iteritems():
|
2009-07-03 01:07:49 +00:00
|
|
|
assert IFilesystemNode.providedBy(childnode), childnode
|
2010-01-27 06:44:30 +00:00
|
|
|
rw_uri = childnode.get_write_uri()
|
2009-07-03 01:07:49 +00:00
|
|
|
ro_uri = childnode.get_readonly_uri()
|
2009-11-20 07:52:55 +00:00
|
|
|
if IFileNode.providedBy(childnode):
|
2014-10-07 19:11:31 +00:00
|
|
|
kiddata = ("filenode", get_filenode_metadata(childnode))
|
2009-07-03 01:07:49 +00:00
|
|
|
elif IDirectoryNode.providedBy(childnode):
|
2009-11-20 07:52:55 +00:00
|
|
|
kiddata = ("dirnode", {'mutable': childnode.is_mutable()})
|
2008-05-19 19:57:04 +00:00
|
|
|
else:
|
2009-07-03 01:07:49 +00:00
|
|
|
kiddata = ("unknown", {})
|
2010-01-27 06:44:30 +00:00
|
|
|
|
2009-07-03 01:07:49 +00:00
|
|
|
kiddata[1]["metadata"] = metadata
|
2008-05-20 01:37:28 +00:00
|
|
|
if rw_uri:
|
|
|
|
kiddata[1]["rw_uri"] = rw_uri
|
2010-01-27 06:44:30 +00:00
|
|
|
if ro_uri:
|
|
|
|
kiddata[1]["ro_uri"] = ro_uri
|
2009-02-04 02:22:48 +00:00
|
|
|
verifycap = childnode.get_verify_cap()
|
|
|
|
if verifycap:
|
|
|
|
kiddata[1]['verify_uri'] = verifycap.to_string()
|
2010-01-27 06:44:30 +00:00
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
kids[name] = kiddata
|
2010-01-27 06:44:30 +00:00
|
|
|
|
|
|
|
drw_uri = dirnode.get_write_uri()
|
|
|
|
dro_uri = dirnode.get_readonly_uri()
|
2008-05-20 01:37:28 +00:00
|
|
|
contents = { 'children': kids }
|
|
|
|
if dro_uri:
|
|
|
|
contents['ro_uri'] = dro_uri
|
|
|
|
if drw_uri:
|
|
|
|
contents['rw_uri'] = drw_uri
|
2009-02-04 02:22:48 +00:00
|
|
|
verifycap = dirnode.get_verify_cap()
|
|
|
|
if verifycap:
|
|
|
|
contents['verify_uri'] = verifycap.to_string()
|
2008-05-20 22:40:49 +00:00
|
|
|
contents['mutable'] = dirnode.is_mutable()
|
2008-05-19 19:57:04 +00:00
|
|
|
data = ("dirnode", contents)
|
2017-01-19 22:39:53 +00:00
|
|
|
return json.dumps(data, indent=1) + "\n"
|
2008-05-19 19:57:04 +00:00
|
|
|
d.addCallback(_got)
|
|
|
|
d.addCallback(text_plain, ctx)
|
2016-08-08 22:11:15 +00:00
|
|
|
|
|
|
|
def error(f):
|
|
|
|
message, code = humanize_failure(f)
|
|
|
|
req = IRequest(ctx)
|
|
|
|
req.setResponseCode(code)
|
2017-01-19 22:39:53 +00:00
|
|
|
return json.dumps({
|
2016-08-08 22:11:15 +00:00
|
|
|
"error": message,
|
|
|
|
})
|
|
|
|
d.addErrback(error)
|
2008-05-19 19:57:04 +00:00
|
|
|
return d
|
|
|
|
|
2008-09-18 05:00:41 +00:00
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
def DirectoryURI(ctx, dirnode):
|
|
|
|
return text_plain(dirnode.get_uri(), ctx)
|
|
|
|
|
|
|
|
def DirectoryReadonlyURI(ctx, dirnode):
|
|
|
|
return text_plain(dirnode.get_readonly_uri(), ctx)
|
|
|
|
|
|
|
|
class RenameForm(rend.Page):
|
|
|
|
addSlash = True
|
|
|
|
docFactory = getxmlfile("rename-form.xhtml")
|
|
|
|
|
|
|
|
def render_title(self, ctx, data):
|
|
|
|
return ctx.tag["Directory SI=%s" % abbreviated_dirnode(self.original)]
|
|
|
|
|
|
|
|
def render_header(self, ctx, data):
|
|
|
|
header = ["Rename "
|
|
|
|
"in directory SI=%s" % abbreviated_dirnode(self.original),
|
|
|
|
]
|
|
|
|
|
|
|
|
if self.original.is_readonly():
|
|
|
|
header.append(" (readonly!)")
|
|
|
|
header.append(":")
|
|
|
|
return ctx.tag[header]
|
|
|
|
|
|
|
|
def render_when_done(self, ctx, data):
|
2008-06-18 02:49:40 +00:00
|
|
|
return T.input(type="hidden", name="when_done", value=".")
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
def render_get_name(self, ctx, data):
|
|
|
|
req = IRequest(ctx)
|
|
|
|
name = get_arg(req, "name", "")
|
|
|
|
ctx.tag.attributes['value'] = name
|
|
|
|
return ctx.tag
|
|
|
|
|
2019-11-05 06:38:43 +00:00
|
|
|
|
|
|
|
class ReloadableMonitorElement(Element, object):
|
|
|
|
"""
|
|
|
|
Like 'ReloadMixin', but for twisted.web.template style. This
|
|
|
|
provides renderers for "reload" and "refesh" and a self.monitor
|
|
|
|
attribute
|
|
|
|
"""
|
2019-11-17 02:23:20 +00:00
|
|
|
refresh_time = timedelta(seconds=60)
|
2019-11-05 06:38:43 +00:00
|
|
|
|
|
|
|
def __init__(self, monitor):
|
|
|
|
self.monitor = monitor
|
|
|
|
super(ReloadableMonitorElement, self).__init__()
|
|
|
|
|
|
|
|
@renderer
|
|
|
|
def refresh(self, req, tag):
|
|
|
|
if self.monitor.is_finished():
|
|
|
|
return ""
|
|
|
|
tag.attributes["http-equiv"] = "refresh"
|
|
|
|
tag.attributes["content"] = str(self.refresh_time)
|
|
|
|
return tag
|
|
|
|
|
|
|
|
@renderer
|
|
|
|
def reload(self, req, tag):
|
|
|
|
if self.monitor.is_finished():
|
|
|
|
return ""
|
|
|
|
# url.gethere would break a proxy, so the correct thing to do is
|
|
|
|
# req.path[-1] + queryargs
|
|
|
|
ophandle = req.prepath[-1]
|
|
|
|
reload_target = ophandle + "?output=html"
|
|
|
|
cancel_target = ophandle + "?t=cancel"
|
|
|
|
cancel_button = tags.form(
|
|
|
|
[
|
|
|
|
tags.input(type="submit", value="Cancel"),
|
|
|
|
],
|
|
|
|
action=cancel_target,
|
|
|
|
method="POST",
|
|
|
|
enctype="multipart/form-data",
|
|
|
|
)
|
|
|
|
|
|
|
|
return tag([
|
|
|
|
"Operation still running: ",
|
|
|
|
tags.a("Reload", href=reload_target),
|
|
|
|
cancel_button,
|
|
|
|
])
|
|
|
|
|
|
|
|
|
|
|
|
def _slashify_path(path):
|
|
|
|
"""
|
|
|
|
Converts a tuple from a 'manifest' path into a string with slashes
|
|
|
|
in it
|
|
|
|
"""
|
|
|
|
if not path:
|
|
|
|
return ""
|
|
|
|
return "/".join([p.encode("utf-8") for p in path])
|
|
|
|
|
|
|
|
|
|
|
|
def _cap_to_link(root, path, cap):
|
|
|
|
"""
|
|
|
|
Turns a capability-string into a WebAPI link tag
|
|
|
|
|
|
|
|
:param root: the root piece of the URI
|
|
|
|
|
|
|
|
:param cap: the capability-string
|
|
|
|
|
|
|
|
:returns: tags.a instance
|
|
|
|
"""
|
|
|
|
# TODO: we need a clean consistent way to get the type of a cap string
|
|
|
|
if cap:
|
|
|
|
if cap.startswith("URI:CHK") or cap.startswith("URI:SSK"):
|
|
|
|
nameurl = urllib.quote(path[-1].encode("utf-8"))
|
|
|
|
uri_link = "%s/file/%s/@@named=/%s" % (root, urllib.quote(cap),
|
|
|
|
nameurl)
|
|
|
|
else:
|
|
|
|
uri_link = "%s/uri/%s" % (root, urllib.quote(cap, safe=""))
|
|
|
|
return tags.a(cap, href=uri_link)
|
|
|
|
else:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
|
|
|
|
class ManifestElement(ReloadableMonitorElement):
|
|
|
|
loader = XMLFile(FilePath(__file__).sibling("manifest.xhtml"))
|
|
|
|
|
|
|
|
def _si_abbrev(self):
|
|
|
|
si = self.monitor.origin_si
|
|
|
|
if not si:
|
|
|
|
return "<LIT>"
|
|
|
|
return base32.b2a(si)[:6]
|
|
|
|
|
|
|
|
@renderer
|
|
|
|
def title(self, req, tag):
|
|
|
|
return tag(
|
|
|
|
"Manifest of SI={}".format(self._si_abbrev())
|
|
|
|
)
|
|
|
|
|
|
|
|
@renderer
|
|
|
|
def header(self, req, tag):
|
|
|
|
return tag(
|
|
|
|
"Manifest of SI={}".format(self._si_abbrev())
|
|
|
|
)
|
|
|
|
|
|
|
|
@renderer
|
|
|
|
def items(self, req, tag):
|
|
|
|
manifest = self.monitor.get_status()["manifest"]
|
|
|
|
root = get_root(req)
|
|
|
|
rows = [
|
|
|
|
{
|
|
|
|
"path": _slashify_path(path),
|
|
|
|
"cap": _cap_to_link(root, path, cap),
|
|
|
|
}
|
|
|
|
for path, cap in manifest
|
|
|
|
]
|
|
|
|
return SlotsSequenceElement(tag, rows)
|
|
|
|
|
|
|
|
|
|
|
|
class ManifestResults(MultiFormatResource, ReloadMixin):
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2017-07-25 15:15:17 +00:00
|
|
|
# Control MultiFormatPage
|
|
|
|
formatArgument = "output"
|
|
|
|
formatDefault = "html"
|
|
|
|
|
2009-02-20 19:15:54 +00:00
|
|
|
def __init__(self, client, monitor):
|
|
|
|
self.client = client
|
2008-10-22 00:03:07 +00:00
|
|
|
self.monitor = monitor
|
|
|
|
|
2017-07-25 15:15:17 +00:00
|
|
|
# The default format is HTML but the HTML renderer is just renderHTTP.
|
|
|
|
render_HTML = None
|
2008-10-07 04:36:18 +00:00
|
|
|
|
2019-11-05 06:38:43 +00:00
|
|
|
def render_HTML(self, req):
|
|
|
|
return renderElement(
|
|
|
|
req,
|
|
|
|
ManifestElement(self.monitor)
|
|
|
|
)
|
2008-10-07 04:36:18 +00:00
|
|
|
|
2017-07-25 15:15:17 +00:00
|
|
|
def render_TEXT(self, req):
|
2009-02-20 19:15:54 +00:00
|
|
|
req.setHeader("content-type", "text/plain")
|
2008-10-22 00:03:07 +00:00
|
|
|
lines = []
|
2008-10-22 00:52:56 +00:00
|
|
|
is_finished = self.monitor.is_finished()
|
|
|
|
lines.append("finished: " + {True: "yes", False: "no"}[is_finished])
|
2019-11-05 06:38:43 +00:00
|
|
|
for path, cap in self.monitor.get_status()["manifest"]:
|
|
|
|
lines.append(_slashify_path(path) + " " + cap)
|
2008-10-22 00:03:07 +00:00
|
|
|
return "\n".join(lines) + "\n"
|
2008-10-07 04:36:18 +00:00
|
|
|
|
2017-07-25 15:15:17 +00:00
|
|
|
def render_JSON(self, req):
|
2009-02-20 19:15:54 +00:00
|
|
|
req.setHeader("content-type", "text/plain")
|
2008-10-22 00:03:07 +00:00
|
|
|
m = self.monitor
|
2008-11-24 21:40:46 +00:00
|
|
|
s = m.get_status()
|
2009-01-09 02:59:32 +00:00
|
|
|
|
2010-02-27 06:55:51 +00:00
|
|
|
if m.origin_si:
|
|
|
|
origin_base32 = base32.b2a(m.origin_si)
|
|
|
|
else:
|
|
|
|
origin_base32 = ""
|
2009-01-09 02:59:32 +00:00
|
|
|
status = { "stats": s["stats"],
|
|
|
|
"finished": m.is_finished(),
|
2010-02-27 06:55:51 +00:00
|
|
|
"origin": origin_base32,
|
2009-01-09 02:59:32 +00:00
|
|
|
}
|
|
|
|
if m.is_finished():
|
|
|
|
# don't return manifest/verifycaps/SIs unless the operation is
|
|
|
|
# done, to save on CPU/memory (both here and in the HTTP client
|
|
|
|
# who has to unpack the JSON). Tests show that the ManifestWalker
|
|
|
|
# needs about 1092 bytes per item, the JSON we generate here
|
|
|
|
# requires about 503 bytes per item, and some internal overhead
|
|
|
|
# (perhaps transport-layer buffers in twisted.web?) requires an
|
|
|
|
# additional 1047 bytes per item.
|
|
|
|
status.update({ "manifest": s["manifest"],
|
|
|
|
"verifycaps": [i for i in s["verifycaps"]],
|
|
|
|
"storage-index": [i for i in s["storage-index"]],
|
|
|
|
})
|
|
|
|
# simplejson doesn't know how to serialize a set. We use a
|
|
|
|
# generator that walks the set rather than list(setofthing) to
|
|
|
|
# save a small amount of memory (4B*len) and a moderate amount of
|
|
|
|
# CPU.
|
2017-01-19 22:39:53 +00:00
|
|
|
return json.dumps(status, indent=1)
|
2008-10-22 00:03:07 +00:00
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2017-07-25 15:15:17 +00:00
|
|
|
class DeepSizeResults(MultiFormatPage):
|
|
|
|
# Control MultiFormatPage
|
|
|
|
formatArgument = "output"
|
|
|
|
formatDefault = "html"
|
|
|
|
|
2009-02-20 19:15:54 +00:00
|
|
|
def __init__(self, client, monitor):
|
|
|
|
self.client = client
|
2008-10-22 00:03:07 +00:00
|
|
|
self.monitor = monitor
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2017-07-25 15:15:17 +00:00
|
|
|
def render_HTML(self, req):
|
2008-10-22 00:52:56 +00:00
|
|
|
is_finished = self.monitor.is_finished()
|
|
|
|
output = "finished: " + {True: "yes", False: "no"}[is_finished] + "\n"
|
|
|
|
if is_finished:
|
2008-10-22 00:03:07 +00:00
|
|
|
stats = self.monitor.get_status()
|
|
|
|
total = (stats.get("size-immutable-files", 0)
|
|
|
|
+ stats.get("size-mutable-files", 0)
|
|
|
|
+ stats.get("size-directories", 0))
|
|
|
|
output += "size: %d\n" % total
|
|
|
|
return output
|
2017-07-25 15:15:17 +00:00
|
|
|
render_TEXT = render_HTML
|
2008-10-22 00:03:07 +00:00
|
|
|
|
2017-07-25 15:15:17 +00:00
|
|
|
def render_JSON(self, req):
|
|
|
|
req.setHeader("content-type", "text/plain")
|
2008-10-22 00:03:07 +00:00
|
|
|
status = {"finished": self.monitor.is_finished(),
|
|
|
|
"size": self.monitor.get_status(),
|
|
|
|
}
|
2017-01-19 22:39:53 +00:00
|
|
|
return json.dumps(status)
|
2008-10-22 00:03:07 +00:00
|
|
|
|
|
|
|
class DeepStatsResults(rend.Page):
|
2009-02-20 19:15:54 +00:00
|
|
|
def __init__(self, client, monitor):
|
|
|
|
self.client = client
|
2008-10-22 00:03:07 +00:00
|
|
|
self.monitor = monitor
|
|
|
|
|
|
|
|
def renderHTTP(self, ctx):
|
|
|
|
# JSON only
|
|
|
|
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
|
|
|
s = self.monitor.get_status().copy()
|
|
|
|
s["finished"] = self.monitor.is_finished()
|
2017-01-19 22:39:53 +00:00
|
|
|
return json.dumps(s, indent=1)
|
2009-01-23 05:01:36 +00:00
|
|
|
|
2017-02-27 17:56:49 +00:00
|
|
|
@implementer(IPushProducer)
|
2009-01-23 05:01:36 +00:00
|
|
|
class ManifestStreamer(dirnode.DeepStats):
|
|
|
|
|
|
|
|
def __init__(self, ctx, origin):
|
|
|
|
dirnode.DeepStats.__init__(self, origin)
|
|
|
|
self.req = IRequest(ctx)
|
|
|
|
|
2009-01-24 02:39:08 +00:00
|
|
|
def setMonitor(self, monitor):
|
|
|
|
self.monitor = monitor
|
|
|
|
def pauseProducing(self):
|
|
|
|
pass
|
|
|
|
def resumeProducing(self):
|
|
|
|
pass
|
|
|
|
def stopProducing(self):
|
|
|
|
self.monitor.cancel()
|
|
|
|
|
2009-01-23 05:01:36 +00:00
|
|
|
def add_node(self, node, path):
|
|
|
|
dirnode.DeepStats.add_node(self, node, path)
|
|
|
|
d = {"path": path,
|
|
|
|
"cap": node.get_uri()}
|
|
|
|
|
|
|
|
if IDirectoryNode.providedBy(node):
|
|
|
|
d["type"] = "directory"
|
2009-07-03 01:07:49 +00:00
|
|
|
elif IFileNode.providedBy(node):
|
2009-01-23 05:01:36 +00:00
|
|
|
d["type"] = "file"
|
2009-07-03 01:07:49 +00:00
|
|
|
else:
|
|
|
|
d["type"] = "unknown"
|
2009-01-23 05:01:36 +00:00
|
|
|
|
|
|
|
v = node.get_verify_cap()
|
|
|
|
if v:
|
|
|
|
v = v.to_string()
|
2010-02-24 08:02:20 +00:00
|
|
|
d["verifycap"] = v or ""
|
2009-01-23 05:01:36 +00:00
|
|
|
|
|
|
|
r = node.get_repair_cap()
|
|
|
|
if r:
|
|
|
|
r = r.to_string()
|
2010-02-24 08:02:20 +00:00
|
|
|
d["repaircap"] = r or ""
|
2009-01-23 05:01:36 +00:00
|
|
|
|
|
|
|
si = node.get_storage_index()
|
|
|
|
if si:
|
|
|
|
si = base32.b2a(si)
|
2010-02-24 08:02:20 +00:00
|
|
|
d["storage-index"] = si or ""
|
2009-01-23 05:01:36 +00:00
|
|
|
|
2017-01-19 22:39:53 +00:00
|
|
|
j = json.dumps(d, ensure_ascii=True)
|
2009-01-23 05:01:36 +00:00
|
|
|
assert "\n" not in j
|
|
|
|
self.req.write(j+"\n")
|
|
|
|
|
|
|
|
def finish(self):
|
|
|
|
stats = dirnode.DeepStats.get_results(self)
|
|
|
|
d = {"type": "stats",
|
|
|
|
"stats": stats,
|
|
|
|
}
|
2017-01-19 22:39:53 +00:00
|
|
|
j = json.dumps(d, ensure_ascii=True)
|
2009-01-23 05:01:36 +00:00
|
|
|
assert "\n" not in j
|
|
|
|
self.req.write(j+"\n")
|
|
|
|
return ""
|
2009-02-17 06:35:53 +00:00
|
|
|
|
2017-02-27 17:56:49 +00:00
|
|
|
@implementer(IPushProducer)
|
2009-02-17 06:35:53 +00:00
|
|
|
class DeepCheckStreamer(dirnode.DeepStats):
|
|
|
|
|
2009-02-18 02:32:43 +00:00
|
|
|
def __init__(self, ctx, origin, verify, repair, add_lease):
|
2009-02-17 06:35:53 +00:00
|
|
|
dirnode.DeepStats.__init__(self, origin)
|
|
|
|
self.req = IRequest(ctx)
|
|
|
|
self.verify = verify
|
|
|
|
self.repair = repair
|
2009-02-18 02:32:43 +00:00
|
|
|
self.add_lease = add_lease
|
2009-02-17 06:35:53 +00:00
|
|
|
|
|
|
|
def setMonitor(self, monitor):
|
|
|
|
self.monitor = monitor
|
|
|
|
def pauseProducing(self):
|
|
|
|
pass
|
|
|
|
def resumeProducing(self):
|
|
|
|
pass
|
|
|
|
def stopProducing(self):
|
|
|
|
self.monitor.cancel()
|
|
|
|
|
|
|
|
def add_node(self, node, path):
|
|
|
|
dirnode.DeepStats.add_node(self, node, path)
|
|
|
|
data = {"path": path,
|
|
|
|
"cap": node.get_uri()}
|
|
|
|
|
|
|
|
if IDirectoryNode.providedBy(node):
|
|
|
|
data["type"] = "directory"
|
2010-01-29 03:14:24 +00:00
|
|
|
elif IFileNode.providedBy(node):
|
2009-02-17 06:35:53 +00:00
|
|
|
data["type"] = "file"
|
2010-01-29 03:14:24 +00:00
|
|
|
else:
|
|
|
|
data["type"] = "unknown"
|
2009-02-17 06:35:53 +00:00
|
|
|
|
|
|
|
v = node.get_verify_cap()
|
|
|
|
if v:
|
|
|
|
v = v.to_string()
|
2010-02-24 08:02:20 +00:00
|
|
|
data["verifycap"] = v or ""
|
2009-02-17 06:35:53 +00:00
|
|
|
|
|
|
|
r = node.get_repair_cap()
|
|
|
|
if r:
|
|
|
|
r = r.to_string()
|
2010-02-24 08:02:20 +00:00
|
|
|
data["repaircap"] = r or ""
|
2009-02-17 06:35:53 +00:00
|
|
|
|
|
|
|
si = node.get_storage_index()
|
|
|
|
if si:
|
|
|
|
si = base32.b2a(si)
|
2010-02-24 08:02:20 +00:00
|
|
|
data["storage-index"] = si or ""
|
2009-02-17 06:35:53 +00:00
|
|
|
|
|
|
|
if self.repair:
|
2009-02-18 02:32:43 +00:00
|
|
|
d = node.check_and_repair(self.monitor, self.verify, self.add_lease)
|
2009-02-17 06:35:53 +00:00
|
|
|
d.addCallback(self.add_check_and_repair, data)
|
|
|
|
else:
|
2009-02-18 02:32:43 +00:00
|
|
|
d = node.check(self.monitor, self.verify, self.add_lease)
|
2009-02-17 06:35:53 +00:00
|
|
|
d.addCallback(self.add_check, data)
|
|
|
|
d.addCallback(self.write_line)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def add_check_and_repair(self, crr, data):
|
|
|
|
data["check-and-repair-results"] = json_check_and_repair_results(crr)
|
|
|
|
return data
|
|
|
|
|
|
|
|
def add_check(self, cr, data):
|
|
|
|
data["check-results"] = json_check_results(cr)
|
|
|
|
return data
|
|
|
|
|
|
|
|
def write_line(self, data):
|
2017-01-19 22:39:53 +00:00
|
|
|
j = json.dumps(data, ensure_ascii=True)
|
2009-02-17 06:35:53 +00:00
|
|
|
assert "\n" not in j
|
|
|
|
self.req.write(j+"\n")
|
|
|
|
|
|
|
|
def finish(self):
|
|
|
|
stats = dirnode.DeepStats.get_results(self)
|
|
|
|
d = {"type": "stats",
|
|
|
|
"stats": stats,
|
|
|
|
}
|
2017-01-19 22:39:53 +00:00
|
|
|
j = json.dumps(d, ensure_ascii=True)
|
2009-02-17 06:35:53 +00:00
|
|
|
assert "\n" not in j
|
|
|
|
self.req.write(j+"\n")
|
|
|
|
return ""
|
2009-07-03 01:07:49 +00:00
|
|
|
|
|
|
|
|
2010-01-27 06:44:30 +00:00
|
|
|
class UnknownNodeHandler(RenderMixin, rend.Page):
|
2009-07-03 01:07:49 +00:00
|
|
|
def __init__(self, client, node, parentnode=None, name=None):
|
|
|
|
rend.Page.__init__(self)
|
|
|
|
assert node
|
|
|
|
self.node = node
|
2010-01-27 06:44:30 +00:00
|
|
|
self.parentnode = parentnode
|
|
|
|
self.name = name
|
2009-07-03 01:07:49 +00:00
|
|
|
|
|
|
|
def render_GET(self, ctx):
|
|
|
|
req = IRequest(ctx)
|
|
|
|
t = get_arg(req, "t", "").strip()
|
|
|
|
if t == "info":
|
|
|
|
return MoreInfo(self.node)
|
2010-01-27 06:44:30 +00:00
|
|
|
if t == "json":
|
2010-01-29 03:14:24 +00:00
|
|
|
is_parent_known_immutable = self.parentnode and not self.parentnode.is_mutable()
|
2010-01-27 06:44:30 +00:00
|
|
|
if self.parentnode and self.name:
|
|
|
|
d = self.parentnode.get_metadata_for(self.name)
|
|
|
|
else:
|
|
|
|
d = defer.succeed(None)
|
2010-01-29 03:14:24 +00:00
|
|
|
d.addCallback(lambda md: UnknownJSONMetadata(ctx, self.node, md, is_parent_known_immutable))
|
2010-01-27 06:44:30 +00:00
|
|
|
return d
|
|
|
|
raise WebError("GET unknown URI type: can only do t=info and t=json, not t=%s.\n"
|
|
|
|
"Using a webapi server that supports a later version of Tahoe "
|
|
|
|
"may help." % t)
|
|
|
|
|
2010-01-29 03:14:24 +00:00
|
|
|
def UnknownJSONMetadata(ctx, node, edge_metadata, is_parent_known_immutable):
|
|
|
|
rw_uri = node.get_write_uri()
|
|
|
|
ro_uri = node.get_readonly_uri()
|
2010-01-27 06:44:30 +00:00
|
|
|
data = ("unknown", {})
|
|
|
|
if ro_uri:
|
|
|
|
data[1]['ro_uri'] = ro_uri
|
|
|
|
if rw_uri:
|
|
|
|
data[1]['rw_uri'] = rw_uri
|
2010-01-29 03:14:24 +00:00
|
|
|
data[1]['mutable'] = True
|
|
|
|
elif is_parent_known_immutable or node.is_alleged_immutable():
|
|
|
|
data[1]['mutable'] = False
|
|
|
|
# else we don't know whether it is mutable.
|
|
|
|
|
2010-01-27 06:44:30 +00:00
|
|
|
if edge_metadata is not None:
|
|
|
|
data[1]['metadata'] = edge_metadata
|
2017-01-19 22:39:53 +00:00
|
|
|
return text_plain(json.dumps(data, indent=1) + "\n", ctx)
|