2008-05-19 19:57:04 +00:00
|
|
|
|
2017-01-19 22:39:53 +00:00
|
|
|
import json
|
2008-05-19 19:57:04 +00:00
|
|
|
import urllib
|
2019-11-17 02:23:20 +00:00
|
|
|
from datetime import timedelta
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2017-02-27 17:56:49 +00:00
|
|
|
from zope.interface import implementer
|
2008-05-19 19:57:04 +00:00
|
|
|
from twisted.internet import defer
|
2009-01-24 02:39:08 +00:00
|
|
|
from twisted.internet.interfaces import IPushProducer
|
2008-05-19 19:57:04 +00:00
|
|
|
from twisted.python.failure import Failure
|
2012-10-25 00:01:25 +00:00
|
|
|
from twisted.web import http
|
2019-11-05 06:34:27 +00:00
|
|
|
from twisted.web.template import (
|
|
|
|
Element,
|
|
|
|
XMLFile,
|
|
|
|
renderElement,
|
|
|
|
renderer,
|
|
|
|
tags,
|
|
|
|
)
|
2019-11-17 02:23:44 +00:00
|
|
|
from hyperlink import URL
|
2019-11-05 06:34:27 +00:00
|
|
|
from twisted.python.filepath import FilePath
|
2008-10-07 04:36:18 +00:00
|
|
|
from nevow import url, rend, inevow, tags as T
|
2008-05-19 19:57:04 +00:00
|
|
|
from nevow.inevow import IRequest
|
|
|
|
|
2009-05-22 00:38:23 +00:00
|
|
|
from foolscap.api import fireEventually
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2016-01-04 16:00:59 +00:00
|
|
|
from allmydata.util import base32
|
2013-04-05 05:36:14 +00:00
|
|
|
from allmydata.util.encodingutil import to_str
|
2019-12-21 23:01:23 +00:00
|
|
|
from allmydata.uri import (
|
|
|
|
from_string_dirnode,
|
|
|
|
from_string,
|
|
|
|
CHKFileURI,
|
|
|
|
WriteableSSKFileURI,
|
|
|
|
ReadonlySSKFileURI,
|
|
|
|
)
|
2009-11-20 07:52:55 +00:00
|
|
|
from allmydata.interfaces import IDirectoryNode, IFileNode, IFilesystemNode, \
|
2009-12-27 20:10:43 +00:00
|
|
|
IImmutableFileNode, IMutableFileNode, ExistingChildError, \
|
2011-08-07 00:43:48 +00:00
|
|
|
NoSuchChildError, EmptyPathnameComponentError, SDMF_VERSION, MDMF_VERSION
|
2011-08-24 15:59:28 +00:00
|
|
|
from allmydata.blacklist import ProhibitedNode
|
2009-01-24 02:39:08 +00:00
|
|
|
from allmydata.monitor import Monitor, OperationCancelledError
|
2009-01-23 05:01:36 +00:00
|
|
|
from allmydata import dirnode
|
2019-11-05 06:34:27 +00:00
|
|
|
from allmydata.web.common import (
|
|
|
|
text_plain,
|
|
|
|
WebError,
|
|
|
|
NeedOperationHandleError,
|
|
|
|
boolean_of_arg,
|
|
|
|
get_arg,
|
|
|
|
get_root,
|
|
|
|
parse_replace_arg,
|
|
|
|
should_create_intermediate_directories,
|
|
|
|
getxmlfile,
|
|
|
|
RenderMixin,
|
|
|
|
humanize_failure,
|
|
|
|
convert_children_json,
|
|
|
|
get_format,
|
|
|
|
get_mutable_type,
|
|
|
|
get_filenode_metadata,
|
|
|
|
render_time,
|
|
|
|
MultiFormatPage,
|
|
|
|
MultiFormatResource,
|
|
|
|
SlotsSequenceElement,
|
|
|
|
)
|
2008-05-19 19:57:04 +00:00
|
|
|
from allmydata.web.filenode import ReplaceMeMixin, \
|
|
|
|
FileNodeHandler, PlaceHolderNodeHandler
|
2012-04-03 03:02:59 +00:00
|
|
|
from allmydata.web.check_results import CheckResultsRenderer, \
|
|
|
|
CheckAndRepairResultsRenderer, DeepCheckResultsRenderer, \
|
|
|
|
DeepCheckAndRepairResultsRenderer, LiteralCheckResultsRenderer
|
2008-09-18 05:00:41 +00:00
|
|
|
from allmydata.web.info import MoreInfo
|
2008-10-22 00:03:07 +00:00
|
|
|
from allmydata.web.operations import ReloadMixin
|
2009-02-17 06:35:53 +00:00
|
|
|
from allmydata.web.check_results import json_check_results, \
|
|
|
|
json_check_and_repair_results
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
class BlockingFileError(Exception):
|
|
|
|
# TODO: catch and transform
|
|
|
|
"""We cannot auto-create a parent directory, because there is a file in
|
|
|
|
the way"""
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
|
2009-02-20 19:15:54 +00:00
|
|
|
def make_handler_for(node, client, parentnode=None, name=None):
|
2008-05-19 19:57:04 +00:00
|
|
|
if parentnode:
|
|
|
|
assert IDirectoryNode.providedBy(parentnode)
|
2008-10-29 04:54:06 +00:00
|
|
|
if IFileNode.providedBy(node):
|
2009-02-20 19:15:54 +00:00
|
|
|
return FileNodeHandler(client, node, parentnode, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
if IDirectoryNode.providedBy(node):
|
2009-02-20 19:15:54 +00:00
|
|
|
return DirectoryNodeHandler(client, node, parentnode, name)
|
2009-07-03 01:07:49 +00:00
|
|
|
return UnknownNodeHandler(client, node, parentnode, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
|
|
|
|
# did inherit from: RenderMixin, rend.Page, ReplaceMeMixin
|
|
|
|
# XXX is MultiFormatResource appropriate? this probably *should*
|
|
|
|
# support ?t=json but I don't know that all the variants already *did*
|
|
|
|
# support that..
|
|
|
|
class DirectoryNodeHandler(RenderMixin, ReplaceMeMixin, Resource, object):
|
2008-05-19 19:57:04 +00:00
|
|
|
addSlash = True
|
|
|
|
|
2009-02-20 19:15:54 +00:00
|
|
|
def __init__(self, client, node, parentnode=None, name=None):
|
2019-08-23 19:31:23 +00:00
|
|
|
super(DirectoryNodeHandler, self).__init__()
|
2009-02-20 19:15:54 +00:00
|
|
|
self.client = client
|
2008-05-19 19:57:04 +00:00
|
|
|
assert node
|
|
|
|
self.node = node
|
|
|
|
self.parentnode = parentnode
|
|
|
|
self.name = name
|
2019-08-24 18:31:08 +00:00
|
|
|
self._operations = client.get_web_service().get_operations()
|
2019-08-07 23:47:52 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def getChild(self, name, req):
|
|
|
|
"""
|
|
|
|
Dynamically create a child for the given request and name
|
|
|
|
"""
|
|
|
|
# XXX can we do this with putChild() instead? (i.e. does it
|
|
|
|
# HAVE to be dynamic?)
|
|
|
|
d = self.node.get(name.decode('utf8'))
|
|
|
|
d.addBoth(self._got_child, req, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
return d
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def _got_child(self, node_or_failure, req, name):
|
|
|
|
"""
|
|
|
|
Callback when self.node.get has returned
|
|
|
|
"""
|
|
|
|
import six
|
|
|
|
name = six.u(name)
|
2008-05-19 19:57:04 +00:00
|
|
|
method = req.method
|
|
|
|
nonterminal = len(req.postpath) > 1
|
2019-08-23 19:31:23 +00:00
|
|
|
t = get_arg(req, "t", "").strip() # XXX looking like MultiFormatResource..
|
2008-05-19 19:57:04 +00:00
|
|
|
if isinstance(node_or_failure, Failure):
|
|
|
|
f = node_or_failure
|
2008-10-27 20:15:25 +00:00
|
|
|
f.trap(NoSuchChildError)
|
2008-05-19 19:57:04 +00:00
|
|
|
# No child by this name. What should we do about it?
|
|
|
|
if nonterminal:
|
|
|
|
if should_create_intermediate_directories(req):
|
|
|
|
# create intermediate directories
|
2009-10-13 02:15:20 +00:00
|
|
|
d = self.node.create_subdirectory(name)
|
2009-02-20 19:15:54 +00:00
|
|
|
d.addCallback(make_handler_for,
|
|
|
|
self.client, self.node, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
return d
|
|
|
|
else:
|
|
|
|
# terminal node
|
2019-08-23 19:31:23 +00:00
|
|
|
terminal_requests = (
|
|
|
|
("POST", "mkdir"),
|
|
|
|
("PUT", "mkdir"),
|
|
|
|
("POST", "mkdir-with-children"),
|
|
|
|
("POST", "mkdir-immutable")
|
|
|
|
)
|
|
|
|
if (method, t) in terminal_requests:
|
2008-05-19 19:57:04 +00:00
|
|
|
# final directory
|
2009-10-26 01:13:21 +00:00
|
|
|
kids = {}
|
2009-11-18 07:09:00 +00:00
|
|
|
if t in ("mkdir-with-children", "mkdir-immutable"):
|
|
|
|
req.content.seek(0)
|
|
|
|
kids_json = req.content.read()
|
2019-08-23 19:31:23 +00:00
|
|
|
kids = convert_children_json(
|
|
|
|
self.client.nodemaker,
|
|
|
|
kids_json,
|
|
|
|
)
|
2011-10-13 16:29:51 +00:00
|
|
|
file_format = get_format(req, None)
|
2009-11-18 07:09:00 +00:00
|
|
|
mutable = True
|
2011-10-13 16:29:51 +00:00
|
|
|
mt = get_mutable_type(file_format)
|
2009-11-18 07:09:00 +00:00
|
|
|
if t == "mkdir-immutable":
|
|
|
|
mutable = False
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
d = self.node.create_subdirectory(
|
|
|
|
name, kids,
|
|
|
|
mutable=mutable,
|
|
|
|
mutable_version=mt,
|
|
|
|
)
|
|
|
|
d.addCallback(
|
|
|
|
make_handler_for,
|
|
|
|
self.client, self.node, name,
|
|
|
|
)
|
2008-05-19 19:57:04 +00:00
|
|
|
return d
|
2019-08-23 19:31:23 +00:00
|
|
|
leaf_requests = (
|
|
|
|
("PUT",""),
|
|
|
|
("PUT","uri"),
|
|
|
|
)
|
|
|
|
if (method, t) in leaf_requests:
|
2008-05-19 19:57:04 +00:00
|
|
|
# we were trying to find the leaf filenode (to put a new
|
|
|
|
# file in its place), and it didn't exist. That's ok,
|
|
|
|
# since that's the leaf node that we're about to create.
|
|
|
|
# We make a dummy one, which will respond to the PUT
|
|
|
|
# request by replacing itself.
|
2009-02-20 19:15:54 +00:00
|
|
|
return PlaceHolderNodeHandler(self.client, self.node, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
# otherwise, we just return a no-such-child error
|
2009-03-04 04:56:30 +00:00
|
|
|
return f
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
node = node_or_failure
|
|
|
|
if nonterminal and should_create_intermediate_directories(req):
|
|
|
|
if not IDirectoryNode.providedBy(node):
|
|
|
|
# we would have put a new directory here, but there was a
|
|
|
|
# file in the way.
|
|
|
|
raise WebError("Unable to create directory '%s': "
|
|
|
|
"a file was in the way" % name,
|
|
|
|
http.CONFLICT)
|
2009-02-20 19:15:54 +00:00
|
|
|
return make_handler_for(node, self.client, self.node, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_DELETE(self, req):
|
2008-05-19 19:57:04 +00:00
|
|
|
assert self.parentnode and self.name
|
|
|
|
d = self.parentnode.delete(self.name)
|
|
|
|
d.addCallback(lambda res: self.node.get_uri())
|
|
|
|
return d
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_GET(self, req):
|
2008-05-19 19:57:04 +00:00
|
|
|
# This is where all of the directory-related ?t=* code goes.
|
|
|
|
t = get_arg(req, "t", "").strip()
|
2012-05-13 03:42:52 +00:00
|
|
|
|
2012-05-13 07:41:53 +00:00
|
|
|
# t=info contains variable ophandles, t=rename-form contains the name
|
|
|
|
# of the child being renamed. Neither is allowed an ETag.
|
|
|
|
FIXED_OUTPUT_TYPES = ["", "json", "uri", "readonly-uri"]
|
|
|
|
if not self.node.is_mutable() and t in FIXED_OUTPUT_TYPES:
|
2012-05-13 03:42:52 +00:00
|
|
|
si = self.node.get_storage_index()
|
|
|
|
if si and req.setETag('DIR:%s-%s' % (base32.b2a(si), t or "")):
|
|
|
|
return ""
|
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
if not t:
|
|
|
|
# render the directory as HTML, using the docFactory and Nevow's
|
|
|
|
# whole templating thing.
|
2011-08-07 00:43:48 +00:00
|
|
|
return DirectoryAsHTML(self.node,
|
|
|
|
self.client.mutable_file_default)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
if t == "json":
|
2019-08-23 19:31:23 +00:00
|
|
|
return DirectoryJSONMetadata(req, self.node)
|
2008-09-18 05:00:41 +00:00
|
|
|
if t == "info":
|
|
|
|
return MoreInfo(self.node)
|
2008-05-19 19:57:04 +00:00
|
|
|
if t == "uri":
|
2019-08-23 19:31:23 +00:00
|
|
|
return DirectoryURI(req, self.node)
|
2008-05-19 19:57:04 +00:00
|
|
|
if t == "readonly-uri":
|
2019-08-23 19:31:23 +00:00
|
|
|
return DirectoryReadonlyURI(req, self.node)
|
2008-05-19 19:57:04 +00:00
|
|
|
if t == 'rename-form':
|
|
|
|
return RenameForm(self.node)
|
|
|
|
|
|
|
|
raise WebError("GET directory: bad t=%s" % t)
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_PUT(self, req):
|
2008-05-19 19:57:04 +00:00
|
|
|
t = get_arg(req, "t", "").strip()
|
2009-07-20 03:47:46 +00:00
|
|
|
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
if t == "mkdir":
|
|
|
|
# our job was done by the traversal/create-intermediate-directory
|
|
|
|
# process that got us here.
|
2019-08-23 19:31:23 +00:00
|
|
|
return text_plain(self.node.get_uri(), req) # TODO: urlencode
|
2008-05-19 19:57:04 +00:00
|
|
|
if t == "uri":
|
|
|
|
if not replace:
|
|
|
|
# they're trying to set_uri and that name is already occupied
|
|
|
|
# (by us).
|
|
|
|
raise ExistingChildError()
|
2009-02-20 19:15:54 +00:00
|
|
|
d = self.replace_me_with_a_childcap(req, self.client, replace)
|
2008-05-19 19:57:04 +00:00
|
|
|
# TODO: results
|
|
|
|
return d
|
|
|
|
|
|
|
|
raise WebError("PUT to a directory")
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_POST(self, req):
|
2008-05-19 19:57:04 +00:00
|
|
|
t = get_arg(req, "t", "").strip()
|
2008-10-22 00:03:07 +00:00
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
if t == "mkdir":
|
|
|
|
d = self._POST_mkdir(req)
|
2009-10-26 01:13:21 +00:00
|
|
|
elif t == "mkdir-with-children":
|
|
|
|
d = self._POST_mkdir_with_children(req)
|
2009-11-18 07:09:00 +00:00
|
|
|
elif t == "mkdir-immutable":
|
|
|
|
d = self._POST_mkdir_immutable(req)
|
2008-05-19 19:57:04 +00:00
|
|
|
elif t == "upload":
|
2019-08-23 19:31:23 +00:00
|
|
|
d = self._POST_upload(req) # this one needs the context
|
2008-05-19 19:57:04 +00:00
|
|
|
elif t == "uri":
|
|
|
|
d = self._POST_uri(req)
|
2011-07-13 00:12:18 +00:00
|
|
|
elif t == "delete" or t == "unlink":
|
|
|
|
d = self._POST_unlink(req)
|
2008-05-19 19:57:04 +00:00
|
|
|
elif t == "rename":
|
|
|
|
d = self._POST_rename(req)
|
2013-04-05 05:36:14 +00:00
|
|
|
elif t == "relink":
|
|
|
|
d = self._POST_relink(req)
|
2008-05-19 19:57:04 +00:00
|
|
|
elif t == "check":
|
|
|
|
d = self._POST_check(req)
|
2008-10-22 00:03:07 +00:00
|
|
|
elif t == "start-deep-check":
|
2019-08-23 19:31:23 +00:00
|
|
|
d = self._POST_start_deep_check(req)
|
2009-02-17 06:35:53 +00:00
|
|
|
elif t == "stream-deep-check":
|
2019-08-23 19:31:23 +00:00
|
|
|
d = self._POST_stream_deep_check(req)
|
2008-10-22 00:03:07 +00:00
|
|
|
elif t == "start-manifest":
|
2019-08-23 19:31:23 +00:00
|
|
|
d = self._POST_start_manifest(req)
|
2008-10-22 00:03:07 +00:00
|
|
|
elif t == "start-deep-size":
|
2019-08-23 19:31:23 +00:00
|
|
|
d = self._POST_start_deep_size(req)
|
2008-10-22 00:03:07 +00:00
|
|
|
elif t == "start-deep-stats":
|
2019-08-23 19:31:23 +00:00
|
|
|
d = self._POST_start_deep_stats(req)
|
2009-01-23 05:01:36 +00:00
|
|
|
elif t == "stream-manifest":
|
2019-08-23 19:31:23 +00:00
|
|
|
d = self._POST_stream_manifest(req)
|
2010-01-24 03:00:20 +00:00
|
|
|
elif t == "set_children" or t == "set-children":
|
2008-05-19 19:57:04 +00:00
|
|
|
d = self._POST_set_children(req)
|
|
|
|
else:
|
|
|
|
raise WebError("POST to a directory with bad t=%s" % t)
|
|
|
|
|
|
|
|
when_done = get_arg(req, "when_done", None)
|
|
|
|
if when_done:
|
|
|
|
d.addCallback(lambda res: url.URL.fromString(when_done))
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _POST_mkdir(self, req):
|
2009-10-26 01:13:21 +00:00
|
|
|
name = get_arg(req, "name", "")
|
|
|
|
if not name:
|
|
|
|
# our job is done, it was handled by the code in got_child
|
|
|
|
# which created the final directory (i.e. us)
|
|
|
|
return defer.succeed(self.node.get_uri()) # TODO: urlencode
|
|
|
|
name = name.decode("utf-8")
|
|
|
|
replace = boolean_of_arg(get_arg(req, "replace", "true"))
|
|
|
|
kids = {}
|
2011-10-13 16:29:51 +00:00
|
|
|
mt = get_mutable_type(get_format(req, None))
|
|
|
|
d = self.node.create_subdirectory(name, kids, overwrite=replace,
|
2011-08-07 00:43:48 +00:00
|
|
|
mutable_version=mt)
|
2009-10-26 01:13:21 +00:00
|
|
|
d.addCallback(lambda child: child.get_uri()) # TODO: urlencode
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _POST_mkdir_with_children(self, req):
|
2008-05-19 19:57:04 +00:00
|
|
|
name = get_arg(req, "name", "")
|
|
|
|
if not name:
|
|
|
|
# our job is done, it was handled by the code in got_child
|
|
|
|
# which created the final directory (i.e. us)
|
|
|
|
return defer.succeed(self.node.get_uri()) # TODO: urlencode
|
|
|
|
name = name.decode("utf-8")
|
2010-01-24 22:43:25 +00:00
|
|
|
# TODO: decide on replace= behavior, see #903
|
|
|
|
#replace = boolean_of_arg(get_arg(req, "replace", "false"))
|
2009-11-18 07:09:00 +00:00
|
|
|
req.content.seek(0)
|
|
|
|
kids_json = req.content.read()
|
2009-10-26 01:13:21 +00:00
|
|
|
kids = convert_children_json(self.client.nodemaker, kids_json)
|
2011-10-13 16:29:51 +00:00
|
|
|
mt = get_mutable_type(get_format(req, None))
|
|
|
|
d = self.node.create_subdirectory(name, kids, overwrite=False,
|
|
|
|
mutable_version=mt)
|
2008-05-19 19:57:04 +00:00
|
|
|
d.addCallback(lambda child: child.get_uri()) # TODO: urlencode
|
|
|
|
return d
|
|
|
|
|
2009-11-18 07:09:00 +00:00
|
|
|
def _POST_mkdir_immutable(self, req):
|
|
|
|
name = get_arg(req, "name", "")
|
|
|
|
if not name:
|
|
|
|
# our job is done, it was handled by the code in got_child
|
|
|
|
# which created the final directory (i.e. us)
|
|
|
|
return defer.succeed(self.node.get_uri()) # TODO: urlencode
|
|
|
|
name = name.decode("utf-8")
|
2010-01-14 22:28:04 +00:00
|
|
|
# TODO: decide on replace= behavior, see #903
|
2010-01-24 22:43:25 +00:00
|
|
|
#replace = boolean_of_arg(get_arg(req, "replace", "false"))
|
2009-11-18 07:09:00 +00:00
|
|
|
req.content.seek(0)
|
|
|
|
kids_json = req.content.read()
|
|
|
|
kids = convert_children_json(self.client.nodemaker, kids_json)
|
2010-01-24 22:43:25 +00:00
|
|
|
d = self.node.create_subdirectory(name, kids, overwrite=False, mutable=False)
|
2009-11-18 07:09:00 +00:00
|
|
|
d.addCallback(lambda child: child.get_uri()) # TODO: urlencode
|
|
|
|
return d
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def _POST_upload(self, req):
|
2008-05-19 19:57:04 +00:00
|
|
|
charset = get_arg(req, "_charset", "utf-8")
|
|
|
|
contents = req.fields["file"]
|
2008-06-04 00:09:39 +00:00
|
|
|
assert contents.filename is None or isinstance(contents.filename, str)
|
2008-05-19 19:57:04 +00:00
|
|
|
name = get_arg(req, "name")
|
|
|
|
name = name or contents.filename
|
|
|
|
if name is not None:
|
|
|
|
name = name.strip()
|
|
|
|
if not name:
|
|
|
|
# this prohibts empty, missing, and all-whitespace filenames
|
|
|
|
raise WebError("upload requires a name")
|
2008-06-04 00:09:39 +00:00
|
|
|
assert isinstance(name, str)
|
2008-05-19 19:57:04 +00:00
|
|
|
name = name.decode(charset)
|
|
|
|
if "/" in name:
|
|
|
|
raise WebError("name= may not contain a slash", http.BAD_REQUEST)
|
|
|
|
assert isinstance(name, unicode)
|
|
|
|
|
|
|
|
# since POST /uri/path/file?t=upload is equivalent to
|
|
|
|
# POST /uri/path/dir?t=upload&name=foo, just do the same thing that
|
|
|
|
# childFactory would do. Things are cleaner if we only do a subset of
|
2019-08-23 19:31:23 +00:00
|
|
|
# them, though, so we don't do: d = self.childFactory(req, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
d = self.node.get(name)
|
|
|
|
def _maybe_got_node(node_or_failure):
|
|
|
|
if isinstance(node_or_failure, Failure):
|
|
|
|
f = node_or_failure
|
2008-10-27 20:15:25 +00:00
|
|
|
f.trap(NoSuchChildError)
|
2008-05-20 18:13:12 +00:00
|
|
|
# create a placeholder which will see POST t=upload
|
2009-02-20 19:15:54 +00:00
|
|
|
return PlaceHolderNodeHandler(self.client, self.node, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
else:
|
|
|
|
node = node_or_failure
|
2009-02-20 19:15:54 +00:00
|
|
|
return make_handler_for(node, self.client, self.node, name)
|
2008-05-19 19:57:04 +00:00
|
|
|
d.addBoth(_maybe_got_node)
|
|
|
|
# now we have a placeholder or a filenodehandler, and we can just
|
|
|
|
# delegate to it. We could return the resource back out of
|
|
|
|
# DirectoryNodeHandler.renderHTTP, and nevow would recurse into it,
|
|
|
|
# but the addCallback() that handles when_done= would break.
|
2019-08-23 19:31:23 +00:00
|
|
|
d.addCallback(lambda child: child.renderHTTP(req))
|
2008-05-19 19:57:04 +00:00
|
|
|
return d
|
|
|
|
|
|
|
|
def _POST_uri(self, req):
|
|
|
|
childcap = get_arg(req, "uri")
|
|
|
|
if not childcap:
|
|
|
|
raise WebError("set-uri requires a uri")
|
|
|
|
name = get_arg(req, "name")
|
|
|
|
if not name:
|
|
|
|
raise WebError("set-uri requires a name")
|
|
|
|
charset = get_arg(req, "_charset", "utf-8")
|
|
|
|
name = name.decode(charset)
|
2013-04-05 05:36:14 +00:00
|
|
|
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
2011-08-09 00:11:17 +00:00
|
|
|
|
2010-01-27 06:44:30 +00:00
|
|
|
# We mustn't pass childcap for the readcap argument because we don't
|
|
|
|
# know whether it is a read cap. Passing a read cap as the writecap
|
|
|
|
# argument will work (it ends up calling NodeMaker.create_from_cap,
|
|
|
|
# which derives a readcap if necessary and possible).
|
|
|
|
d = self.node.set_uri(name, childcap, None, overwrite=replace)
|
2008-05-19 19:57:04 +00:00
|
|
|
d.addCallback(lambda res: childcap)
|
|
|
|
return d
|
|
|
|
|
2011-07-13 00:12:18 +00:00
|
|
|
def _POST_unlink(self, req):
|
2008-05-19 19:57:04 +00:00
|
|
|
name = get_arg(req, "name")
|
|
|
|
if name is None:
|
|
|
|
# apparently an <input type="hidden" name="name" value="">
|
|
|
|
# won't show up in the resulting encoded form.. the 'name'
|
2011-07-13 00:12:18 +00:00
|
|
|
# field is completely missing. So to allow unlinking of a
|
|
|
|
# child with a name that is the empty string, we have to
|
|
|
|
# pretend that None means ''. The only downside of this is
|
|
|
|
# a slightly confusing error message if someone does a POST
|
|
|
|
# without a name= field. For our own HTML this isn't a big
|
|
|
|
# deal, because we create the 'unlink' POST buttons ourselves.
|
2008-05-19 19:57:04 +00:00
|
|
|
name = ''
|
|
|
|
charset = get_arg(req, "_charset", "utf-8")
|
|
|
|
name = name.decode(charset)
|
|
|
|
d = self.node.delete(name)
|
2011-07-13 00:12:18 +00:00
|
|
|
d.addCallback(lambda res: "thing unlinked")
|
2008-05-19 19:57:04 +00:00
|
|
|
return d
|
|
|
|
|
|
|
|
def _POST_rename(self, req):
|
2013-04-05 05:36:14 +00:00
|
|
|
# rename is identical to relink, but to_dir is not allowed
|
|
|
|
# and to_name is required.
|
|
|
|
if get_arg(req, "to_dir") is not None:
|
|
|
|
raise WebError("to_dir= is not valid for rename")
|
|
|
|
if get_arg(req, "to_name") is None:
|
|
|
|
raise WebError("to_name= is required for rename")
|
|
|
|
return self._POST_relink(req)
|
|
|
|
|
|
|
|
def _POST_relink(self, req):
|
2008-05-19 19:57:04 +00:00
|
|
|
charset = get_arg(req, "_charset", "utf-8")
|
2013-04-05 05:36:14 +00:00
|
|
|
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2011-11-10 08:00:11 +00:00
|
|
|
from_name = get_arg(req, "from_name")
|
|
|
|
if from_name is not None:
|
|
|
|
from_name = from_name.strip()
|
|
|
|
from_name = from_name.decode(charset)
|
|
|
|
assert isinstance(from_name, unicode)
|
2013-04-05 05:36:14 +00:00
|
|
|
else:
|
|
|
|
raise WebError("from_name= is required")
|
|
|
|
|
2011-11-10 08:00:11 +00:00
|
|
|
to_name = get_arg(req, "to_name")
|
|
|
|
if to_name is not None:
|
|
|
|
to_name = to_name.strip()
|
|
|
|
to_name = to_name.decode(charset)
|
|
|
|
assert isinstance(to_name, unicode)
|
2013-04-05 05:36:14 +00:00
|
|
|
else:
|
2011-11-10 08:00:11 +00:00
|
|
|
to_name = from_name
|
|
|
|
|
2012-05-09 21:18:27 +00:00
|
|
|
# Disallow slashes in both from_name and to_name, that would only
|
2013-04-05 05:36:14 +00:00
|
|
|
# cause confusion.
|
2012-05-09 21:18:27 +00:00
|
|
|
if "/" in from_name:
|
|
|
|
raise WebError("from_name= may not contain a slash",
|
|
|
|
http.BAD_REQUEST)
|
|
|
|
if "/" in to_name:
|
2011-11-19 01:42:10 +00:00
|
|
|
raise WebError("to_name= may not contain a slash",
|
|
|
|
http.BAD_REQUEST)
|
2011-11-10 08:00:11 +00:00
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
to_dir = get_arg(req, "to_dir")
|
|
|
|
if to_dir is not None and to_dir != self.node.get_write_uri():
|
|
|
|
to_dir = to_dir.strip()
|
|
|
|
to_dir = to_dir.decode(charset)
|
|
|
|
assert isinstance(to_dir, unicode)
|
|
|
|
to_path = to_dir.split(u"/")
|
|
|
|
to_root = self.client.nodemaker.create_from_cap(to_str(to_path[0]))
|
|
|
|
if not IDirectoryNode.providedBy(to_root):
|
|
|
|
raise WebError("to_dir is not a directory", http.BAD_REQUEST)
|
|
|
|
d = to_root.get_child_at_path(to_path[1:])
|
2012-05-09 21:18:27 +00:00
|
|
|
else:
|
2013-04-05 05:36:14 +00:00
|
|
|
d = defer.succeed(self.node)
|
2012-05-09 21:18:27 +00:00
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def _got_new_parent(new_parent):
|
|
|
|
if not IDirectoryNode.providedBy(new_parent):
|
2012-05-09 21:18:27 +00:00
|
|
|
raise WebError("to_dir is not a directory", http.BAD_REQUEST)
|
2013-04-05 05:36:14 +00:00
|
|
|
|
|
|
|
return self.node.move_child_to(from_name, new_parent,
|
|
|
|
to_name, replace)
|
|
|
|
d.addCallback(_got_new_parent)
|
2011-11-10 08:00:11 +00:00
|
|
|
d.addCallback(lambda res: "thing moved")
|
|
|
|
return d
|
|
|
|
|
2009-11-26 23:27:31 +00:00
|
|
|
def _maybe_literal(self, res, Results_Class):
|
|
|
|
if res:
|
|
|
|
return Results_Class(self.client, res)
|
2012-04-03 03:02:59 +00:00
|
|
|
return LiteralCheckResultsRenderer(self.client)
|
2009-11-26 23:27:31 +00:00
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
def _POST_check(self, req):
|
|
|
|
# check this directory
|
2008-09-10 02:45:17 +00:00
|
|
|
verify = boolean_of_arg(get_arg(req, "verify", "false"))
|
|
|
|
repair = boolean_of_arg(get_arg(req, "repair", "false"))
|
2009-02-18 02:32:43 +00:00
|
|
|
add_lease = boolean_of_arg(get_arg(req, "add-lease", "false"))
|
2008-09-10 02:45:17 +00:00
|
|
|
if repair:
|
2009-02-18 02:32:43 +00:00
|
|
|
d = self.node.check_and_repair(Monitor(), verify, add_lease)
|
2012-04-03 03:02:59 +00:00
|
|
|
d.addCallback(self._maybe_literal, CheckAndRepairResultsRenderer)
|
2008-09-10 02:45:17 +00:00
|
|
|
else:
|
2009-02-18 02:32:43 +00:00
|
|
|
d = self.node.check(Monitor(), verify, add_lease)
|
2012-04-03 03:02:59 +00:00
|
|
|
d.addCallback(self._maybe_literal, CheckResultsRenderer)
|
2008-05-19 19:57:04 +00:00
|
|
|
return d
|
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
def _start_operation(self, monitor, renderer, ctx):
|
2019-08-07 23:47:52 +00:00
|
|
|
self._operations.add_monitor(ctx, monitor, renderer)
|
|
|
|
return self._operations.redirect_to(ctx)
|
2008-10-22 00:03:07 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def _POST_start_deep_check(self, req):
|
2008-07-17 23:47:09 +00:00
|
|
|
# check this directory and everything reachable from it
|
2019-08-23 19:31:23 +00:00
|
|
|
if not get_arg(req, "ophandle"):
|
2008-10-22 00:03:07 +00:00
|
|
|
raise NeedOperationHandleError("slow operation requires ophandle=")
|
2019-08-23 19:31:23 +00:00
|
|
|
verify = boolean_of_arg(get_arg(req, "verify", "false"))
|
|
|
|
repair = boolean_of_arg(get_arg(req, "repair", "false"))
|
|
|
|
add_lease = boolean_of_arg(get_arg(req, "add-lease", "false"))
|
2008-09-07 19:44:56 +00:00
|
|
|
if repair:
|
2009-02-18 02:32:43 +00:00
|
|
|
monitor = self.node.start_deep_check_and_repair(verify, add_lease)
|
2012-04-03 03:02:59 +00:00
|
|
|
renderer = DeepCheckAndRepairResultsRenderer(self.client, monitor)
|
2008-09-07 19:44:56 +00:00
|
|
|
else:
|
2009-02-18 02:32:43 +00:00
|
|
|
monitor = self.node.start_deep_check(verify, add_lease)
|
2012-04-03 03:02:59 +00:00
|
|
|
renderer = DeepCheckResultsRenderer(self.client, monitor)
|
2019-08-23 19:31:23 +00:00
|
|
|
return self._start_operation(monitor, renderer, req)
|
2008-10-22 00:03:07 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def _POST_stream_deep_check(self, req):
|
|
|
|
verify = boolean_of_arg(get_arg(req, "verify", "false"))
|
|
|
|
repair = boolean_of_arg(get_arg(req, "repair", "false"))
|
|
|
|
add_lease = boolean_of_arg(get_arg(req, "add-lease", "false"))
|
|
|
|
walker = DeepCheckStreamer(req, self.node, verify, repair, add_lease)
|
2009-02-17 06:35:53 +00:00
|
|
|
monitor = self.node.deep_traverse(walker)
|
|
|
|
walker.setMonitor(monitor)
|
|
|
|
# register to hear stopProducing. The walker ignores pauseProducing.
|
2019-08-23 19:31:23 +00:00
|
|
|
req.registerProducer(walker, True)
|
2009-02-17 06:35:53 +00:00
|
|
|
d = monitor.when_done()
|
|
|
|
def _done(res):
|
2019-08-23 19:31:23 +00:00
|
|
|
req.unregisterProducer()
|
2009-02-17 06:35:53 +00:00
|
|
|
return res
|
|
|
|
d.addBoth(_done)
|
|
|
|
def _cancelled(f):
|
|
|
|
f.trap(OperationCancelledError)
|
|
|
|
return "Operation Cancelled"
|
|
|
|
d.addErrback(_cancelled)
|
2009-02-25 06:13:35 +00:00
|
|
|
def _error(f):
|
|
|
|
# signal the error as a non-JSON "ERROR:" line, plus exception
|
2009-02-25 08:46:21 +00:00
|
|
|
msg = "ERROR: %s(%s)\n" % (f.value.__class__.__name__,
|
|
|
|
", ".join([str(a) for a in f.value.args]))
|
2009-02-25 06:13:35 +00:00
|
|
|
msg += str(f)
|
|
|
|
return msg
|
|
|
|
d.addErrback(_error)
|
2009-02-17 06:35:53 +00:00
|
|
|
return d
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def _POST_start_manifest(self, req):
|
|
|
|
if not get_arg(req, "ophandle"):
|
2008-10-22 00:03:07 +00:00
|
|
|
raise NeedOperationHandleError("slow operation requires ophandle=")
|
|
|
|
monitor = self.node.build_manifest()
|
2009-02-20 19:15:54 +00:00
|
|
|
renderer = ManifestResults(self.client, monitor)
|
2019-08-23 19:31:23 +00:00
|
|
|
return self._start_operation(monitor, renderer, req)
|
2008-10-22 00:03:07 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def _POST_start_deep_size(self, req):
|
|
|
|
if not get_arg(req, "ophandle"):
|
2008-10-22 00:03:07 +00:00
|
|
|
raise NeedOperationHandleError("slow operation requires ophandle=")
|
|
|
|
monitor = self.node.start_deep_stats()
|
2009-02-20 19:15:54 +00:00
|
|
|
renderer = DeepSizeResults(self.client, monitor)
|
2019-08-23 19:31:23 +00:00
|
|
|
return self._start_operation(monitor, renderer, req)
|
2008-10-22 00:03:07 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def _POST_start_deep_stats(self, req):
|
|
|
|
if not get_arg(req, "ophandle"):
|
2008-10-22 00:03:07 +00:00
|
|
|
raise NeedOperationHandleError("slow operation requires ophandle=")
|
|
|
|
monitor = self.node.start_deep_stats()
|
2009-02-20 19:15:54 +00:00
|
|
|
renderer = DeepStatsResults(self.client, monitor)
|
2019-08-23 19:31:23 +00:00
|
|
|
return self._start_operation(monitor, renderer, req)
|
2008-07-17 23:47:09 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def _POST_stream_manifest(self, req):
|
|
|
|
walker = ManifestStreamer(req, self.node)
|
2009-01-23 05:01:36 +00:00
|
|
|
monitor = self.node.deep_traverse(walker)
|
2009-01-24 02:39:08 +00:00
|
|
|
walker.setMonitor(monitor)
|
|
|
|
# register to hear stopProducing. The walker ignores pauseProducing.
|
2019-08-23 19:31:23 +00:00
|
|
|
req.registerProducer(walker, True)
|
2009-01-24 02:39:08 +00:00
|
|
|
d = monitor.when_done()
|
|
|
|
def _done(res):
|
2019-08-23 19:31:23 +00:00
|
|
|
req.unregisterProducer()
|
2009-01-24 02:39:08 +00:00
|
|
|
return res
|
|
|
|
d.addBoth(_done)
|
|
|
|
def _cancelled(f):
|
|
|
|
f.trap(OperationCancelledError)
|
|
|
|
return "Operation Cancelled"
|
|
|
|
d.addErrback(_cancelled)
|
2009-02-25 06:13:35 +00:00
|
|
|
def _error(f):
|
|
|
|
# signal the error as a non-JSON "ERROR:" line, plus exception
|
2009-02-25 08:46:21 +00:00
|
|
|
msg = "ERROR: %s(%s)\n" % (f.value.__class__.__name__,
|
|
|
|
", ".join([str(a) for a in f.value.args]))
|
2009-02-25 06:13:35 +00:00
|
|
|
msg += str(f)
|
|
|
|
return msg
|
|
|
|
d.addErrback(_error)
|
2009-01-24 02:39:08 +00:00
|
|
|
return d
|
2009-01-23 05:01:36 +00:00
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
def _POST_set_children(self, req):
|
2013-04-05 05:36:14 +00:00
|
|
|
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
2008-05-19 19:57:04 +00:00
|
|
|
req.content.seek(0)
|
|
|
|
body = req.content.read()
|
|
|
|
try:
|
2017-01-19 22:39:53 +00:00
|
|
|
children = json.loads(body)
|
2019-03-22 17:03:34 +00:00
|
|
|
except ValueError as le:
|
2008-05-19 19:57:04 +00:00
|
|
|
le.args = tuple(le.args + (body,))
|
|
|
|
# TODO test handling of bad JSON
|
|
|
|
raise
|
2009-10-13 00:24:40 +00:00
|
|
|
cs = {}
|
2008-05-19 19:57:04 +00:00
|
|
|
for name, (file_or_dir, mddict) in children.iteritems():
|
2017-03-24 02:19:00 +00:00
|
|
|
name = unicode(name) # json returns str *or* unicode
|
2009-10-12 23:51:26 +00:00
|
|
|
writecap = mddict.get('rw_uri')
|
|
|
|
if writecap is not None:
|
|
|
|
writecap = str(writecap)
|
|
|
|
readcap = mddict.get('ro_uri')
|
|
|
|
if readcap is not None:
|
|
|
|
readcap = str(readcap)
|
2009-10-13 00:24:40 +00:00
|
|
|
cs[name] = (writecap, readcap, mddict.get('metadata'))
|
2008-05-19 19:57:04 +00:00
|
|
|
d = self.node.set_children(cs, replace)
|
|
|
|
d.addCallback(lambda res: "Okay so I did it.")
|
|
|
|
# TODO: results
|
|
|
|
return d
|
|
|
|
|
|
|
|
def abbreviated_dirnode(dirnode):
|
|
|
|
u = from_string_dirnode(dirnode.get_uri())
|
2009-01-31 02:32:05 +00:00
|
|
|
return u.abbrev_si()
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2011-10-02 03:45:03 +00:00
|
|
|
SPACE = u"\u00A0"*2
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
class DirectoryAsHTML(Element):
|
2008-05-19 19:57:04 +00:00
|
|
|
# The remainder of this class is to render the directory into
|
|
|
|
# human+browser -oriented HTML.
|
2019-08-23 19:31:23 +00:00
|
|
|
loader = XMLFile(FilePath(__file__).sibling("directory.xhtml"))
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def __init__(self, node, default_mutable_format):
|
2019-08-23 19:31:23 +00:00
|
|
|
super(DirectoryAsHTML, self).__init__()
|
2008-05-19 19:57:04 +00:00
|
|
|
self.node = node
|
2019-08-23 19:31:23 +00:00
|
|
|
if default_mutable_format not in (MDMF_VERSION, SDMF_VERSION):
|
|
|
|
raise ValueError(
|
|
|
|
"Uknown multable format '{}'".format(default_mutable_format)
|
|
|
|
)
|
2011-08-07 00:43:48 +00:00
|
|
|
self.default_mutable_format = default_mutable_format
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render(self, request):
|
|
|
|
"""
|
|
|
|
Override Element.render .. we have async work to do before we flatten our template
|
|
|
|
"""
|
|
|
|
# XXX should this be a helper like MultiFormatResource etc?
|
|
|
|
# i.e. AsyncElement or something?
|
|
|
|
template = Element.render(request)
|
|
|
|
|
|
|
|
# XXX what's the -> twisted.web.template version of this.
|
|
|
|
|
|
|
|
# beforeRender is literally just a (possibly-deferred) thing
|
|
|
|
# that's called before render .. that is, in renderHTTP .. so I
|
|
|
|
# think we can just call it "_gather_children" or something and
|
|
|
|
# call it in our render?
|
|
|
|
def beforeRender(self, req):
|
2009-03-07 11:56:01 +00:00
|
|
|
# attempt to get the dirnode's children, stashing them (or the
|
|
|
|
# failure that results) for later use
|
|
|
|
d = self.node.list()
|
|
|
|
def _good(children):
|
|
|
|
# Deferreds don't optimize out tail recursion, and the way
|
|
|
|
# Nevow's flattener handles Deferreds doesn't take this into
|
|
|
|
# account. As a result, large lists of Deferreds that fire in the
|
|
|
|
# same turn (i.e. the output of defer.succeed) will cause a stack
|
|
|
|
# overflow. To work around this, we insert a turn break after
|
|
|
|
# every 100 items, using foolscap's fireEventually(). This gives
|
|
|
|
# the stack a chance to be popped. It would also work to put
|
|
|
|
# every item in its own turn, but that'd be a lot more
|
|
|
|
# inefficient. This addresses ticket #237, for which I was never
|
|
|
|
# able to create a failing unit test.
|
|
|
|
output = []
|
|
|
|
for i,item in enumerate(sorted(children.items())):
|
|
|
|
if i % 100 == 0:
|
|
|
|
output.append(fireEventually(item))
|
|
|
|
else:
|
|
|
|
output.append(item)
|
|
|
|
self.dirnode_children = output
|
2019-08-23 19:31:23 +00:00
|
|
|
return req
|
2009-03-07 11:56:01 +00:00
|
|
|
def _bad(f):
|
|
|
|
text, code = humanize_failure(f)
|
|
|
|
self.dirnode_children = None
|
|
|
|
self.dirnode_children_error = text
|
2019-08-23 19:31:23 +00:00
|
|
|
return req
|
2009-03-07 11:56:01 +00:00
|
|
|
d.addCallbacks(_good, _bad)
|
|
|
|
return d
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_title(self, req, data):
|
2008-05-19 19:57:04 +00:00
|
|
|
si_s = abbreviated_dirnode(self.node)
|
2009-10-29 02:50:50 +00:00
|
|
|
header = ["Tahoe-LAFS - Directory SI=%s" % si_s]
|
2010-01-27 06:44:30 +00:00
|
|
|
if self.node.is_unknown():
|
|
|
|
header.append(" (unknown)")
|
|
|
|
elif not self.node.is_mutable():
|
|
|
|
header.append(" (immutable)")
|
|
|
|
elif self.node.is_readonly():
|
2009-05-26 23:24:14 +00:00
|
|
|
header.append(" (read-only)")
|
|
|
|
else:
|
|
|
|
header.append(" (modifiable)")
|
2019-08-23 19:31:23 +00:00
|
|
|
return req.tag[header]
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_header(self, req, data):
|
2008-05-19 19:57:04 +00:00
|
|
|
si_s = abbreviated_dirnode(self.node)
|
2009-10-29 02:50:50 +00:00
|
|
|
header = ["Tahoe-LAFS Directory SI=", T.span(class_="data-chars")[si_s]]
|
2010-01-27 06:44:30 +00:00
|
|
|
if self.node.is_unknown():
|
|
|
|
header.append(" (unknown)")
|
|
|
|
elif not self.node.is_mutable():
|
|
|
|
header.append(" (immutable)")
|
|
|
|
elif self.node.is_readonly():
|
2009-05-26 23:24:14 +00:00
|
|
|
header.append(" (read-only)")
|
2019-08-23 19:31:23 +00:00
|
|
|
return req.tag[header]
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_welcome(self, req, data):
|
|
|
|
link = get_root(req)
|
|
|
|
return req.tag[T.a(href=link)["Return to Welcome page"]]
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_show_readonly(self, req, data):
|
2010-01-27 06:44:30 +00:00
|
|
|
if self.node.is_unknown() or self.node.is_readonly():
|
2009-01-31 02:32:05 +00:00
|
|
|
return ""
|
|
|
|
rocap = self.node.get_readonly_uri()
|
2019-08-23 19:31:23 +00:00
|
|
|
root = get_root(req)
|
2009-01-31 02:32:05 +00:00
|
|
|
uri_link = "%s/uri/%s/" % (root, urllib.quote(rocap))
|
2019-08-23 19:31:23 +00:00
|
|
|
return req.tag[T.a(href=uri_link)["Read-Only Version"]]
|
2009-01-31 02:32:05 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_try_children(self, req, data):
|
2009-03-07 11:56:01 +00:00
|
|
|
# if the dirnode can be retrived, render a table of children.
|
|
|
|
# Otherwise, render an apologetic error message.
|
2009-03-20 23:58:09 +00:00
|
|
|
if self.dirnode_children is not None:
|
2019-08-23 19:31:23 +00:00
|
|
|
return req.tag
|
2009-03-07 11:56:01 +00:00
|
|
|
else:
|
|
|
|
return T.div[T.p["Error reading directory:"],
|
|
|
|
T.p[self.dirnode_children_error]]
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def data_children(self, req, data):
|
2009-03-07 11:56:01 +00:00
|
|
|
return self.dirnode_children
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_row(self, req, data):
|
2008-05-19 19:57:04 +00:00
|
|
|
name, (target, metadata) = data
|
|
|
|
name = name.encode("utf-8")
|
|
|
|
assert not isinstance(name, unicode)
|
2008-09-24 20:35:05 +00:00
|
|
|
nameurl = urllib.quote(name, safe="") # encode any slashes too
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
root = get_root(req)
|
2008-06-18 02:49:40 +00:00
|
|
|
here = "%s/uri/%s/" % (root, urllib.quote(self.node.get_uri()))
|
2010-01-27 06:44:30 +00:00
|
|
|
if self.node.is_unknown() or self.node.is_readonly():
|
2011-07-13 00:12:18 +00:00
|
|
|
unlink = "-"
|
2008-05-19 19:57:04 +00:00
|
|
|
rename = "-"
|
|
|
|
else:
|
2011-07-13 00:12:18 +00:00
|
|
|
# this creates a button which will cause our _POST_unlink method
|
|
|
|
# to be invoked, which unlinks the file and then redirects the
|
2008-05-19 19:57:04 +00:00
|
|
|
# browser back to this directory
|
2011-07-13 00:12:18 +00:00
|
|
|
unlink = T.form(action=here, method="post")[
|
|
|
|
T.input(type='hidden', name='t', value='unlink'),
|
2008-05-19 19:57:04 +00:00
|
|
|
T.input(type='hidden', name='name', value=name),
|
2008-06-18 02:49:40 +00:00
|
|
|
T.input(type='hidden', name='when_done', value="."),
|
2013-03-14 17:04:18 +00:00
|
|
|
T.input(type='submit', _class='btn', value='unlink', name="unlink"),
|
2008-05-19 19:57:04 +00:00
|
|
|
]
|
|
|
|
|
2008-06-18 02:49:40 +00:00
|
|
|
rename = T.form(action=here, method="get")[
|
2008-05-19 19:57:04 +00:00
|
|
|
T.input(type='hidden', name='t', value='rename-form'),
|
|
|
|
T.input(type='hidden', name='name', value=name),
|
2008-06-18 02:49:40 +00:00
|
|
|
T.input(type='hidden', name='when_done', value="."),
|
2013-03-14 17:04:18 +00:00
|
|
|
T.input(type='submit', _class='btn', value='rename/relink', name="rename"),
|
2011-11-10 08:00:11 +00:00
|
|
|
]
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
req.fillSlots("unlink", unlink)
|
|
|
|
req.fillSlots("rename", rename)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
times = []
|
2009-04-11 22:52:05 +00:00
|
|
|
linkcrtime = metadata.get('tahoe', {}).get("linkcrtime")
|
|
|
|
if linkcrtime is not None:
|
2016-01-04 16:00:59 +00:00
|
|
|
times.append("lcr: " + render_time(linkcrtime))
|
2009-04-11 22:52:05 +00:00
|
|
|
else:
|
|
|
|
# For backwards-compatibility with links last modified by Tahoe < 1.4.0:
|
|
|
|
if "ctime" in metadata:
|
2016-01-04 16:00:59 +00:00
|
|
|
ctime = render_time(metadata["ctime"])
|
2009-04-11 22:52:05 +00:00
|
|
|
times.append("c: " + ctime)
|
|
|
|
linkmotime = metadata.get('tahoe', {}).get("linkmotime")
|
|
|
|
if linkmotime is not None:
|
2008-05-19 19:57:04 +00:00
|
|
|
if times:
|
|
|
|
times.append(T.br())
|
2016-01-04 16:00:59 +00:00
|
|
|
times.append("lmo: " + render_time(linkmotime))
|
2009-04-11 22:52:05 +00:00
|
|
|
else:
|
|
|
|
# For backwards-compatibility with links last modified by Tahoe < 1.4.0:
|
|
|
|
if "mtime" in metadata:
|
2016-01-04 16:00:59 +00:00
|
|
|
mtime = render_time(metadata["mtime"])
|
2009-04-11 22:52:05 +00:00
|
|
|
if times:
|
|
|
|
times.append(T.br())
|
2008-05-19 19:57:04 +00:00
|
|
|
times.append("m: " + mtime)
|
2019-08-23 19:31:23 +00:00
|
|
|
req.fillSlots("times", times)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2009-07-03 01:07:49 +00:00
|
|
|
assert IFilesystemNode.providedBy(target), target
|
2010-01-27 06:44:30 +00:00
|
|
|
target_uri = target.get_uri() or ""
|
|
|
|
quoted_uri = urllib.quote(target_uri, safe="") # escape slashes too
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
if IMutableFileNode.providedBy(target):
|
|
|
|
# to prevent javascript in displayed .html files from stealing a
|
|
|
|
# secret directory URI from the URL, send the browser to a URI-based
|
|
|
|
# page that doesn't know about the directory at all
|
2008-09-24 20:35:05 +00:00
|
|
|
dlurl = "%s/file/%s/@@named=/%s" % (root, quoted_uri, nameurl)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
req.fillSlots("filename", T.a(href=dlurl, rel="noreferrer")[name])
|
|
|
|
req.fillSlots("type", "SSK")
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
req.fillSlots("size", "?")
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2008-12-05 22:39:39 +00:00
|
|
|
info_link = "%s/uri/%s?t=info" % (root, quoted_uri)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2009-11-20 07:52:55 +00:00
|
|
|
elif IImmutableFileNode.providedBy(target):
|
2008-09-24 20:35:05 +00:00
|
|
|
dlurl = "%s/file/%s/@@named=/%s" % (root, quoted_uri, nameurl)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
req.fillSlots("filename", T.a(href=dlurl, rel="noreferrer")[name])
|
|
|
|
req.fillSlots("type", "FILE")
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
req.fillSlots("size", target.get_size())
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2008-12-05 22:39:39 +00:00
|
|
|
info_link = "%s/uri/%s?t=info" % (root, quoted_uri)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
|
|
|
elif IDirectoryNode.providedBy(target):
|
|
|
|
# directory
|
2010-01-27 06:44:30 +00:00
|
|
|
uri_link = "%s/uri/%s/" % (root, urllib.quote(target_uri))
|
2019-08-23 19:31:23 +00:00
|
|
|
req.fillSlots("filename", T.a(href=uri_link)[name])
|
2009-11-18 19:18:32 +00:00
|
|
|
if not target.is_mutable():
|
|
|
|
dirtype = "DIR-IMM"
|
|
|
|
elif target.is_readonly():
|
2008-05-19 19:57:04 +00:00
|
|
|
dirtype = "DIR-RO"
|
|
|
|
else:
|
|
|
|
dirtype = "DIR"
|
2019-08-23 19:31:23 +00:00
|
|
|
req.fillSlots("type", dirtype)
|
|
|
|
req.fillSlots("size", "-")
|
2008-12-05 22:39:39 +00:00
|
|
|
info_link = "%s/uri/%s/?t=info" % (root, quoted_uri)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2011-08-24 15:59:28 +00:00
|
|
|
elif isinstance(target, ProhibitedNode):
|
2019-08-23 19:31:23 +00:00
|
|
|
req.fillSlots("filename", T.strike[name])
|
2011-08-24 15:59:28 +00:00
|
|
|
if IDirectoryNode.providedBy(target.wrapped_node):
|
|
|
|
blacklisted_type = "DIR-BLACKLISTED"
|
|
|
|
else:
|
|
|
|
blacklisted_type = "BLACKLISTED"
|
2019-08-23 19:31:23 +00:00
|
|
|
req.fillSlots("type", blacklisted_type)
|
|
|
|
req.fillSlots("size", "-")
|
2011-08-24 15:59:28 +00:00
|
|
|
info_link = None
|
2019-08-23 19:31:23 +00:00
|
|
|
req.fillSlots("info", ["Access Prohibited:", T.br, target.reason])
|
2011-08-24 15:59:28 +00:00
|
|
|
|
2009-07-03 01:07:49 +00:00
|
|
|
else:
|
|
|
|
# unknown
|
2019-08-23 19:31:23 +00:00
|
|
|
req.fillSlots("filename", name)
|
2010-01-28 22:08:00 +00:00
|
|
|
if target.get_write_uri() is not None:
|
|
|
|
unknowntype = "?"
|
2010-01-29 03:14:24 +00:00
|
|
|
elif not self.node.is_mutable() or target.is_alleged_immutable():
|
2010-01-28 22:08:00 +00:00
|
|
|
unknowntype = "?-IMM"
|
|
|
|
else:
|
|
|
|
unknowntype = "?-RO"
|
2019-08-23 19:31:23 +00:00
|
|
|
req.fillSlots("type", unknowntype)
|
|
|
|
req.fillSlots("size", "-")
|
2009-07-03 01:07:49 +00:00
|
|
|
# use a directory-relative info link, so we can extract both the
|
|
|
|
# writecap and the readcap
|
|
|
|
info_link = "%s?t=info" % urllib.quote(name)
|
|
|
|
|
2011-08-24 15:59:28 +00:00
|
|
|
if info_link:
|
2019-08-23 19:31:23 +00:00
|
|
|
req.fillSlots("info", T.a(href=info_link)["More Info"])
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
return req.tag
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2011-10-02 03:45:03 +00:00
|
|
|
# XXX: similar to render_upload_form and render_mkdir_form in root.py.
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_forms(self, req, data):
|
2008-07-17 23:47:09 +00:00
|
|
|
forms = []
|
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
if self.node.is_readonly():
|
2009-03-07 11:56:01 +00:00
|
|
|
return T.div["No upload forms: directory is read-only"]
|
2009-04-07 18:28:34 +00:00
|
|
|
if self.dirnode_children is None:
|
2009-03-07 11:56:01 +00:00
|
|
|
return T.div["No upload forms: directory is unreadable"]
|
2008-07-17 23:47:09 +00:00
|
|
|
|
2011-10-02 03:45:03 +00:00
|
|
|
mkdir_sdmf = T.input(type='radio', name='format',
|
|
|
|
value='sdmf', id='mkdir-sdmf',
|
|
|
|
checked='checked')
|
|
|
|
mkdir_mdmf = T.input(type='radio', name='format',
|
|
|
|
value='mdmf', id='mkdir-mdmf')
|
|
|
|
|
|
|
|
mkdir_form = T.form(action=".", method="post",
|
|
|
|
enctype="multipart/form-data")[
|
2008-05-19 19:57:04 +00:00
|
|
|
T.fieldset[
|
|
|
|
T.input(type="hidden", name="t", value="mkdir"),
|
2008-06-18 02:49:40 +00:00
|
|
|
T.input(type="hidden", name="when_done", value="."),
|
2009-05-26 23:24:14 +00:00
|
|
|
T.legend(class_="freeform-form-label")["Create a new directory in this directory"],
|
2013-08-30 14:03:58 +00:00
|
|
|
"New directory name:"+SPACE, T.br,
|
2011-10-02 03:45:03 +00:00
|
|
|
T.input(type="text", name="name"), SPACE,
|
2013-08-30 14:03:58 +00:00
|
|
|
T.div(class_="form-inline")[
|
|
|
|
mkdir_sdmf, T.label(for_='mutable-directory-sdmf')[SPACE, "SDMF"], SPACE*2,
|
|
|
|
mkdir_mdmf, T.label(for_='mutable-directory-mdmf')[SPACE, "MDMF (experimental)"]
|
|
|
|
],
|
|
|
|
T.input(type="submit", class_="btn", value="Create")
|
2008-05-19 19:57:04 +00:00
|
|
|
]]
|
2011-10-02 03:45:03 +00:00
|
|
|
forms.append(T.div(class_="freeform-form")[mkdir_form])
|
|
|
|
|
|
|
|
upload_chk = T.input(type='radio', name='format',
|
|
|
|
value='chk', id='upload-chk',
|
|
|
|
checked='checked')
|
|
|
|
upload_sdmf = T.input(type='radio', name='format',
|
|
|
|
value='sdmf', id='upload-sdmf')
|
|
|
|
upload_mdmf = T.input(type='radio', name='format',
|
|
|
|
value='mdmf', id='upload-mdmf')
|
|
|
|
|
|
|
|
upload_form = T.form(action=".", method="post",
|
|
|
|
enctype="multipart/form-data")[
|
2008-05-19 19:57:04 +00:00
|
|
|
T.fieldset[
|
|
|
|
T.input(type="hidden", name="t", value="upload"),
|
2008-06-18 02:49:40 +00:00
|
|
|
T.input(type="hidden", name="when_done", value="."),
|
2008-05-19 19:57:04 +00:00
|
|
|
T.legend(class_="freeform-form-label")["Upload a file to this directory"],
|
2011-10-02 03:45:03 +00:00
|
|
|
"Choose a file to upload:"+SPACE,
|
|
|
|
T.input(type="file", name="file", class_="freeform-input-file"), SPACE,
|
2013-08-30 14:03:58 +00:00
|
|
|
T.div(class_="form-inline")[
|
|
|
|
upload_chk, T.label(for_="upload-chk") [SPACE, "Immutable"], SPACE*2,
|
|
|
|
upload_sdmf, T.label(for_="upload-sdmf")[SPACE, "SDMF"], SPACE*2,
|
|
|
|
upload_mdmf, T.label(for_="upload-mdmf")[SPACE, "MDMF (experimental)"]
|
|
|
|
],
|
|
|
|
T.input(type="submit", class_="btn", value="Upload"), SPACE*2,
|
2008-05-19 19:57:04 +00:00
|
|
|
]]
|
2011-10-02 03:45:03 +00:00
|
|
|
forms.append(T.div(class_="freeform-form")[upload_form])
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2011-10-02 03:45:03 +00:00
|
|
|
attach_form = T.form(action=".", method="post",
|
|
|
|
enctype="multipart/form-data")[
|
2013-08-30 14:03:58 +00:00
|
|
|
T.fieldset[ T.div(class_="form-inline")[
|
|
|
|
T.input(type="hidden", name="t", value="uri"),
|
|
|
|
T.input(type="hidden", name="when_done", value="."),
|
|
|
|
T.legend(class_="freeform-form-label")["Add a link to a file or directory which is already in Tahoe-LAFS."],
|
|
|
|
"New child name:"+SPACE,
|
2013-08-30 19:57:38 +00:00
|
|
|
T.input(type="text", name="name"), SPACE*2, T.br,
|
2013-08-30 14:03:58 +00:00
|
|
|
"URI of new child:"+SPACE,
|
|
|
|
T.input(type="text", name="uri"), SPACE,
|
|
|
|
T.input(type="submit", class_="btn", value="Attach"),
|
|
|
|
]]]
|
2011-10-02 03:45:03 +00:00
|
|
|
forms.append(T.div(class_="freeform-form")[attach_form])
|
2008-07-17 23:47:09 +00:00
|
|
|
return forms
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_results(self, req, data):
|
2008-05-19 19:57:04 +00:00
|
|
|
return get_arg(req, "results", "")
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def DirectoryJSONMetadata(req, dirnode):
|
2008-05-19 19:57:04 +00:00
|
|
|
d = dirnode.list()
|
|
|
|
def _got(children):
|
|
|
|
kids = {}
|
|
|
|
for name, (childnode, metadata) in children.iteritems():
|
2009-07-03 01:07:49 +00:00
|
|
|
assert IFilesystemNode.providedBy(childnode), childnode
|
2010-01-27 06:44:30 +00:00
|
|
|
rw_uri = childnode.get_write_uri()
|
2009-07-03 01:07:49 +00:00
|
|
|
ro_uri = childnode.get_readonly_uri()
|
2009-11-20 07:52:55 +00:00
|
|
|
if IFileNode.providedBy(childnode):
|
2014-10-07 19:11:31 +00:00
|
|
|
kiddata = ("filenode", get_filenode_metadata(childnode))
|
2009-07-03 01:07:49 +00:00
|
|
|
elif IDirectoryNode.providedBy(childnode):
|
2009-11-20 07:52:55 +00:00
|
|
|
kiddata = ("dirnode", {'mutable': childnode.is_mutable()})
|
2008-05-19 19:57:04 +00:00
|
|
|
else:
|
2009-07-03 01:07:49 +00:00
|
|
|
kiddata = ("unknown", {})
|
2010-01-27 06:44:30 +00:00
|
|
|
|
2009-07-03 01:07:49 +00:00
|
|
|
kiddata[1]["metadata"] = metadata
|
2008-05-20 01:37:28 +00:00
|
|
|
if rw_uri:
|
|
|
|
kiddata[1]["rw_uri"] = rw_uri
|
2010-01-27 06:44:30 +00:00
|
|
|
if ro_uri:
|
|
|
|
kiddata[1]["ro_uri"] = ro_uri
|
2009-02-04 02:22:48 +00:00
|
|
|
verifycap = childnode.get_verify_cap()
|
|
|
|
if verifycap:
|
|
|
|
kiddata[1]['verify_uri'] = verifycap.to_string()
|
2010-01-27 06:44:30 +00:00
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
kids[name] = kiddata
|
2010-01-27 06:44:30 +00:00
|
|
|
|
|
|
|
drw_uri = dirnode.get_write_uri()
|
|
|
|
dro_uri = dirnode.get_readonly_uri()
|
2008-05-20 01:37:28 +00:00
|
|
|
contents = { 'children': kids }
|
|
|
|
if dro_uri:
|
|
|
|
contents['ro_uri'] = dro_uri
|
|
|
|
if drw_uri:
|
|
|
|
contents['rw_uri'] = drw_uri
|
2009-02-04 02:22:48 +00:00
|
|
|
verifycap = dirnode.get_verify_cap()
|
|
|
|
if verifycap:
|
|
|
|
contents['verify_uri'] = verifycap.to_string()
|
2008-05-20 22:40:49 +00:00
|
|
|
contents['mutable'] = dirnode.is_mutable()
|
2008-05-19 19:57:04 +00:00
|
|
|
data = ("dirnode", contents)
|
2017-01-19 22:39:53 +00:00
|
|
|
return json.dumps(data, indent=1) + "\n"
|
2008-05-19 19:57:04 +00:00
|
|
|
d.addCallback(_got)
|
2019-08-23 19:31:23 +00:00
|
|
|
d.addCallback(text_plain, req)
|
2016-08-08 22:11:15 +00:00
|
|
|
|
|
|
|
def error(f):
|
|
|
|
message, code = humanize_failure(f)
|
|
|
|
req.setResponseCode(code)
|
2017-01-19 22:39:53 +00:00
|
|
|
return json.dumps({
|
2016-08-08 22:11:15 +00:00
|
|
|
"error": message,
|
|
|
|
})
|
|
|
|
d.addErrback(error)
|
2008-05-19 19:57:04 +00:00
|
|
|
return d
|
|
|
|
|
2008-09-18 05:00:41 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def DirectoryURI(req, dirnode):
|
|
|
|
return text_plain(dirnode.get_uri(), req)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def DirectoryReadonlyURI(req, dirnode):
|
|
|
|
return text_plain(dirnode.get_readonly_uri(), req)
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
class RenameForm(Element):
|
2008-05-19 19:57:04 +00:00
|
|
|
addSlash = True
|
|
|
|
docFactory = getxmlfile("rename-form.xhtml")
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_title(self, req, data):
|
|
|
|
return req.tag["Directory SI=%s" % abbreviated_dirnode(self.original)]
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_header(self, req, data):
|
2008-05-19 19:57:04 +00:00
|
|
|
header = ["Rename "
|
|
|
|
"in directory SI=%s" % abbreviated_dirnode(self.original),
|
|
|
|
]
|
|
|
|
|
|
|
|
if self.original.is_readonly():
|
|
|
|
header.append(" (readonly!)")
|
|
|
|
header.append(":")
|
2019-08-23 19:31:23 +00:00
|
|
|
return req.tag[header]
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_when_done(self, req, data):
|
2008-06-18 02:49:40 +00:00
|
|
|
return T.input(type="hidden", name="when_done", value=".")
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_get_name(self, req, data):
|
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
name = get_arg(req, "name", "")
|
2019-08-23 19:31:23 +00:00
|
|
|
req.tag.attributes['value'] = name
|
|
|
|
return req.tag
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2019-11-05 06:38:43 +00:00
|
|
|
|
2019-11-17 02:23:58 +00:00
|
|
|
class ReloadableMonitorElement(Element):
|
2019-11-05 06:38:43 +00:00
|
|
|
"""
|
|
|
|
Like 'ReloadMixin', but for twisted.web.template style. This
|
|
|
|
provides renderers for "reload" and "refesh" and a self.monitor
|
2019-11-17 02:23:58 +00:00
|
|
|
attribute (which is an instance of IMonitor)
|
2019-11-05 06:38:43 +00:00
|
|
|
"""
|
2019-11-17 02:23:20 +00:00
|
|
|
refresh_time = timedelta(seconds=60)
|
2019-11-05 06:38:43 +00:00
|
|
|
|
|
|
|
def __init__(self, monitor):
|
|
|
|
self.monitor = monitor
|
|
|
|
super(ReloadableMonitorElement, self).__init__()
|
|
|
|
|
|
|
|
@renderer
|
|
|
|
def refresh(self, req, tag):
|
|
|
|
if self.monitor.is_finished():
|
2019-11-17 02:23:44 +00:00
|
|
|
return u""
|
|
|
|
tag.attributes[u"http-equiv"] = u"refresh"
|
|
|
|
tag.attributes[u"content"] = u"{}".format(self.refresh_time.seconds)
|
2019-11-05 06:38:43 +00:00
|
|
|
return tag
|
|
|
|
|
|
|
|
@renderer
|
|
|
|
def reload(self, req, tag):
|
|
|
|
if self.monitor.is_finished():
|
2019-11-17 02:23:44 +00:00
|
|
|
return u""
|
|
|
|
reload_url = URL.from_text(u"{}".format(req.path))
|
2019-11-05 06:38:43 +00:00
|
|
|
cancel_button = tags.form(
|
|
|
|
[
|
2019-11-17 02:23:44 +00:00
|
|
|
tags.input(type=u"submit", value=u"Cancel"),
|
2019-11-05 06:38:43 +00:00
|
|
|
],
|
2019-11-17 02:23:44 +00:00
|
|
|
action=reload_url.replace(query={u"t": u"cancel"}).to_uri().to_text(),
|
|
|
|
method=u"POST",
|
|
|
|
enctype=u"multipart/form-data",
|
2019-11-05 06:38:43 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
return tag([
|
2019-11-17 02:23:44 +00:00
|
|
|
u"Operation still running: ",
|
|
|
|
tags.a(
|
|
|
|
u"Reload",
|
|
|
|
href=reload_url.replace(query={u"output": u"html"}).to_uri().to_text(),
|
|
|
|
),
|
2019-11-05 06:38:43 +00:00
|
|
|
cancel_button,
|
|
|
|
])
|
|
|
|
|
|
|
|
|
|
|
|
def _slashify_path(path):
|
|
|
|
"""
|
|
|
|
Converts a tuple from a 'manifest' path into a string with slashes
|
|
|
|
in it
|
|
|
|
"""
|
|
|
|
if not path:
|
|
|
|
return ""
|
|
|
|
return "/".join([p.encode("utf-8") for p in path])
|
|
|
|
|
|
|
|
|
|
|
|
def _cap_to_link(root, path, cap):
|
|
|
|
"""
|
|
|
|
Turns a capability-string into a WebAPI link tag
|
|
|
|
|
2019-11-17 02:23:44 +00:00
|
|
|
:param text root: the root piece of the URI
|
2019-11-05 06:38:43 +00:00
|
|
|
|
2019-11-17 02:23:44 +00:00
|
|
|
:param text cap: the capability-string
|
2019-11-05 06:38:43 +00:00
|
|
|
|
2019-11-17 02:23:44 +00:00
|
|
|
:returns: something suitable for `IRenderable`, specifically
|
|
|
|
either a valid local link (tags.a instance) to the capability
|
|
|
|
or an empty string.
|
2019-11-05 06:38:43 +00:00
|
|
|
"""
|
|
|
|
if cap:
|
2019-11-17 02:23:44 +00:00
|
|
|
root_url = URL.from_text(u"{}".format(root))
|
2019-12-21 23:01:23 +00:00
|
|
|
cap_obj = from_string(cap)
|
|
|
|
if isinstance(cap_obj, (CHKFileURI, WriteableSSKFileURI, ReadonlySSKFileURI)):
|
2019-11-17 02:23:44 +00:00
|
|
|
uri_link = root_url.child(
|
|
|
|
u"file",
|
|
|
|
u"{}".format(urllib.quote(cap)),
|
|
|
|
u"{}".format(urllib.quote(path[-1])),
|
|
|
|
)
|
2019-11-05 06:38:43 +00:00
|
|
|
else:
|
2019-11-17 02:23:44 +00:00
|
|
|
uri_link = root_url.child(
|
|
|
|
u"uri",
|
|
|
|
u"{}".format(urllib.quote(cap, safe="")),
|
|
|
|
)
|
|
|
|
return tags.a(cap, href=uri_link.to_text())
|
2019-11-05 06:38:43 +00:00
|
|
|
else:
|
2019-11-17 02:23:44 +00:00
|
|
|
return u""
|
2019-11-05 06:38:43 +00:00
|
|
|
|
|
|
|
|
|
|
|
class ManifestElement(ReloadableMonitorElement):
|
|
|
|
loader = XMLFile(FilePath(__file__).sibling("manifest.xhtml"))
|
|
|
|
|
|
|
|
def _si_abbrev(self):
|
|
|
|
si = self.monitor.origin_si
|
|
|
|
if not si:
|
|
|
|
return "<LIT>"
|
|
|
|
return base32.b2a(si)[:6]
|
|
|
|
|
|
|
|
@renderer
|
|
|
|
def title(self, req, tag):
|
|
|
|
return tag(
|
|
|
|
"Manifest of SI={}".format(self._si_abbrev())
|
|
|
|
)
|
|
|
|
|
|
|
|
@renderer
|
|
|
|
def header(self, req, tag):
|
|
|
|
return tag(
|
|
|
|
"Manifest of SI={}".format(self._si_abbrev())
|
|
|
|
)
|
|
|
|
|
|
|
|
@renderer
|
|
|
|
def items(self, req, tag):
|
|
|
|
manifest = self.monitor.get_status()["manifest"]
|
|
|
|
root = get_root(req)
|
|
|
|
rows = [
|
|
|
|
{
|
|
|
|
"path": _slashify_path(path),
|
|
|
|
"cap": _cap_to_link(root, path, cap),
|
|
|
|
}
|
|
|
|
for path, cap in manifest
|
|
|
|
]
|
|
|
|
return SlotsSequenceElement(tag, rows)
|
|
|
|
|
|
|
|
|
|
|
|
class ManifestResults(MultiFormatResource, ReloadMixin):
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2017-07-25 15:15:17 +00:00
|
|
|
# Control MultiFormatPage
|
|
|
|
formatArgument = "output"
|
|
|
|
formatDefault = "html"
|
|
|
|
|
2009-02-20 19:15:54 +00:00
|
|
|
def __init__(self, client, monitor):
|
|
|
|
self.client = client
|
2008-10-22 00:03:07 +00:00
|
|
|
self.monitor = monitor
|
|
|
|
|
2019-11-05 06:38:43 +00:00
|
|
|
def render_HTML(self, req):
|
|
|
|
return renderElement(
|
|
|
|
req,
|
|
|
|
ManifestElement(self.monitor)
|
|
|
|
)
|
2008-10-07 04:36:18 +00:00
|
|
|
|
2017-07-25 15:15:17 +00:00
|
|
|
def render_TEXT(self, req):
|
2009-02-20 19:15:54 +00:00
|
|
|
req.setHeader("content-type", "text/plain")
|
2008-10-22 00:03:07 +00:00
|
|
|
lines = []
|
2008-10-22 00:52:56 +00:00
|
|
|
is_finished = self.monitor.is_finished()
|
|
|
|
lines.append("finished: " + {True: "yes", False: "no"}[is_finished])
|
2019-11-05 06:38:43 +00:00
|
|
|
for path, cap in self.monitor.get_status()["manifest"]:
|
|
|
|
lines.append(_slashify_path(path) + " " + cap)
|
2008-10-22 00:03:07 +00:00
|
|
|
return "\n".join(lines) + "\n"
|
2008-10-07 04:36:18 +00:00
|
|
|
|
2017-07-25 15:15:17 +00:00
|
|
|
def render_JSON(self, req):
|
2009-02-20 19:15:54 +00:00
|
|
|
req.setHeader("content-type", "text/plain")
|
2008-10-22 00:03:07 +00:00
|
|
|
m = self.monitor
|
2008-11-24 21:40:46 +00:00
|
|
|
s = m.get_status()
|
2009-01-09 02:59:32 +00:00
|
|
|
|
2010-02-27 06:55:51 +00:00
|
|
|
if m.origin_si:
|
|
|
|
origin_base32 = base32.b2a(m.origin_si)
|
|
|
|
else:
|
|
|
|
origin_base32 = ""
|
2009-01-09 02:59:32 +00:00
|
|
|
status = { "stats": s["stats"],
|
|
|
|
"finished": m.is_finished(),
|
2010-02-27 06:55:51 +00:00
|
|
|
"origin": origin_base32,
|
2009-01-09 02:59:32 +00:00
|
|
|
}
|
|
|
|
if m.is_finished():
|
|
|
|
# don't return manifest/verifycaps/SIs unless the operation is
|
|
|
|
# done, to save on CPU/memory (both here and in the HTTP client
|
|
|
|
# who has to unpack the JSON). Tests show that the ManifestWalker
|
|
|
|
# needs about 1092 bytes per item, the JSON we generate here
|
|
|
|
# requires about 503 bytes per item, and some internal overhead
|
|
|
|
# (perhaps transport-layer buffers in twisted.web?) requires an
|
|
|
|
# additional 1047 bytes per item.
|
|
|
|
status.update({ "manifest": s["manifest"],
|
|
|
|
"verifycaps": [i for i in s["verifycaps"]],
|
|
|
|
"storage-index": [i for i in s["storage-index"]],
|
|
|
|
})
|
|
|
|
# simplejson doesn't know how to serialize a set. We use a
|
|
|
|
# generator that walks the set rather than list(setofthing) to
|
|
|
|
# save a small amount of memory (4B*len) and a moderate amount of
|
|
|
|
# CPU.
|
2017-01-19 22:39:53 +00:00
|
|
|
return json.dumps(status, indent=1)
|
2008-10-22 00:03:07 +00:00
|
|
|
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2017-07-25 15:15:17 +00:00
|
|
|
class DeepSizeResults(MultiFormatPage):
|
|
|
|
# Control MultiFormatPage
|
|
|
|
formatArgument = "output"
|
|
|
|
formatDefault = "html"
|
|
|
|
|
2009-02-20 19:15:54 +00:00
|
|
|
def __init__(self, client, monitor):
|
|
|
|
self.client = client
|
2008-10-22 00:03:07 +00:00
|
|
|
self.monitor = monitor
|
2008-05-19 19:57:04 +00:00
|
|
|
|
2017-07-25 15:15:17 +00:00
|
|
|
def render_HTML(self, req):
|
2008-10-22 00:52:56 +00:00
|
|
|
is_finished = self.monitor.is_finished()
|
|
|
|
output = "finished: " + {True: "yes", False: "no"}[is_finished] + "\n"
|
|
|
|
if is_finished:
|
2008-10-22 00:03:07 +00:00
|
|
|
stats = self.monitor.get_status()
|
|
|
|
total = (stats.get("size-immutable-files", 0)
|
|
|
|
+ stats.get("size-mutable-files", 0)
|
|
|
|
+ stats.get("size-directories", 0))
|
|
|
|
output += "size: %d\n" % total
|
|
|
|
return output
|
2017-07-25 15:15:17 +00:00
|
|
|
render_TEXT = render_HTML
|
2008-10-22 00:03:07 +00:00
|
|
|
|
2017-07-25 15:15:17 +00:00
|
|
|
def render_JSON(self, req):
|
|
|
|
req.setHeader("content-type", "text/plain")
|
2008-10-22 00:03:07 +00:00
|
|
|
status = {"finished": self.monitor.is_finished(),
|
|
|
|
"size": self.monitor.get_status(),
|
|
|
|
}
|
2017-01-19 22:39:53 +00:00
|
|
|
return json.dumps(status)
|
2008-10-22 00:03:07 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
class DeepStatsResults(Resource):
|
2009-02-20 19:15:54 +00:00
|
|
|
def __init__(self, client, monitor):
|
|
|
|
self.client = client
|
2008-10-22 00:03:07 +00:00
|
|
|
self.monitor = monitor
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def renderHTTP(self, req):
|
2008-10-22 00:03:07 +00:00
|
|
|
# JSON only
|
2019-08-23 19:31:23 +00:00
|
|
|
req.setHeader("content-type", "text/plain")
|
2008-10-22 00:03:07 +00:00
|
|
|
s = self.monitor.get_status().copy()
|
|
|
|
s["finished"] = self.monitor.is_finished()
|
2017-01-19 22:39:53 +00:00
|
|
|
return json.dumps(s, indent=1)
|
2009-01-23 05:01:36 +00:00
|
|
|
|
2017-02-27 17:56:49 +00:00
|
|
|
@implementer(IPushProducer)
|
2009-01-23 05:01:36 +00:00
|
|
|
class ManifestStreamer(dirnode.DeepStats):
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def __init__(self, req, origin):
|
2009-01-23 05:01:36 +00:00
|
|
|
dirnode.DeepStats.__init__(self, origin)
|
2019-08-23 19:31:23 +00:00
|
|
|
self.req = req
|
2009-01-23 05:01:36 +00:00
|
|
|
|
2009-01-24 02:39:08 +00:00
|
|
|
def setMonitor(self, monitor):
|
|
|
|
self.monitor = monitor
|
|
|
|
def pauseProducing(self):
|
|
|
|
pass
|
|
|
|
def resumeProducing(self):
|
|
|
|
pass
|
|
|
|
def stopProducing(self):
|
|
|
|
self.monitor.cancel()
|
|
|
|
|
2009-01-23 05:01:36 +00:00
|
|
|
def add_node(self, node, path):
|
|
|
|
dirnode.DeepStats.add_node(self, node, path)
|
|
|
|
d = {"path": path,
|
|
|
|
"cap": node.get_uri()}
|
|
|
|
|
|
|
|
if IDirectoryNode.providedBy(node):
|
|
|
|
d["type"] = "directory"
|
2009-07-03 01:07:49 +00:00
|
|
|
elif IFileNode.providedBy(node):
|
2009-01-23 05:01:36 +00:00
|
|
|
d["type"] = "file"
|
2009-07-03 01:07:49 +00:00
|
|
|
else:
|
|
|
|
d["type"] = "unknown"
|
2009-01-23 05:01:36 +00:00
|
|
|
|
|
|
|
v = node.get_verify_cap()
|
|
|
|
if v:
|
|
|
|
v = v.to_string()
|
2010-02-24 08:02:20 +00:00
|
|
|
d["verifycap"] = v or ""
|
2009-01-23 05:01:36 +00:00
|
|
|
|
|
|
|
r = node.get_repair_cap()
|
|
|
|
if r:
|
|
|
|
r = r.to_string()
|
2010-02-24 08:02:20 +00:00
|
|
|
d["repaircap"] = r or ""
|
2009-01-23 05:01:36 +00:00
|
|
|
|
|
|
|
si = node.get_storage_index()
|
|
|
|
if si:
|
|
|
|
si = base32.b2a(si)
|
2010-02-24 08:02:20 +00:00
|
|
|
d["storage-index"] = si or ""
|
2009-01-23 05:01:36 +00:00
|
|
|
|
2017-01-19 22:39:53 +00:00
|
|
|
j = json.dumps(d, ensure_ascii=True)
|
2009-01-23 05:01:36 +00:00
|
|
|
assert "\n" not in j
|
|
|
|
self.req.write(j+"\n")
|
|
|
|
|
|
|
|
def finish(self):
|
|
|
|
stats = dirnode.DeepStats.get_results(self)
|
|
|
|
d = {"type": "stats",
|
|
|
|
"stats": stats,
|
|
|
|
}
|
2017-01-19 22:39:53 +00:00
|
|
|
j = json.dumps(d, ensure_ascii=True)
|
2009-01-23 05:01:36 +00:00
|
|
|
assert "\n" not in j
|
|
|
|
self.req.write(j+"\n")
|
|
|
|
return ""
|
2009-02-17 06:35:53 +00:00
|
|
|
|
2017-02-27 17:56:49 +00:00
|
|
|
@implementer(IPushProducer)
|
2009-02-17 06:35:53 +00:00
|
|
|
class DeepCheckStreamer(dirnode.DeepStats):
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def __init__(self, req, origin, verify, repair, add_lease):
|
2009-02-17 06:35:53 +00:00
|
|
|
dirnode.DeepStats.__init__(self, origin)
|
2019-08-23 19:31:23 +00:00
|
|
|
self.req = req
|
2009-02-17 06:35:53 +00:00
|
|
|
self.verify = verify
|
|
|
|
self.repair = repair
|
2009-02-18 02:32:43 +00:00
|
|
|
self.add_lease = add_lease
|
2009-02-17 06:35:53 +00:00
|
|
|
|
|
|
|
def setMonitor(self, monitor):
|
|
|
|
self.monitor = monitor
|
|
|
|
def pauseProducing(self):
|
|
|
|
pass
|
|
|
|
def resumeProducing(self):
|
|
|
|
pass
|
|
|
|
def stopProducing(self):
|
|
|
|
self.monitor.cancel()
|
|
|
|
|
|
|
|
def add_node(self, node, path):
|
|
|
|
dirnode.DeepStats.add_node(self, node, path)
|
|
|
|
data = {"path": path,
|
|
|
|
"cap": node.get_uri()}
|
|
|
|
|
|
|
|
if IDirectoryNode.providedBy(node):
|
|
|
|
data["type"] = "directory"
|
2010-01-29 03:14:24 +00:00
|
|
|
elif IFileNode.providedBy(node):
|
2009-02-17 06:35:53 +00:00
|
|
|
data["type"] = "file"
|
2010-01-29 03:14:24 +00:00
|
|
|
else:
|
|
|
|
data["type"] = "unknown"
|
2009-02-17 06:35:53 +00:00
|
|
|
|
|
|
|
v = node.get_verify_cap()
|
|
|
|
if v:
|
|
|
|
v = v.to_string()
|
2010-02-24 08:02:20 +00:00
|
|
|
data["verifycap"] = v or ""
|
2009-02-17 06:35:53 +00:00
|
|
|
|
|
|
|
r = node.get_repair_cap()
|
|
|
|
if r:
|
|
|
|
r = r.to_string()
|
2010-02-24 08:02:20 +00:00
|
|
|
data["repaircap"] = r or ""
|
2009-02-17 06:35:53 +00:00
|
|
|
|
|
|
|
si = node.get_storage_index()
|
|
|
|
if si:
|
|
|
|
si = base32.b2a(si)
|
2010-02-24 08:02:20 +00:00
|
|
|
data["storage-index"] = si or ""
|
2009-02-17 06:35:53 +00:00
|
|
|
|
|
|
|
if self.repair:
|
2009-02-18 02:32:43 +00:00
|
|
|
d = node.check_and_repair(self.monitor, self.verify, self.add_lease)
|
2009-02-17 06:35:53 +00:00
|
|
|
d.addCallback(self.add_check_and_repair, data)
|
|
|
|
else:
|
2009-02-18 02:32:43 +00:00
|
|
|
d = node.check(self.monitor, self.verify, self.add_lease)
|
2009-02-17 06:35:53 +00:00
|
|
|
d.addCallback(self.add_check, data)
|
|
|
|
d.addCallback(self.write_line)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def add_check_and_repair(self, crr, data):
|
|
|
|
data["check-and-repair-results"] = json_check_and_repair_results(crr)
|
|
|
|
return data
|
|
|
|
|
|
|
|
def add_check(self, cr, data):
|
|
|
|
data["check-results"] = json_check_results(cr)
|
|
|
|
return data
|
|
|
|
|
|
|
|
def write_line(self, data):
|
2017-01-19 22:39:53 +00:00
|
|
|
j = json.dumps(data, ensure_ascii=True)
|
2009-02-17 06:35:53 +00:00
|
|
|
assert "\n" not in j
|
|
|
|
self.req.write(j+"\n")
|
|
|
|
|
|
|
|
def finish(self):
|
|
|
|
stats = dirnode.DeepStats.get_results(self)
|
|
|
|
d = {"type": "stats",
|
|
|
|
"stats": stats,
|
|
|
|
}
|
2017-01-19 22:39:53 +00:00
|
|
|
j = json.dumps(d, ensure_ascii=True)
|
2009-02-17 06:35:53 +00:00
|
|
|
assert "\n" not in j
|
|
|
|
self.req.write(j+"\n")
|
|
|
|
return ""
|
2009-07-03 01:07:49 +00:00
|
|
|
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
class UnknownNodeHandler(RenderMixin, Resource):
|
2009-07-03 01:07:49 +00:00
|
|
|
def __init__(self, client, node, parentnode=None, name=None):
|
2019-08-23 19:31:23 +00:00
|
|
|
super(UnknownNodeHandler, self).__init__()
|
2009-07-03 01:07:49 +00:00
|
|
|
assert node
|
|
|
|
self.node = node
|
2010-01-27 06:44:30 +00:00
|
|
|
self.parentnode = parentnode
|
|
|
|
self.name = name
|
2009-07-03 01:07:49 +00:00
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def render_GET(self, req):
|
2009-07-03 01:07:49 +00:00
|
|
|
t = get_arg(req, "t", "").strip()
|
|
|
|
if t == "info":
|
|
|
|
return MoreInfo(self.node)
|
2010-01-27 06:44:30 +00:00
|
|
|
if t == "json":
|
2010-01-29 03:14:24 +00:00
|
|
|
is_parent_known_immutable = self.parentnode and not self.parentnode.is_mutable()
|
2010-01-27 06:44:30 +00:00
|
|
|
if self.parentnode and self.name:
|
|
|
|
d = self.parentnode.get_metadata_for(self.name)
|
|
|
|
else:
|
|
|
|
d = defer.succeed(None)
|
2019-08-23 19:31:23 +00:00
|
|
|
d.addCallback(lambda md: UnknownJSONMetadata(req, self.node, md, is_parent_known_immutable))
|
2010-01-27 06:44:30 +00:00
|
|
|
return d
|
|
|
|
raise WebError("GET unknown URI type: can only do t=info and t=json, not t=%s.\n"
|
|
|
|
"Using a webapi server that supports a later version of Tahoe "
|
|
|
|
"may help." % t)
|
|
|
|
|
2019-08-23 19:31:23 +00:00
|
|
|
def UnknownJSONMetadata(req, node, edge_metadata, is_parent_known_immutable):
|
2010-01-29 03:14:24 +00:00
|
|
|
rw_uri = node.get_write_uri()
|
|
|
|
ro_uri = node.get_readonly_uri()
|
2010-01-27 06:44:30 +00:00
|
|
|
data = ("unknown", {})
|
|
|
|
if ro_uri:
|
|
|
|
data[1]['ro_uri'] = ro_uri
|
|
|
|
if rw_uri:
|
|
|
|
data[1]['rw_uri'] = rw_uri
|
2010-01-29 03:14:24 +00:00
|
|
|
data[1]['mutable'] = True
|
|
|
|
elif is_parent_known_immutable or node.is_alleged_immutable():
|
|
|
|
data[1]['mutable'] = False
|
|
|
|
# else we don't know whether it is mutable.
|
|
|
|
|
2010-01-27 06:44:30 +00:00
|
|
|
if edge_metadata is not None:
|
|
|
|
data[1]['metadata'] = edge_metadata
|
2019-08-23 19:31:23 +00:00
|
|
|
return text_plain(json.dumps(data, indent=1) + "\n", req)
|