mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-04-13 22:03:04 +00:00
update many dirnode interfaces to accept dict-of-nodes instead of dict-of-caps
interfaces.py: define INodeMaker, document argument values, change create_new_mutable_directory() to take dict-of-nodes. Change dirnode.set_nodes() and dirnode.create_subdirectory() too. nodemaker.py: use INodeMaker, update create_new_mutable_directory() client.py: have create_dirnode() delegate initial_children= to nodemaker dirnode.py (Adder): take dict-of-nodes instead of list-of-nodes, which updates set_nodes() and create_subdirectory() web/common.py (convert_initial_children_json): create dict-of-nodes web/directory.py: same web/unlinked.py: same test_dirnode.py: update tests to match
This commit is contained in:
parent
8a7c980e37
commit
b4ec86c95a
@ -458,9 +458,7 @@ class Client(node.Node, pollmixin.PollMixin):
|
||||
return self.nodemaker.create_from_cap(writecap, readcap)
|
||||
|
||||
def create_dirnode(self, initial_children={}):
|
||||
d = self.nodemaker.create_new_mutable_directory()
|
||||
if initial_children:
|
||||
d.addCallback(lambda n: n.set_children(initial_children))
|
||||
d = self.nodemaker.create_new_mutable_directory(initial_children)
|
||||
return d
|
||||
|
||||
def create_mutable_file(self, contents=None, keysize=None):
|
||||
|
@ -16,7 +16,7 @@ from allmydata.check_results import DeepCheckResults, \
|
||||
DeepCheckAndRepairResults
|
||||
from allmydata.monitor import Monitor
|
||||
from allmydata.util import hashutil, mathutil, base32, log
|
||||
from allmydata.util.assertutil import _assert, precondition
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.netstring import netstring, split_netstring
|
||||
from allmydata.uri import DirectoryURI, ReadonlyDirectoryURI, \
|
||||
LiteralFileURI, from_string
|
||||
@ -59,27 +59,22 @@ class Adder:
|
||||
def __init__(self, node, entries=None, overwrite=True):
|
||||
self.node = node
|
||||
if entries is None:
|
||||
entries = []
|
||||
entries = {}
|
||||
precondition(isinstance(entries, dict), entries)
|
||||
self.entries = entries
|
||||
self.overwrite = overwrite
|
||||
|
||||
def set_node(self, name, node, metadata):
|
||||
precondition(isinstance(name, unicode), name)
|
||||
precondition(IFilesystemNode.providedBy(node), node)
|
||||
self.entries.append( [name, node, metadata] )
|
||||
self.entries[name] = (node, metadata)
|
||||
|
||||
def modify(self, old_contents, servermap, first_time):
|
||||
children = self.node._unpack_contents(old_contents)
|
||||
now = time.time()
|
||||
for e in self.entries:
|
||||
if len(e) == 2:
|
||||
name, child = e
|
||||
new_metadata = None
|
||||
else:
|
||||
assert len(e) == 3
|
||||
name, child, new_metadata = e
|
||||
assert _assert(IFilesystemNode.providedBy(child), child)
|
||||
assert isinstance(name, unicode)
|
||||
for (name, (child, new_metadata)) in self.entries.iteritems():
|
||||
precondition(isinstance(name, unicode), name)
|
||||
precondition(IFilesystemNode.providedBy(child), child)
|
||||
if name in children:
|
||||
if not self.overwrite:
|
||||
raise ExistingChildError("child '%s' already exists" % name)
|
||||
@ -437,6 +432,7 @@ class DirectoryNode:
|
||||
return d
|
||||
|
||||
def set_nodes(self, entries, overwrite=True):
|
||||
precondition(isinstance(entries, dict), entries)
|
||||
if self.is_readonly():
|
||||
return defer.fail(NotMutableError())
|
||||
a = Adder(self, entries, overwrite=overwrite)
|
||||
@ -477,7 +473,7 @@ class DirectoryNode:
|
||||
return defer.fail(NotMutableError())
|
||||
d = self._nodemaker.create_new_mutable_directory(initial_children)
|
||||
def _created(child):
|
||||
entries = [(name, child, None)]
|
||||
entries = {name: (child, None)}
|
||||
a = Adder(self, entries, overwrite=overwrite)
|
||||
d = self._node.modify(a.modify)
|
||||
d.addCallback(lambda res: child)
|
||||
|
@ -924,12 +924,12 @@ class IDirectoryNode(IMutableFilesystemNode):
|
||||
NotMutableError."""
|
||||
|
||||
def set_nodes(entries, overwrite=True):
|
||||
"""Add multiple (name, child_node) pairs (or (name, child_node,
|
||||
metadata) triples) to a directory node. Returns a Deferred that fires
|
||||
(with this dirnode) when the operation finishes. This is equivalent
|
||||
to calling set_node() multiple times, but is much more efficient. All
|
||||
child names must be unicode strings."""
|
||||
|
||||
"""Add multiple children to a directory node. Takes a dict mapping
|
||||
unicode childname to (child_node, metdata) tuples. If metdata=None,
|
||||
the original metadata is left unmodified. Returns a Deferred that
|
||||
fires (with this dirnode) when the operation finishes. This is
|
||||
equivalent to calling set_node() multiple times, but is much more
|
||||
efficient."""
|
||||
|
||||
def add_file(name, uploadable, metadata=None, overwrite=True):
|
||||
"""I upload a file (using the given IUploadable), then attach the
|
||||
@ -950,9 +950,9 @@ class IDirectoryNode(IMutableFilesystemNode):
|
||||
"""I create and attach a directory at the given name. The new
|
||||
directory can be empty, or it can be populated with children
|
||||
according to 'initial_children', which takes a dictionary in the same
|
||||
format as set_children (i.e. mapping unicode child name to (writecap,
|
||||
readcap, metadata) triples). The child name must be a unicode string.
|
||||
I return a Deferred that fires (with the new directory node) when the
|
||||
format as set_nodes (i.e. mapping unicode child name to (childnode,
|
||||
metadata) tuples). The child name must be a unicode string. I return
|
||||
a Deferred that fires (with the new directory node) when the
|
||||
operation finishes."""
|
||||
|
||||
def move_child_to(current_child_name, new_parent, new_child_name=None,
|
||||
@ -2028,7 +2028,7 @@ class IClient(Interface):
|
||||
"""Create a new unattached dirnode, possibly with initial children.
|
||||
|
||||
@param initial_children: dict with keys that are unicode child names,
|
||||
and values that are (child_writecap, child_readcap, metadata) tuples.
|
||||
and values that are (childnode, metadata) tuples.
|
||||
|
||||
@return: a Deferred that fires with the new IDirectoryNode instance.
|
||||
"""
|
||||
@ -2051,6 +2051,39 @@ class IClient(Interface):
|
||||
IDirectoryNode-providing instances, like DirectoryNode.
|
||||
"""
|
||||
|
||||
class INodeMaker(Interface):
|
||||
"""The NodeMaker is used to create IFilesystemNode instances. It can
|
||||
accept a filecap/dircap string and return the node right away. It can
|
||||
also create new nodes (i.e. upload a file, or create a mutable file)
|
||||
asynchronously. Once you have one of these nodes, you can use other
|
||||
methods to determine whether it is a file or directory, and to download
|
||||
or modify its contents.
|
||||
|
||||
The NodeMaker encapsulates all the authorities that these
|
||||
IfilesystemNodes require (like references to the StorageFarmBroker). Each
|
||||
Tahoe process will typically have a single NodeMaker, but unit tests may
|
||||
create simplified/mocked forms for testing purposes.
|
||||
"""
|
||||
def create_from_cap(writecap, readcap=None):
|
||||
"""I create an IFilesystemNode from the given writecap/readcap. I can
|
||||
only provide nodes for existing file/directory objects: use my other
|
||||
methods to create new objects. I return synchronously."""
|
||||
|
||||
def create_mutable_file(contents=None, keysize=None):
|
||||
"""I create a new mutable file, and return a Deferred which will fire
|
||||
with the IMutableFileNode instance when it is ready. If contents= is
|
||||
provided (a bytestring), it will be used as the initial contents of
|
||||
the new file, otherwise the file will contain zero bytes. keysize= is
|
||||
for use by unit tests, to create mutable files that are smaller than
|
||||
usual."""
|
||||
|
||||
def create_new_mutable_directory(initial_children={}):
|
||||
"""I create a new mutable directory, and return a Deferred which will
|
||||
fire with the IDirectoryNode instance when it is ready. If
|
||||
initial_children= is provided (a dict mapping unicode child name to
|
||||
(childnode, metadata_dict) tuples), the directory will be populated
|
||||
with those children, otherwise it will be empty."""
|
||||
|
||||
class IClientStatus(Interface):
|
||||
def list_all_uploads():
|
||||
"""Return a list of uploader objects, one for each upload which
|
||||
|
@ -1,25 +1,15 @@
|
||||
import weakref
|
||||
from zope.interface import implements
|
||||
from allmydata.interfaces import INodeMaker
|
||||
from allmydata.immutable.filenode import FileNode, LiteralFileNode
|
||||
from allmydata.mutable.filenode import MutableFileNode
|
||||
from allmydata.dirnode import DirectoryNode
|
||||
from allmydata.unknown import UnknownNode
|
||||
from allmydata.uri import DirectoryURI, ReadonlyDirectoryURI
|
||||
|
||||
# the "node maker" is a two-argument callable (really a 'create' method on a
|
||||
# NodeMaker instance) which accepts a URI string (and an optional readcap
|
||||
# string, for use by dirnode.copy) and returns an object which (at the very
|
||||
# least) provides IFilesystemNode. That interface has other methods that can
|
||||
# be used to determine if the node represents a file or directory, in which
|
||||
# case other methods are available (like download() or modify()). Each Tahoe
|
||||
# process will typically have a single NodeMaker, but unit tests may create
|
||||
# simplified/mocked forms for test purposes.
|
||||
|
||||
# any authorities which fsnodes will need (like a reference to the
|
||||
# StorageFarmBroker, to access storage servers for publish/retrieve/download)
|
||||
# will be retained as attributes inside the NodeMaker and passed to fsnodes
|
||||
# as necessary.
|
||||
|
||||
class NodeMaker:
|
||||
implements(INodeMaker)
|
||||
|
||||
def __init__(self, storage_broker, secret_holder, history,
|
||||
uploader, downloader, download_cache_dirman,
|
||||
default_encoding_parameters, key_generator):
|
||||
@ -92,5 +82,5 @@ class NodeMaker:
|
||||
d = self.create_mutable_file()
|
||||
d.addCallback(self._create_dirnode)
|
||||
if initial_children:
|
||||
d.addCallback(lambda n: n.set_children(initial_children))
|
||||
d.addCallback(lambda n: n.set_nodes(initial_children))
|
||||
return d
|
||||
|
@ -176,7 +176,7 @@ class Dirnode(GridTestMixin, unittest.TestCase,
|
||||
self.shouldFail(dirnode.NotMutableError, "set_uri ro", None,
|
||||
ro_dn.set_node, u"newchild", filenode)
|
||||
self.shouldFail(dirnode.NotMutableError, "set_nodes ro", None,
|
||||
ro_dn.set_nodes, [ (u"newchild", filenode) ])
|
||||
ro_dn.set_nodes, { u"newchild": (filenode, None) })
|
||||
self.shouldFail(dirnode.NotMutableError, "set_uri ro", None,
|
||||
ro_dn.add_file, u"newchild", uploadable)
|
||||
self.shouldFail(dirnode.NotMutableError, "set_uri ro", None,
|
||||
@ -486,18 +486,17 @@ class Dirnode(GridTestMixin, unittest.TestCase,
|
||||
d.addCallback(lambda res: n.delete(u"e3"))
|
||||
|
||||
# metadata through set_nodes()
|
||||
d.addCallback(lambda res: n.set_nodes([ (u"f1", n),
|
||||
(u"f2", n, {}),
|
||||
(u"f3", n,
|
||||
{"key": "value"}),
|
||||
]))
|
||||
d.addCallback(lambda res:
|
||||
n.set_nodes({ u"f1": (n, None),
|
||||
u"f2": (n, {}),
|
||||
u"f3": (n, {"key": "value"}),
|
||||
}))
|
||||
d.addCallback(lambda n2: self.failUnlessIdentical(n2, n))
|
||||
d.addCallback(lambda res:
|
||||
self.shouldFail(ExistingChildError, "set_nodes-no",
|
||||
"child 'f1' already exists",
|
||||
n.set_nodes,
|
||||
[ (u"f1", n),
|
||||
(u"new", n), ],
|
||||
n.set_nodes, { u"f1": (n, None),
|
||||
u"new": (n, None), },
|
||||
overwrite=False))
|
||||
# and 'new' should not have been created
|
||||
d.addCallback(lambda res: n.list())
|
||||
@ -686,6 +685,7 @@ class Dirnode(GridTestMixin, unittest.TestCase,
|
||||
self.basedir = "dirnode/Dirnode/test_create_subdirectory"
|
||||
self.set_up_grid()
|
||||
c = self.g.clients[0]
|
||||
nm = c.nodemaker
|
||||
|
||||
d = c.create_dirnode()
|
||||
def _then(n):
|
||||
@ -694,8 +694,8 @@ class Dirnode(GridTestMixin, unittest.TestCase,
|
||||
fake_file_uri = make_mutable_file_uri()
|
||||
other_file_uri = make_mutable_file_uri()
|
||||
md = {"metakey": "metavalue"}
|
||||
kids = {u"kid1": (fake_file_uri, fake_file_uri),
|
||||
u"kid2": (other_file_uri, other_file_uri, md),
|
||||
kids = {u"kid1": (nm.create_from_cap(fake_file_uri), None),
|
||||
u"kid2": (nm.create_from_cap(other_file_uri), md),
|
||||
}
|
||||
d = n.create_subdirectory(u"subdir", kids)
|
||||
def _check(sub):
|
||||
|
@ -54,7 +54,10 @@ def get_arg(ctx_or_req, argname, default=None, multiple=False):
|
||||
return results[0]
|
||||
return default
|
||||
|
||||
def convert_initial_children_json(initial_children_json):
|
||||
def convert_initial_children_json(nodemaker, initial_children_json):
|
||||
"""I convert the JSON output of GET?t=json into the dict-of-nodes input
|
||||
to both dirnode.create_subdirectory() and
|
||||
client.create_directory(initial_children=)."""
|
||||
initial_children = {}
|
||||
if initial_children_json:
|
||||
data = simplejson.loads(initial_children_json)
|
||||
@ -67,7 +70,8 @@ def convert_initial_children_json(initial_children_json):
|
||||
if readcap is not None:
|
||||
readcap = str(readcap)
|
||||
metadata = propdict.get("metadata", {})
|
||||
initial_children[name] = (writecap, readcap, metadata)
|
||||
childnode = nodemaker.create_from_cap(writecap, readcap)
|
||||
initial_children[name] = (childnode, metadata)
|
||||
return initial_children
|
||||
|
||||
def abbreviate_time(data):
|
||||
|
@ -101,8 +101,9 @@ class DirectoryNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
|
||||
else:
|
||||
req.content.seek(0)
|
||||
kids_json = req.content.read()
|
||||
initial_children = convert_initial_children_json(kids_json)
|
||||
d = self.node.create_subdirectory(name, initial_children)
|
||||
kids = convert_initial_children_json(self.client.nodemaker,
|
||||
kids_json)
|
||||
d = self.node.create_subdirectory(name, kids)
|
||||
d.addCallback(make_handler_for,
|
||||
self.client, self.node, name)
|
||||
return d
|
||||
@ -227,10 +228,9 @@ class DirectoryNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
|
||||
return defer.succeed(self.node.get_uri()) # TODO: urlencode
|
||||
name = name.decode("utf-8")
|
||||
replace = boolean_of_arg(get_arg(req, "replace", "true"))
|
||||
children_json = get_arg(req, "children", "")
|
||||
initial_children = convert_initial_children_json(children_json)
|
||||
d = self.node.create_subdirectory(name, initial_children,
|
||||
overwrite=replace)
|
||||
kids_json = get_arg(req, "children", "")
|
||||
kids = convert_initial_children_json(self.client.nodemaker, kids_json)
|
||||
d = self.node.create_subdirectory(name, kids, overwrite=replace)
|
||||
d.addCallback(lambda child: child.get_uri()) # TODO: urlencode
|
||||
return d
|
||||
|
||||
|
@ -27,9 +27,9 @@ def PUTUnlinkedSSK(req, client):
|
||||
def PUTUnlinkedCreateDirectory(req, client):
|
||||
# "PUT /uri?t=mkdir", to create an unlinked directory.
|
||||
req.content.seek(0)
|
||||
initial_children_json = req.content.read()
|
||||
initial_children = convert_initial_children_json(initial_children_json)
|
||||
d = client.create_dirnode(initial_children=initial_children)
|
||||
kids_json = req.content.read()
|
||||
kids = convert_initial_children_json(client.nodemaker, kids_json)
|
||||
d = client.create_dirnode(initial_children=kids)
|
||||
d.addCallback(lambda dirnode: dirnode.get_uri())
|
||||
# XXX add redirect_to_result
|
||||
return d
|
||||
@ -94,9 +94,9 @@ def POSTUnlinkedSSK(req, client):
|
||||
|
||||
def POSTUnlinkedCreateDirectory(req, client):
|
||||
# "POST /uri?t=mkdir", to create an unlinked directory.
|
||||
initial_json = get_arg(req, "children", "")
|
||||
initial_children = convert_initial_children_json(initial_json)
|
||||
d = client.create_dirnode(initial_children=initial_children)
|
||||
kids_json = get_arg(req, "children", "")
|
||||
kids = convert_initial_children_json(client.nodemaker, kids_json)
|
||||
d = client.create_dirnode(initial_children=kids)
|
||||
redirect = get_arg(req, "redirect_to_result", "false")
|
||||
if boolean_of_arg(redirect):
|
||||
def _then_redir(res):
|
||||
|
Loading…
x
Reference in New Issue
Block a user