mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-02-20 09:46:18 +00:00
remove old filetree code
This commit is contained in:
parent
fb02488a8e
commit
4a0682874b
2
setup.py
2
setup.py
@ -79,7 +79,7 @@ majority of the nodes are no longer available.""",
|
||||
url='http://allmydata.org/',
|
||||
license='GNU GPL',
|
||||
packages=["allmydata", "allmydata.test", "allmydata.util",
|
||||
"allmydata.filetree", "allmydata.scripts",],
|
||||
"allmydata.scripts",],
|
||||
package_dir={ "allmydata": "src/allmydata",},
|
||||
scripts = ["bin/allmydata-tahoe"],
|
||||
package_data={ 'allmydata': ['web/*.xhtml', 'web/*.css'] },
|
||||
|
@ -1,21 +0,0 @@
|
||||
|
||||
from zope.interface import implements
|
||||
from allmydata.filetree.interfaces import INode
|
||||
|
||||
class BaseDataNode(object):
|
||||
implements(INode)
|
||||
prefix = None # must be set by subclass
|
||||
|
||||
def new(self, data):
|
||||
self.set_base_data(data)
|
||||
return self
|
||||
|
||||
def get_base_data(self):
|
||||
raise NotImplementedError # must be provided by subclass
|
||||
def set_base_data(self, data):
|
||||
raise NotImplementedError # must be provided by subclass
|
||||
def serialize_node(self):
|
||||
return "%s:%s" % (self.prefix, self.get_base_data())
|
||||
def populate_node(self, body, node_maker):
|
||||
self.set_base_data(body)
|
||||
|
@ -1,439 +0,0 @@
|
||||
|
||||
from zope.interface import implements
|
||||
from twisted.internet import defer
|
||||
from cStringIO import StringIO
|
||||
from allmydata.filetree.interfaces import (
|
||||
INode, INodeMaker, IDirectoryNode, ISubTree,
|
||||
ICHKDirectoryNode, ISSKDirectoryNode,
|
||||
NoSuchChildError,
|
||||
)
|
||||
from allmydata.filetree.basenode import BaseDataNode
|
||||
from allmydata import download
|
||||
from allmydata.util import bencode
|
||||
|
||||
# interesting feature ideas:
|
||||
# pubsub for MutableDirectoryNode: get rapid notification of changes
|
||||
# caused by someone else
|
||||
#
|
||||
# bind a local physical directory to the MutableDirectoryNode contents:
|
||||
# each time the vdrive changes, update the local drive to match, and
|
||||
# vice versa.
|
||||
|
||||
from itertools import islice, izip
|
||||
def in_pairs(iterable):
|
||||
"s -> (s0,s1), (s2,s3), (s4,s5), ..."
|
||||
a = islice(iterable, 0, None, 2)
|
||||
b = islice(iterable, 1, None, 2)
|
||||
return izip(a, b)
|
||||
|
||||
|
||||
class SubTreeNode:
|
||||
implements(INode, IDirectoryNode)
|
||||
|
||||
def __init__(self, tree):
|
||||
self.enclosing_tree = tree
|
||||
self.children = {}
|
||||
# # subdirectory_node_children maps child name to another SubTreeNode
|
||||
# # instance. This is only for internal directory nodes. All other
|
||||
# # nodes are listed in child_specifications instead.
|
||||
# self.subdirectory_node_children = {}
|
||||
# # child_specifications maps child name to a specification tuple which
|
||||
# # describes how to obtain the actual child. For example, if "foo.jpg"
|
||||
# # in this node represents a CHK-encoded FILE with a uri of "fooURI",
|
||||
# # then self.child_specifications["foo.jpg"] = ("CHKFILE","fooURI")
|
||||
# self.child_specifications = {}
|
||||
|
||||
def list(self):
|
||||
return self.children
|
||||
|
||||
def get(self, childname):
|
||||
if childname in self.children:
|
||||
return self.children[childname]
|
||||
else:
|
||||
raise NoSuchChildError("no child named '%s'" % (childname,))
|
||||
|
||||
def get_subtree(self):
|
||||
return self.enclosing_tree
|
||||
|
||||
def delete(self, childname):
|
||||
assert self.enclosing_tree.is_mutable()
|
||||
if childname in self.children:
|
||||
del self.children[childname]
|
||||
else:
|
||||
raise NoSuchChildError("no child named '%s'" % (childname,))
|
||||
|
||||
def add_subdir(self, childname):
|
||||
assert childname not in self.children
|
||||
newnode = SubTreeNode(self.enclosing_tree)
|
||||
self.children[childname] = newnode
|
||||
return newnode
|
||||
|
||||
def add(self, childname, node):
|
||||
assert childname not in self.children
|
||||
assert INode(node)
|
||||
self.children[childname] = node
|
||||
return self
|
||||
|
||||
def serialize_node(self):
|
||||
# note: this is a one-pass recursive serialization that will result
|
||||
# in the whole file table being held in memory. This is only
|
||||
# appropriate for directories with fewer than, say, 10k nodes. If we
|
||||
# support larger directories, we should turn this into some kind of
|
||||
# generator instead, and write the serialized data directly to a
|
||||
# tempfile.
|
||||
#
|
||||
# [name1, child1, name2, child2..]
|
||||
#
|
||||
# child1 is either a list for subdirs, or a string for non-subdirs
|
||||
|
||||
data = []
|
||||
for name in sorted(self.children.keys()):
|
||||
data.append(name)
|
||||
data.append(self.children[name].serialize_node())
|
||||
return data
|
||||
|
||||
def populate_dirnode(self, data, node_maker):
|
||||
assert INodeMaker(node_maker)
|
||||
assert len(data) % 2 == 0
|
||||
for (name, child_data) in in_pairs(data):
|
||||
if isinstance(child_data, (list, tuple)):
|
||||
child = SubTreeNode(self.enclosing_tree)
|
||||
child.populate_dirnode(child_data, node_maker)
|
||||
else:
|
||||
assert isinstance(child_data, str)
|
||||
child = node_maker.make_node_from_serialized(child_data)
|
||||
self.children[name] = child
|
||||
|
||||
def is_leaf_subtree(self):
|
||||
return False
|
||||
|
||||
|
||||
class _DirectorySubTree(object):
|
||||
"""I represent a set of connected directories that all share the same
|
||||
access control: any given person can read or write anything in this tree
|
||||
as a group, and it is not possible to give access to some pieces of this
|
||||
tree and not to others. Read-only access to individual files can be
|
||||
granted independently, of course, but through an unnamed URI, not as a
|
||||
subdirectory.
|
||||
|
||||
Each internal directory is represented by a separate Node.
|
||||
|
||||
This is an abstract base class. Individual subclasses will implement
|
||||
various forms of serialization, persistence, and mutability.
|
||||
|
||||
"""
|
||||
implements(ISubTree)
|
||||
|
||||
|
||||
def new(self):
|
||||
# create a new, empty directory
|
||||
self.root = SubTreeNode(self)
|
||||
self.mutable = True # sure, why not
|
||||
return self
|
||||
|
||||
def populate_from_node(self, node, parent_is_mutable, node_maker, downloader):
|
||||
# self.populate_from_node must be defined by the subclass (CHK or
|
||||
# SSK), since it controls how the spec is interpreted. It will
|
||||
# probably use the contents of the node to figure out what to
|
||||
# download from the grid, then pass this downloaded serialized data
|
||||
# to populate_from_data()
|
||||
raise NotImplementedError
|
||||
|
||||
def _populate_from_data(self, data, node_maker):
|
||||
self.root = SubTreeNode(self)
|
||||
self.root.populate_dirnode(bencode.bdecode(data), node_maker)
|
||||
return self
|
||||
|
||||
def serialize_subtree_to_file(self, f):
|
||||
sexprs = self.root.serialize_node()
|
||||
bencode.bwrite(sexprs, f)
|
||||
|
||||
def is_mutable(self):
|
||||
return self.mutable
|
||||
|
||||
def get_node_for_path(self, path):
|
||||
# this is restricted to traversing our own subtree. Returns
|
||||
# (found_path, node, remaining_path)
|
||||
found_path = []
|
||||
remaining_path = path[:]
|
||||
node = self.root
|
||||
while remaining_path:
|
||||
name = remaining_path[0]
|
||||
try:
|
||||
childnode = node.get(name)
|
||||
except NoSuchChildError:
|
||||
# The node *would* be in this subtree if it existed, but it
|
||||
# doesn't. Leave found_path and remaining_path alone, and
|
||||
# node points at the last parent node that was on the path.
|
||||
break
|
||||
if IDirectoryNode.providedBy(childnode):
|
||||
# recurse
|
||||
node = childnode
|
||||
found_path.append(name)
|
||||
remaining_path.pop(0)
|
||||
continue
|
||||
else:
|
||||
# the path takes us out of this subtree and into another
|
||||
node = childnode # next subtree node
|
||||
found_path.append(name)
|
||||
remaining_path.pop(0)
|
||||
break
|
||||
return (found_path, node, remaining_path)
|
||||
|
||||
def put_node_at_path(self, path, new_node):
|
||||
assert len(path) > 0
|
||||
child_name = path[-1]
|
||||
|
||||
# first step: get (or create) the parent directory
|
||||
node = self.root
|
||||
for subdir_name in path[:-1]:
|
||||
# TODO: peeking at private attributes is naughty, but using
|
||||
# node.get() and catching NoSuchChildError would be slightly
|
||||
# ugly. Reconsider the IDirectoryNode.get() API.
|
||||
childnode = node.children.get(subdir_name)
|
||||
if childnode:
|
||||
assert IDirectoryNode.providedBy(childnode)
|
||||
else:
|
||||
# we have to create new directories until the parent exists
|
||||
childnode = node.add_subdir(subdir_name)
|
||||
node = childnode
|
||||
|
||||
# 'node' is now pointing at the parent directory
|
||||
if child_name in node.children:
|
||||
# oops, there's already something there. We can replace it.
|
||||
# TODO: How do we free the subtree that was just orphaned?
|
||||
node.delete(child_name)
|
||||
|
||||
# now we can finally add the new node
|
||||
node.add(child_name, new_node)
|
||||
|
||||
def delete_node_at_path(self, path):
|
||||
assert len(path) > 0
|
||||
child_name = path[-1]
|
||||
|
||||
# first step: get the parent directory
|
||||
node = self.root
|
||||
for subdir_name in path[:-1]:
|
||||
subdir_node = node.get(subdir_name) # may raise NoSuchChildError
|
||||
node = subdir_node
|
||||
|
||||
# 'node' is now pointing at the parent directory. Let's make sure the
|
||||
# path they want to delete actually exists. We don't really care what
|
||||
# the child *is*, just that it exists.
|
||||
node.get(child_name) # may raise NoSuchChildError
|
||||
|
||||
# now delete it
|
||||
# TODO: How do we free the subtree that was just orphaned?
|
||||
node.delete(child_name)
|
||||
|
||||
|
||||
class LocalFileSubTreeNode(BaseDataNode):
|
||||
prefix = "LocalFileDirectory"
|
||||
|
||||
def new(self, filename):
|
||||
self.filename = filename
|
||||
return self
|
||||
|
||||
def get_base_data(self):
|
||||
return self.filename
|
||||
def set_base_data(self, data):
|
||||
self.filename = data
|
||||
|
||||
def is_leaf_subtree(self):
|
||||
return False
|
||||
|
||||
class LocalFileSubTree(_DirectorySubTree):
|
||||
node_class = LocalFileSubTreeNode
|
||||
|
||||
def new(self, filename):
|
||||
self.filename = filename
|
||||
return _DirectorySubTree.new(self)
|
||||
|
||||
def populate_from_node(self, node, parent_is_mutable, node_maker, downloader):
|
||||
self.mutable = True # probably
|
||||
self.filename = node.filename
|
||||
f = open(self.filename, "rb")
|
||||
data = f.read()
|
||||
f.close()
|
||||
d = defer.succeed(data)
|
||||
d.addCallback(self._populate_from_data, node_maker)
|
||||
return d
|
||||
|
||||
def mutation_modifies_parent(self):
|
||||
return False
|
||||
|
||||
def create_node_now(self):
|
||||
return LocalFileSubTreeNode().new(self.filename)
|
||||
|
||||
def _update(self):
|
||||
f = open(self.filename, "wb")
|
||||
self.serialize_subtree_to_file(f)
|
||||
f.close()
|
||||
|
||||
def update_now(self, uploader):
|
||||
self._update()
|
||||
return self.create_node_now()
|
||||
|
||||
def update(self, work_queue):
|
||||
# TODO: this may suffer from the same execute-too-early problem as
|
||||
# redirect.LocalFileRedirection
|
||||
self._update()
|
||||
return None
|
||||
|
||||
|
||||
class CHKDirectorySubTreeNode(BaseDataNode):
|
||||
implements(ICHKDirectoryNode)
|
||||
prefix = "CHKDirectory"
|
||||
|
||||
def get_base_data(self):
|
||||
return self.uri
|
||||
def set_base_data(self, data):
|
||||
self.uri = data
|
||||
|
||||
def get_uri(self):
|
||||
return self.uri
|
||||
|
||||
def is_leaf_subtree(self):
|
||||
return False
|
||||
|
||||
|
||||
class CHKDirectorySubTree(_DirectorySubTree):
|
||||
# maybe mutable, maybe not
|
||||
node_class = CHKDirectorySubTreeNode
|
||||
|
||||
def new(self):
|
||||
self.uri = None
|
||||
return _DirectorySubTree.new(self)
|
||||
|
||||
def set_uri(self, uri):
|
||||
self.uri = uri
|
||||
|
||||
def populate_from_node(self, node, parent_is_mutable, node_maker, downloader):
|
||||
assert ICHKDirectoryNode(node)
|
||||
self.mutable = parent_is_mutable
|
||||
d = downloader.download(node.get_uri(), download.Data())
|
||||
d.addCallback(self._populate_from_data, node_maker)
|
||||
def _populated(res):
|
||||
self.uri = node.get_uri()
|
||||
return self
|
||||
d.addCallback(_populated)
|
||||
return d
|
||||
|
||||
def mutation_modifies_parent(self):
|
||||
return True
|
||||
|
||||
def create_node_now(self):
|
||||
return CHKDirectorySubTreeNode().new(self.uri)
|
||||
|
||||
def update_now(self, uploader):
|
||||
f = StringIO()
|
||||
self.serialize_subtree_to_file(f)
|
||||
data = f.getvalue()
|
||||
d = uploader.upload_data(data)
|
||||
def _uploaded(uri):
|
||||
self.uri = uri
|
||||
return self.create_node_now()
|
||||
d.addCallback(_uploaded)
|
||||
return d
|
||||
|
||||
def update(self, workqueue):
|
||||
# this is the CHK form
|
||||
old_uri = self.uri
|
||||
f, filename = workqueue.create_tempfile(".chkdir")
|
||||
self.serialize_subtree_to_file(f)
|
||||
f.close()
|
||||
boxname = workqueue.create_boxname()
|
||||
workqueue.add_upload_chk(filename, boxname)
|
||||
workqueue.add_delete_tempfile(filename)
|
||||
workqueue.add_retain_uri_from_box(boxname)
|
||||
workqueue.add_delete_box(boxname)
|
||||
if old_uri:
|
||||
workqueue.add_unlink_uri(old_uri)
|
||||
# TODO: think about how self.old_uri will get updated. I *think* that
|
||||
# this whole instance will get replaced, so it ought to be ok. But
|
||||
# this needs investigation.
|
||||
|
||||
# mutation affects our parent, so we return a boxname for them
|
||||
return boxname
|
||||
|
||||
|
||||
class SSKDirectorySubTreeNode(object):
|
||||
implements(INode, ISSKDirectoryNode)
|
||||
prefix = "SSKDirectory"
|
||||
|
||||
def serialize_node(self):
|
||||
data = (self.read_cap, self.write_cap)
|
||||
return "%s:%s" % (self.prefix, bencode.bencode(data))
|
||||
def populate_node(self, body, node_maker):
|
||||
self.read_cap, self.write_cap = bencode.bdecode(body)
|
||||
|
||||
def get_read_capability(self):
|
||||
return self.read_cap
|
||||
def get_write_capability(self):
|
||||
return self.write_cap
|
||||
def set_read_capability(self, read_cap):
|
||||
self.read_cap = read_cap
|
||||
def set_write_capability(self, write_cap):
|
||||
self.write_cap = write_cap
|
||||
|
||||
def is_leaf_subtree(self):
|
||||
return False
|
||||
|
||||
|
||||
class SSKDirectorySubTree(_DirectorySubTree):
|
||||
node_class = SSKDirectorySubTreeNode
|
||||
|
||||
def new(self):
|
||||
_DirectorySubTree.new(self)
|
||||
self.version = 0
|
||||
# TODO: populate
|
||||
return self
|
||||
|
||||
def populate_from_node(self, node, parent_is_mutable, node_maker, downloader):
|
||||
node = ISSKDirectoryNode(node)
|
||||
self.read_capability = node.get_read_capability()
|
||||
self.write_capability = node.get_write_capability()
|
||||
self.mutable = bool(self.write_capability)
|
||||
d = downloader.download_ssk(self.read_capability, download.Data())
|
||||
d.addCallback(self._populate_from_data, node_maker)
|
||||
return d
|
||||
|
||||
def set_version(self, version):
|
||||
self.version = version
|
||||
|
||||
def mutation_modifies_parent(self):
|
||||
return False
|
||||
|
||||
def create_node_now(self):
|
||||
node = SSKDirectorySubTreeNode()
|
||||
node.set_read_capability(self.read_capability)
|
||||
node.set_write_capability(self.write_capability)
|
||||
return node
|
||||
|
||||
def update_now(self, uploader):
|
||||
if not self.write_capability:
|
||||
raise RuntimeError("This SSKDirectorySubTree is not mutable")
|
||||
|
||||
f = StringIO()
|
||||
self.serialize_subtree_to_file(f)
|
||||
data = f.getvalue()
|
||||
|
||||
self.version += 1
|
||||
d = uploader.upload_ssk_data(self.write_capability, self.version, data)
|
||||
d.addCallback(lambda ignored: self.create_node_now())
|
||||
return d
|
||||
|
||||
def update(self, workqueue):
|
||||
# this is the SSK form
|
||||
f, filename = workqueue.create_tempfile(".sskdir")
|
||||
self.serialize_subtree_to_file(f)
|
||||
f.close()
|
||||
|
||||
oldversion = self.version
|
||||
self.version = self.version + 1
|
||||
|
||||
workqueue.add_upload_ssk(self.write_capability, oldversion, filename)
|
||||
workqueue.add_delete_tempfile(filename)
|
||||
workqueue.add_retain_ssk(self.read_capability)
|
||||
# mutation does not affect our parent
|
||||
return None
|
@ -1,51 +0,0 @@
|
||||
|
||||
from zope.interface import implements
|
||||
from allmydata.filetree.interfaces import INode, IFileNode
|
||||
from allmydata.filetree.basenode import BaseDataNode
|
||||
from allmydata.util import bencode
|
||||
|
||||
class CHKFileNode(BaseDataNode):
|
||||
implements(IFileNode)
|
||||
prefix = "CHKFile"
|
||||
|
||||
def new(self, uri):
|
||||
self.uri = uri
|
||||
return self
|
||||
|
||||
def put_node_at_path(self, path, node):
|
||||
raise RuntimeError
|
||||
|
||||
def get_base_data(self):
|
||||
return self.uri
|
||||
def set_base_data(self, data):
|
||||
self.uri = data
|
||||
|
||||
def get_uri(self):
|
||||
return self.uri
|
||||
|
||||
def is_leaf_subtree(self):
|
||||
return True
|
||||
|
||||
class SSKFileNode(object):
|
||||
implements(INode, IFileNode)
|
||||
prefix = "SSKFile"
|
||||
|
||||
def put_node_at_path(self, path, node):
|
||||
raise RuntimeError
|
||||
|
||||
def serialize_node(self):
|
||||
data = (self.read_cap, self.write_cap)
|
||||
return "%s:%s" % (self.prefix, bencode.bencode(data))
|
||||
def populate_node(self, data, node_maker):
|
||||
assert data.startswith(self.prefix + ":")
|
||||
capdata = data[len(self.prefix)+1:]
|
||||
self.read_cap, self.write_cap = bencode.bdecode(capdata)
|
||||
|
||||
def get_read_capability(self):
|
||||
return self.read_cap
|
||||
def get_write_capability(self):
|
||||
return self.write_cap
|
||||
|
||||
def is_leaf_subtree(self):
|
||||
return True
|
||||
|
@ -1,338 +0,0 @@
|
||||
|
||||
from zope.interface import Interface
|
||||
|
||||
class INode(Interface):
|
||||
"""This is some sort of retrievable node. All objects which implement
|
||||
other I*Node interfaces also implement this one."""
|
||||
|
||||
# the INode-implementing class must have an attribute named .prefix which
|
||||
# contains a string.
|
||||
|
||||
def serialize_node():
|
||||
"""Return a data structure which contains enough information to build
|
||||
this node again in the future (by calling
|
||||
INodeMaker.make_node_from_serialized(). For IDirectoryNodes, this
|
||||
will be a list. For all other nodes this will be a string of the form
|
||||
'prefix:body', where 'prefix' must be the same as the class attribute
|
||||
.prefix ."""
|
||||
def populate_node(body, node_maker):
|
||||
"""INodeMaker.make_node_from_serialized() will first use the prefix
|
||||
from the .prefix attribute to decide what kind of Node to create.
|
||||
They will then call this populate_node() method with the body to
|
||||
populate the new Node. 'node_maker' provides INodeMaker, which
|
||||
provides that same make_node_from_serialized function to create any
|
||||
internal child nodes that might be necessary."""
|
||||
|
||||
def is_leaf_subtree():
|
||||
"""Return True if this node does not refer to a traversable
|
||||
subtree. When searching for the node that describes a path, the
|
||||
search will stop at the first leaf node found. IFileNodes should
|
||||
return True here.
|
||||
"""
|
||||
# TODO: there is a slightly confusing mixture of IDirectoryNodes and all
|
||||
# other kinds of nodes. It is convenient to mix them because that way list()
|
||||
# can point at nodes of all sorts, but IDirectoryNodes are very different
|
||||
# than the rest (because they to not represent distinct subtrees). There
|
||||
# might be a better way to factor this.
|
||||
|
||||
# TODO: 'node' is a problematic term here. It refers to nodes in the graph of
|
||||
# connected subtrees. It also refers to nodes in the graph of directories and
|
||||
# links within a single subtree. And the interface named INode is
|
||||
# unfortunately a homonym with "inode", the data structure we previously used
|
||||
# to represent information about an uploaded file which was too large to keep
|
||||
# locally (the list of blockids), which meant the inode itself was uploaded.
|
||||
# We no longer use inodes, but using a word that sounds just like it may
|
||||
# cause confusion.
|
||||
|
||||
class IFileNode(Interface):
|
||||
"""This is a file which can be retrieved."""
|
||||
# TODO: not sure which of these to provide.. should URIs contain "CHK" or
|
||||
# "SSK" in them? Or should that be a detail of IDownloader?
|
||||
def get_uri():
|
||||
"""Return the URI of the target file. This URI can be passed
|
||||
to an IDownloader to retrieve the data."""
|
||||
def download(downloader, target):
|
||||
"""Download the file to the given target (using the provided
|
||||
downloader). Return a deferred that fires (with 'target') when the
|
||||
download is complete."""
|
||||
|
||||
class IDirectoryNode(Interface):
|
||||
"""This is a directory which can be listed."""
|
||||
# these calls do not modify the subtree
|
||||
def list():
|
||||
"""Return a dictionary mapping each childname to a node. These nodes
|
||||
implement various I*Node interfaces depending upon what they can do."""
|
||||
def get(childname):
|
||||
"""Return a child node. Raises NoSuchChildError if there is no
|
||||
child of that name."""
|
||||
def get_subtree():
|
||||
"""Return the ISubTree which contains this node."""
|
||||
|
||||
# the following calls modify the subtree. After calling them, you must
|
||||
# tell the enclosing subtree to serialize and upload itself. They can
|
||||
# only be called if this directory node is associated with a mutable
|
||||
# subtree.
|
||||
def delete(childname):
|
||||
"""Delete any child referenced by this name."""
|
||||
def add_subdir(childname):
|
||||
"""Create a new directory node, and return it."""
|
||||
def add(childname, node):
|
||||
"""Add a new node to this path. Returns self."""
|
||||
|
||||
class ISubTree(Interface):
|
||||
"""A subtree is a collection of Nodes: files, directories, other trees.
|
||||
|
||||
A subtree represents a set of connected directories and files that all
|
||||
share the same access control: any given person can read or write
|
||||
anything in this tree as a group, and it is not possible to give access
|
||||
to some pieces of this tree and not to others. Read-only access to
|
||||
individual files can be granted independently, of course, but through an
|
||||
unnamed URI, not as a subdirectory.
|
||||
|
||||
Each internal directory is represented by a separate Node. This might be
|
||||
a DirectoryNode, or it might be a FileNode.
|
||||
"""
|
||||
|
||||
# All ISubTree-providing instances must have a class-level attribute
|
||||
# named .node_class which references the matching INode-providing class.
|
||||
# This is used by the ISubTreeMaker to turn nodes into subtrees.
|
||||
|
||||
def populate_from_node(node, parent_is_mutable, node_maker, downloader):
|
||||
"""Subtrees are created by ISubTreeMaker.open() being called with an
|
||||
INode which describes both the kind of subtree to be created and a
|
||||
way to obtain its contents. open() uses the node to create a new
|
||||
instance of the appropriate subtree type, then calls this
|
||||
populate_from_node() method.
|
||||
|
||||
Each subtree's populate_from_node() method is expected to use the
|
||||
downloader to obtain a file with the subtree's serialized contents
|
||||
(probably by pulling data from some source, like the grid, the vdrive
|
||||
server, an HTTP server, or somewhere on the local filesystem), then
|
||||
unserialize them and populate the subtree's state.
|
||||
|
||||
Return a Deferred that will fire (with self) when this subtree is
|
||||
ready for use (specifically when it is ready for get() and add()
|
||||
calls).
|
||||
"""
|
||||
|
||||
def is_mutable():
|
||||
"""This returns True if we have the ability to modify this subtree.
|
||||
If this returns True, this reference may be adapted to
|
||||
IMutableSubTree to actually exercise these mutation rights.
|
||||
"""
|
||||
|
||||
def mutation_modifies_parent():
|
||||
"""This returns True if any modification to this subtree will result
|
||||
in it getting a new identity, and thus requiring its parent be
|
||||
notified. This is True for CHKDirectorySubTree, but False for
|
||||
SSKDirectorySubTree and all redirections.
|
||||
"""
|
||||
|
||||
def get_node_for_path(path):
|
||||
"""Ask this subtree to follow the path through its internal nodes.
|
||||
|
||||
Returns a tuple of (found_path, node, remaining_path). This method
|
||||
operations synchronously, and does not return a Deferred.
|
||||
|
||||
(found_path=path, found_node, [])
|
||||
If the path terminates within this subtree, found_path=path and
|
||||
remaining_path=[], and the node will be an internal IDirectoryNode.
|
||||
|
||||
(found_path, last_node, remaining_path)
|
||||
If the path does not terminate within this subtree but neither does
|
||||
it exit this subtree, the last internal IDirectoryNode that *was* on
|
||||
the path will be returned in 'node'. The path components that led to
|
||||
this node will be in found_path, and the remaining components will be
|
||||
in remaining_path. If you want to create the target node, loop over
|
||||
remaining_path as follows::
|
||||
|
||||
while remaining_path:
|
||||
node = node.add_subdir(remaining_path.pop(0))
|
||||
|
||||
(found_path, exit_node, remaining_path)
|
||||
If the path leaves this subtree, 'node' will be a different kind of
|
||||
INode (probably one that points at a child directory of some sort),
|
||||
found_path will be the components that led to this point, and
|
||||
remaining_path will be the remaining components. If you still wish to
|
||||
locate the target, use 'node' to open a new subtree, then provide
|
||||
'remaining_path' to the new subtree's get_node_for_path() method.
|
||||
|
||||
"""
|
||||
|
||||
def put_node_at_path(path, node):
|
||||
"""Add the given node to this subtree, at 'path'.
|
||||
|
||||
This may create internal directory subnodes as necessary. This must
|
||||
run synchronously, and returns None.
|
||||
"""
|
||||
|
||||
def delete_node_at_path(path):
|
||||
"""Delete the node at the the given path.
|
||||
|
||||
This must run synchronously, and returns None.
|
||||
"""
|
||||
|
||||
def serialize_subtree_to_file(f):
|
||||
"""Create a string which describes my structure and write it to the
|
||||
given filehandle (using only .write()). This string should be
|
||||
suitable for uploading to the grid or storing in a local file."""
|
||||
|
||||
def update_now(uploader):
|
||||
"""Perform whatever work is necessary to record this subtree to
|
||||
persistent storage.
|
||||
|
||||
This returns an INode, or a Deferred that fires (with an INode) when
|
||||
the subtree has been persisted.
|
||||
|
||||
For directory subtrees, this will cause the subtree to serialize
|
||||
itself to a file, then upload this file to the grid, then create an
|
||||
INode-providing instance which describes where the file wound up. For
|
||||
redirections, this will cause the subtree to modify the redirection's
|
||||
persistent storage, then return the (unmodified) INode that describes
|
||||
the redirection.
|
||||
|
||||
This form does not use the workqueue. If the node is shut down before
|
||||
the Deferred fires, a redirection or SSK subtree might be left in its
|
||||
previous state, or it might have been updated.
|
||||
"""
|
||||
|
||||
def update(workqueue):
|
||||
"""Perform and schedule whatever work is necessary to record this
|
||||
subtree to persistent storage.
|
||||
|
||||
Returns a boxname or None, synchronously. This function does not
|
||||
return a Deferred.
|
||||
|
||||
If the parent subtree needs to be modified with the new identity of
|
||||
this subtree (i.e. for CHKDirectorySubTree instances), this will
|
||||
return a boxname in which the serialized INode will be placed once
|
||||
the added workqueue steps have completed. The caller should add
|
||||
'addpath' steps to the workqueue using this boxname (which will
|
||||
eventually cause recursion on other subtrees, until some subtree is
|
||||
updated which does not require notifying the parent). update() will
|
||||
add steps to delete the box at the end of the workqueue.
|
||||
|
||||
If the parent subtree does not need to be modified (i.e. for
|
||||
SSKDirectorySubTree instances, or redirections), this will return
|
||||
None.
|
||||
|
||||
This is like update_now(), but uses the workqueue to insure
|
||||
consistency in the face of node shutdowns. Once our intentions have
|
||||
been recorded in the workqueue, if the node is shut down before the
|
||||
upload steps have completed, the update will eventually complete the
|
||||
next time the node is started.
|
||||
"""
|
||||
|
||||
def create_node_now():
|
||||
# TODO: this is no longer just for testing.. vdrive.addpath needs it
|
||||
"""FOR TESTING ONLY. Immediately create and return an INode which
|
||||
describes the current state of this subtree. This does not perform
|
||||
any upload or persistence work, and thus depends upon any internal
|
||||
state having been previously set correctly. In general this will
|
||||
return the correct value for subtrees which have just been created
|
||||
(and not yet mutated). It will also return the correct value for
|
||||
subtrees which do not change their identity when they are mutated
|
||||
(SSKDirectorySubTrees and redirections).
|
||||
"""
|
||||
|
||||
class INodeMaker(Interface):
|
||||
def make_node_from_serialized(serialized):
|
||||
"""Turn a string into an INode, which contains information about the
|
||||
file or directory (like a URI), but does not contain the actual
|
||||
contents. An ISubTreeMaker can be used later to retrieve the contents
|
||||
(which means downloading the file if this is an IFileNode, or perhaps
|
||||
creating a new subtree from the contents)."""
|
||||
|
||||
class ISubTreeMaker(Interface):
|
||||
def make_subtree_from_node(node, parent_is_mutable):
|
||||
"""Turn an INode into an ISubTree.
|
||||
|
||||
I accept an INode-providing specification of a subtree, and return a
|
||||
Deferred that fires with an ISubTree-providing instance. I will
|
||||
perform network IO and download the serialized data that the INode
|
||||
references, if necessary, or ask the vdrive server (or other provider)
|
||||
for a pointer, or read it from local disk.
|
||||
"""
|
||||
|
||||
|
||||
class IVirtualDrive(Interface):
|
||||
|
||||
def __init__(workqueue, downloader, root_node):
|
||||
pass
|
||||
|
||||
# commands to manipulate files
|
||||
|
||||
def list(path):
|
||||
"""List the contents of the directory at the given path.
|
||||
|
||||
'path' is a list of strings (empty to refer to the root directory)
|
||||
and must refer to a DIRECTORY node. This method returns a Deferred
|
||||
that fires with a dictionary that maps strings to filetypes. The
|
||||
strings are useful as path name components. The filetypes are
|
||||
Interfaces: either IDirectoryNode if path+[childname] can be used in
|
||||
a 'list' method, or IFileNode if path+[childname] can be used in a
|
||||
'download' method.
|
||||
|
||||
The Deferred will errback (with NoSuchDirectoryError) if the path
|
||||
does not point to an actual directory.
|
||||
"""
|
||||
|
||||
def download(path, target):
|
||||
"""Download the file at the given path to 'target'.
|
||||
|
||||
'path' must refer to a FILE. 'target' must implement IDownloadTarget.
|
||||
This returns a Deferred that fires (with 'target') when the download
|
||||
is complete.
|
||||
"""
|
||||
|
||||
def upload_data(path, data):
|
||||
"""Upload a string to the given path. The path must not already exist.
|
||||
|
||||
path[:-1] must refer to a writable DIRECTORY node.
|
||||
|
||||
This uses the workqueue, and returns None.
|
||||
"""
|
||||
|
||||
def upload(path, filename):
|
||||
"""Upload a file from disk to the given path.
|
||||
|
||||
This uses the workqueue, and returns None.
|
||||
"""
|
||||
|
||||
def delete(path):
|
||||
"""Delete the file or directory at the given path.
|
||||
|
||||
Returns a Deferred that fires (with self) when the delete is
|
||||
complete.
|
||||
"""
|
||||
|
||||
def add_node(path, node):
|
||||
"""Add a node to the given path. Use the workqueue.
|
||||
"""
|
||||
|
||||
# commands to manipulate subtrees
|
||||
|
||||
# ... detach subtree, merge subtree, etc
|
||||
|
||||
|
||||
# TODO
|
||||
|
||||
class ICHKDirectoryNode(Interface):
|
||||
def get_uri():
|
||||
pass
|
||||
class ISSKDirectoryNode(Interface):
|
||||
def get_read_capability():
|
||||
pass
|
||||
def get_write_capability():
|
||||
pass
|
||||
|
||||
|
||||
|
||||
class NoSuchChildError(Exception):
|
||||
pass
|
||||
class NoSuchDirectoryError(Exception):
|
||||
pass
|
||||
class PathAlreadyExistsError(Exception):
|
||||
pass
|
||||
class PathDoesNotExistError(Exception):
|
||||
pass
|
@ -1,41 +0,0 @@
|
||||
|
||||
from zope.interface import implements
|
||||
from allmydata.filetree import directory, file, redirect
|
||||
from allmydata.filetree.interfaces import INodeMaker
|
||||
|
||||
# this list is used by NodeMaker to convert node specification strings (found
|
||||
# inside the serialized form of subtrees) into Nodes (which live in the
|
||||
# in-RAM form of subtrees).
|
||||
all_node_types = [
|
||||
directory.LocalFileSubTreeNode,
|
||||
directory.CHKDirectorySubTreeNode,
|
||||
directory.SSKDirectorySubTreeNode,
|
||||
file.CHKFileNode,
|
||||
file.SSKFileNode,
|
||||
redirect.LocalFileRedirectionNode,
|
||||
redirect.VdriveRedirectionNode,
|
||||
redirect.HTTPRedirectionNode,
|
||||
redirect.VdriveOrLocalFileRedirectionNode,
|
||||
]
|
||||
|
||||
class NodeMaker(object):
|
||||
implements(INodeMaker)
|
||||
|
||||
def make_node_from_serialized(self, serialized):
|
||||
# this turns a string into an INode, which contains information about
|
||||
# the file or directory (like a URI), but does not contain the actual
|
||||
# contents. An ISubTreeMaker can be used later to retrieve the
|
||||
# contents (which means downloading the file if this is an IFileNode,
|
||||
# or perhaps creating a new subtree from the contents)
|
||||
|
||||
# maybe include parent_is_mutable?
|
||||
assert isinstance(serialized, str)
|
||||
prefix, body = serialized.split(":", 2)
|
||||
|
||||
for node_class in all_node_types:
|
||||
if prefix == node_class.prefix:
|
||||
node = node_class()
|
||||
node.populate_node(body, self)
|
||||
return node
|
||||
raise RuntimeError("unable to handle node type '%s'" % prefix)
|
||||
|
@ -1,273 +0,0 @@
|
||||
|
||||
from cStringIO import StringIO
|
||||
from zope.interface import implements
|
||||
from twisted.internet import defer
|
||||
|
||||
from allmydata.filetree.interfaces import ISubTree, INodeMaker
|
||||
from allmydata.filetree.basenode import BaseDataNode
|
||||
from allmydata.util import bencode
|
||||
|
||||
class LocalFileRedirectionNode(BaseDataNode):
|
||||
prefix = "LocalFileRedirection"
|
||||
|
||||
def new(self, handle):
|
||||
self.handle = handle
|
||||
return self
|
||||
|
||||
def get_base_data(self):
|
||||
return self.handle
|
||||
def set_base_data(self, data):
|
||||
self.handle = data
|
||||
|
||||
def is_leaf_subtree(self):
|
||||
return False
|
||||
|
||||
class _BaseRedirection(object):
|
||||
implements(ISubTree)
|
||||
|
||||
def new(self, child_node):
|
||||
self.child_node = child_node
|
||||
return self
|
||||
|
||||
def mutation_modifies_parent(self):
|
||||
return False
|
||||
|
||||
def get_node_for_path(self, path):
|
||||
return ([], self.child_node, path)
|
||||
|
||||
def put_node_at_path(self, path, node):
|
||||
assert path == []
|
||||
self.child_node = node
|
||||
|
||||
def serialize_subtree_to_file(self, f):
|
||||
f.write(self.child_node.serialize_node())
|
||||
|
||||
def _populate_from_data(self, data, node_maker):
|
||||
assert INodeMaker(node_maker)
|
||||
self.child_node = node_maker.make_node_from_serialized(data)
|
||||
return self
|
||||
|
||||
|
||||
class LocalFileRedirection(_BaseRedirection):
|
||||
node_class = LocalFileRedirectionNode
|
||||
|
||||
def new(self, handle, child_node):
|
||||
self.filename = handle
|
||||
return _BaseRedirection.new(self, child_node)
|
||||
|
||||
def populate_from_node(self, node, parent_is_mutable, node_maker, downloader):
|
||||
# return a Deferred that fires (with self) when this node is ready
|
||||
# for use
|
||||
|
||||
assert isinstance(node, LocalFileRedirectionNode)
|
||||
self.filename = node.handle
|
||||
# there is a local file which contains a bencoded serialized subtree
|
||||
# specification.
|
||||
|
||||
# TODO: will this enable outsiders to cause us to read from arbitrary
|
||||
# files? Think about this. It is probably a good idea to restrict the
|
||||
# filename to be a single component, and to always put them in a
|
||||
# well-known directory that contains nothing else, and maybe make
|
||||
# them unguessable.
|
||||
f = open(self.filename, "rb")
|
||||
data = f.read()
|
||||
f.close()
|
||||
# note: we don't cache the contents of the file. TODO: consider
|
||||
# doing this based upon mtime. It is important that we be able to
|
||||
# notice if the file has been changed.
|
||||
d = defer.succeed(data)
|
||||
d.addCallback(self._populate_from_data, node_maker)
|
||||
return d
|
||||
|
||||
def is_mutable(self):
|
||||
return True
|
||||
|
||||
def create_node_now(self):
|
||||
return LocalFileRedirectionNode().new(self.filename)
|
||||
|
||||
def _update(self):
|
||||
f = open(self.filename, "wb")
|
||||
self.serialize_subtree_to_file(f)
|
||||
f.close()
|
||||
|
||||
def update_now(self, uploader):
|
||||
self._update()
|
||||
return self.create_node_now()
|
||||
|
||||
def update(self, workqueue):
|
||||
# TODO: this happens too early, before earlier items in the workqueue
|
||||
# have been executed. This might not be a problem, if our update()
|
||||
# method isn't actually called until everything earlier has been
|
||||
# executed anyways. Need to ponder this.
|
||||
self._update()
|
||||
return None
|
||||
|
||||
class VdriveRedirectionNode(LocalFileRedirectionNode):
|
||||
prefix = "VdriveRedirection"
|
||||
|
||||
class VdriveRedirection(_BaseRedirection):
|
||||
node_class = VdriveRedirectionNode
|
||||
|
||||
def new(self, handle):
|
||||
self.handle = handle
|
||||
return self
|
||||
|
||||
def populate_from_node(self, node, parent_is_mutable, node_maker, downloader):
|
||||
# this specifies a handle for which the Vdrive maintains a serialized
|
||||
# subtree specification.
|
||||
assert isinstance(node, VdriveRedirectionNode)
|
||||
self.handle = node.handle
|
||||
|
||||
# TODO: vdrive?
|
||||
d = self._vdrive.callRemote("lookup_handle", self.handle)
|
||||
d.addCallback(self._populate_from_data, node_maker)
|
||||
return d
|
||||
|
||||
def is_mutable(self):
|
||||
return True # TODO: maybe, maybe not
|
||||
|
||||
def create_node_now(self):
|
||||
return VdriveRedirectionNode().new(self.handle)
|
||||
|
||||
def update_now(self, uploader):
|
||||
f = StringIO()
|
||||
self.serialize_subtree_to_file(f)
|
||||
d = self._vdrive.callRemote("set_handle", self.handle, f.getvalue())
|
||||
def _done(res):
|
||||
return self.create_node_now()
|
||||
d.addCallback(_done)
|
||||
return d
|
||||
|
||||
def update(self, workqueue):
|
||||
f, filename = workqueue.create_tempfile(".tovdrive")
|
||||
self.serialize_subtree_to_file(f)
|
||||
f.close()
|
||||
workqueue.add_vdrive_update_handle(self.handle, filename)
|
||||
workqueue.add_delete_tempfile(filename)
|
||||
return None
|
||||
|
||||
class VdriveOrLocalFileRedirectionNode(LocalFileRedirectionNode):
|
||||
prefix = "VdriveOrLocalFileRedirection"
|
||||
|
||||
class VdriveOrLocalFileRedirection(_BaseRedirection):
|
||||
node_class = VdriveOrLocalFileRedirectionNode
|
||||
|
||||
def new(self, handle, child_node):
|
||||
self.handle = handle
|
||||
self.version = 0
|
||||
self.child_node = child_node
|
||||
# TODO
|
||||
return self
|
||||
|
||||
def populate_from_node(self, node, parent_is_mutable, node_maker, downloader):
|
||||
# there is a local file which contains a bencoded serialized
|
||||
# subtree specification. The vdrive also has a copy. Whomever has
|
||||
# the higher version number wins.
|
||||
assert isinstance(node, VdriveOrLocalFileRedirectionNode)
|
||||
self.filename = self.handle = node.handle
|
||||
|
||||
f = open(self.filename, "rb")
|
||||
#local_version, local_data = bencode.bdecode(f.read())
|
||||
local_version_and_data = f.read()
|
||||
f.close()
|
||||
|
||||
# TODO: vdrive?
|
||||
# TODO: pubsub so we can cache the vdrive's results
|
||||
d = self._vdrive.callRemote("lookup_handle", self.handle)
|
||||
d.addCallback(self._choose_winner, local_version_and_data)
|
||||
d.addCallback(self._populate_from_data, node_maker)
|
||||
return d
|
||||
|
||||
def _choose_winner(self, vdrive_version_and_data, local_version_and_data):
|
||||
vdrive_version, vdrive_data = bencode.bdecode(vdrive_version_and_data)
|
||||
local_version, local_data = bencode.bdecode(local_version_and_data)
|
||||
if vdrive_version > local_version:
|
||||
data = vdrive_data
|
||||
self.version = vdrive_version
|
||||
else:
|
||||
data = local_data
|
||||
self.version = local_version
|
||||
# NOTE: two layers of bencoding here, TODO
|
||||
return data
|
||||
|
||||
def is_mutable(self):
|
||||
return True
|
||||
|
||||
def create_node_now(self):
|
||||
return VdriveOrLocalFileRedirectionNode().new(self.handle)
|
||||
|
||||
def _update(self):
|
||||
self.version += 1
|
||||
f = StringIO()
|
||||
self.serialize_subtree_to_file(f)
|
||||
version_and_data = bencode.bencode((self.version, f.getvalue()))
|
||||
return version_and_data
|
||||
|
||||
def update_now(self, uploader):
|
||||
version_and_data = self._update()
|
||||
f = open(self.filename, "wb")
|
||||
f.write(version_and_data)
|
||||
f.close()
|
||||
|
||||
d = self._vdrive.callRemote("set_handle", self.handle, version_and_data)
|
||||
def _done(res):
|
||||
return self.create_node_now()
|
||||
d.addCallback(_done)
|
||||
return d
|
||||
|
||||
def update(self, workqueue):
|
||||
version_and_data = self._update()
|
||||
# TODO: this may have the same problem as LocalFileRedirection.update
|
||||
f = open(self.filename, "wb")
|
||||
f.write(version_and_data)
|
||||
f.close()
|
||||
|
||||
f, filename = workqueue.create_tempfile(".tovdrive")
|
||||
self.serialize_subtree_to_file(f)
|
||||
f.close()
|
||||
workqueue.add_vdrive_update_handle(self.handle, filename)
|
||||
workqueue.add_delete_tempfile(filename)
|
||||
return None
|
||||
|
||||
class HTTPRedirectionNode(BaseDataNode):
|
||||
prefix = "HTTPRedirection"
|
||||
|
||||
def new(self, url):
|
||||
self.url = url
|
||||
return self
|
||||
|
||||
def get_base_data(self):
|
||||
return self.url
|
||||
def set_base_data(self, data):
|
||||
self.url = data
|
||||
|
||||
def is_leaf_subtree(self):
|
||||
return False
|
||||
|
||||
class HTTPRedirection(_BaseRedirection):
|
||||
node_class = HTTPRedirectionNode
|
||||
|
||||
def new(self, url):
|
||||
self.url = url
|
||||
|
||||
def populate_from_node(self, node, parent_is_mutable, node_maker, downloader):
|
||||
# this specifies a URL at which there is a bencoded serialized
|
||||
# subtree specification.
|
||||
self.url = node.url
|
||||
assert isinstance(node, HTTPRedirectionNode)
|
||||
from twisted.web import client
|
||||
d = client.getPage(self.url)
|
||||
d.addCallback(self._populate_from_data, node_maker)
|
||||
return d
|
||||
|
||||
def is_mutable(self):
|
||||
return False
|
||||
|
||||
def create_node_now(self):
|
||||
return HTTPRedirectionNode().new(self.url)
|
||||
|
||||
def update_now(self, uploader):
|
||||
raise RuntimeError("HTTPRedirection is not mutable")
|
||||
|
||||
def update(self, workqueue):
|
||||
raise RuntimeError("HTTPRedirection is not mutable")
|
@ -1,352 +0,0 @@
|
||||
|
||||
import os.path
|
||||
from zope.interface import implements
|
||||
from twisted.internet import defer
|
||||
from allmydata.filetree import directory, redirect
|
||||
from allmydata.filetree.interfaces import (
|
||||
IVirtualDrive, ISubTreeMaker,
|
||||
INodeMaker, INode, ISubTree, IFileNode, IDirectoryNode,
|
||||
NoSuchDirectoryError, NoSuchChildError, PathAlreadyExistsError,
|
||||
PathDoesNotExistError,
|
||||
)
|
||||
from allmydata.interfaces import IDownloader, IUploader, IWorkQueue
|
||||
|
||||
from allmydata.filetree.nodemaker import NodeMaker
|
||||
|
||||
all_openable_subtree_types = [
|
||||
directory.LocalFileSubTree,
|
||||
directory.CHKDirectorySubTree,
|
||||
directory.SSKDirectorySubTree,
|
||||
redirect.LocalFileRedirection,
|
||||
redirect.VdriveRedirection,
|
||||
redirect.VdriveOrLocalFileRedirection,
|
||||
redirect.HTTPRedirection,
|
||||
]
|
||||
|
||||
class SubTreeMaker(object):
|
||||
implements(ISubTreeMaker)
|
||||
|
||||
def __init__(self, downloader):
|
||||
# this is created with everything it might need to download and
|
||||
# create subtrees. That means a Downloader and in the future (?) a
|
||||
# reference to the vdrive.
|
||||
assert IDownloader(downloader)
|
||||
self._downloader = downloader
|
||||
self._node_maker = NodeMaker()
|
||||
self._cache = {}
|
||||
|
||||
def _create(self, node, parent_is_mutable):
|
||||
assert INode(node)
|
||||
assert INodeMaker(self._node_maker)
|
||||
for subtree_class in all_openable_subtree_types:
|
||||
if isinstance(node, subtree_class.node_class):
|
||||
subtree = subtree_class()
|
||||
d = subtree.populate_from_node(node,
|
||||
parent_is_mutable,
|
||||
self._node_maker,
|
||||
self._downloader)
|
||||
return d
|
||||
raise RuntimeError("unable to handle subtree specification '%s'"
|
||||
% (node,))
|
||||
|
||||
def make_subtree_from_node(self, node, parent_is_mutable):
|
||||
assert INode(node)
|
||||
assert not isinstance(node, IDirectoryNode)
|
||||
|
||||
# is it in cache? To check this we need to use the node's serialized
|
||||
# form, since nodes are instances and don't compare by value
|
||||
node_s = node.serialize_node()
|
||||
if node_s in self._cache:
|
||||
return defer.succeed(self._cache[node_s])
|
||||
|
||||
d = defer.maybeDeferred(self._create, node, parent_is_mutable)
|
||||
d.addCallback(self._add_to_cache, node_s)
|
||||
return d
|
||||
|
||||
def _add_to_cache(self, subtree, node_s):
|
||||
self._cache[node_s] = subtree
|
||||
# TODO: remove things from the cache eventually
|
||||
return subtree
|
||||
|
||||
|
||||
|
||||
class VirtualDrive(object):
|
||||
implements(IVirtualDrive)
|
||||
debug = False
|
||||
|
||||
def __init__(self, workqueue, downloader, uploader, root_node):
|
||||
assert IWorkQueue(workqueue)
|
||||
assert IDownloader(downloader)
|
||||
assert IUploader(uploader)
|
||||
assert INode(root_node)
|
||||
self.workqueue = workqueue
|
||||
workqueue.set_vdrive(self)
|
||||
workqueue.set_uploader(uploader)
|
||||
self._downloader = downloader
|
||||
self._uploader = uploader
|
||||
self.root_node = root_node
|
||||
self.subtree_maker = SubTreeMaker(downloader)
|
||||
|
||||
# these methods are used to walk through our subtrees
|
||||
|
||||
def _get_root(self):
|
||||
return self.subtree_maker.make_subtree_from_node(self.root_node, False)
|
||||
|
||||
def _get_node(self, path):
|
||||
d = self._get_closest_node(path)
|
||||
def _got_node((node, remaining_path)):
|
||||
if remaining_path:
|
||||
return None
|
||||
return node
|
||||
d.addCallback(_got_node)
|
||||
return d
|
||||
|
||||
def _get_closest_node(self, path):
|
||||
"""Find the closest directory node parent for the desired path.
|
||||
Return a Deferred that fires with (node, remaining_path).
|
||||
"""
|
||||
d = self._get_root()
|
||||
d.addCallback(self._get_closest_node_1, path)
|
||||
return d
|
||||
|
||||
def _get_closest_node_1(self, subtree, path):
|
||||
(found_path, node, remaining_path) = subtree.get_node_for_path(path)
|
||||
parent_is_mutable = subtree.is_mutable()
|
||||
if IDirectoryNode.providedBy(node) or node.is_leaf_subtree():
|
||||
# traversal done
|
||||
return (node, remaining_path)
|
||||
# otherwise, we must open and recurse into a new subtree
|
||||
d = self.subtree_maker.make_subtree_from_node(node, parent_is_mutable)
|
||||
def _opened(next_subtree):
|
||||
next_subtree = ISubTree(next_subtree)
|
||||
return self._get_closest_node_1(next_subtree, remaining_path)
|
||||
d.addCallback(_opened)
|
||||
return d
|
||||
|
||||
def _get_directory(self, path):
|
||||
"""Return a Deferred that fires with the IDirectoryNode at the given
|
||||
path, or raise NoSuchDirectoryError if there is no such node. This
|
||||
will traverse subtrees as necessary."""
|
||||
d = self._get_node(path)
|
||||
def _got_directory(node):
|
||||
if not node:
|
||||
raise NoSuchDirectoryError
|
||||
assert IDirectoryNode(node)
|
||||
return node
|
||||
d.addCallback(_got_directory)
|
||||
return d
|
||||
|
||||
def _get_file(self, path):
|
||||
"""Return a Deferred that files with an IFileNode at the given path,
|
||||
or raises a NoSuchDirectoryError or NoSuchChildError, or some other
|
||||
error if the path refers to something other than a file."""
|
||||
d = self._get_node(path)
|
||||
def _got_node(node):
|
||||
if not node:
|
||||
raise NoSuchChildError
|
||||
return IFileNode(node)
|
||||
d.addCallback(_got_node)
|
||||
return d
|
||||
|
||||
def _get_file_uri(self, path):
|
||||
d = self._get_file(path)
|
||||
d.addCallback(lambda filenode: filenode.get_uri())
|
||||
return d
|
||||
|
||||
def _child_should_not_exist(self, path):
|
||||
d = self._get_node(path)
|
||||
def _got_node(node):
|
||||
if node is not None:
|
||||
raise PathAlreadyExistsError
|
||||
d.addCallback(_got_node)
|
||||
return d
|
||||
|
||||
def _child_should_exist(self, path):
|
||||
d = self._get_node(path)
|
||||
def _got_node(node):
|
||||
if node is None:
|
||||
raise PathDoesNotExistError
|
||||
d.addCallback(_got_node)
|
||||
return d
|
||||
|
||||
def _get_closest_node_and_prepath(self, path):
|
||||
d = self._get_closest_node(path)
|
||||
def _got_closest((node, remaining_path)):
|
||||
prepath_len = len(path) - len(remaining_path)
|
||||
prepath = path[:prepath_len]
|
||||
assert path[prepath_len:] == remaining_path, "um, path=%s, prepath=%s, prepath_len=%d, remaining_path=%s" % (path, prepath, prepath_len, remaining_path)
|
||||
return (prepath, node, remaining_path)
|
||||
d.addCallback(_got_closest)
|
||||
return d
|
||||
|
||||
def get_subtrees_for_path(self, path):
|
||||
# compute a list of [(subtree1, subpath1), ...], which represents
|
||||
# which parts of 'path' traverse which subtrees. This can be used to
|
||||
# present the virtual drive to the user in a form that includes
|
||||
# redirection nodes (which do not consume path segments), or to
|
||||
# figure out which subtrees need to be updated when the identity of a
|
||||
# lower subtree (i.e. CHK) is changed.
|
||||
|
||||
# TODO: it might be useful to add some items to the return value.
|
||||
# Like if there is a node already present at that path, to return it.
|
||||
d = self._get_root()
|
||||
results = []
|
||||
d.addCallback(self._get_subtrees_for_path_1, results, path)
|
||||
return d
|
||||
|
||||
def _get_subtrees_for_path_1(self, subtree, results, path):
|
||||
(found_path, node, remaining_path) = subtree.get_node_for_path(path)
|
||||
if IDirectoryNode.providedBy(node):
|
||||
# traversal done. We are looking at the final subtree, and the
|
||||
# entire path (found_path + remaining_path) will live in here.
|
||||
r = (subtree, (found_path + remaining_path))
|
||||
results.append(r)
|
||||
return results
|
||||
if node.is_leaf_subtree():
|
||||
# for this assert to fail, we found a File or something where we
|
||||
# were expecting to find another subdirectory.
|
||||
assert len(remaining_path) == 0
|
||||
results.append((subtree, found_path))
|
||||
return results
|
||||
# otherwise we must open and recurse into a new subtree
|
||||
results.append((subtree, found_path))
|
||||
parent_is_mutable = subtree.is_mutable()
|
||||
d = self.subtree_maker.make_subtree_from_node(node, parent_is_mutable)
|
||||
def _opened(next_subtree):
|
||||
next_subtree = ISubTree(next_subtree)
|
||||
return self._get_subtrees_for_path_1(next_subtree, results,
|
||||
remaining_path)
|
||||
d.addCallback(_opened)
|
||||
return d
|
||||
|
||||
|
||||
# these are called by the workqueue
|
||||
|
||||
def addpath_with_node(self, path, new_node):
|
||||
new_node_boxname = self.workqueue.create_boxname(new_node)
|
||||
self.workqueue.add_delete_box(new_node_boxname)
|
||||
return self.addpath(path, new_node_boxname)
|
||||
|
||||
def addpath(self, path, new_node_boxname):
|
||||
# this adds a block of steps to the workqueue which, when complete,
|
||||
# will result in the new_node existing in the virtual drive at
|
||||
# 'path'.
|
||||
|
||||
# First we figure out which subtrees are involved
|
||||
d = self.get_subtrees_for_path(path)
|
||||
|
||||
# then we walk through them from the bottom, arranging to modify them
|
||||
# as necessary
|
||||
def _got_subtrees(subtrees, new_node_boxname):
|
||||
for (subtree, subpath) in reversed(subtrees):
|
||||
if self.debug:
|
||||
print "SUBTREE", subtree, subpath
|
||||
assert subtree.is_mutable()
|
||||
must_update = subtree.mutation_modifies_parent()
|
||||
subtree_node = subtree.create_node_now()
|
||||
new_subtree_boxname = None
|
||||
if must_update:
|
||||
new_subtree_boxname = self.workqueue.create_boxname()
|
||||
self.workqueue.add_delete_box(new_subtree_boxname)
|
||||
self.workqueue.add_modify_subtree(subtree_node, subpath,
|
||||
new_node_boxname,
|
||||
new_subtree_boxname)
|
||||
# the box filled by the modify_subtree will be propagated
|
||||
# upwards
|
||||
new_node_boxname = new_subtree_boxname
|
||||
else:
|
||||
# the buck stops here
|
||||
self.workqueue.add_modify_subtree(subtree_node, subpath,
|
||||
new_node_boxname)
|
||||
return
|
||||
d.addCallback(_got_subtrees, new_node_boxname)
|
||||
return d
|
||||
|
||||
def deletepath(self, path):
|
||||
if self.debug:
|
||||
print "DELETEPATH(%s)" % (path,)
|
||||
return self.addpath(path, None)
|
||||
|
||||
def modify_subtree(self, subtree_node, localpath, new_node,
|
||||
new_subtree_boxname=None):
|
||||
# TODO: I'm lying here, we don't know who the parent is, so we can't
|
||||
# really say whether they're mutable or not. But we're pretty sure
|
||||
# that the new subtree is supposed to be mutable, because we asserted
|
||||
# that earlier (although I suppose perhaps someone could change a
|
||||
# VdriveRedirection or an SSK file while we're offline in the middle
|
||||
# of our workqueue..). Tell the new subtree that their parent is
|
||||
# mutable so we can be sure it will believe that it itself is
|
||||
# mutable.
|
||||
parent_is_mutable = True
|
||||
d = self.subtree_maker.make_subtree_from_node(subtree_node,
|
||||
parent_is_mutable)
|
||||
def _got_subtree(subtree):
|
||||
assert subtree.is_mutable()
|
||||
if new_node:
|
||||
subtree.put_node_at_path(localpath, new_node)
|
||||
else:
|
||||
subtree.delete_node_at_path(localpath)
|
||||
return subtree.update_now(self._uploader)
|
||||
d.addCallback(_got_subtree)
|
||||
if new_subtree_boxname:
|
||||
d.addCallback(lambda new_subtree_node:
|
||||
self.workqueue.write_to_box(new_subtree_boxname,
|
||||
new_subtree_node))
|
||||
return d
|
||||
|
||||
|
||||
# these are user-visible
|
||||
|
||||
def list(self, path):
|
||||
assert isinstance(path, list)
|
||||
d = self._get_directory(path)
|
||||
d.addCallback(lambda node: node.list())
|
||||
return d
|
||||
|
||||
def download(self, path, target):
|
||||
# TODO: does this mean download it right now? or schedule it in the
|
||||
# workqueue for eventual download? should we add download steps to
|
||||
# the workqueue?
|
||||
assert isinstance(path, list)
|
||||
d = self._get_file_uri(path)
|
||||
d.addCallback(lambda uri: self._downloader.download(uri, target))
|
||||
return d
|
||||
|
||||
def download_as_data(self, path):
|
||||
# TODO: this is kind of goofy.. think of a better download API that
|
||||
# is appropriate for this class
|
||||
from allmydata import download
|
||||
target = download.Data()
|
||||
return self.download(path, target)
|
||||
|
||||
def upload_data(self, path, data):
|
||||
assert isinstance(path, list)
|
||||
f, tempfilename = self.workqueue.create_tempfile()
|
||||
f.write(data)
|
||||
f.close()
|
||||
boxname = self.workqueue.create_boxname()
|
||||
self.workqueue.add_upload_chk(tempfilename, boxname)
|
||||
self.workqueue.add_addpath(boxname, path)
|
||||
self.workqueue.add_delete_box(boxname)
|
||||
self.workqueue.add_delete_tempfile(tempfilename)
|
||||
|
||||
def upload(self, path, filename):
|
||||
assert isinstance(path, list)
|
||||
filename = os.path.abspath(filename)
|
||||
boxname = self.workqueue.create_boxname()
|
||||
self.workqueue.add_upload_chk(filename, boxname)
|
||||
self.workqueue.add_addpath(boxname, path)
|
||||
self.workqueue.add_delete_box(boxname)
|
||||
|
||||
def delete(self, path):
|
||||
assert isinstance(path, list)
|
||||
self.workqueue.add_deletepath(path)
|
||||
|
||||
def add_node(self, path, node):
|
||||
assert isinstance(path, list)
|
||||
assert INode(node)
|
||||
assert not IDirectoryNode.providedBy(node)
|
||||
boxname = self.workqueue.create_boxname(node)
|
||||
self.workqueue.add_addpath(boxname, path)
|
||||
self.workqueue.add_delete_box(boxname)
|
||||
|
@ -1,667 +0,0 @@
|
||||
|
||||
from zope.interface import implements
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer
|
||||
from allmydata.interfaces import IDownloader, IUploader
|
||||
#from allmydata.filetree.directory import (ImmutableDirectorySubTree,
|
||||
# SubTreeNode,
|
||||
# CHKDirectorySubTree)
|
||||
#from allmydata.filetree.specification import (CHKFileSpecification,
|
||||
# CHKDirectorySpecification)
|
||||
from allmydata import workqueue
|
||||
from cStringIO import StringIO
|
||||
|
||||
class FakeGrid(object):
|
||||
implements(IDownloader, IUploader)
|
||||
|
||||
"""
|
||||
class FakeOpener(object):
|
||||
implements(IOpener)
|
||||
def __init__(self, objects={}):
|
||||
self.objects = objects
|
||||
def open(self, subtree_specification, parent_is_mutable):
|
||||
#print "open", subtree_specification, subtree_specification.serialize(), parent_is_mutable
|
||||
return defer.succeed(self.objects[subtree_specification.serialize()])
|
||||
|
||||
|
||||
class FakeWorkQueue(object):
|
||||
implements(workqueue.IWorkQueue)
|
||||
def __init__(self):
|
||||
self.first_commands = []
|
||||
self.last_commands = []
|
||||
self.tempfile_number = 0
|
||||
self.boxname_number = 0
|
||||
def dump_commands(self):
|
||||
return self.first_commands + self.last_commands
|
||||
def clear_commands(self):
|
||||
self.first_commands = []
|
||||
self.last_commands = []
|
||||
|
||||
def create_tempfile(self, suffix=""):
|
||||
self.tempfile_number += 1
|
||||
self.first_commands.append("create_tempfile-%d" % self.tempfile_number)
|
||||
return (StringIO(), "dummy_filename-%d" % self.tempfile_number)
|
||||
def create_boxname(self):
|
||||
self.boxname_number += 1
|
||||
self.first_commands.append("create_boxname-%d" % self.boxname_number)
|
||||
return "dummy_boxname-%d" % self.boxname_number
|
||||
def add_upload_chk(self, source_filename, stash_uri_in_boxname):
|
||||
self.first_commands.append(("upload_chk", source_filename,
|
||||
stash_uri_in_boxname))
|
||||
def add_upload_ssk(self, source_filename, write_capability,
|
||||
previous_version):
|
||||
self.first_commands.append(("upload_ssk", source_filename,
|
||||
write_capability, previous_version))
|
||||
def add_retain_ssk(self, read_capability):
|
||||
self.last_commands.append(("retain_ssk", read_capability))
|
||||
def add_unlink_ssk(self, write_capability):
|
||||
self.last_commands.append(("unlink_ssk", write_capability))
|
||||
def add_retain_uri_from_box(self, boxname):
|
||||
self.last_commands.append(("retain_uri_from_box", boxname))
|
||||
def add_addpath(self, boxname, path):
|
||||
self.first_commands.append(("addpath", boxname, path))
|
||||
def add_unlink_uri(self, uri):
|
||||
self.last_commands.append(("unlink_uri", uri))
|
||||
def add_delete_tempfile(self, filename):
|
||||
self.first_commands.append(("delete_tempfile", filename))
|
||||
def add_delete_box(self, boxname):
|
||||
self.last_commands.append(("delete_box", boxname))
|
||||
|
||||
|
||||
|
||||
class OneSubTree(unittest.TestCase):
|
||||
def test_create_empty_immutable(self):
|
||||
st = ImmutableDirectorySubTree()
|
||||
st.new()
|
||||
self.failIf(st.is_mutable())
|
||||
d = st.get([], FakeOpener())
|
||||
def _got_root(root):
|
||||
self.failUnless(IDirectoryNode.providedBy(root))
|
||||
self.failUnlessEqual(root.list(), [])
|
||||
d.addCallback(_got_root)
|
||||
return d
|
||||
|
||||
def test_immutable_1(self):
|
||||
st = ImmutableDirectorySubTree()
|
||||
st.new()
|
||||
# now populate it (by modifying the internal data structures) with
|
||||
# some internal directories
|
||||
one = SubTreeNode(st)
|
||||
two = SubTreeNode(st)
|
||||
three = SubTreeNode(st)
|
||||
st.root.node_children["one"] = one
|
||||
st.root.node_children["two"] = two
|
||||
two.node_children["three"] = three
|
||||
|
||||
# now examine it
|
||||
self.failIf(st.is_mutable())
|
||||
o = FakeOpener()
|
||||
d = st.get([], o)
|
||||
def _got_root(root):
|
||||
self.failUnless(IDirectoryNode.providedBy(root))
|
||||
self.failUnlessEqual(root.list(), ["one", "two"])
|
||||
d.addCallback(_got_root)
|
||||
d.addCallback(lambda res: st.get(["one"], o))
|
||||
def _got_one(_one):
|
||||
self.failUnlessIdentical(one, _one)
|
||||
self.failUnless(IDirectoryNode.providedBy(_one))
|
||||
self.failUnlessEqual(_one.list(), [])
|
||||
d.addCallback(_got_one)
|
||||
d.addCallback(lambda res: st.get(["two"], o))
|
||||
def _got_two(_two):
|
||||
self.failUnlessIdentical(two, _two)
|
||||
self.failUnless(IDirectoryNode.providedBy(_two))
|
||||
self.failUnlessEqual(_two.list(), ["three"])
|
||||
d.addCallback(_got_two)
|
||||
d.addCallback(lambda res: st.get(["two", "three"], o))
|
||||
def _got_three(_three):
|
||||
self.failUnlessIdentical(three, _three)
|
||||
self.failUnless(IDirectoryNode.providedBy(_three))
|
||||
self.failUnlessEqual(_three.list(), [])
|
||||
d.addCallback(_got_three)
|
||||
d.addCallback(lambda res: st.get(["missing"], o))
|
||||
d.addCallback(self.failUnlessEqual, None)
|
||||
return d
|
||||
|
||||
def test_mutable_1(self):
|
||||
o = FakeOpener()
|
||||
wq = FakeWorkQueue()
|
||||
st = MutableCHKDirectorySubTree()
|
||||
st.new()
|
||||
st.set_uri(None)
|
||||
self.failUnless(st.is_mutable())
|
||||
d = st.get([], o)
|
||||
def _got_root(root):
|
||||
self.failUnless(IDirectoryNode.providedBy(root))
|
||||
self.failUnlessEqual(root.list(), [])
|
||||
d.addCallback(_got_root)
|
||||
file_three = CHKFileSpecification()
|
||||
file_three.set_uri("file_three_uri")
|
||||
d.addCallback(lambda res: st.add(["one", "two", "three"], file_three,
|
||||
o, wq))
|
||||
d.addCallback(lambda res: st.get(["one"], o))
|
||||
def _got_one(one):
|
||||
self.failUnless(IDirectoryNode.providedBy(one))
|
||||
self.failUnlessEqual(one.list(), ["two"])
|
||||
d.addCallback(_got_one)
|
||||
d.addCallback(lambda res: st.get(["one", "two"], o))
|
||||
def _got_two(two):
|
||||
self.failUnless(IDirectoryNode.providedBy(two))
|
||||
self.failUnlessEqual(two.list(), ["three"])
|
||||
self.failUnlessIdentical(two.child_specifications["three"],
|
||||
file_three)
|
||||
d.addCallback(_got_two)
|
||||
return d
|
||||
|
||||
def test_addpath(self):
|
||||
o = FakeOpener()
|
||||
wq = FakeWorkQueue()
|
||||
st = MutableCHKDirectorySubTree()
|
||||
st.new()
|
||||
st.set_uri(None)
|
||||
file_three = CHKFileSpecification()
|
||||
file_three.set_uri("file_three_uri")
|
||||
d = st.add(["one", "two", "three"], file_three, o, wq)
|
||||
def _done(res):
|
||||
expected = [
|
||||
"create_tempfile-1",
|
||||
"create_boxname-1",
|
||||
('upload_chk', 'dummy_filename-1', 'dummy_boxname-1'),
|
||||
('delete_tempfile', 'dummy_filename-1'),
|
||||
('addpath', 'dummy_boxname-1', []),
|
||||
('retain_uri_from_box', 'dummy_boxname-1'),
|
||||
('delete_box', 'dummy_boxname-1'),
|
||||
('unlink_uri', None),
|
||||
]
|
||||
self.failUnlessEqual(wq.dump_commands(), expected)
|
||||
#print
|
||||
#for c in wq.dump_commands():
|
||||
# print c
|
||||
d.addCallback(_done)
|
||||
return d
|
||||
|
||||
def test_serialize(self):
|
||||
st = ImmutableDirectorySubTree()
|
||||
st.new()
|
||||
one = SubTreeNode(st)
|
||||
two = SubTreeNode(st)
|
||||
three = SubTreeNode(st)
|
||||
st.root.node_children["one"] = one
|
||||
st.root.node_children["two"] = two
|
||||
two.node_children["three"] = three
|
||||
file_four = CHKFileSpecification()
|
||||
file_four.set_uri("file_four_uri")
|
||||
two.child_specifications["four"] = file_four
|
||||
data = st.serialize()
|
||||
st_new = ImmutableDirectorySubTree()
|
||||
st_new.unserialize(data)
|
||||
|
||||
st_four = ImmutableDirectorySubTree()
|
||||
st_four.new()
|
||||
st_four.root.node_children["five"] = SubTreeNode(st_four)
|
||||
|
||||
o = FakeOpener({("CHK-File", "file_four_uri"): st_four})
|
||||
d = st.get([], o)
|
||||
def _got_root(root):
|
||||
self.failUnless(IDirectoryNode.providedBy(root))
|
||||
self.failUnlessEqual(root.list(), ["one", "two"])
|
||||
d.addCallback(_got_root)
|
||||
d.addCallback(lambda res: st.get(["two"], o))
|
||||
def _got_two(_two):
|
||||
self.failUnless(IDirectoryNode.providedBy(_two))
|
||||
self.failUnlessEqual(_two.list(), ["four", "three"])
|
||||
d.addCallback(_got_two)
|
||||
|
||||
d.addCallback(lambda res: st.get(["two", "four"], o))
|
||||
def _got_four(_four):
|
||||
self.failUnless(IDirectoryNode.providedBy(_four))
|
||||
self.failUnlessEqual(_four.list(), ["five"])
|
||||
d.addCallback(_got_four)
|
||||
|
||||
class MultipleSubTrees(unittest.TestCase):
|
||||
|
||||
def test_open(self):
|
||||
st = ImmutableDirectorySubTree()
|
||||
st.new()
|
||||
# populate it with some internal directories and child links and see
|
||||
# if we can follow them
|
||||
one = SubTreeNode(st)
|
||||
two = SubTreeNode(st)
|
||||
three = SubTreeNode(st)
|
||||
st.root.node_children["one"] = one
|
||||
st.root.node_children["two"] = two
|
||||
two.node_children["three"] = three
|
||||
|
||||
def test_addpath(self):
|
||||
wq = FakeWorkQueue()
|
||||
st1 = MutableCHKDirectorySubTree()
|
||||
st1.new()
|
||||
st1.set_uri(None)
|
||||
one = SubTreeNode(st1)
|
||||
two = SubTreeNode(st1)
|
||||
st1.root.node_children["one"] = one
|
||||
one.node_children["two"] = two
|
||||
three = CHKDirectorySpecification()
|
||||
three.set_uri("dir_three_uri")
|
||||
two.child_specifications["three"] = three
|
||||
|
||||
st2 = MutableCHKDirectorySubTree()
|
||||
st2.new()
|
||||
st2.set_uri(None)
|
||||
four = SubTreeNode(st2)
|
||||
five = SubTreeNode(st2)
|
||||
st2.root.node_children["four"] = four
|
||||
four.node_children["five"] = five
|
||||
|
||||
file_six = CHKFileSpecification()
|
||||
file_six.set_uri("file_six_uri")
|
||||
|
||||
o = FakeOpener({("CHK-Directory", "dir_three_uri"): st2})
|
||||
|
||||
d = defer.succeed(None)
|
||||
d.addCallback(lambda res:
|
||||
st1.get(["one", "two", "three", "four", "five"], o))
|
||||
def _got_five(res):
|
||||
self.failUnless(IDirectoryNode.providedBy(res))
|
||||
self.failUnlessIdentical(res, five)
|
||||
d.addCallback(_got_five)
|
||||
|
||||
d.addCallback(lambda res:
|
||||
st1.add(["one", "two", "six"],
|
||||
file_six, o, wq))
|
||||
def _done(res):
|
||||
expected = [
|
||||
"create_tempfile-1",
|
||||
"create_boxname-1",
|
||||
('upload_chk', 'dummy_filename-1', 'dummy_boxname-1'),
|
||||
('delete_tempfile', 'dummy_filename-1'),
|
||||
# one/two/six only modifies the top-most CHKDirectory, so
|
||||
# the addpath that gets scheduled is targeted at the root
|
||||
('addpath', 'dummy_boxname-1', []),
|
||||
('retain_uri_from_box', 'dummy_boxname-1'),
|
||||
('delete_box', 'dummy_boxname-1'),
|
||||
('unlink_uri', None),
|
||||
]
|
||||
self.failUnlessEqual(wq.dump_commands(), expected)
|
||||
wq.clear_commands()
|
||||
d.addCallback(_done)
|
||||
|
||||
d.addCallback(lambda res:
|
||||
st1.add(["one", "two", "three", "four", "six"],
|
||||
file_six, o, wq))
|
||||
def _done2(res):
|
||||
expected = [
|
||||
"create_tempfile-2",
|
||||
"create_boxname-2",
|
||||
('upload_chk', 'dummy_filename-2', 'dummy_boxname-2'),
|
||||
('delete_tempfile', 'dummy_filename-2'),
|
||||
# one/two/three/four/six modifies the lower CHKDirectory, so
|
||||
# we schedule an addpath of the link that points from the
|
||||
# upper CHKDirectory to the lower one (at one/two/three).
|
||||
('addpath', 'dummy_boxname-2', ["one", "two", "three"]),
|
||||
('retain_uri_from_box', 'dummy_boxname-2'),
|
||||
('delete_box', 'dummy_boxname-2'),
|
||||
('unlink_uri', None),
|
||||
]
|
||||
self.failUnlessEqual(wq.dump_commands(), expected)
|
||||
d.addCallback(_done2)
|
||||
|
||||
|
||||
return d
|
||||
|
||||
del OneSubTree
|
||||
del MultipleSubTrees
|
||||
|
||||
class Redirect(unittest.TestCase):
|
||||
pass
|
||||
"""
|
||||
|
||||
import os.path
|
||||
from twisted.python.failure import Failure
|
||||
from allmydata.filetree import directory, redirect, vdrive
|
||||
from allmydata.filetree.interfaces import (ISubTree, INode, IDirectoryNode,
|
||||
IFileNode, NoSuchDirectoryError,
|
||||
NoSuchChildError)
|
||||
from allmydata.filetree.file import CHKFileNode
|
||||
from allmydata import upload
|
||||
from allmydata.interfaces import IDownloader
|
||||
from allmydata.util import bencode
|
||||
|
||||
class Utils(unittest.TestCase):
|
||||
def test_in_pairs(self):
|
||||
l = range(8)
|
||||
pairs = list(directory.in_pairs(l))
|
||||
self.failUnlessEqual(pairs, [(0,1), (2,3), (4,5), (6,7)])
|
||||
|
||||
class FakeGrid(object):
|
||||
implements(IDownloader, IUploader)
|
||||
debug = False
|
||||
|
||||
def __init__(self):
|
||||
self.files = {}
|
||||
|
||||
def upload(self, uploadable):
|
||||
uri = "stub-uri-%d" % len(self.files)
|
||||
if self.debug:
|
||||
print "FakeGrid.upload -> %s" % uri
|
||||
assert upload.IUploadable.providedBy(uploadable)
|
||||
f = uploadable.get_filehandle()
|
||||
data = f.read()
|
||||
uploadable.close_filehandle(f)
|
||||
self.files[uri] = data
|
||||
return defer.succeed(uri)
|
||||
|
||||
def upload_filename(self, filename):
|
||||
if self.debug:
|
||||
print "FakeGrid.upload_filename(%s)" % filename
|
||||
return self.upload(upload.FileName(filename))
|
||||
|
||||
def upload_data(self, data):
|
||||
if self.debug:
|
||||
print "FakeGrid.upload_data(%s)" % data
|
||||
return self.upload(upload.Data(data))
|
||||
|
||||
def download(self, uri, target):
|
||||
if self.debug:
|
||||
print "FakeGrid.download(%s)" % uri
|
||||
target.open()
|
||||
target.write(self.files[uri])
|
||||
target.close()
|
||||
return defer.maybeDeferred(target.finish)
|
||||
|
||||
|
||||
class VDrive(unittest.TestCase):
|
||||
|
||||
def makeVirtualDrive(self, basedir, root_node=None, grid=None):
|
||||
wq = workqueue.WorkQueue(os.path.join("test_filetree",
|
||||
"VDrive",
|
||||
basedir, "1.workqueue"))
|
||||
if grid:
|
||||
assert IUploader.providedBy(grid)
|
||||
assert IDownloader.providedBy(grid)
|
||||
dl = ul = grid
|
||||
else:
|
||||
dl = ul = FakeGrid()
|
||||
if not root_node:
|
||||
root_node = directory.LocalFileSubTreeNode()
|
||||
root_node.new("rootdirtree.save")
|
||||
v = vdrive.VirtualDrive(wq, dl, ul, root_node)
|
||||
return v
|
||||
|
||||
def makeLocalTree(self, basename):
|
||||
# create a LocalFileRedirection pointing at a LocalFileSubTree.
|
||||
# Returns a VirtualDrive instance.
|
||||
topdir = directory.LocalFileSubTree().new("%s-dirtree.save" % basename)
|
||||
topdir.update_now(None)
|
||||
root = redirect.LocalFileRedirection().new("%s-root" % basename,
|
||||
topdir.create_node_now())
|
||||
root.update_now(None)
|
||||
v = self.makeVirtualDrive("%s-vdrive" % basename,
|
||||
root.create_node_now())
|
||||
return v
|
||||
|
||||
def makeCHKTree(self, basename):
|
||||
# create a LocalFileRedirection pointing at a CHKDirectorySubTree.
|
||||
# Returns a VirtualDrive instance.
|
||||
grid = FakeGrid()
|
||||
topdir = directory.CHKDirectorySubTree().new()
|
||||
d = topdir.update_now(grid)
|
||||
def _updated(topnode):
|
||||
root = redirect.LocalFileRedirection()
|
||||
root.new("%s-root" % basename, topnode)
|
||||
return root.update_now(grid)
|
||||
d.addCallback(_updated)
|
||||
d.addCallback(lambda rootnode:
|
||||
self.makeVirtualDrive("%s-vdrive" % basename,
|
||||
rootnode, grid))
|
||||
return d
|
||||
|
||||
def failUnlessListsAreEqual(self, list1, list2):
|
||||
self.failUnlessEqual(sorted(list1), sorted(list2))
|
||||
|
||||
def failUnlessContentsAreEqual(self, c1, c2):
|
||||
c1a = dict([(k,v.serialize_node()) for k,v in c1.items()])
|
||||
c2a = dict([(k,v.serialize_node()) for k,v in c2.items()])
|
||||
self.failUnlessEqual(c1a, c2a)
|
||||
|
||||
def testDirectory(self):
|
||||
stm = vdrive.SubTreeMaker(FakeGrid())
|
||||
|
||||
# create an empty directory (stored locally)
|
||||
subtree = directory.LocalFileSubTree()
|
||||
subtree.new("dirtree.save")
|
||||
self.failUnless(ISubTree.providedBy(subtree))
|
||||
|
||||
# get the root IDirectoryNode (which is still empty) and examine it
|
||||
(found_path, root, remaining_path) = subtree.get_node_for_path([])
|
||||
self.failUnlessEqual(found_path, [])
|
||||
self.failUnlessEqual(remaining_path, [])
|
||||
self.failUnless(INode.providedBy(root))
|
||||
self.failUnless(IDirectoryNode.providedBy(root))
|
||||
self.failUnlessListsAreEqual(root.list().keys(), [])
|
||||
self.failUnlessIdentical(root.get_subtree(), subtree)
|
||||
|
||||
# now add some children to it
|
||||
subdir1 = root.add_subdir("subdir1")
|
||||
file1 = CHKFileNode()
|
||||
file1.new("uri1")
|
||||
root.add("foo.txt", file1)
|
||||
self.failUnlessListsAreEqual(root.list().keys(),
|
||||
["foo.txt", "subdir1"])
|
||||
self.failUnlessIdentical(root.get("foo.txt"), file1)
|
||||
subdir1a = root.get("subdir1")
|
||||
self.failUnlessIdentical(subdir1, subdir1a)
|
||||
del subdir1a
|
||||
self.failUnless(IDirectoryNode.providedBy(subdir1))
|
||||
self.failUnlessListsAreEqual(subdir1.list().keys(), [])
|
||||
self.failUnlessIdentical(subdir1.get_subtree(), subtree)
|
||||
|
||||
subdir2 = subdir1.add_subdir("subdir2")
|
||||
subdir3 = subdir2.add_subdir("subdir3")
|
||||
subdir4 = subdir2.add_subdir("subdir4")
|
||||
|
||||
subdir2.delete("subdir4")
|
||||
self.failUnlessListsAreEqual(subdir2.list().keys(), ["subdir3"])
|
||||
|
||||
del root, subdir1, subdir2, subdir3, subdir4
|
||||
# leaving file1 for later use
|
||||
|
||||
# now serialize it and examine the results
|
||||
f = StringIO()
|
||||
subtree.serialize_subtree_to_file(f)
|
||||
data = f.getvalue()
|
||||
#print data
|
||||
unpacked = bencode.bdecode(data)
|
||||
#print unpacked
|
||||
del f, data, unpacked
|
||||
|
||||
node = subtree.create_node_now()
|
||||
self.failUnless(isinstance(node, directory.LocalFileSubTreeNode))
|
||||
node_s = node.serialize_node()
|
||||
self.failUnless(isinstance(node_s, str))
|
||||
self.failUnless(node_s.startswith("LocalFileDirectory:"))
|
||||
self.failUnless("dirtree.save" in node_s)
|
||||
del node, node_s
|
||||
|
||||
d = defer.maybeDeferred(subtree.update_now, None)
|
||||
def _updated(node):
|
||||
# now reconstruct it
|
||||
return stm.make_subtree_from_node(node, False)
|
||||
d.addCallback(_updated)
|
||||
|
||||
def _opened(new_subtree):
|
||||
res = new_subtree.get_node_for_path([])
|
||||
(found_path, root, remaining_path) = res
|
||||
self.failUnlessEqual(found_path, [])
|
||||
self.failUnlessEqual(remaining_path, [])
|
||||
self.failUnless(INode.providedBy(root))
|
||||
self.failUnless(IDirectoryNode.providedBy(root))
|
||||
self.failUnlessListsAreEqual(root.list().keys(),
|
||||
["foo.txt", "subdir1"])
|
||||
file1a = root.get("foo.txt")
|
||||
self.failUnless(INode(file1a))
|
||||
self.failUnless(isinstance(file1a, CHKFileNode))
|
||||
self.failUnless(IFileNode(file1a))
|
||||
self.failUnlessEqual(file1a.get_uri(), "uri1")
|
||||
subdir1 = root.get("subdir1")
|
||||
subdir2 = subdir1.get("subdir2")
|
||||
self.failUnlessListsAreEqual(subdir2.list().keys(), ["subdir3"])
|
||||
subdir2.delete("subdir3")
|
||||
self.failUnlessListsAreEqual(subdir2.list().keys(), [])
|
||||
d.addCallback(_opened)
|
||||
return d
|
||||
|
||||
def shouldFail(self, res, expected_failure, which):
|
||||
if isinstance(res, Failure):
|
||||
res.trap(expected_failure)
|
||||
else:
|
||||
self.fail("%s was supposed to raise %s, not get '%s'" %
|
||||
(which, expected_failure, res))
|
||||
|
||||
def testVdrive(self):
|
||||
v = self.makeLocalTree("vdrive")
|
||||
|
||||
d = v.list([])
|
||||
def _listed(contents):
|
||||
self.failUnlessEqual(contents, {})
|
||||
d.addCallback(_listed)
|
||||
|
||||
child1 = CHKFileNode().new("uri1")
|
||||
d.addCallback(lambda res: v.add_node(["a"], child1))
|
||||
d.addCallback(lambda res: v.workqueue.flush())
|
||||
d.addCallback(lambda res: v.list([]))
|
||||
def _listed2(contents):
|
||||
self.failUnlessListsAreEqual(contents.keys(), ["a"])
|
||||
self.failUnlessContentsAreEqual(contents, {"a": child1})
|
||||
d.addCallback(_listed2)
|
||||
child2 = CHKFileNode().new("uri2")
|
||||
child3 = CHKFileNode().new("uri3")
|
||||
d.addCallback(lambda res: v.add_node(["b","c"], child2))
|
||||
d.addCallback(lambda res: v.add_node(["b","d"], child3))
|
||||
d.addCallback(lambda res: v.workqueue.flush())
|
||||
d.addCallback(lambda res: v.list([]))
|
||||
def _listed3(contents):
|
||||
self.failUnlessListsAreEqual(contents.keys(), ["a","b"])
|
||||
d.addCallback(_listed3)
|
||||
d.addCallback(lambda res: v.list(["b"]))
|
||||
def _listed4(contents):
|
||||
self.failUnlessListsAreEqual(contents.keys(), ["c","d"])
|
||||
self.failUnlessContentsAreEqual(contents,
|
||||
{"c": child2, "d": child3})
|
||||
d.addCallback(_listed4)
|
||||
|
||||
d.addCallback(lambda res: v._get_file_uri(["b","c"]))
|
||||
d.addCallback(self.failUnlessEqual, "uri2")
|
||||
|
||||
d.addCallback(lambda res: v.list(["bogus"]))
|
||||
d.addBoth(self.shouldFail, NoSuchDirectoryError, "list(bogus)")
|
||||
|
||||
d.addCallback(lambda res: v._get_file_uri(["b", "bogus"]))
|
||||
d.addBoth(self.shouldFail, NoSuchChildError, "_get_file_uri(b/bogus)")
|
||||
|
||||
return d
|
||||
|
||||
def testUpload(self):
|
||||
v = self.makeLocalTree("upload")
|
||||
filename = "upload1"
|
||||
DATA = "here is some data\n"
|
||||
f = open(filename, "wb")
|
||||
f.write(DATA)
|
||||
f.close()
|
||||
|
||||
rc = v.upload(["a","b","upload1"], filename)
|
||||
self.failUnlessIdentical(rc, None)
|
||||
|
||||
d = v.workqueue.flush()
|
||||
|
||||
d.addCallback(lambda res: v.list([]))
|
||||
d.addCallback(lambda contents:
|
||||
self.failUnlessListsAreEqual(contents.keys(), ["a"]))
|
||||
d.addCallback(lambda res: v.list(["a"]))
|
||||
d.addCallback(lambda contents:
|
||||
self.failUnlessListsAreEqual(contents.keys(), ["b"]))
|
||||
d.addCallback(lambda res: v.list(["a","b"]))
|
||||
d.addCallback(lambda contents:
|
||||
self.failUnlessListsAreEqual(contents.keys(),
|
||||
["upload1"]))
|
||||
d.addCallback(lambda res: v.download_as_data(["a","b","upload1"]))
|
||||
d.addCallback(self.failUnlessEqual, DATA)
|
||||
|
||||
return d
|
||||
|
||||
def testCHKDirUpload(self):
|
||||
DATA = "here is some data\n"
|
||||
filename = "upload1"
|
||||
f = open(filename, "wb")
|
||||
f.write(DATA)
|
||||
f.close()
|
||||
|
||||
d = defer.maybeDeferred(self.makeCHKTree, "chk-upload")
|
||||
def _made(v):
|
||||
self.v = v
|
||||
|
||||
rc = v.upload(["a","b","upload1"], filename)
|
||||
self.failUnlessIdentical(rc, None)
|
||||
|
||||
return v.workqueue.flush()
|
||||
d.addCallback(_made)
|
||||
|
||||
d.addCallback(lambda res: self.v.list([]))
|
||||
d.addCallback(lambda contents:
|
||||
self.failUnlessListsAreEqual(contents.keys(), ["a"]))
|
||||
d.addCallback(lambda res: self.v.list(["a"]))
|
||||
d.addCallback(lambda contents:
|
||||
self.failUnlessListsAreEqual(contents.keys(), ["b"]))
|
||||
d.addCallback(lambda res: self.v.list(["a","b"]))
|
||||
d.addCallback(lambda contents:
|
||||
self.failUnlessListsAreEqual(contents.keys(),
|
||||
["upload1"]))
|
||||
d.addCallback(lambda res: self.v.download_as_data(["a","b","upload1"]))
|
||||
d.addCallback(self.failUnlessEqual, DATA)
|
||||
|
||||
return d
|
||||
|
||||
def testCHKDirDelete(self):
|
||||
DATA = "here is some data\n"
|
||||
filename = "upload1"
|
||||
f = open(filename, "wb")
|
||||
f.write(DATA)
|
||||
f.close()
|
||||
|
||||
d = defer.maybeDeferred(self.makeCHKTree, "chk-delete")
|
||||
def _made(v):
|
||||
self.v = v
|
||||
d.addCallback(_made)
|
||||
|
||||
d.addCallback(lambda r:
|
||||
self.v.upload(["a","b","upload1"], filename))
|
||||
d.addCallback(lambda r:
|
||||
self.v.upload_data(["a","b","upload2"], DATA))
|
||||
d.addCallback(lambda r:
|
||||
self.v.upload(["a","c","upload3"], filename))
|
||||
d.addCallback(lambda r:
|
||||
self.v.workqueue.flush())
|
||||
|
||||
d.addCallback(lambda r: self.v.list([]))
|
||||
d.addCallback(lambda contents:
|
||||
self.failUnlessListsAreEqual(contents.keys(), ["a"]))
|
||||
d.addCallback(lambda r: self.v.list(["a"]))
|
||||
d.addCallback(lambda contents:
|
||||
self.failUnlessListsAreEqual(contents.keys(), ["b","c"]))
|
||||
d.addCallback(lambda r: self.v.list(["a","b"]))
|
||||
d.addCallback(lambda contents:
|
||||
self.failUnlessListsAreEqual(contents.keys(),
|
||||
["upload1", "upload2"]))
|
||||
#d.addCallback(lambda r: self.v.download_as_data(["a","b","upload1"]))
|
||||
#d.addCallback(self.failUnlessEqual, DATA)
|
||||
|
||||
# now delete it
|
||||
d.addCallback(lambda r: self.v.delete(["a","b","upload2"]))
|
||||
d.addCallback(lambda r: self.v.workqueue.flush())
|
||||
d.addCallback(lambda r: self.v.list(["a","b"]))
|
||||
d.addCallback(lambda contents:
|
||||
self.failUnlessListsAreEqual(contents.keys(),
|
||||
["upload1"]))
|
||||
|
||||
|
||||
return d
|
@ -255,61 +255,3 @@ class Test(unittest.TestCase):
|
||||
sorted(expected_keys))
|
||||
return res
|
||||
|
||||
|
||||
"""
|
||||
class Traverse(unittest.TestCase):
|
||||
def make_tree(self, basedir):
|
||||
os.makedirs(basedir)
|
||||
root = LocalDirNode(basedir)
|
||||
self.d1 = d1 = root.add_directory("d1")
|
||||
self.d2 = d2 = root.add_directory("d2")
|
||||
root.add_file("a", "a")
|
||||
root.add_file("b", "b")
|
||||
d1.add_file("1.a", "1.a")
|
||||
d1.add_file("1.b", "1.b")
|
||||
d2.add_file("2.a", "2.a")
|
||||
d2.add_file("2.b", "2.b")
|
||||
return root
|
||||
|
||||
def test_one(self):
|
||||
basedir = "test_vdrive/one"
|
||||
root = self.make_tree(basedir)
|
||||
v = vdrive.VDrive()
|
||||
v.set_root(root)
|
||||
|
||||
d = v.get_dir("")
|
||||
d.addCallback(lambda dir: self.failUnlessEqual(dir, root))
|
||||
d.addCallback(lambda res: v.get_dir("/d1"))
|
||||
def _check(dir):
|
||||
self.failUnless(isinstance(dir, LocalDirNode))
|
||||
self.failUnlessEqual(dir._basedir, self.d1._basedir)
|
||||
d.addCallback(_check)
|
||||
|
||||
|
||||
d.addCallback(lambda res: v.listdir(""))
|
||||
d.addCallback(lambda files:
|
||||
self.failUnlessEqual(sorted(files),
|
||||
["a", "b", "d1", "d2"]))
|
||||
d.addCallback(lambda res: v.listdir("/"))
|
||||
d.addCallback(lambda files:
|
||||
self.failUnlessEqual(sorted(files),
|
||||
["a", "b", "d1", "d2"]))
|
||||
d.addCallback(lambda res: v.listdir("d1"))
|
||||
d.addCallback(lambda files:
|
||||
self.failUnlessEqual(sorted(files),
|
||||
["1.a", "1.b"]))
|
||||
|
||||
d.addCallback(lambda res: v.make_directory("", "d3"))
|
||||
d.addCallback(lambda res: v.listdir(""))
|
||||
d.addCallback(lambda files:
|
||||
self.failUnlessEqual(sorted(files),
|
||||
["a", "b", "d1", "d2", "d3"]))
|
||||
|
||||
d.addCallback(lambda res: v.make_directory("d2", "d2.1"))
|
||||
d.addCallback(lambda res: v.listdir("/d2"))
|
||||
d.addCallback(lambda files:
|
||||
self.failUnlessEqual(sorted(files),
|
||||
["2.a", "2.b", "d2.1"]))
|
||||
return d
|
||||
del Traverse
|
||||
"""
|
||||
|
@ -1,166 +0,0 @@
|
||||
|
||||
import os
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer
|
||||
from allmydata import workqueue
|
||||
from allmydata.util import idlib
|
||||
from allmydata.filetree.file import CHKFileNode
|
||||
|
||||
class FakeWorkQueue(workqueue.WorkQueue):
|
||||
|
||||
def __init__(self, basedir):
|
||||
workqueue.WorkQueue.__init__(self, basedir)
|
||||
self.dispatched_steps = []
|
||||
|
||||
def dispatch_step(self, steptype, lines):
|
||||
self.dispatched_steps.append((steptype, lines))
|
||||
return defer.succeed(None)
|
||||
|
||||
class Reuse(unittest.TestCase):
|
||||
def wq(self, testname):
|
||||
return FakeWorkQueue("test_workqueue/Reuse/%s/workqueue" % testname)
|
||||
|
||||
def testOne(self):
|
||||
wq = self.wq("testOne")
|
||||
# steps must be retained from one session to the next
|
||||
wq.add_upload_chk("source_filename", "box1")
|
||||
wq.add_unlink_uri("someuri")
|
||||
# files in the tmpdir are not: these are either in the process of
|
||||
# being added or in the process of being removed.
|
||||
tmpfile = os.path.join(wq.tmpdir, "foo")
|
||||
f = open(tmpfile, "wb")
|
||||
f.write("foo")
|
||||
f.close()
|
||||
# files created with create_tempfile *are* retained, however
|
||||
f, filename = wq.create_tempfile()
|
||||
filename = os.path.join(wq.filesdir, filename)
|
||||
f.write("bar")
|
||||
f.close()
|
||||
|
||||
del wq
|
||||
wq2 = self.wq("testOne")
|
||||
steps = wq2.get_all_steps()
|
||||
self.failUnlessEqual(steps[0], ("upload_chk",
|
||||
["source_filename", "box1"]))
|
||||
self.failUnlessEqual(steps[1], ("unlink_uri", ["someuri"]))
|
||||
self.failIf(os.path.exists(tmpfile))
|
||||
self.failUnless(os.path.exists(filename))
|
||||
|
||||
|
||||
class Items(unittest.TestCase):
|
||||
def wq(self, testname):
|
||||
return FakeWorkQueue("test_workqueue/Items/%s/workqueue" % testname)
|
||||
|
||||
def testTempfile(self):
|
||||
wq = self.wq("testTempfile")
|
||||
(f, filename) = wq.create_tempfile(".chkdir")
|
||||
self.failUnless(filename.endswith(".chkdir"))
|
||||
data = "this is some random data: %s\n" % idlib.b2a(os.urandom(15))
|
||||
f.write(data)
|
||||
f.close()
|
||||
f2 = wq.open_tempfile(filename)
|
||||
data2 = f2.read()
|
||||
f2.close()
|
||||
self.failUnlessEqual(data, data2)
|
||||
|
||||
def testBox(self):
|
||||
wq = self.wq("testBox")
|
||||
boxname = wq.create_boxname()
|
||||
wq.write_to_box(boxname, CHKFileNode().new("uri goes here"))
|
||||
out = wq.read_from_box(boxname)
|
||||
self.failUnless(isinstance(out, CHKFileNode))
|
||||
self.failUnlessEqual(out.get_uri(), "uri goes here")
|
||||
|
||||
def testCHK(self):
|
||||
wq = self.wq("testCHK")
|
||||
wq.add_upload_chk("source_filename", "box1")
|
||||
wq.add_retain_uri_from_box("box1")
|
||||
wq.add_addpath("box1", ["home", "warner", "foo.txt"])
|
||||
wq.add_delete_box("box1")
|
||||
wq.add_unlink_uri("olduri")
|
||||
|
||||
self.failUnlessEqual(wq.count_pending_steps(), 5)
|
||||
stepname, steptype, lines = wq.get_next_step()
|
||||
self.failUnlessEqual(steptype, "upload_chk")
|
||||
steps = wq.get_all_steps()
|
||||
self.failUnlessEqual(steps[0], ("upload_chk",
|
||||
["source_filename", "box1"]))
|
||||
self.failUnlessEqual(steps[1], ("retain_uri_from_box",
|
||||
["box1"]))
|
||||
self.failUnlessEqual(steps[2], ("addpath",
|
||||
["box1", "home", "warner", "foo.txt"]))
|
||||
self.failUnlessEqual(steps[3], ("delete_box",
|
||||
["box1"]))
|
||||
self.failUnlessEqual(steps[4], ("unlink_uri",
|
||||
["olduri"]))
|
||||
|
||||
def testCHK2(self):
|
||||
wq = self.wq("testCHK2")
|
||||
wq.add_upload_chk("source_filename", "box1")
|
||||
wq.add_retain_uri_from_box("box1")
|
||||
wq.add_addpath("box1", ["home", "warner", "foo.txt"])
|
||||
wq.add_delete_box("box1")
|
||||
wq.add_unlink_uri("olduri")
|
||||
|
||||
# then this batch happens a bit later
|
||||
(f, tmpfilename) = wq.create_tempfile(".chkdir")
|
||||
f.write("some data")
|
||||
f.close()
|
||||
wq.add_upload_chk(os.path.join(wq.filesdir, tmpfilename), "box2")
|
||||
wq.add_delete_tempfile(tmpfilename)
|
||||
wq.add_retain_uri_from_box("box2")
|
||||
wq.add_delete_box("box2")
|
||||
wq.add_unlink_uri("oldchk")
|
||||
|
||||
self.failUnlessEqual(wq.count_pending_steps(), 10)
|
||||
steps = wq.get_all_steps()
|
||||
|
||||
self.failUnlessEqual(steps[0], ("upload_chk",
|
||||
["source_filename", "box1"]))
|
||||
self.failUnlessEqual(steps[1], ("retain_uri_from_box",
|
||||
["box1"]))
|
||||
self.failUnlessEqual(steps[2], ("addpath",
|
||||
["box1", "home", "warner", "foo.txt"]))
|
||||
self.failUnlessEqual(steps[3],
|
||||
("upload_chk",
|
||||
[os.path.join(wq.filesdir, tmpfilename),
|
||||
"box2"]))
|
||||
self.failUnlessEqual(steps[4],
|
||||
("retain_uri_from_box", ["box2"]))
|
||||
self.failUnlessEqual(steps[5], ("delete_box",
|
||||
["box1"]))
|
||||
self.failUnlessEqual(steps[6], ("unlink_uri",
|
||||
["olduri"]))
|
||||
self.failUnlessEqual(steps[7],
|
||||
("delete_tempfile", [tmpfilename]))
|
||||
self.failUnlessEqual(steps[8], ("delete_box", ["box2"]))
|
||||
self.failUnlessEqual(steps[9], ("unlink_uri", ["oldchk"]))
|
||||
|
||||
def testRun(self):
|
||||
wq = self.wq("testRun")
|
||||
wq.add_upload_chk("source_filename", "box1")
|
||||
wq.add_retain_uri_from_box("box1")
|
||||
wq.add_addpath("box1", ["home", "warner", "foo.txt"])
|
||||
wq.add_delete_box("box1")
|
||||
wq.add_unlink_uri("olduri")
|
||||
|
||||
# this tempfile should be deleted after the last step completes
|
||||
(f, tmpfilename) = wq.create_tempfile(".dummy")
|
||||
tmpfilename = os.path.join(wq.filesdir, tmpfilename)
|
||||
f.write("stuff")
|
||||
f.close()
|
||||
self.failUnless(os.path.exists(tmpfilename))
|
||||
# likewise this unreferenced box should get deleted
|
||||
boxname = wq.create_boxname()
|
||||
wq.write_to_box(boxname, CHKFileNode().new("uri here"))
|
||||
boxfile = os.path.join(wq.boxesdir, boxname)
|
||||
self.failUnless(os.path.exists(boxfile))
|
||||
|
||||
d = wq.flush()
|
||||
def _check(res):
|
||||
self.failUnlessEqual(len(wq.dispatched_steps), 5)
|
||||
self.failUnlessEqual(wq.dispatched_steps[0][0], "upload_chk")
|
||||
self.failIf(os.path.exists(tmpfilename))
|
||||
self.failIf(os.path.exists(boxfile))
|
||||
d.addCallback(_check)
|
||||
return d
|
@ -3,10 +3,8 @@
|
||||
|
||||
import os.path
|
||||
from zope.interface import implements
|
||||
from twisted.application import service
|
||||
from twisted.internet import defer
|
||||
from twisted.python import log
|
||||
from allmydata import upload, download, uri
|
||||
from allmydata import uri
|
||||
from allmydata.Crypto.Cipher import AES
|
||||
from allmydata.util import hashutil, idlib
|
||||
from allmydata.interfaces import IDirectoryNode, IFileNode
|
||||
@ -14,179 +12,6 @@ from allmydata.interfaces import IDirectoryNode, IFileNode
|
||||
class NotMutableError(Exception):
|
||||
pass
|
||||
|
||||
class VDrive(service.MultiService):
|
||||
name = "vdrive"
|
||||
|
||||
def set_server(self, vdrive_server):
|
||||
self.gvd_server = vdrive_server
|
||||
def set_root(self, root):
|
||||
self.gvd_root = root
|
||||
|
||||
def dirpath(self, dir_or_path):
|
||||
if isinstance(dir_or_path, str):
|
||||
return self.get_dir(dir_or_path)
|
||||
return defer.succeed(dir_or_path)
|
||||
|
||||
def get_dir(self, path):
|
||||
"""Return a Deferred that fires with a RemoteReference to a
|
||||
MutableDirectoryNode at the given /-delimited path."""
|
||||
d = defer.succeed(self.gvd_root)
|
||||
if path.startswith("/"):
|
||||
path = path[1:]
|
||||
if path == "":
|
||||
return d
|
||||
for piece in path.split("/"):
|
||||
d.addCallback(lambda parent: parent.callRemote("list"))
|
||||
def _find(table, subdir):
|
||||
for name,target in table:
|
||||
if name == subdir:
|
||||
return target
|
||||
else:
|
||||
raise KeyError("no such directory '%s' in '%s'" %
|
||||
(subdir, [t[0] for t in table]))
|
||||
d.addCallback(_find, piece)
|
||||
def _check(subdir):
|
||||
assert not isinstance(subdir, str), "Hey, %s shouldn't be a string" % subdir
|
||||
return subdir
|
||||
d.addCallback(_check)
|
||||
return d
|
||||
|
||||
def get_uri_from_parent(self, parent, filename):
|
||||
assert not isinstance(parent, str), "'%s' isn't a directory node" % (parent,)
|
||||
d = parent.callRemote("list")
|
||||
def _find(table):
|
||||
for name,target in table:
|
||||
if name == filename:
|
||||
assert isinstance(target, str), "Hey, %s isn't a file" % filename
|
||||
return target
|
||||
else:
|
||||
raise KeyError("no such file '%s' in '%s'" %
|
||||
(filename, [t[0] for t in table]))
|
||||
d.addCallback(_find)
|
||||
return d
|
||||
|
||||
def get_root(self):
|
||||
return self.gvd_root
|
||||
|
||||
def listdir(self, dir_or_path):
|
||||
d = self.dirpath(dir_or_path)
|
||||
d.addCallback(lambda parent: parent.callRemote("list"))
|
||||
def _list(table):
|
||||
return [t[0] for t in table]
|
||||
d.addCallback(_list)
|
||||
return d
|
||||
|
||||
def put_file(self, dir_or_path, name, uploadable):
|
||||
"""Upload an IUploadable and add it to the virtual drive (as an entry
|
||||
called 'name', in 'dir_or_path') 'dir_or_path' must either be a
|
||||
string like 'root/subdir1/subdir2', or a directory node (either the
|
||||
root directory node returned by get_root(), or a subdirectory
|
||||
returned by list() ).
|
||||
|
||||
The uploadable can be an instance of allmydata.upload.Data,
|
||||
FileHandle, or FileName.
|
||||
|
||||
I return a deferred that will fire when the operation is complete.
|
||||
"""
|
||||
|
||||
log.msg("putting file to '%s'" % name)
|
||||
ul = self.parent.getServiceNamed("uploader")
|
||||
d = self.dirpath(dir_or_path)
|
||||
def _got_dir(dirnode):
|
||||
d1 = ul.upload(uploadable)
|
||||
def _add(uri):
|
||||
d2 = dirnode.callRemote("add_file", name, uri)
|
||||
d2.addCallback(lambda res: uri)
|
||||
return d2
|
||||
d1.addCallback(_add)
|
||||
return d1
|
||||
d.addCallback(_got_dir)
|
||||
def _done(res):
|
||||
log.msg("finished putting file to '%s'" % name)
|
||||
return res
|
||||
d.addCallback(_done)
|
||||
return d
|
||||
|
||||
def put_file_by_filename(self, dir_or_path, name, filename):
|
||||
return self.put_file(dir_or_path, name, upload.FileName(filename))
|
||||
def put_file_by_data(self, dir_or_path, name, data):
|
||||
return self.put_file(dir_or_path, name, upload.Data(data))
|
||||
def put_file_by_filehandle(self, dir_or_path, name, filehandle):
|
||||
return self.put_file(dir_or_path, name, upload.FileHandle(filehandle))
|
||||
|
||||
def make_directory(self, dir_or_path, name):
|
||||
d = self.dirpath(dir_or_path)
|
||||
d.addCallback(lambda parent: parent.callRemote("add_directory", name))
|
||||
return d
|
||||
|
||||
def remove(self, parent, name):
|
||||
assert not isinstance(parent, str)
|
||||
log.msg("vdrive removing %s" % name)
|
||||
# first find the uri
|
||||
d = self.get_uri_from_parent(parent, name)
|
||||
def _got_uri(vid):
|
||||
# TODO: delete the file's shares using this
|
||||
pass
|
||||
d.addCallback(_got_uri)
|
||||
def _delete_from_parent(res):
|
||||
return parent.callRemote("remove", name)
|
||||
d.addCallback(_delete_from_parent)
|
||||
def _done(res):
|
||||
log.msg("vdrive done removing %s" % name)
|
||||
d.addCallback(_done)
|
||||
return d
|
||||
|
||||
|
||||
def get_file(self, dir_and_name_or_path, download_target):
|
||||
"""Retrieve a file from the virtual drive and put it somewhere.
|
||||
|
||||
The file to be retrieved may either be specified as a (dir, name)
|
||||
tuple or as a full /-delimited pathname. In the former case, 'dir'
|
||||
can be either a DirectoryNode or a pathname.
|
||||
|
||||
The download target must be an IDownloadTarget instance like
|
||||
allmydata.download.Data, .FileName, or .FileHandle .
|
||||
"""
|
||||
|
||||
log.msg("getting file from %s" % (dir_and_name_or_path,))
|
||||
dl = self.parent.getServiceNamed("downloader")
|
||||
|
||||
if isinstance(dir_and_name_or_path, tuple):
|
||||
dir_or_path, name = dir_and_name_or_path
|
||||
d = self.dirpath(dir_or_path)
|
||||
def _got_dir(dirnode):
|
||||
return self.get_uri_from_parent(dirnode, name)
|
||||
d.addCallback(_got_dir)
|
||||
else:
|
||||
rslash = dir_and_name_or_path.rfind("/")
|
||||
if rslash == -1:
|
||||
# we're looking for a file in the root directory
|
||||
dir = self.gvd_root
|
||||
name = dir_and_name_or_path
|
||||
d = self.get_uri_from_parent(dir, name)
|
||||
else:
|
||||
dirpath = dir_and_name_or_path[:rslash]
|
||||
name = dir_and_name_or_path[rslash+1:]
|
||||
d = self.dirpath(dirpath)
|
||||
d.addCallback(lambda dir:
|
||||
self.get_uri_from_parent(dir, name))
|
||||
|
||||
def _got_uri(uri):
|
||||
return dl.download(uri, download_target)
|
||||
d.addCallback(_got_uri)
|
||||
def _done(res):
|
||||
log.msg("finished getting file")
|
||||
return res
|
||||
d.addCallback(_done)
|
||||
return d
|
||||
|
||||
def get_file_to_filename(self, from_where, filename):
|
||||
return self.get_file(from_where, download.FileName(filename))
|
||||
def get_file_to_data(self, from_where):
|
||||
return self.get_file(from_where, download.Data())
|
||||
def get_file_to_filehandle(self, from_where, filehandle):
|
||||
return self.get_file(from_where, download.FileHandle(filehandle))
|
||||
|
||||
|
||||
def create_directory_node(client, diruri):
|
||||
assert uri.is_dirnode_uri(diruri)
|
||||
|
Loading…
x
Reference in New Issue
Block a user