mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-01-26 14:19:32 +00:00
more filetree, it's actually starting to make sense now
This commit is contained in:
parent
48216ce272
commit
b61a4ff371
@ -6,6 +6,10 @@ class BaseDataNode(object):
|
||||
implements(INode)
|
||||
prefix = None # must be set by subclass
|
||||
|
||||
def new(self, data):
|
||||
self.set_base_data(data)
|
||||
return self
|
||||
|
||||
def get_base_data(self):
|
||||
raise NotImplementedError # must be provided by subclass
|
||||
def set_base_data(self, data):
|
||||
|
@ -1,5 +1,7 @@
|
||||
|
||||
from zope.interface import implements
|
||||
from twisted.internet import defer
|
||||
from cStringIO import StringIO
|
||||
from allmydata.filetree.interfaces import (
|
||||
INode, IDirectoryNode, ISubTree,
|
||||
ICHKDirectoryNode, ISSKDirectoryNode,
|
||||
@ -127,6 +129,7 @@ class _DirectorySubTree(object):
|
||||
# create a new, empty directory
|
||||
self.root = SubTreeNode(self)
|
||||
self.mutable = True # sure, why not
|
||||
return self
|
||||
|
||||
def populate_from_node(self, node, parent_is_mutable, node_maker, downloader):
|
||||
# self.populate_from_node must be defined by the subclass (CHK or
|
||||
@ -182,6 +185,7 @@ class LocalFileSubTreeNode(BaseDataNode):
|
||||
|
||||
def new(self, filename):
|
||||
self.filename = filename
|
||||
return self
|
||||
|
||||
def get_base_data(self):
|
||||
return self.filename
|
||||
@ -189,9 +193,11 @@ class LocalFileSubTreeNode(BaseDataNode):
|
||||
self.filename = data
|
||||
|
||||
class LocalFileSubTree(_DirectorySubTree):
|
||||
node_class = LocalFileSubTreeNode
|
||||
|
||||
def new(self, filename):
|
||||
self.filename = filename
|
||||
_DirectorySubTree.new(self)
|
||||
return _DirectorySubTree.new(self)
|
||||
|
||||
def populate_from_node(self, node, parent_is_mutable, node_maker, downloader):
|
||||
self.mutable = True # probably
|
||||
@ -201,11 +207,25 @@ class LocalFileSubTree(_DirectorySubTree):
|
||||
f.close()
|
||||
return defer.succeed(self._populate_from_data(node_maker))
|
||||
|
||||
def update(self, prepath, work_queue):
|
||||
def create_node_now(self):
|
||||
return LocalFileSubTreeNode().new(self.filename)
|
||||
|
||||
def _update(self):
|
||||
f = open(self.filename, "wb")
|
||||
self.serialize_to_file(f)
|
||||
f.close()
|
||||
|
||||
def update_now(self, uploader):
|
||||
self._update()
|
||||
return self.create_node_now()
|
||||
|
||||
def update(self, work_queue):
|
||||
# TODO: this may suffer from the same execute-too-early problem as
|
||||
# redirect.LocalFileRedirection
|
||||
self._update()
|
||||
return None
|
||||
|
||||
|
||||
class CHKDirectorySubTreeNode(BaseDataNode):
|
||||
implements(ICHKDirectoryNode)
|
||||
prefix = "CHKDirectory"
|
||||
@ -221,9 +241,10 @@ class CHKDirectorySubTreeNode(BaseDataNode):
|
||||
|
||||
class CHKDirectorySubTree(_DirectorySubTree):
|
||||
# maybe mutable, maybe not
|
||||
node_class = CHKDirectorySubTreeNode
|
||||
|
||||
def set_uri(self, uri):
|
||||
self.old_uri = uri
|
||||
self.uri = uri
|
||||
|
||||
def populate_from_node(self, node, parent_is_mutable, node_maker, downloader):
|
||||
assert ICHKDirectoryNode(node)
|
||||
@ -232,22 +253,37 @@ class CHKDirectorySubTree(_DirectorySubTree):
|
||||
d.addCallback(self._populate_from_data, node_maker)
|
||||
return d
|
||||
|
||||
def update(self, prepath, work_queue):
|
||||
def create_node_now(self):
|
||||
return CHKDirectorySubTreeNode().new(self.uri)
|
||||
|
||||
def update_now(self, uploader):
|
||||
f = StringIO()
|
||||
self.serialize_to_file(f)
|
||||
data = f.getvalue()
|
||||
d = uploader.upload_data(data)
|
||||
def _uploaded(uri):
|
||||
self.uri = uri
|
||||
return self.create_node_now()
|
||||
d.addCallback(_uploaded)
|
||||
return d
|
||||
|
||||
def update(self, workqueue):
|
||||
# this is the CHK form
|
||||
f, filename = work_queue.create_tempfile(".chkdir")
|
||||
old_uri = self.uri
|
||||
f, filename = workqueue.create_tempfile(".chkdir")
|
||||
self.serialize_to_file(f)
|
||||
f.close()
|
||||
boxname = work_queue.create_boxname()
|
||||
# mutation affects our parent
|
||||
work_queue.add_upload_chk(filename, boxname)
|
||||
work_queue.add_delete_tempfile(filename)
|
||||
work_queue.add_retain_uri_from_box(boxname)
|
||||
work_queue.add_delete_box(boxname)
|
||||
work_queue.add_addpath(boxname, prepath)
|
||||
work_queue.add_unlink_uri(self.old_uri)
|
||||
boxname = workqueue.create_boxname()
|
||||
workqueue.add_upload_chk(filename, boxname)
|
||||
workqueue.add_delete_tempfile(filename)
|
||||
workqueue.add_retain_uri_from_box(boxname)
|
||||
workqueue.add_delete_box(boxname)
|
||||
workqueue.add_unlink_uri(old_uri)
|
||||
# TODO: think about how self.old_uri will get updated. I *think* that
|
||||
# this whole instance will get replaced, so it ought to be ok. But
|
||||
# this needs investigation.
|
||||
|
||||
# mutation affects our parent, so we return a boxname for them
|
||||
return boxname
|
||||
|
||||
|
||||
@ -265,14 +301,20 @@ class SSKDirectorySubTreeNode(object):
|
||||
return self.read_cap
|
||||
def get_write_capability(self):
|
||||
return self.write_cap
|
||||
def set_read_capability(self, read_cap):
|
||||
self.read_cap = read_cap
|
||||
def set_write_capability(self, write_cap):
|
||||
self.write_cap = write_cap
|
||||
|
||||
|
||||
class SSKDirectorySubTree(_DirectorySubTree):
|
||||
node_class = SSKDirectorySubTreeNode
|
||||
|
||||
def new(self):
|
||||
_DirectorySubTree.new(self)
|
||||
self.version = 0
|
||||
# TODO: populate
|
||||
return self
|
||||
|
||||
def populate_from_node(self, node, parent_is_mutable, node_maker, downloader):
|
||||
node = ISSKDirectoryNode(node)
|
||||
@ -286,15 +328,36 @@ class SSKDirectorySubTree(_DirectorySubTree):
|
||||
def set_version(self, version):
|
||||
self.version = version
|
||||
|
||||
def upload_my_serialized_form(self, work_queue):
|
||||
def create_node_now(self):
|
||||
node = SSKDirectorySubTreeNode()
|
||||
node.set_read_capability(self.read_capability)
|
||||
node.set_write_capability(self.write_capability)
|
||||
return node
|
||||
|
||||
def update_now(self, uploader):
|
||||
if not self.write_capability:
|
||||
raise RuntimeError("This SSKDirectorySubTree is not mutable")
|
||||
|
||||
f = StringIO()
|
||||
self.serialize_to_file(f)
|
||||
data = f.getvalue()
|
||||
|
||||
self.version += 1
|
||||
d = uploader.upload_ssk_data(self.write_capability, self.version, data)
|
||||
d.addCallback(lambda ignored: self.create_node_now())
|
||||
return d
|
||||
|
||||
def update(self, workqueue):
|
||||
# this is the SSK form
|
||||
f, filename = work_queue.create_tempfile(".sskdir")
|
||||
f, filename = workqueue.create_tempfile(".sskdir")
|
||||
self.serialize_to_file(f)
|
||||
f.close()
|
||||
# mutation does not affect our parent
|
||||
work_queue.add_upload_ssk(filename, self.write_capability,
|
||||
self.version)
|
||||
self.version = self.version + 1
|
||||
work_queue.add_delete_tempfile(filename)
|
||||
work_queue.add_retain_ssk(self.read_capability)
|
||||
|
||||
oldversion = self.version
|
||||
self.version = self.version + 1
|
||||
|
||||
workqueue.add_upload_ssk(self.write_capability, oldversion, filename)
|
||||
workqueue.add_delete_tempfile(filename)
|
||||
workqueue.add_retain_ssk(self.read_capability)
|
||||
# mutation does not affect our parent
|
||||
return None
|
||||
|
@ -69,6 +69,10 @@ class ISubTree(Interface):
|
||||
a DirectoryNode, or it might be a FileNode.
|
||||
"""
|
||||
|
||||
# All ISubTree-providing instances must have a class-level attribute
|
||||
# named .node_class which references the matching INode-providing class.
|
||||
# This is used by the Opener to turn nodes into subtrees.
|
||||
|
||||
def populate_from_node(node, parent_is_mutable, node_maker, downloader):
|
||||
"""Subtrees are created by opener.open() being called with an INode
|
||||
which describes both the kind of subtree to be created and a way to
|
||||
@ -129,18 +133,62 @@ class ISubTree(Interface):
|
||||
given filehandle (using only .write()). This string should be
|
||||
suitable for uploading to the mesh or storing in a local file."""
|
||||
|
||||
def update(prepath, workqueue):
|
||||
"""Perform and schedule whatever work is necessary to record this
|
||||
subtree to persistent storage and update the parent at 'prepath'
|
||||
with a new child specification.
|
||||
def update_now(uploader):
|
||||
"""Perform whatever work is necessary to record this subtree to
|
||||
persistent storage.
|
||||
|
||||
This returns an Inode, or a Deferred that fires (with an INode) when
|
||||
the subtree has been persisted.
|
||||
|
||||
For directory subtrees, this will cause the subtree to serialize
|
||||
itself to a file, then add instructions to the workqueue to first
|
||||
upload this file to the mesh, then add the file's URI to the parent's
|
||||
subtree. The second instruction will possibly cause recursion, until
|
||||
some subtree is updated which does not require notifying the parent.
|
||||
itself to a file, then upload this file to the mesh, then create an
|
||||
INode-providing instance which describes where the file wound up. For
|
||||
redirections, this will cause the subtree to modify the redirection's
|
||||
persistent storage, then return the (unmodified) INode that describes
|
||||
the redirection.
|
||||
|
||||
This form does not use the workqueue. If the node is shut down before
|
||||
the Deferred fires, a redirection or SSK subtree might be left in its
|
||||
previous state, or it might have been updated.
|
||||
"""
|
||||
|
||||
def update(workqueue):
|
||||
"""Perform and schedule whatever work is necessary to record this
|
||||
subtree to persistent storage.
|
||||
|
||||
Returns a boxname or None, synchronously. This function does not
|
||||
return a Deferred.
|
||||
|
||||
If the parent subtree needs to be modified with the new identity of
|
||||
this subtree (i.e. for CHKDirectorySubTree instances), this will
|
||||
return a boxname in which the serialized INode will be placed once
|
||||
the added workqueue steps have completed. The caller should add
|
||||
'addpath' steps to the workqueue using this boxname (which will
|
||||
eventually cause recursion on other subtrees, until some subtree is
|
||||
updated which does not require notifying the parent). update() will
|
||||
add steps to delete the box at the end of the workqueue.
|
||||
|
||||
If the parent subtree does not need to be modified (i.e. for
|
||||
SSKDirectorySubTree instances, or redirections), this will return
|
||||
None.
|
||||
|
||||
This is like update_now(), but uses the workqueue to insure
|
||||
consistency in the face of node shutdowns. Once our intentions have
|
||||
been recorded in the workqueue, if the node is shut down before the
|
||||
upload steps have completed, the update will eventually complete the
|
||||
next time the node is started.
|
||||
"""
|
||||
|
||||
def create_node_now():
|
||||
"""FOR TESTING ONLY. Immediately create and return an INode which
|
||||
describes the current state of this subtree. This does not perform
|
||||
any upload or persistence work, and thus depends upon any internal
|
||||
state having been previously set correctly. In general this will
|
||||
return the correct value for subtrees which have just been created
|
||||
(and not yet mutated). It will also return the correct value for
|
||||
subtrees which do not change their identity when they are mutated
|
||||
(SSKDirectorySubTrees and redirections).
|
||||
"""
|
||||
|
||||
#class IMutableSubTree(Interface):
|
||||
# def mutation_affects_parent():
|
||||
@ -176,13 +224,12 @@ class ISubTree(Interface):
|
||||
# mesh or in a file."""
|
||||
|
||||
class IOpener(Interface):
|
||||
def open(subtree_specification, parent_is_mutable):
|
||||
"""I can take an ISubTreeSpecification-providing specification of a
|
||||
subtree and return a Deferred which fires with an instance that
|
||||
provides ISubTree (and maybe even IMutableSubTree). I probably do
|
||||
this by performing network IO: reading a file from the mesh, or from
|
||||
local disk, or asking some central-service node for the current
|
||||
value."""
|
||||
def open(subtree_node, parent_is_mutable, node_maker):
|
||||
"""I can take an INode-providing specification of a subtree and
|
||||
return a Deferred which fires with an instance that provides ISubTree
|
||||
(and maybe even IMutableSubTree). I probably do this by performing
|
||||
network IO: reading a file from the mesh, or from local disk, or
|
||||
asking some central-service node for the current value."""
|
||||
|
||||
|
||||
class IVirtualDrive(Interface):
|
||||
@ -196,6 +243,13 @@ class IVirtualDrive(Interface):
|
||||
"""Given a string produced by original_node.serialize_node(), produce
|
||||
an equivalent node.
|
||||
"""
|
||||
def make_subtree_from_node(node, parent_is_mutable):
|
||||
"""Given an INode, create an ISubTree.
|
||||
|
||||
This returns a Deferred that fires (with the new subtree) when the
|
||||
subtree is ready for use. This uses an IOpener to download the
|
||||
subtree data, if necessary.
|
||||
"""
|
||||
|
||||
# commands to manipulate files
|
||||
|
||||
|
@ -3,15 +3,16 @@ from zope.interface import implements
|
||||
from twisted.internet import defer
|
||||
from allmydata.filetree import interfaces, directory, redirect
|
||||
#from allmydata.filetree.file import CHKFile, MutableSSKFile, ImmutableSSKFile
|
||||
#from allmydata.filetree.specification import unserialize_subtree_specification
|
||||
from allmydata.filetree.interfaces import INode, IDirectoryNode
|
||||
|
||||
all_openable_subtree_types = [
|
||||
directory.LocalFileSubTree,
|
||||
directory.CHKDirectorySubTree,
|
||||
directory.SSKDirectorySubTree,
|
||||
redirect.LocalFileRedirection,
|
||||
redirect.QueenRedirection,
|
||||
redirect.HTTPRedirection,
|
||||
redirect.QueenOrLocalFileRedirection,
|
||||
redirect.HTTPRedirection,
|
||||
]
|
||||
|
||||
# the Opener can turn an INode (which describes a subtree, like a directory
|
||||
@ -24,31 +25,36 @@ class Opener(object):
|
||||
self._downloader = downloader
|
||||
self._cache = {}
|
||||
|
||||
def _create(self, spec, parent_is_mutable):
|
||||
assert isinstance(spec, tuple)
|
||||
def _create(self, node, parent_is_mutable, node_maker):
|
||||
assert INode(node)
|
||||
for subtree_class in all_openable_subtree_types:
|
||||
if spec[0] == subtree_class.stype:
|
||||
if isinstance(node, subtree_class.node_class):
|
||||
subtree = subtree_class()
|
||||
d = subtree.populate_from_specification(spec,
|
||||
parent_is_mutable,
|
||||
self._downloader)
|
||||
d = subtree.populate_from_node(node,
|
||||
parent_is_mutable,
|
||||
node_maker,
|
||||
self._downloader)
|
||||
return d
|
||||
raise RuntimeError("unable to handle subtree specification '%s'"
|
||||
% (spec,))
|
||||
% (node,))
|
||||
|
||||
def open(self, subtree_specification, parent_is_mutable):
|
||||
spec = interfaces.ISubTreeSpecification(subtree_specification)
|
||||
def open(self, node, parent_is_mutable, node_maker):
|
||||
assert INode(node)
|
||||
assert not isinstance(node, IDirectoryNode)
|
||||
|
||||
# is it in cache?
|
||||
if spec in self._cache:
|
||||
return defer.succeed(self._cache[spec])
|
||||
# is it in cache? To check this we need to use the node's serialized
|
||||
# form, since nodes are instances and don't compare by value
|
||||
node_s = node.serialize_node()
|
||||
if node_s in self._cache:
|
||||
return defer.succeed(self._cache[node_s])
|
||||
|
||||
d = defer.maybeDeferred(self._create, spec, parent_is_mutable)
|
||||
d.addCallback(self._add_to_cache, spec)
|
||||
d = defer.maybeDeferred(self._create,
|
||||
node, parent_is_mutable, node_maker)
|
||||
d.addCallback(self._add_to_cache, node_s)
|
||||
return d
|
||||
|
||||
def _add_to_cache(self, subtree, spec):
|
||||
self._cache[spec] = subtree
|
||||
def _add_to_cache(self, subtree, node_s):
|
||||
self._cache[node_s] = subtree
|
||||
# TODO: remove things from the cache eventually
|
||||
return subtree
|
||||
|
||||
|
@ -12,6 +12,7 @@ class LocalFileRedirectionNode(BaseDataNode):
|
||||
|
||||
def new(self, handle):
|
||||
self.handle = handle
|
||||
return self
|
||||
|
||||
def get_base_data(self):
|
||||
return self.handle
|
||||
@ -23,6 +24,7 @@ class _BaseRedirection(object):
|
||||
|
||||
def new(self, child_node):
|
||||
self.child_node = child_node
|
||||
return self
|
||||
|
||||
def get_node_for_path(self, path):
|
||||
return ([], self.child_node, path)
|
||||
@ -34,12 +36,13 @@ class _BaseRedirection(object):
|
||||
self.child_node = node_maker(data)
|
||||
return self
|
||||
|
||||
|
||||
class LocalFileRedirection(_BaseRedirection):
|
||||
stype = "LocalFileRedirection"
|
||||
node_class = LocalFileRedirectionNode
|
||||
|
||||
def new(self, handle, child_node):
|
||||
self.filename = handle
|
||||
_BaseRedirection.new(self, child_node)
|
||||
return _BaseRedirection.new(self, child_node)
|
||||
|
||||
def populate_from_node(self, node, parent_is_mutable, node_maker, downloader):
|
||||
# return a Deferred that fires (with self) when this node is ready
|
||||
@ -66,20 +69,35 @@ class LocalFileRedirection(_BaseRedirection):
|
||||
def is_mutable(self):
|
||||
return True
|
||||
|
||||
def update(self, prepath, workqueue):
|
||||
def create_node_now(self):
|
||||
return LocalFileRedirectionNode().new(self.filename)
|
||||
|
||||
def _update(self):
|
||||
f = open(self.filename, "wb")
|
||||
self.serialize_subtree_to_file(f)
|
||||
f.close()
|
||||
|
||||
def update_now(self, uploader):
|
||||
self._update()
|
||||
return self.create_node_now()
|
||||
|
||||
def update(self, workqueue):
|
||||
# TODO: this happens too early, before earlier items in the workqueue
|
||||
# have been executed. This might not be a problem, if our update()
|
||||
# method isn't actually called until everything earlier has been
|
||||
# executed anyways. Need to ponder this.
|
||||
self._update()
|
||||
return None
|
||||
|
||||
class QueenRedirectionNode(LocalFileRedirectionNode):
|
||||
prefix = "QueenRedirection"
|
||||
|
||||
class QueenRedirection(_BaseRedirection):
|
||||
style = "QueenRedirection"
|
||||
node_class = QueenRedirectionNode
|
||||
|
||||
def new(self, handle):
|
||||
self.handle = handle
|
||||
return self
|
||||
|
||||
def populate_from_node(self, node, parent_is_mutable, node_maker, downloader):
|
||||
# this specifies a handle for which the Queen maintains a serialized
|
||||
@ -95,23 +113,38 @@ class QueenRedirection(_BaseRedirection):
|
||||
def is_mutable(self):
|
||||
return True # TODO: maybe, maybe not
|
||||
|
||||
def update(self, prepath, workqueue):
|
||||
def create_node_now(self):
|
||||
return QueenRedirectionNode().new(self.handle)
|
||||
|
||||
def update_now(self, uploader):
|
||||
f = StringIO()
|
||||
self.serialize_subtree_to_file(f)
|
||||
d = self._queen.callRemote("set_handle", self.handle, f.getvalue())
|
||||
def _done(res):
|
||||
return self.create_node_now()
|
||||
d.addCallback(_done)
|
||||
return d
|
||||
|
||||
def update(self, workqueue):
|
||||
f, filename = workqueue.create_tempfile(".toqueen")
|
||||
self.serialize_subtree_to_file(f)
|
||||
f.close()
|
||||
workqueue.add_queen_update_handle(self.handle, filename)
|
||||
workqueue.add_delete_tempfile(filename)
|
||||
return None
|
||||
|
||||
class QueenOrLocalFileRedirectionNode(LocalFileRedirectionNode):
|
||||
prefix = "QueenOrLocalFileRedirection"
|
||||
|
||||
class QueenOrLocalFileRedirection(_BaseRedirection):
|
||||
stype = "QueenOrLocalFileRedirection"
|
||||
node_class = QueenOrLocalFileRedirectionNode
|
||||
|
||||
def new(self, handle, child_node):
|
||||
self.handle = handle
|
||||
self.version = 0
|
||||
self.child_node = child_node
|
||||
# TODO
|
||||
return self
|
||||
|
||||
def populate_from_node(self, node, parent_is_mutable, node_maker, downloader):
|
||||
# there is a local file which contains a bencoded serialized
|
||||
@ -146,22 +179,48 @@ class QueenOrLocalFileRedirection(_BaseRedirection):
|
||||
def is_mutable(self):
|
||||
return True
|
||||
|
||||
def update(self, prepath, workqueue):
|
||||
def create_node_now(self):
|
||||
return QueenOrLocalFileRedirectionNode().new(self.handle)
|
||||
|
||||
def _update(self):
|
||||
self.version += 1
|
||||
f = StringIO()
|
||||
self.serialize_subtree_to_file(f)
|
||||
version_and_data = bencode.bencode((self.version, f.getvalue()))
|
||||
return version_and_data
|
||||
|
||||
def update_now(self, uploader):
|
||||
version_and_data = self._update()
|
||||
f = open(self.filename, "wb")
|
||||
f.write(version_and_data)
|
||||
f.close()
|
||||
|
||||
d = self._queen.callRemote("set_handle", self.handle, version_and_data)
|
||||
def _done(res):
|
||||
return self.create_node_now()
|
||||
d.addCallback(_done)
|
||||
return d
|
||||
|
||||
def update(self, workqueue):
|
||||
version_and_data = self._update()
|
||||
# TODO: this may have the same problem as LocalFileRedirection.update
|
||||
f = open(self.filename, "wb")
|
||||
f.write(version_and_data)
|
||||
f.close()
|
||||
|
||||
f, filename = workqueue.create_tempfile(".toqueen")
|
||||
self.serialize_subtree_to_file(f)
|
||||
f.close()
|
||||
workqueue.add_queen_update_handle(self.handle, filename)
|
||||
workqueue.add_delete_tempfile(filename)
|
||||
return None
|
||||
|
||||
class HTTPRedirectionNode(BaseDataNode):
|
||||
prefix = "HTTPRedirection"
|
||||
|
||||
def new(self, url):
|
||||
self.url = url
|
||||
return self
|
||||
|
||||
def get_base_data(self):
|
||||
return self.url
|
||||
@ -169,19 +228,29 @@ class HTTPRedirectionNode(BaseDataNode):
|
||||
self.url = data
|
||||
|
||||
class HTTPRedirection(_BaseRedirection):
|
||||
stype = "HTTPRedirection"
|
||||
node_class = HTTPRedirectionNode
|
||||
|
||||
def new(self, url):
|
||||
self.url = url
|
||||
|
||||
def populate_from_node(self, node, parent_is_mutable, node_maker, downloader):
|
||||
# this specifies a URL at which there is a bencoded serialized
|
||||
# subtree specification.
|
||||
self.url = node.url
|
||||
assert isinstance(node, HTTPRedirectionNode)
|
||||
from twisted.web import client
|
||||
d = client.getPage(node.url)
|
||||
d = client.getPage(self.url)
|
||||
d.addCallback(self._populate_from_data, node_maker)
|
||||
return d
|
||||
|
||||
def is_mutable(self):
|
||||
return False
|
||||
|
||||
def update(self, prepath, workqueue):
|
||||
def create_node_now(self):
|
||||
return HTTPRedirectionNode().new(self.url)
|
||||
|
||||
def update_now(self, uploader):
|
||||
raise RuntimeError("HTTPRedirection is not mutable")
|
||||
|
||||
def update(self, workqueue):
|
||||
raise RuntimeError("HTTPRedirection is not mutable")
|
||||
|
@ -1,64 +0,0 @@
|
||||
|
||||
"""
|
||||
from zope.interface import implements
|
||||
from allmydata.filetree.interfaces import ISubTreeSpecification
|
||||
|
||||
class CHKFileSpecification(object):
|
||||
implements(ISubTreeSpecification)
|
||||
stype = "CHK-File"
|
||||
def set_uri(self, uri):
|
||||
self.uri = uri
|
||||
def serialize(self):
|
||||
return (self.stype, self.uri)
|
||||
def unserialize(self, data):
|
||||
assert data[0] == self.stype
|
||||
self.uri = data[1]
|
||||
|
||||
class ImmutableSSKFileSpecification(object):
|
||||
implements(ISubTreeSpecification)
|
||||
stype = "SSK-Readonly-File"
|
||||
def set_read_capability(self, read_cap):
|
||||
self.read_cap = read_cap
|
||||
def get_read_capability(self):
|
||||
return self.read_cap
|
||||
def serialize(self):
|
||||
return (self.stype, self.read_cap)
|
||||
def unserialize(self, data):
|
||||
assert data[0] == self.stype
|
||||
self.read_cap = data[1]
|
||||
|
||||
class MutableSSKFileSpecification(ImmutableSSKFileSpecification):
|
||||
implements(ISubTreeSpecification)
|
||||
stype = "SSK-ReadWrite-File"
|
||||
def set_write_capability(self, write_cap):
|
||||
self.write_cap = write_cap
|
||||
def get_write_capability(self):
|
||||
return self.write_cap
|
||||
def serialize(self):
|
||||
return (self.stype, self.read_cap, self.write_cap)
|
||||
def unserialize(self, data):
|
||||
assert data[0] == self.stype
|
||||
self.read_cap = data[1]
|
||||
self.write_cap = data[2]
|
||||
|
||||
|
||||
|
||||
|
||||
def unserialize_subtree_specification(serialized_spec):
|
||||
assert isinstance(serialized_spec, tuple)
|
||||
for stype in [CHKDirectorySpecification,
|
||||
ImmutableSSKDirectorySpecification,
|
||||
MutableSSKDirectorySpecification,
|
||||
|
||||
LocalFileRedirection,
|
||||
QueenRedirection,
|
||||
HTTPRedirection,
|
||||
QueenOrLocalFileRedirection,
|
||||
]:
|
||||
if tuple[0] == stype:
|
||||
spec = stype()
|
||||
spec.unserialize(serialized_spec)
|
||||
return spec
|
||||
raise RuntimeError("unable to unserialize subtree specification '%s'" %
|
||||
(serialized_spec,))
|
||||
"""
|
@ -51,10 +51,15 @@ class VirtualDrive(object):
|
||||
return node
|
||||
raise RuntimeError("unable to handle subtree type '%s'" % prefix)
|
||||
|
||||
def make_subtree_from_node(self, node, parent_is_mutable):
|
||||
assert INode(node)
|
||||
return self.opener.open(node, parent_is_mutable,
|
||||
self.make_subtree_from_node)
|
||||
|
||||
# these methods are used to walk through our subtrees
|
||||
|
||||
def _get_root(self):
|
||||
return self.opener.open(self.root_node, False)
|
||||
return self.make_subtree_from_node(self.root_node, False)
|
||||
|
||||
def _get_node(self, path):
|
||||
d = self._get_closest_node(path)
|
||||
@ -84,7 +89,7 @@ class VirtualDrive(object):
|
||||
# traversal done
|
||||
return (node, remaining_path)
|
||||
# otherwise, we must open and recurse into a new subtree
|
||||
d = self.opener.open(node, parent_is_mutable)
|
||||
d = self.make_subtree_from_node(node, parent_is_mutable)
|
||||
def _opened(next_subtree):
|
||||
next_subtree = ISubTree(next_subtree)
|
||||
return self._get_closest_node_1(next_subtree, remaining_path)
|
||||
@ -147,6 +152,15 @@ class VirtualDrive(object):
|
||||
d.addCallback(_got_closest)
|
||||
return d
|
||||
|
||||
def _get_subtree_path(self, path):
|
||||
# compute a list of [(subtree1, subpath1), ...], which represents
|
||||
# which parts of 'path' traverse which subtrees. This can be used to
|
||||
# present the virtual drive to the user in a form that includes
|
||||
# redirection nodes (which do not consume path segments), or to
|
||||
# figure out which subtrees need to be updated when the identity of a
|
||||
# lower subtree (i.e. CHK) is changed.
|
||||
pass # TODO
|
||||
|
||||
# these are called by the workqueue
|
||||
|
||||
def add(self, path, new_node):
|
||||
@ -164,9 +178,13 @@ class VirtualDrive(object):
|
||||
node.add(new_node_path, new_node)
|
||||
subtree = node.get_subtree()
|
||||
# now, tell the subtree to serialize and upload itself, using the
|
||||
# workqueue. The subtree will also queue a step to notify its
|
||||
# parent (using 'prepath'), if necessary.
|
||||
return subtree.update(prepath, self.workqueue)
|
||||
# workqueue.
|
||||
boxname = subtree.update(self.workqueue)
|
||||
if boxname:
|
||||
# the parent needs to be notified, so queue a step to notify
|
||||
# them (using 'prepath')
|
||||
self.workqueue.add_addpath(boxname, prepath)
|
||||
return self # TODO: what wold be the most useful?
|
||||
d.addCallback(_add_new_node)
|
||||
return d
|
||||
|
||||
@ -210,7 +228,10 @@ class VirtualDrive(object):
|
||||
node.delete(orphan_path)
|
||||
# now serialize and upload
|
||||
subtree = node.get_subtree()
|
||||
return subtree.update(prepath, self.workqueue)
|
||||
boxname = subtree.update(self.workqueue)
|
||||
if boxname:
|
||||
self.workqueue.add_addpath(boxname, prepath)
|
||||
return self
|
||||
d.addCallback(_got_parent)
|
||||
return d
|
||||
|
||||
|
@ -1,11 +1,11 @@
|
||||
|
||||
from zope.interface import implements
|
||||
#from zope.interface import implements
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer
|
||||
from allmydata.filetree.interfaces import IOpener, IDirectoryNode
|
||||
from allmydata.filetree.directory import (#ImmutableDirectorySubTree,
|
||||
SubTreeNode,
|
||||
CHKDirectorySubTree)
|
||||
#from twisted.internet import defer
|
||||
#from allmydata.filetree.interfaces import IOpener, IDirectoryNode
|
||||
#from allmydata.filetree.directory import (ImmutableDirectorySubTree,
|
||||
# SubTreeNode,
|
||||
# CHKDirectorySubTree)
|
||||
#from allmydata.filetree.specification import (CHKFileSpecification,
|
||||
# CHKDirectorySpecification)
|
||||
from allmydata import workqueue
|
||||
@ -324,11 +324,16 @@ class Stuff(unittest.TestCase):
|
||||
dl = None
|
||||
if not root_node:
|
||||
root_node = directory.LocalFileSubTreeNode()
|
||||
root_node.new("dirtree.save")
|
||||
root_node.new("rootdirtree.save")
|
||||
v = vdrive.VirtualDrive(wq, dl, root_node)
|
||||
return v
|
||||
|
||||
def testDirectory(self):
|
||||
# TODO: we only need this VirtualDrive for the opener. Perhaps
|
||||
# make_subtree_from_node should move out of that class and into a
|
||||
# module-level function.
|
||||
v = self.makeVirtualDrive("test_filetree_new/testDirectory")
|
||||
|
||||
# create an empty directory (stored locally)
|
||||
subtree = directory.LocalFileSubTree()
|
||||
subtree.new("dirtree.save")
|
||||
@ -352,6 +357,7 @@ class Stuff(unittest.TestCase):
|
||||
self.failUnlessIdentical(root.get("foo.txt"), file1)
|
||||
subdir1a = root.get("subdir1")
|
||||
self.failUnlessIdentical(subdir1, subdir1a)
|
||||
del subdir1a
|
||||
self.failUnless(IDirectoryNode.providedBy(subdir1))
|
||||
self.failUnlessEqual(subdir1.list(), [])
|
||||
self.failUnlessIdentical(subdir1.get_subtree(), subtree)
|
||||
@ -363,16 +369,43 @@ class Stuff(unittest.TestCase):
|
||||
subdir2.delete("subdir4")
|
||||
self.failUnlessEqual(subdir2.list(), ["subdir3"])
|
||||
|
||||
del root, subdir1, subdir2, subdir3, subdir4
|
||||
# leaving file1 for later use
|
||||
|
||||
# now serialize it and reconstruct it
|
||||
f = StringIO()
|
||||
subtree.serialize_subtree_to_file(f)
|
||||
data = f.getvalue()
|
||||
#print data
|
||||
|
||||
# hrm, something is missing here.. subtree to ??? to node to subtree
|
||||
node = subtree.create_node_now()
|
||||
self.failUnless(isinstance(node, directory.LocalFileSubTreeNode))
|
||||
node_s = node.serialize_node()
|
||||
self.failUnless(isinstance(node_s, str))
|
||||
self.failUnless(node_s.startswith("LocalFileDirectory:"))
|
||||
self.failUnless("dirtree.save" in node_s)
|
||||
|
||||
v = self.makeVirtualDrive("test_filetree_new/testDirectory")
|
||||
#node = v.make_node_from_serialized(data)
|
||||
# now reconstruct it
|
||||
d = v.make_subtree_from_node(node, False)
|
||||
def _opened(new_subtree):
|
||||
res = new_subtree.get_node_for_path([])
|
||||
(found_path, root, remaining_path) = res
|
||||
self.failUnlessEqual(found_path, [])
|
||||
self.failUnlessEqual(remaining_path, [])
|
||||
self.failUnless(INode.providedBy(root))
|
||||
self.failUnless(IDirectoryNode.providedBy(root))
|
||||
self.failUnlessEqual(root.list(), ["foo.txt", "subdir1"])
|
||||
file1a = root.get("foo.txt")
|
||||
self.failUnless(isinstance(CHKFileNode, file1a))
|
||||
self.failUnlessEqual(file1a.get_uri(), "uri1")
|
||||
subdir1 = root.get("subdir1")
|
||||
subdir2 = subdir1.get("subdir2")
|
||||
self.failUnlessEqual(subdir2.list(), ["subdir3"])
|
||||
subdir2.delete("subdir3")
|
||||
self.failUnlessEqual(subdir2.list(), [])
|
||||
d.addCallback(_opened)
|
||||
return d
|
||||
testDirectory.todo = "not working yet"
|
||||
|
||||
def testVdrive(self):
|
||||
# create some stuff, see if we can import everything
|
||||
@ -389,6 +422,6 @@ class Stuff(unittest.TestCase):
|
||||
|
||||
def start():
|
||||
root_node = redirect.LocalFileRedirectionNode()
|
||||
root_node.new("handle", dirtree)
|
||||
# root_node.new("handle", dirtree)
|
||||
root = redirect.LocalFileRedirection()
|
||||
# wow, bootstrapping is hard
|
||||
|
@ -284,6 +284,9 @@ class IUploader(Interface):
|
||||
"""Upload the file. 'uploadable' must impement IUploadable. This
|
||||
returns a Deferred which fires with the URI of the file."""
|
||||
|
||||
def upload_ssk(write_capability, new_version, uploadable):
|
||||
pass # TODO
|
||||
|
||||
class Uploader(service.MultiService):
|
||||
"""I am a service that allows file uploading.
|
||||
"""
|
||||
|
@ -25,7 +25,11 @@ class IWorkQueue(Interface):
|
||||
"""
|
||||
|
||||
def create_tempfile(suffix=""):
|
||||
"""Return (f, filename)."""
|
||||
"""Return (f, filename), where 'f' is an open filehandle, and
|
||||
'filename' is a string that can be passed to other workqueue steps to
|
||||
refer to that same file later. NOTE: 'filename' is not an absolute
|
||||
path, rather it will be interpreted relative to some directory known
|
||||
only by the workqueue."""
|
||||
def create_boxname(contents=None):
|
||||
"""Return a unique box name (as a string)."""
|
||||
|
||||
@ -40,12 +44,17 @@ class IWorkQueue(Interface):
|
||||
'add_subpath' step will reference this boxname and retrieve the URI.
|
||||
"""
|
||||
|
||||
def add_upload_ssk(source_filename, write_capability, previous_version):
|
||||
def add_upload_ssk(write_capability, previous_version, source_filename):
|
||||
"""This step uploads a file to the mesh in a way that replaces the
|
||||
previous version and does not require a change to the ID referenced
|
||||
by the parent.
|
||||
"""
|
||||
|
||||
def add_queen_update_handle(handle, source_filename):
|
||||
"""Arrange for a central queen to be notified that the given handle
|
||||
has been updated with the contents of the given tempfile. This will
|
||||
send a set_handle() message to the queen."""
|
||||
|
||||
def add_retain_ssk(read_capability):
|
||||
"""Arrange for the given SSK to be kept alive."""
|
||||
|
||||
@ -59,8 +68,8 @@ class IWorkQueue(Interface):
|
||||
file."""
|
||||
|
||||
def add_addpath(boxname, path):
|
||||
"""When executed, this step will retrieve the URI from the given box
|
||||
and call root.add(path, URIishthingyTODO, etc).
|
||||
"""When executed, this step will retrieve the serialized INode from
|
||||
the given box and call vdrive.add(path, node) .
|
||||
"""
|
||||
|
||||
def add_unlink_uri(uri):
|
||||
|
Loading…
x
Reference in New Issue
Block a user