mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-02-01 00:45:52 +00:00
Add some direct tests for NodeMaker.create_from_uri
This commit is contained in:
parent
4ca45aaa93
commit
2ac4af7fb4
111
src/allmydata/test/strategies.py
Normal file
111
src/allmydata/test/strategies.py
Normal file
@ -0,0 +1,111 @@
|
||||
"""
|
||||
Hypothesis strategies use for testing Tahoe-LAFS.
|
||||
"""
|
||||
|
||||
from hypothesis.strategies import (
|
||||
one_of,
|
||||
builds,
|
||||
binary,
|
||||
)
|
||||
|
||||
from ..uri import (
|
||||
WriteableSSKFileURI,
|
||||
WriteableMDMFFileURI,
|
||||
DirectoryURI,
|
||||
MDMFDirectoryURI,
|
||||
)
|
||||
|
||||
def write_capabilities():
|
||||
"""
|
||||
Build ``IURI`` providers representing all kinds of write capabilities.
|
||||
"""
|
||||
return one_of([
|
||||
ssk_capabilities(),
|
||||
mdmf_capabilities(),
|
||||
dir2_capabilities(),
|
||||
dir2_mdmf_capabilities(),
|
||||
])
|
||||
|
||||
|
||||
def ssk_capabilities():
|
||||
"""
|
||||
Build ``WriteableSSKFileURI`` instances.
|
||||
"""
|
||||
return builds(
|
||||
WriteableSSKFileURI,
|
||||
ssk_writekeys(),
|
||||
ssk_fingerprints(),
|
||||
)
|
||||
|
||||
|
||||
def _writekeys(size=16):
|
||||
"""
|
||||
Build ``bytes`` representing write keys.
|
||||
"""
|
||||
return binary(min_size=size, max_size=size)
|
||||
|
||||
|
||||
def ssk_writekeys():
|
||||
"""
|
||||
Build ``bytes`` representing SSK write keys.
|
||||
"""
|
||||
return _writekeys()
|
||||
|
||||
|
||||
def _fingerprints(size=32):
|
||||
"""
|
||||
Build ``bytes`` representing fingerprints.
|
||||
"""
|
||||
return binary(min_size=size, max_size=size)
|
||||
|
||||
|
||||
def ssk_fingerprints():
|
||||
"""
|
||||
Build ``bytes`` representing SSK fingerprints.
|
||||
"""
|
||||
return _fingerprints()
|
||||
|
||||
|
||||
def mdmf_capabilities():
|
||||
"""
|
||||
Build ``WriteableMDMFFileURI`` instances.
|
||||
"""
|
||||
return builds(
|
||||
WriteableMDMFFileURI,
|
||||
mdmf_writekeys(),
|
||||
mdmf_fingerprints(),
|
||||
)
|
||||
|
||||
|
||||
def mdmf_writekeys():
|
||||
"""
|
||||
Build ``bytes`` representing MDMF write keys.
|
||||
"""
|
||||
return _writekeys()
|
||||
|
||||
|
||||
def mdmf_fingerprints():
|
||||
"""
|
||||
Build ``bytes`` representing MDMF fingerprints.
|
||||
"""
|
||||
return _fingerprints()
|
||||
|
||||
|
||||
def dir2_capabilities():
|
||||
"""
|
||||
Build ``DirectoryURI`` instances.
|
||||
"""
|
||||
return builds(
|
||||
DirectoryURI,
|
||||
ssk_capabilities(),
|
||||
)
|
||||
|
||||
|
||||
def dir2_mdmf_capabilities():
|
||||
"""
|
||||
Build ``MDMFDirectoryURI`` instances.
|
||||
"""
|
||||
return builds(
|
||||
MDMFDirectoryURI,
|
||||
mdmf_capabilities(),
|
||||
)
|
@ -12,6 +12,15 @@ from fixtures import (
|
||||
Fixture,
|
||||
TempDir,
|
||||
)
|
||||
|
||||
from hypothesis import (
|
||||
given,
|
||||
)
|
||||
from hypothesis.strategies import (
|
||||
sampled_from,
|
||||
booleans,
|
||||
)
|
||||
|
||||
from eliot.testing import (
|
||||
capture_logging,
|
||||
assertHasAction,
|
||||
@ -39,6 +48,9 @@ from testtools.twistedsupport import (
|
||||
import allmydata
|
||||
import allmydata.util.log
|
||||
|
||||
from allmydata.nodemaker import (
|
||||
NodeMaker,
|
||||
)
|
||||
from allmydata.node import OldConfigError, UnescapedHashError, create_node_dir
|
||||
from allmydata.frontends.auth import NeedRootcapLookupScheme
|
||||
from allmydata import client
|
||||
@ -60,7 +72,9 @@ import allmydata.test.common_util as testutil
|
||||
from .common import (
|
||||
EMPTY_CLIENT_CONFIG,
|
||||
SyncTestCase,
|
||||
AsyncBrokenTestCase,
|
||||
UseTestPlugins,
|
||||
UseNode,
|
||||
MemoryIntroducerClient,
|
||||
get_published_announcements,
|
||||
)
|
||||
@ -69,6 +83,9 @@ from .matchers import (
|
||||
matches_storage_announcement,
|
||||
matches_furl,
|
||||
)
|
||||
from .strategies import (
|
||||
write_capabilities,
|
||||
)
|
||||
|
||||
SOME_FURL = b"pb://abcde@nowhere/fake"
|
||||
|
||||
@ -987,7 +1004,98 @@ class Run(unittest.TestCase, testutil.StallMixin):
|
||||
c2.setServiceParent(self.sparent)
|
||||
yield c2.disownServiceParent()
|
||||
|
||||
class NodeMaker(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
class NodeMakerTests(testutil.ReallyEqualMixin, AsyncBrokenTestCase):
|
||||
|
||||
def _make_node_maker(self, mode, writecap, deep_immutable):
|
||||
"""
|
||||
Create a callable which can create an ``IFilesystemNode`` provider for the
|
||||
given cap.
|
||||
|
||||
:param unicode mode: The read/write combination to pass to
|
||||
``NodeMaker.create_from_cap``. If it contains ``u"r"`` then a
|
||||
readcap will be passed in. If it contains ``u"w"`` then a
|
||||
writecap will be passed in.
|
||||
|
||||
:param IURI writecap: The capability for which to create a node.
|
||||
|
||||
:param bool deep_immutable: Whether to request a "deep immutable" node
|
||||
which forces the result to be an immutable ``IFilesystemNode`` (I
|
||||
think -exarkun).
|
||||
"""
|
||||
if writecap.is_mutable():
|
||||
# It's just not a valid combination to have a mutable alongside
|
||||
# deep_immutable = True. It's easier to fix deep_immutable than
|
||||
# writecap to clear up this conflict.
|
||||
deep_immutable = False
|
||||
|
||||
if "r" in mode:
|
||||
readcap = writecap.get_readonly().to_string()
|
||||
else:
|
||||
readcap = None
|
||||
if "w" in mode:
|
||||
writecap = writecap.to_string()
|
||||
else:
|
||||
writecap = None
|
||||
|
||||
nm = NodeMaker(
|
||||
storage_broker=None,
|
||||
secret_holder=None,
|
||||
history=None,
|
||||
uploader=None,
|
||||
terminator=None,
|
||||
default_encoding_parameters={u"k": 1, u"n": 1},
|
||||
mutable_file_default=None,
|
||||
key_generator=None,
|
||||
blacklist=None,
|
||||
)
|
||||
return partial(
|
||||
nm.create_from_cap,
|
||||
writecap,
|
||||
readcap,
|
||||
deep_immutable,
|
||||
)
|
||||
|
||||
@given(
|
||||
mode=sampled_from(["w", "r", "rw"]),
|
||||
writecap=write_capabilities(),
|
||||
deep_immutable=booleans(),
|
||||
)
|
||||
def test_cached_result(self, mode, writecap, deep_immutable):
|
||||
"""
|
||||
``NodeMaker.create_from_cap`` returns the same object when called with the
|
||||
same arguments.
|
||||
"""
|
||||
make_node = self._make_node_maker(mode, writecap, deep_immutable)
|
||||
original = make_node()
|
||||
additional = make_node()
|
||||
|
||||
self.assertThat(
|
||||
original,
|
||||
Is(additional),
|
||||
)
|
||||
|
||||
@given(
|
||||
mode=sampled_from(["w", "r", "rw"]),
|
||||
writecap=write_capabilities(),
|
||||
deep_immutable=booleans(),
|
||||
)
|
||||
def test_cache_expired(self, mode, writecap, deep_immutable):
|
||||
"""
|
||||
After the node object returned by an earlier call to
|
||||
``NodeMaker.create_from_cap`` has been garbage collected, a new call
|
||||
to ``NodeMaker.create_from_cap`` returns a node object, maybe even a
|
||||
new one although we can't really prove it.
|
||||
"""
|
||||
make_node = self._make_node_maker(mode, writecap, deep_immutable)
|
||||
make_node()
|
||||
additional = make_node()
|
||||
self.assertThat(
|
||||
additional,
|
||||
AfterPreprocessing(
|
||||
lambda node: node.get_readonly_uri(),
|
||||
Equals(writecap.get_readonly().to_string()),
|
||||
),
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_maker(self):
|
||||
|
Loading…
x
Reference in New Issue
Block a user